fix: HERMES_WEBUI_SKIP_ONBOARDING unconditional + guard against config writes + 10 skipped tests fixed — v0.50.65

Fixes two SKIP_ONBOARDING bugs and eliminates 10 permanently-skipped integration tests.

- SKIP_ONBOARDING=1 now honoured unconditionally (no longer gated on chat_ready)
- apply_onboarding_setup refuses to write config/env files when SKIP_ONBOARDING is set
- TestMediaEndpointIntegration (6) and TestOnboardingGateIntegration (4): collection-time
  skip guards removed; server reachability checked at runtime with fail() not skip()

Tests: 1327 passed, 0 skipped.

Admin merge — self-built PR, Nathan authorized full merge process in session.
This commit is contained in:
nesquena-hermes
2026-04-16 10:19:10 -07:00
committed by GitHub
parent 2efc1fb8e8
commit db1240dde5
6 changed files with 107 additions and 29 deletions

View File

@@ -144,23 +144,20 @@ class TestMediaEndpointUnit(unittest.TestCase):
# ── Integration tests: live server on TEST_PORT ───────────────────────────────
def _server_reachable() -> bool:
try:
urllib.request.urlopen(BASE + "/health", timeout=3)
return True
except Exception:
return False
# No collection-time skip guard — conftest.py starts the server via its
# autouse session fixture BEFORE tests run. A collection-time check always
# sees no server and turns every test into a skip. Instead we assert
# reachability inside setUp() so failures are loud errors, not silent skips.
requires_server = unittest.skipUnless(
_server_reachable(), f"Test server not reachable at {BASE}"
)
@requires_server
class TestMediaEndpointIntegration(unittest.TestCase):
def setUp(self):
try:
urllib.request.urlopen(BASE + "/health", timeout=5)
except Exception as exc:
self.fail(f"Test server at {BASE} is not reachable: {exc}")
def _get(self, path):
try:
with urllib.request.urlopen(BASE + path, timeout=10) as r:

View File

@@ -235,17 +235,21 @@ def _server_reachable() -> bool:
return False
# Mark integration tests to only run when test server is up
requires_server = pytest.mark.skipif(
not _server_reachable(),
reason="Test server on :8788 not reachable",
)
# No collection-time skip guard — conftest.py starts the server via its
# autouse session fixture BEFORE tests run. A collection-time check always
# sees no server and turns every test into a skip. Server reachability is
# asserted inside the _require_server fixture instead so failures are loud.
@requires_server
class TestOnboardingGateIntegration:
"""Live-server integration tests for the onboarding gate fix."""
@pytest.fixture(autouse=True)
def _require_server(self):
"""Assert server is reachable at test runtime (not collection time)."""
if not _server_reachable():
pytest.fail(f"Test server at {BASE} is not reachable")
@pytest.fixture(autouse=True)
def _clean(self):
hermes_home = _server_hermes_home()
@@ -254,6 +258,16 @@ class TestOnboardingGateIntegration:
yield
for rel in ("config.yaml", ".env"):
(hermes_home / rel).unlink(missing_ok=True)
# Force the server to reload its in-memory config after file deletion.
# apply_onboarding_setup() calls reload_config() which caches provider
# state in the server process. Deleting files on disk does not clear
# that cache — the next test would see provider_configured=True.
# GET /api/personalities always calls reload_config(), giving us a
# cheap way to flush the cache without a server restart.
try:
_http_get("/api/personalities")
except Exception:
pass
def test_no_config_wizard_fires(self):
"""No config.yaml → completed=False."""

View File

@@ -2,13 +2,14 @@
Sprint 39 Tests: Skip-onboarding env var + onboarding key reload fix (PR A of issue #329).
Covers:
- HERMES_WEBUI_SKIP_ONBOARDING=1 bypasses the wizard when chat_ready is true
- HERMES_WEBUI_SKIP_ONBOARDING=1 does NOT bypass when chat_ready is false
- HERMES_WEBUI_SKIP_ONBOARDING=1 bypasses the wizard unconditionally (chat_ready not required)
- HERMES_WEBUI_SKIP_ONBOARDING unset leaves default behaviour unchanged
- apply_onboarding_setup sets os.environ synchronously when an API key is saved
- apply_onboarding_setup refuses to write config/env files when SKIP_ONBOARDING is set
"""
import os
import unittest
import unittest.mock
from unittest.mock import patch
import api.onboarding as mod
@@ -92,11 +93,11 @@ class TestSkipOnboardingEnvVar(unittest.TestCase):
status = self._run_status(_READY_RUNTIME, {"HERMES_WEBUI_SKIP_ONBOARDING": "yes"})
self.assertTrue(status["completed"])
def test_skip_env_set_but_not_chat_ready_does_not_skip(self):
"""HERMES_WEBUI_SKIP_ONBOARDING=1 + chat_ready=False → still shows wizard."""
def test_skip_env_1_works_even_when_not_chat_ready(self):
"""HERMES_WEBUI_SKIP_ONBOARDING=1 skips unconditionally — chat_ready is NOT required."""
status = self._run_status(_NOT_READY_RUNTIME, {"HERMES_WEBUI_SKIP_ONBOARDING": "1"})
self.assertFalse(status["completed"],
"completed must be False when not chat_ready even if skip env set")
self.assertTrue(status["completed"],
"completed must be True when skip env var is set, regardless of chat_ready")
def test_skip_env_unset_leaves_default_false(self):
"""Without the env var, completed is False when settings are empty."""
@@ -182,5 +183,53 @@ class TestApplyOnboardingKeySync(unittest.TestCase):
os.environ.pop("OPENAI_API_KEY", None)
class TestApplyOnboardingSkipGuard(unittest.TestCase):
"""apply_onboarding_setup must not write config/env when SKIP_ONBOARDING is set."""
def test_apply_setup_blocked_when_skip_env_set(self):
"""SKIP_ONBOARDING=1 → apply_onboarding_setup never touches disk."""
save_yaml_mock = unittest.mock.MagicMock()
write_env_mock = unittest.mock.MagicMock()
with patch.dict(os.environ, {"HERMES_WEBUI_SKIP_ONBOARDING": "1"}, clear=False), \
patch("api.onboarding._save_yaml_config", save_yaml_mock), \
patch("api.onboarding._write_env_file", write_env_mock), \
patch("api.onboarding.save_settings"), \
patch("api.onboarding.get_onboarding_status", return_value={"completed": True}):
mod.apply_onboarding_setup({
"provider": "openai",
"model": "gpt-4o",
"api_key": "should-not-be-saved",
})
save_yaml_mock.assert_not_called()
write_env_mock.assert_not_called()
def test_apply_setup_proceeds_normally_without_skip_env(self):
"""Without SKIP_ONBOARDING, apply_onboarding_setup writes config as usual."""
import pathlib
save_yaml_mock = unittest.mock.MagicMock()
mock_cfg = {"model": {"provider": "openai", "default": "gpt-4o"}}
env = {k: v for k, v in os.environ.items() if k != "HERMES_WEBUI_SKIP_ONBOARDING"}
with patch.dict(os.environ, env, clear=True), \
patch("api.onboarding._load_yaml_config", return_value=mock_cfg), \
patch("api.onboarding._save_yaml_config", save_yaml_mock), \
patch("api.onboarding._write_env_file"), \
patch("api.onboarding.reload_config"), \
patch("api.onboarding.get_onboarding_status", return_value={"completed": True}), \
patch("api.onboarding._get_config_path", return_value=pathlib.Path("/tmp/fake.yaml")), \
patch("api.onboarding._load_env_file", return_value={"OPENAI_API_KEY": "existing"}), \
patch("api.onboarding._provider_api_key_present", return_value=True), \
patch("api.onboarding._get_active_hermes_home", return_value=pathlib.Path("/tmp")):
mod.apply_onboarding_setup({
"provider": "openai",
"model": "gpt-4o",
})
save_yaml_mock.assert_called_once()
if __name__ == "__main__":
unittest.main()