fix: HERMES_WEBUI_SKIP_ONBOARDING unconditional + guard against config writes + 10 skipped tests fixed — v0.50.65

Fixes two SKIP_ONBOARDING bugs and eliminates 10 permanently-skipped integration tests.

- SKIP_ONBOARDING=1 now honoured unconditionally (no longer gated on chat_ready)
- apply_onboarding_setup refuses to write config/env files when SKIP_ONBOARDING is set
- TestMediaEndpointIntegration (6) and TestOnboardingGateIntegration (4): collection-time
  skip guards removed; server reachability checked at runtime with fail() not skip()

Tests: 1327 passed, 0 skipped.

Admin merge — self-built PR, Nathan authorized full merge process in session.
This commit is contained in:
nesquena-hermes
2026-04-16 10:19:10 -07:00
committed by GitHub
parent 2efc1fb8e8
commit db1240dde5
6 changed files with 107 additions and 29 deletions

View File

@@ -1,5 +1,12 @@
# Hermes Web UI -- Changelog # Hermes Web UI -- Changelog
## [v0.50.65] — 2026-04-16
### Fixed
- **`HERMES_WEBUI_SKIP_ONBOARDING=1` now works unconditionally** — previously the env var was gated on `chat_ready=True`, so hosting providers (e.g. Agent37) that set it but hadn't yet wired up a provider key would still see the wizard on every page load. The var is now honoured as a hard operator override regardless of `chat_ready`. If you set it, the wizard is gone. (Fixes skip-onboarding regression)
- **Onboarding wizard can no longer overwrite config or env files when `SKIP_ONBOARDING` is set** — `apply_onboarding_setup` now checks the env var first and refuses to touch `config.yaml` or `.env` if it is set. This is a belt-and-suspenders guard: even if a stale JS bundle somehow triggers the setup endpoint while `SKIP_ONBOARDING` is active, no files are written.
## [v0.50.64] — 2026-04-16 ## [v0.50.64] — 2026-04-16
### Changed ### Changed

View File

@@ -416,11 +416,12 @@ def get_onboarding_status() -> dict:
# HERMES_WEBUI_SKIP_ONBOARDING=1 lets hosting providers (e.g. Agent37) ship # HERMES_WEBUI_SKIP_ONBOARDING=1 lets hosting providers (e.g. Agent37) ship
# a pre-configured instance without the wizard blocking the first load. # a pre-configured instance without the wizard blocking the first load.
# Only takes effect when the system is actually chat_ready — a misconfigured # This is an operator-level override and is honoured unconditionally —
# deployment still shows the wizard so the user can fix it. # the operator knows their deployment is configured; we must not second-guess
# it by requiring chat_ready to also be true.
skip_env = os.environ.get("HERMES_WEBUI_SKIP_ONBOARDING", "").strip() skip_env = os.environ.get("HERMES_WEBUI_SKIP_ONBOARDING", "").strip()
skip_requested = skip_env in {"1", "true", "yes"} skip_requested = skip_env in {"1", "true", "yes"}
auto_completed = skip_requested and bool(runtime.get("chat_ready")) auto_completed = skip_requested # unconditional: operator says skip, we skip
# Auto-complete for existing Hermes users: if config.yaml already exists # Auto-complete for existing Hermes users: if config.yaml already exists
# AND the system is chat_ready, treat onboarding as done. These users # AND the system is chat_ready, treat onboarding as done. These users
@@ -457,6 +458,16 @@ def get_onboarding_status() -> dict:
def apply_onboarding_setup(body: dict) -> dict: def apply_onboarding_setup(body: dict) -> dict:
# Hard guard: if the operator set SKIP_ONBOARDING, the wizard should never
# have appeared. Even if the frontend somehow calls this endpoint anyway
# (e.g. a stale JS bundle or a curious user), we must not overwrite the
# operator's config.yaml or .env files. Just mark onboarding complete and
# return the current status — no file writes.
skip_env = os.environ.get("HERMES_WEBUI_SKIP_ONBOARDING", "").strip()
if skip_env in {"1", "true", "yes"}:
save_settings({"onboarding_completed": True})
return get_onboarding_status()
provider = str(body.get("provider") or "").strip().lower() provider = str(body.get("provider") or "").strip().lower()
model = str(body.get("model") or "").strip() model = str(body.get("model") or "").strip()
api_key = str(body.get("api_key") or "").strip() api_key = str(body.get("api_key") or "").strip()

View File

@@ -553,7 +553,7 @@
<div class="settings-section-title">System</div> <div class="settings-section-title">System</div>
<div class="settings-section-meta">Instance version and access controls.</div> <div class="settings-section-meta">Instance version and access controls.</div>
</div> </div>
<span class="settings-version-badge">v0.50.64</span> <span class="settings-version-badge">v0.50.65</span>
</div> </div>
<div class="settings-field" style="border-top:1px solid var(--border);padding-top:12px;margin-top:8px"> <div class="settings-field" style="border-top:1px solid var(--border);padding-top:12px;margin-top:8px">
<label for="settingsPassword" data-i18n="settings_label_password">Access Password</label> <label for="settingsPassword" data-i18n="settings_label_password">Access Password</label>

View File

@@ -144,23 +144,20 @@ class TestMediaEndpointUnit(unittest.TestCase):
# ── Integration tests: live server on TEST_PORT ─────────────────────────────── # ── Integration tests: live server on TEST_PORT ───────────────────────────────
# No collection-time skip guard — conftest.py starts the server via its
def _server_reachable() -> bool: # autouse session fixture BEFORE tests run. A collection-time check always
try: # sees no server and turns every test into a skip. Instead we assert
urllib.request.urlopen(BASE + "/health", timeout=3) # reachability inside setUp() so failures are loud errors, not silent skips.
return True
except Exception:
return False
requires_server = unittest.skipUnless(
_server_reachable(), f"Test server not reachable at {BASE}"
)
@requires_server
class TestMediaEndpointIntegration(unittest.TestCase): class TestMediaEndpointIntegration(unittest.TestCase):
def setUp(self):
try:
urllib.request.urlopen(BASE + "/health", timeout=5)
except Exception as exc:
self.fail(f"Test server at {BASE} is not reachable: {exc}")
def _get(self, path): def _get(self, path):
try: try:
with urllib.request.urlopen(BASE + path, timeout=10) as r: with urllib.request.urlopen(BASE + path, timeout=10) as r:

View File

@@ -235,17 +235,21 @@ def _server_reachable() -> bool:
return False return False
# Mark integration tests to only run when test server is up # No collection-time skip guard — conftest.py starts the server via its
requires_server = pytest.mark.skipif( # autouse session fixture BEFORE tests run. A collection-time check always
not _server_reachable(), # sees no server and turns every test into a skip. Server reachability is
reason="Test server on :8788 not reachable", # asserted inside the _require_server fixture instead so failures are loud.
)
@requires_server
class TestOnboardingGateIntegration: class TestOnboardingGateIntegration:
"""Live-server integration tests for the onboarding gate fix.""" """Live-server integration tests for the onboarding gate fix."""
@pytest.fixture(autouse=True)
def _require_server(self):
"""Assert server is reachable at test runtime (not collection time)."""
if not _server_reachable():
pytest.fail(f"Test server at {BASE} is not reachable")
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def _clean(self): def _clean(self):
hermes_home = _server_hermes_home() hermes_home = _server_hermes_home()
@@ -254,6 +258,16 @@ class TestOnboardingGateIntegration:
yield yield
for rel in ("config.yaml", ".env"): for rel in ("config.yaml", ".env"):
(hermes_home / rel).unlink(missing_ok=True) (hermes_home / rel).unlink(missing_ok=True)
# Force the server to reload its in-memory config after file deletion.
# apply_onboarding_setup() calls reload_config() which caches provider
# state in the server process. Deleting files on disk does not clear
# that cache — the next test would see provider_configured=True.
# GET /api/personalities always calls reload_config(), giving us a
# cheap way to flush the cache without a server restart.
try:
_http_get("/api/personalities")
except Exception:
pass
def test_no_config_wizard_fires(self): def test_no_config_wizard_fires(self):
"""No config.yaml → completed=False.""" """No config.yaml → completed=False."""

View File

@@ -2,13 +2,14 @@
Sprint 39 Tests: Skip-onboarding env var + onboarding key reload fix (PR A of issue #329). Sprint 39 Tests: Skip-onboarding env var + onboarding key reload fix (PR A of issue #329).
Covers: Covers:
- HERMES_WEBUI_SKIP_ONBOARDING=1 bypasses the wizard when chat_ready is true - HERMES_WEBUI_SKIP_ONBOARDING=1 bypasses the wizard unconditionally (chat_ready not required)
- HERMES_WEBUI_SKIP_ONBOARDING=1 does NOT bypass when chat_ready is false
- HERMES_WEBUI_SKIP_ONBOARDING unset leaves default behaviour unchanged - HERMES_WEBUI_SKIP_ONBOARDING unset leaves default behaviour unchanged
- apply_onboarding_setup sets os.environ synchronously when an API key is saved - apply_onboarding_setup sets os.environ synchronously when an API key is saved
- apply_onboarding_setup refuses to write config/env files when SKIP_ONBOARDING is set
""" """
import os import os
import unittest import unittest
import unittest.mock
from unittest.mock import patch from unittest.mock import patch
import api.onboarding as mod import api.onboarding as mod
@@ -92,11 +93,11 @@ class TestSkipOnboardingEnvVar(unittest.TestCase):
status = self._run_status(_READY_RUNTIME, {"HERMES_WEBUI_SKIP_ONBOARDING": "yes"}) status = self._run_status(_READY_RUNTIME, {"HERMES_WEBUI_SKIP_ONBOARDING": "yes"})
self.assertTrue(status["completed"]) self.assertTrue(status["completed"])
def test_skip_env_set_but_not_chat_ready_does_not_skip(self): def test_skip_env_1_works_even_when_not_chat_ready(self):
"""HERMES_WEBUI_SKIP_ONBOARDING=1 + chat_ready=False → still shows wizard.""" """HERMES_WEBUI_SKIP_ONBOARDING=1 skips unconditionally — chat_ready is NOT required."""
status = self._run_status(_NOT_READY_RUNTIME, {"HERMES_WEBUI_SKIP_ONBOARDING": "1"}) status = self._run_status(_NOT_READY_RUNTIME, {"HERMES_WEBUI_SKIP_ONBOARDING": "1"})
self.assertFalse(status["completed"], self.assertTrue(status["completed"],
"completed must be False when not chat_ready even if skip env set") "completed must be True when skip env var is set, regardless of chat_ready")
def test_skip_env_unset_leaves_default_false(self): def test_skip_env_unset_leaves_default_false(self):
"""Without the env var, completed is False when settings are empty.""" """Without the env var, completed is False when settings are empty."""
@@ -182,5 +183,53 @@ class TestApplyOnboardingKeySync(unittest.TestCase):
os.environ.pop("OPENAI_API_KEY", None) os.environ.pop("OPENAI_API_KEY", None)
class TestApplyOnboardingSkipGuard(unittest.TestCase):
"""apply_onboarding_setup must not write config/env when SKIP_ONBOARDING is set."""
def test_apply_setup_blocked_when_skip_env_set(self):
"""SKIP_ONBOARDING=1 → apply_onboarding_setup never touches disk."""
save_yaml_mock = unittest.mock.MagicMock()
write_env_mock = unittest.mock.MagicMock()
with patch.dict(os.environ, {"HERMES_WEBUI_SKIP_ONBOARDING": "1"}, clear=False), \
patch("api.onboarding._save_yaml_config", save_yaml_mock), \
patch("api.onboarding._write_env_file", write_env_mock), \
patch("api.onboarding.save_settings"), \
patch("api.onboarding.get_onboarding_status", return_value={"completed": True}):
mod.apply_onboarding_setup({
"provider": "openai",
"model": "gpt-4o",
"api_key": "should-not-be-saved",
})
save_yaml_mock.assert_not_called()
write_env_mock.assert_not_called()
def test_apply_setup_proceeds_normally_without_skip_env(self):
"""Without SKIP_ONBOARDING, apply_onboarding_setup writes config as usual."""
import pathlib
save_yaml_mock = unittest.mock.MagicMock()
mock_cfg = {"model": {"provider": "openai", "default": "gpt-4o"}}
env = {k: v for k, v in os.environ.items() if k != "HERMES_WEBUI_SKIP_ONBOARDING"}
with patch.dict(os.environ, env, clear=True), \
patch("api.onboarding._load_yaml_config", return_value=mock_cfg), \
patch("api.onboarding._save_yaml_config", save_yaml_mock), \
patch("api.onboarding._write_env_file"), \
patch("api.onboarding.reload_config"), \
patch("api.onboarding.get_onboarding_status", return_value={"completed": True}), \
patch("api.onboarding._get_config_path", return_value=pathlib.Path("/tmp/fake.yaml")), \
patch("api.onboarding._load_env_file", return_value={"OPENAI_API_KEY": "existing"}), \
patch("api.onboarding._provider_api_key_present", return_value=True), \
patch("api.onboarding._get_active_hermes_home", return_value=pathlib.Path("/tmp")):
mod.apply_onboarding_setup({
"provider": "openai",
"model": "gpt-4o",
})
save_yaml_mock.assert_called_once()
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()