chore: update OpenRouter and provider model lists — v0.50.54

chore: update OpenRouter and provider model lists — v0.50.54
This commit is contained in:
nesquena-hermes
2026-04-15 16:04:20 -07:00
committed by GitHub
4 changed files with 60 additions and 37 deletions

View File

@@ -1,5 +1,10 @@
# Hermes Web UI -- Changelog # Hermes Web UI -- Changelog
## [v0.50.54] — 2026-04-15
### Changed
- **OpenRouter model list** — updated to 14 current models across 7 providers. All slugs verified live against the OpenRouter catalog. Removed `o4-mini`, old Gemini 2.x entries, and Llama 4. Added Claude Opus 4.6, GPT-5.4, Gemini 3.1 Pro Preview, Gemini 3 Flash Preview, DeepSeek R1, Qwen3 Coder, Qwen3.6 Plus, Grok 4.20, and Mistral Large. Both Claude 4.6 and 4.5 generations preserved. Fixed `grok-4-20``grok-4.20` slug and Gemini `-preview` suffixes.
## [v0.50.53] — 2026-04-15 ## [v0.50.53] — 2026-04-15
### Fixed ### Fixed

View File

@@ -404,31 +404,29 @@ CLI_TOOLSETS = get_config().get("platform_toolsets", {}).get("cli", _DEFAULT_TOO
# ── Model / provider discovery ─────────────────────────────────────────────── # ── Model / provider discovery ───────────────────────────────────────────────
# Hardcoded fallback models (used when no config.yaml or agent is available) # Hardcoded fallback models (used when no config.yaml or agent is available)
# Also used as the OpenRouter model list — keep this curated to current, widely-used models.
_FALLBACK_MODELS = [ _FALLBACK_MODELS = [
{"provider": "OpenAI", "id": "openai/gpt-5.4-mini", "label": "GPT-5.4 Mini"}, # OpenAI
{"provider": "OpenAI", "id": "openai/o4-mini", "label": "o4-mini"}, {"provider": "OpenAI", "id": "openai/gpt-5.4-mini", "label": "GPT-5.4 Mini"},
{ {"provider": "OpenAI", "id": "openai/gpt-5.4", "label": "GPT-5.4"},
"provider": "Anthropic", # Anthropic — 4.6 flagship + 4.5 generation
"id": "anthropic/claude-sonnet-4.6", {"provider": "Anthropic", "id": "anthropic/claude-opus-4.6", "label": "Claude Opus 4.6"},
"label": "Claude Sonnet 4.6", {"provider": "Anthropic", "id": "anthropic/claude-sonnet-4.6", "label": "Claude Sonnet 4.6"},
}, {"provider": "Anthropic", "id": "anthropic/claude-sonnet-4-5", "label": "Claude Sonnet 4.5"},
{ {"provider": "Anthropic", "id": "anthropic/claude-haiku-4-5", "label": "Claude Haiku 4.5"},
"provider": "Anthropic", # Google
"id": "anthropic/claude-sonnet-4-5", {"provider": "Google", "id": "google/gemini-3.1-pro-preview", "label": "Gemini 3.1 Pro Preview"},
"label": "Claude Sonnet 4.5", {"provider": "Google", "id": "google/gemini-3-flash-preview", "label": "Gemini 3 Flash Preview"},
}, # DeepSeek
{ {"provider": "DeepSeek", "id": "deepseek/deepseek-chat-v3-0324", "label": "DeepSeek V3"},
"provider": "Anthropic", {"provider": "DeepSeek", "id": "deepseek/deepseek-r1", "label": "DeepSeek R1"},
"id": "anthropic/claude-haiku-4-5", # Qwen (Alibaba) — strong coding and general models
"label": "Claude Haiku 4.5", {"provider": "Qwen", "id": "qwen/qwen3-coder", "label": "Qwen3 Coder"},
}, {"provider": "Qwen", "id": "qwen/qwen3.6-plus", "label": "Qwen3.6 Plus"},
{"provider": "Other", "id": "google/gemini-2.5-pro", "label": "Gemini 2.5 Pro"}, # xAI
{ {"provider": "xAI", "id": "x-ai/grok-4.20", "label": "Grok 4.20"},
"provider": "Other", # Mistral
"id": "deepseek/deepseek-chat-v3-0324", {"provider": "Mistral", "id": "mistralai/mistral-large-latest", "label": "Mistral Large"},
"label": "DeepSeek V3",
},
{"provider": "Other", "id": "meta-llama/llama-4-scout", "label": "Llama 4 Scout"},
] ]
# Provider display names for known Hermes provider IDs # Provider display names for known Hermes provider IDs
@@ -451,6 +449,9 @@ _PROVIDER_DISPLAY = {
"opencode-zen": "OpenCode Zen", "opencode-zen": "OpenCode Zen",
"opencode-go": "OpenCode Go", "opencode-go": "OpenCode Go",
"lmstudio": "LM Studio", "lmstudio": "LM Studio",
"mistralai": "Mistral",
"qwen": "Qwen",
"x-ai": "xAI",
} }
# Well-known models per provider (used to populate dropdown for direct API providers) # Well-known models per provider (used to populate dropdown for direct API providers)
@@ -463,7 +464,7 @@ _PROVIDER_MODELS = {
], ],
"openai": [ "openai": [
{"id": "gpt-5.4-mini", "label": "GPT-5.4 Mini"}, {"id": "gpt-5.4-mini", "label": "GPT-5.4 Mini"},
{"id": "o4-mini", "label": "o4-mini"}, {"id": "gpt-5.4", "label": "GPT-5.4"},
], ],
"openai-codex": [ "openai-codex": [
{"id": "gpt-5.4", "label": "GPT-5.4"}, {"id": "gpt-5.4", "label": "GPT-5.4"},
@@ -475,7 +476,8 @@ _PROVIDER_MODELS = {
{"id": "codex-mini-latest", "label": "Codex Mini (latest)"}, {"id": "codex-mini-latest", "label": "Codex Mini (latest)"},
], ],
"google": [ "google": [
{"id": "gemini-2.5-pro", "label": "Gemini 2.5 Pro"}, {"id": "gemini-3.1-pro-preview", "label": "Gemini 3.1 Pro Preview"},
{"id": "gemini-3-flash-preview", "label": "Gemini 3 Flash Preview"},
], ],
"deepseek": [ "deepseek": [
{"id": "deepseek-chat-v3-0324", "label": "DeepSeek V3"}, {"id": "deepseek-chat-v3-0324", "label": "DeepSeek V3"},
@@ -485,7 +487,7 @@ _PROVIDER_MODELS = {
{"id": "claude-opus-4.6", "label": "Claude Opus 4.6 (via Nous)"}, {"id": "claude-opus-4.6", "label": "Claude Opus 4.6 (via Nous)"},
{"id": "claude-sonnet-4.6", "label": "Claude Sonnet 4.6 (via Nous)"}, {"id": "claude-sonnet-4.6", "label": "Claude Sonnet 4.6 (via Nous)"},
{"id": "gpt-5.4-mini", "label": "GPT-5.4 Mini (via Nous)"}, {"id": "gpt-5.4-mini", "label": "GPT-5.4 Mini (via Nous)"},
{"id": "gemini-2.5-pro", "label": "Gemini 2.5 Pro (via Nous)"}, {"id": "gemini-3.1-pro-preview", "label": "Gemini 3.1 Pro Preview (via Nous)"},
], ],
"zai": [ "zai": [
{"id": "glm-5.1", "label": "GLM-5.1"}, {"id": "glm-5.1", "label": "GLM-5.1"},
@@ -515,7 +517,7 @@ _PROVIDER_MODELS = {
{"id": "gpt-4o", "label": "GPT-4o"}, {"id": "gpt-4o", "label": "GPT-4o"},
{"id": "claude-opus-4.6", "label": "Claude Opus 4.6"}, {"id": "claude-opus-4.6", "label": "Claude Opus 4.6"},
{"id": "claude-sonnet-4.6", "label": "Claude Sonnet 4.6"}, {"id": "claude-sonnet-4.6", "label": "Claude Sonnet 4.6"},
{"id": "gemini-2.5-pro", "label": "Gemini 2.5 Pro"}, {"id": "gemini-3.1-pro-preview", "label": "Gemini 3.1 Pro Preview"},
], ],
# OpenCode Zen — curated models via opencode.ai/zen (pay-as-you-go credits) # OpenCode Zen — curated models via opencode.ai/zen (pay-as-you-go credits)
"opencode-zen": [ "opencode-zen": [
@@ -542,8 +544,8 @@ _PROVIDER_MODELS = {
{"id": "claude-sonnet-4", "label": "Claude Sonnet 4"}, {"id": "claude-sonnet-4", "label": "Claude Sonnet 4"},
{"id": "claude-haiku-4-5", "label": "Claude Haiku 4.5"}, {"id": "claude-haiku-4-5", "label": "Claude Haiku 4.5"},
{"id": "claude-3-5-haiku", "label": "Claude 3.5 Haiku"}, {"id": "claude-3-5-haiku", "label": "Claude 3.5 Haiku"},
{"id": "gemini-3.1-pro", "label": "Gemini 3.1 Pro"}, {"id": "gemini-3.1-pro-preview", "label": "Gemini 3.1 Pro Preview"},
{"id": "gemini-3-flash", "label": "Gemini 3 Flash"}, {"id": "gemini-3-flash-preview", "label": "Gemini 3 Flash Preview"},
{"id": "glm-5.1", "label": "GLM-5.1"}, {"id": "glm-5.1", "label": "GLM-5.1"},
{"id": "glm-5", "label": "GLM-5"}, {"id": "glm-5", "label": "GLM-5"},
{"id": "kimi-k2.5", "label": "Kimi K2.5"}, {"id": "kimi-k2.5", "label": "Kimi K2.5"},
@@ -564,8 +566,22 @@ _PROVIDER_MODELS = {
], ],
# 'gemini' is the hermes_cli provider ID for Google AI Studio # 'gemini' is the hermes_cli provider ID for Google AI Studio
"gemini": [ "gemini": [
{"id": "gemini-2.5-pro", "label": "Gemini 2.5 Pro"}, {"id": "gemini-3.1-pro-preview", "label": "Gemini 3.1 Pro Preview"},
{"id": "gemini-2.0-flash", "label": "Gemini 2.0 Flash"}, {"id": "gemini-3-flash-preview", "label": "Gemini 3 Flash Preview"},
],
# Mistral — prefix used in OpenRouter model IDs (mistralai/mistral-large-latest)
"mistralai": [
{"id": "mistral-large-latest", "label": "Mistral Large"},
{"id": "mistral-small-latest", "label": "Mistral Small"},
],
# Qwen (Alibaba) — prefix used in OpenRouter model IDs (qwen/qwen3-coder)
"qwen": [
{"id": "qwen3-coder", "label": "Qwen3 Coder"},
{"id": "qwen3.6-plus", "label": "Qwen3.6 Plus"},
],
# xAI — prefix used in OpenRouter model IDs (x-ai/grok-4-20)
"x-ai": [
{"id": "grok-4.20", "label": "Grok 4.20"},
], ],
} }

View File

@@ -553,7 +553,7 @@
<div class="settings-section-title">System</div> <div class="settings-section-title">System</div>
<div class="settings-section-meta">Instance version and access controls.</div> <div class="settings-section-meta">Instance version and access controls.</div>
</div> </div>
<span class="settings-version-badge">v0.50.53</span> <span class="settings-version-badge">v0.50.54</span>
</div> </div>
<div class="settings-field" style="border-top:1px solid var(--border);padding-top:12px;margin-top:8px"> <div class="settings-field" style="border-top:1px solid var(--border);padding-top:12px;margin-top:8px">
<label for="settingsPassword" data-i18n="settings_label_password">Access Password</label> <label for="settingsPassword" data-i18n="settings_label_password">Access Password</label>

View File

@@ -127,10 +127,12 @@ class TestStaleModelListCleanup:
"_FALLBACK_MODELS must keep gpt-5.4-mini as primary OpenAI model (#374)" "_FALLBACK_MODELS must keep gpt-5.4-mini as primary OpenAI model (#374)"
) )
def test_fallback_still_has_o4_mini(self): def test_fallback_has_gpt54(self):
"""_FALLBACK_MODELS must still contain o4-mini (reasoning model).""" """_FALLBACK_MODELS must contain gpt-5.4-mini as the primary OpenAI option."""
assert "o4-mini" in CONFIG_PY, ( from api.config import _FALLBACK_MODELS
"_FALLBACK_MODELS must keep o4-mini as reasoning model (#374)" ids = [m["id"] for m in _FALLBACK_MODELS]
assert any("gpt-5.4-mini" in mid for mid in ids), (
"_FALLBACK_MODELS must include gpt-5.4-mini as the primary OpenAI option"
) )
def test_copilot_list_unchanged(self): def test_copilot_list_unchanged(self):