refactor: extract resolve_model_provider helper, fix cross-provider routing

Replace duplicated inline provider resolution in routes.py and streaming.py
with a shared resolve_model_provider() helper in config.py.

Improvements over original:
- If model ID has a prefix matching any known direct-API provider
  (not just the config provider), strip it and route correctly.
  This handles edge cases like localStorage restoring a model from
  a different provider group.
- Single source of truth for the resolution logic.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Nathan Esquenazi
2026-04-01 22:56:34 -07:00
parent 2864c2b691
commit 241357595d
3 changed files with 37 additions and 21 deletions

View File

@@ -312,6 +312,36 @@ _PROVIDER_MODELS = {
} }
def resolve_model_provider(model_id: str):
"""Resolve bare model name and provider for AIAgent.
Model IDs from the dropdown may include a provider prefix
(e.g. 'anthropic/claude-sonnet-4.6'). Direct-API providers expect
bare model names, while OpenRouter expects the full provider/model path.
Returns (model, provider) where provider may be None.
"""
config_provider = None
model_cfg = cfg.get('model', {})
if isinstance(model_cfg, dict):
config_provider = model_cfg.get('provider')
model_id = (model_id or '').strip()
if not model_id:
return model_id, config_provider
if '/' in model_id:
prefix, bare = model_id.split('/', 1)
# If prefix matches config provider, strip it
if config_provider and prefix == config_provider:
return bare, config_provider
# If prefix is a known direct-API provider, use it
if prefix in _PROVIDER_MODELS:
return bare, prefix
return model_id, config_provider
def get_available_models() -> dict: def get_available_models() -> dict:
""" """
Return available models grouped by provider. Return available models grouped by provider.

View File

@@ -629,15 +629,9 @@ def _handle_chat_sync(handler, body):
try: try:
from run_agent import AIAgent from run_agent import AIAgent
with CHAT_LOCK: with CHAT_LOCK:
from api.config import cfg as _hcfg from api.config import resolve_model_provider
_model = s.model or '' _model, _provider = resolve_model_provider(s.model)
_prov = None agent = AIAgent(model=_model, provider=_provider, platform='cli', quiet_mode=True,
_mc = _hcfg.get('model', {})
if isinstance(_mc, dict):
_prov = _mc.get('provider')
if _prov and '/' in _model and _model.startswith(_prov + '/'):
_model = _model.split('/', 1)[1]
agent = AIAgent(model=_model, provider=_prov, platform='cli', quiet_mode=True,
enabled_toolsets=CLI_TOOLSETS, session_id=s.session_id) enabled_toolsets=CLI_TOOLSETS, session_id=s.session_id)
workspace_ctx = f"[Workspace: {s.workspace}]\n" workspace_ctx = f"[Workspace: {s.workspace}]\n"
workspace_system_msg = ( workspace_system_msg = (

View File

@@ -13,7 +13,7 @@ from pathlib import Path
from api.config import ( from api.config import (
STREAMS, STREAMS_LOCK, CANCEL_FLAGS, CLI_TOOLSETS, STREAMS, STREAMS_LOCK, CANCEL_FLAGS, CLI_TOOLSETS,
_get_session_agent_lock, _set_thread_env, _clear_thread_env, _get_session_agent_lock, _set_thread_env, _clear_thread_env,
cfg as _hermes_cfg, resolve_model_provider,
) )
# Lazy import to avoid circular deps -- hermes-agent is on sys.path via api/config.py # Lazy import to avoid circular deps -- hermes-agent is on sys.path via api/config.py
@@ -100,18 +100,10 @@ def _run_agent_streaming(session_id, msg_text, model, workspace, stream_id, atta
if AIAgent is None: if AIAgent is None:
raise ImportError("AIAgent not available -- check that hermes-agent is on sys.path") raise ImportError("AIAgent not available -- check that hermes-agent is on sys.path")
# Resolve provider from config so agent routes to the right API resolved_model, resolved_provider = resolve_model_provider(model)
_provider = None
model_cfg = _hermes_cfg.get('model', {})
if isinstance(model_cfg, dict):
_provider = model_cfg.get('provider')
# If model has provider/ prefix matching config provider, strip it
# so AIAgent doesn't misroute to OpenRouter
if _provider and '/' in model and model.startswith(_provider + '/'):
model = model.split('/', 1)[1]
agent = AIAgent( agent = AIAgent(
model=model, model=resolved_model,
provider=_provider, provider=resolved_provider,
platform='cli', platform='cli',
quiet_mode=True, quiet_mode=True,
enabled_toolsets=CLI_TOOLSETS, enabled_toolsets=CLI_TOOLSETS,