From dc2334c5a309e32f473adbd5fb3ad3be10a90614 Mon Sep 17 00:00:00 2001 From: Hermes Agent Date: Wed, 15 Apr 2026 22:11:15 +0000 Subject: [PATCH] fix(review): use _PROVIDER_MODELS check instead of custom-only guard MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The original fix preserved full IDs only when config_provider == 'custom', which broke existing tests expecting prefix-stripping for known namespaces like 'openai/' and 'google/'. The correct heuristic: strip the prefix only when it is a known provider namespace (i.e. prefix in _PROVIDER_MODELS — 'openai', 'google', 'anthropic', etc.). Unknown prefixes like 'zai-org' are intrinsic to the model ID and must be preserved. This satisfies both the DeepInfra use case (#548) and the existing #433 regression tests. --- api/config.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/api/config.py b/api/config.py index a7a2b84..2f83a11 100644 --- a/api/config.py +++ b/api/config.py @@ -637,14 +637,14 @@ def resolve_model_provider(model_id: str) -> tuple: # just because the model name contains a slash (e.g. google/gemma-4-26b-a4b). # The user has explicitly pointed at a base_url, so trust their routing config. if config_base_url: - # For explicit custom endpoints, preserve full slash-bearing model IDs - # (e.g. "zai-org/GLM-5.1" on DeepInfra). Stripping the prefix causes - # model_not_found on providers that require vendor/model format. - if (config_provider or "").strip().lower() == "custom": - return model_id, config_provider, config_base_url - # Non-custom providers with a base_url override can still use bare IDs. - bare_model = model_id.split('/', 1)[-1] - return bare_model, config_provider, config_base_url + # Only strip the provider prefix when it's a known provider namespace + # (e.g. "openai/gpt-5.4" → "gpt-5.4" for a custom OpenAI-compatible proxy). + # Unknown prefixes (e.g. "zai-org/GLM-5.1" on DeepInfra) are intrinsic to + # the model ID and must be preserved — stripping them causes model_not_found. + if prefix in _PROVIDER_MODELS: + return bare, config_provider, config_base_url + # Unknown prefix (not a named provider) — pass full model_id through. + return model_id, config_provider, config_base_url # If prefix does NOT match config provider, the user picked a cross-provider model # from the OpenRouter dropdown (e.g. config=anthropic but picked openai/gpt-5.4-mini). # In this case always route through openrouter with the full provider/model string.