diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py index 0c7c03d..5b06f56 100644 --- a/ai-hub/app/config.py +++ b/ai-hub/app/config.py @@ -237,6 +237,20 @@ "client_secret": self.OIDC_CLIENT_SECRET, "server_url": self.OIDC_SERVER_URL, "redirect_uri": self.OIDC_REDIRECT_URI + }, + "llm_providers": { + "providers": self.LLM_PROVIDERS + }, + "tts_provider": { + "provider": self.TTS_PROVIDER, + "model_name": self.TTS_MODEL_NAME, + "voice_name": self.TTS_VOICE_NAME, + "api_key": self.TTS_API_KEY + }, + "stt_provider": { + "provider": self.STT_PROVIDER, + "model_name": self.STT_MODEL_NAME, + "api_key": self.STT_API_KEY } } diff --git a/ai-hub/app/core/providers/factory.py b/ai-hub/app/core/providers/factory.py index dd336c0..303cdc2 100644 --- a/ai-hub/app/core/providers/factory.py +++ b/ai-hub/app/core/providers/factory.py @@ -64,18 +64,37 @@ # --- 3. The Factory Functions --- def get_llm_provider(provider_name: str, model_name: str = "", system_prompt: str = None, api_key_override: str = None, **kwargs) -> BaseLM: """Factory function to get the appropriate, pre-configured LLM provider, with optional system prompt.""" - providerKey = api_key_override or _llm_providers.get(provider_name) + # helper for masked/null keys + def is_empty(k): + return not k or k in ("None", "none", "") or "*" in str(k) + + # 1. Resolve Provider Key + providerKey = api_key_override + if is_empty(providerKey): + # Check LLM_PROVIDERS dict first (hot-loaded via admin) + p_info = settings.LLM_PROVIDERS.get(provider_name, {}) + providerKey = p_info.get("api_key") + + # Secondary fallback to hardcoded env settings + if is_empty(providerKey): + if provider_name == "gemini": providerKey = settings.GEMINI_API_KEY + elif provider_name == "deepseek": providerKey = settings.DEEPSEEK_API_KEY + + # 2. Resolve Model Name + modelName = model_name + if not modelName: + modelName = settings.LLM_PROVIDERS.get(provider_name, {}).get("model") + if not modelName: + if provider_name == "gemini": modelName = settings.GEMINI_MODEL_NAME + elif provider_name == "deepseek": modelName = settings.DEEPSEEK_MODEL_NAME + else: + raise ValueError(f"No model name provided for '{provider_name}'.") + # Extract base type (e.g. 'gemini_2' -> 'gemini') litellm_providers = [p.value for p in litellm.LlmProviders] base_type = kwargs.get("provider_type") or resolve_provider_info(provider_name, "llm", _llm_providers, litellm_providers) - modelName = model_name - if not modelName: - modelName = _llm_models.get(provider_name) - if not modelName: - raise ValueError(f"No model name provided for '{provider_name}'.") - full_model = f'{base_type}/{modelName}' if '/' not in modelName else modelName # Pass the optional system_prompt and kwargs to the GeneralProvider constructor