diff --git a/ai-hub/app/core/providers/llm/general.py b/ai-hub/app/core/providers/llm/general.py index c195c04..179232b 100644 --- a/ai-hub/app/core/providers/llm/general.py +++ b/ai-hub/app/core/providers/llm/general.py @@ -7,6 +7,10 @@ self.api_key = api_key self.system_prompt = system_prompt + # Validate API Key early + if not api_key or "*" in str(api_key): + raise ValueError(f"Invalid or missing API key for LLM provider '{model_name}'. Please configure it in Settings.") + # Determine max tokens dynamically via LiteLLM info max_tokens = 8000 try: @@ -47,7 +51,11 @@ try: return litellm.completion(**request) except Exception as e: - raise RuntimeError(f"Failed to get response from LiteLLM for model '{self.model_name}': {e}") + # Distinguish between network errors and missing credits + err_msg = str(e) + if "authentication" in err_msg.lower() or "401" in err_msg: + raise RuntimeError(f"Authentication failed for {self.model_name}. Check your API key.") + raise RuntimeError(f"LiteLLM Error ({self.model_name}): {err_msg}") async def aforward(self, prompt=None, messages=None, **kwargs): """ @@ -66,4 +74,7 @@ try: return await litellm.acompletion(**request) except Exception as e: - raise RuntimeError(f"Failed to get response from LiteLLM for model '{self.model_name}': {e}") \ No newline at end of file + err_msg = str(e) + if "authentication" in err_msg.lower() or "401" in err_msg: + raise RuntimeError(f"Authentication failed for {self.model_name}. Check your API key.") + raise RuntimeError(f"LiteLLM Error ({self.model_name}): {err_msg}") \ No newline at end of file