diff --git a/ai-hub/app/core/providers/llm/general.py b/ai-hub/app/core/providers/llm/general.py index 6221c5a..565f67c 100644 --- a/ai-hub/app/core/providers/llm/general.py +++ b/ai-hub/app/core/providers/llm/general.py @@ -8,7 +8,7 @@ self.system_prompt = system_prompt # Call the parent constructor max_tokens = 8000 - if model_name == "gemini": + if model_name.startswith("gemini"): max_tokens = 10000000 super().__init__(model=model_name, max_tokens=max_tokens, **kwargs) diff --git a/ai-hub/tests/core/providers/llm/test_llm_general.py b/ai-hub/tests/core/providers/llm/test_llm_general.py index bc28acd..cd59499 100644 --- a/ai-hub/tests/core/providers/llm/test_llm_general.py +++ b/ai-hub/tests/core/providers/llm/test_llm_general.py @@ -50,7 +50,7 @@ messages=[{"role": "user", "content": self.prompt}], api_key=self.api_key, temperature=0.0, # <-- Add this line - max_tokens=1000, # <-- Add this line + max_tokens=10000000, # <-- Add this line ) # Run the async test function