Newer
Older
cortex-hub / ai-hub / app / core / llm_providers.py
# import httpx
# import logging
# import json
# from abc import ABC, abstractmethod
# from openai import OpenAI
# from typing import final
# from app.config import settings # <-- Import the centralized settings

# # --- 1. Initialize API Clients from Central Config ---
# # All environment variable access is now gone from this file.
# deepseek_client = OpenAI(api_key=settings.DEEPSEEK_API_KEY, base_url="https://api.deepseek.com")
# GEMINI_URL = f"https://generativelanguage.googleapis.com/v1beta/models/{settings.GEMINI_MODEL_NAME}:generateContent?key={settings.GEMINI_API_KEY}"


# # --- 2. Provider Interface and Implementations (Unchanged) ---
# class LLMProvider(ABC):
#     """Abstract base class ('Interface') for all LLM providers."""
#     @abstractmethod
#     async def generate_response(self, prompt: str) -> str:
#         """Generates a response from the LLM."""
#         pass

# @final
# class DeepSeekProvider(LLMProvider):
#     """Provider for the DeepSeek API."""
#     def __init__(self, model_name: str):
#         self.model = model_name

#     async def generate_response(self, prompt: str) -> str:
#         messages = [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": prompt}]
#         try:
#             chat_completion = deepseek_client.chat.completions.create(model=self.model, messages=messages)
#             return chat_completion.choices[0].message.content
#         except Exception as e:
#             logging.error("DeepSeek Provider Error", exc_info=True)
#             raise

# @final
# class GeminiProvider(LLMProvider):
#     """Provider for the Google Gemini API."""
#     def __init__(self, api_url: str):
#         self.url = api_url

#     async def generate_response(self, prompt: str) -> str:
#         payload = {"contents": [{"parts": [{"text": prompt}]}]}
#         headers = {"Content-Type": "application/json"}
#         try:
#             async with httpx.AsyncClient() as client:
#                 response = await client.post(self.url, json=payload, headers=headers)
#                 response.raise_for_status()
#                 data = response.json()
#                 return data['candidates'][0]['content']['parts'][0]['text']
#         except Exception as e:
#             logging.error("Gemini Provider Error", exc_info=True)
#             raise

# # --- 3. The Factory Function ---
# # The dictionary of providers is now built using values from the settings object.
# _providers = {
#     "deepseek": DeepSeekProvider(model_name=settings.DEEPSEEK_MODEL_NAME),
#     "gemini": GeminiProvider(api_url=GEMINI_URL)
# }

# def get_llm_provider(model_name: str) -> LLMProvider:
#     """Factory function to get the appropriate, pre-configured LLM provider."""
#     provider = _providers.get(model_name)
#     if not provider:
#         raise ValueError(f"Unsupported model provider: '{model_name}'. Supported providers are: {list(_providers.keys())}")
#     return provider