diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py new file mode 100644 index 0000000..ab8f8f4 --- /dev/null +++ b/ai-hub/app/config.py @@ -0,0 +1,29 @@ +# app/config.py + +import os +from dotenv import load_dotenv + +# Load environment variables from a .env file +load_dotenv() + +class Settings: + # --- Database --- + DB_MODE: str = os.getenv("DB_MODE", "sqlite") + DATABASE_URL: str = ( + "sqlite:///./data/ai_hub.db" + if DB_MODE == "sqlite" + else os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") + ) + + # --- LLM API Keys & Models --- + DEEPSEEK_API_KEY: str = os.getenv("DEEPSEEK_API_KEY") + GEMINI_API_KEY: str = os.getenv("GEMINI_API_KEY") + DEEPSEEK_MODEL_NAME: str = os.getenv("DEEPSEEK_MODEL_NAME", "deepseek-chat") + GEMINI_MODEL_NAME: str = os.getenv("GEMINI_MODEL_NAME", "gemini-1.5-flash-latest") + + # --- Vector Store --- + FAISS_INDEX_PATH: str = os.getenv("FAISS_INDEX_PATH", "data/faiss_index.bin") + EMBEDDING_DIMENSION: int = int(os.getenv("EMBEDDING_DIMENSION", 768)) + +# Instantiate the settings so they can be imported and used anywhere +settings = Settings() \ No newline at end of file diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py new file mode 100644 index 0000000..ab8f8f4 --- /dev/null +++ b/ai-hub/app/config.py @@ -0,0 +1,29 @@ +# app/config.py + +import os +from dotenv import load_dotenv + +# Load environment variables from a .env file +load_dotenv() + +class Settings: + # --- Database --- + DB_MODE: str = os.getenv("DB_MODE", "sqlite") + DATABASE_URL: str = ( + "sqlite:///./data/ai_hub.db" + if DB_MODE == "sqlite" + else os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") + ) + + # --- LLM API Keys & Models --- + DEEPSEEK_API_KEY: str = os.getenv("DEEPSEEK_API_KEY") + GEMINI_API_KEY: str = os.getenv("GEMINI_API_KEY") + DEEPSEEK_MODEL_NAME: str = os.getenv("DEEPSEEK_MODEL_NAME", "deepseek-chat") + GEMINI_MODEL_NAME: str = os.getenv("GEMINI_MODEL_NAME", "gemini-1.5-flash-latest") + + # --- Vector Store --- + FAISS_INDEX_PATH: str = os.getenv("FAISS_INDEX_PATH", "data/faiss_index.bin") + EMBEDDING_DIMENSION: int = int(os.getenv("EMBEDDING_DIMENSION", 768)) + +# Instantiate the settings so they can be imported and used anywhere +settings = Settings() \ No newline at end of file diff --git a/ai-hub/app/db/database.py b/ai-hub/app/db/database.py index cbb6b13..c2dfee1 100644 --- a/ai-hub/app/db/database.py +++ b/ai-hub/app/db/database.py @@ -1,54 +1,5 @@ -import os -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, declarative_base # <-- CORRECTED IMPORT +from sqlalchemy.orm import declarative_base -# --- Configuration --- -# Determines the database mode. Can be "postgres" or "sqlite". -# Defaults to "postgres" if not set. -DB_MODE = os.getenv("DB_MODE", "postgres").lower() - -# Default database URLs -POSTGRES_DEFAULT_URL = "postgresql://user:password@localhost/ai_hub_db" -SQLITE_DEFAULT_URL = "sqlite:///./data/ai_hub.db" - -DATABASE_URL = "" -engine_args = {} - -# --- Database Initialization --- -if DB_MODE == "sqlite": - print("✅ Initializing with SQLite in-file database.") - DATABASE_URL = SQLITE_DEFAULT_URL - # SQLite requires a specific argument to allow access from multiple threads, - # which is common in web applications. - engine_args = {"connect_args": {"check_same_thread": False}} -else: # Default to postgres - # Use the provided DATABASE_URL or fall back to the default. - DATABASE_URL = os.getenv("DATABASE_URL", POSTGRES_DEFAULT_URL) - DB_MODE = "postgres" - print(f"✅ Initializing with PostgreSQL database. URL: {DATABASE_URL}") - # pool_pre_ping checks if a connection is still alive before using it from the pool. - engine_args = {"pool_pre_ping": True} - - -# Create the SQLAlchemy engine with the determined settings -engine = create_engine(DATABASE_URL, **engine_args) - -# SessionLocal is a factory for creating new database session objects -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -# Base is a class that our database model classes will inherit from. -Base = declarative_base() - - -# --- Dependency for FastAPI --- -def get_db(): - """ - FastAPI dependency that provides a database session for a single API request. - It ensures the session is always closed after the request is finished. - """ - db = SessionLocal() - try: - yield db - finally: - db.close() - +# This Base class is the foundation that all your SQLAlchemy ORM models +# (like Document, VectorMetadata, etc.) will inherit from. +Base = declarative_base() \ No newline at end of file diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py new file mode 100644 index 0000000..ab8f8f4 --- /dev/null +++ b/ai-hub/app/config.py @@ -0,0 +1,29 @@ +# app/config.py + +import os +from dotenv import load_dotenv + +# Load environment variables from a .env file +load_dotenv() + +class Settings: + # --- Database --- + DB_MODE: str = os.getenv("DB_MODE", "sqlite") + DATABASE_URL: str = ( + "sqlite:///./data/ai_hub.db" + if DB_MODE == "sqlite" + else os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") + ) + + # --- LLM API Keys & Models --- + DEEPSEEK_API_KEY: str = os.getenv("DEEPSEEK_API_KEY") + GEMINI_API_KEY: str = os.getenv("GEMINI_API_KEY") + DEEPSEEK_MODEL_NAME: str = os.getenv("DEEPSEEK_MODEL_NAME", "deepseek-chat") + GEMINI_MODEL_NAME: str = os.getenv("GEMINI_MODEL_NAME", "gemini-1.5-flash-latest") + + # --- Vector Store --- + FAISS_INDEX_PATH: str = os.getenv("FAISS_INDEX_PATH", "data/faiss_index.bin") + EMBEDDING_DIMENSION: int = int(os.getenv("EMBEDDING_DIMENSION", 768)) + +# Instantiate the settings so they can be imported and used anywhere +settings = Settings() \ No newline at end of file diff --git a/ai-hub/app/db/database.py b/ai-hub/app/db/database.py index cbb6b13..c2dfee1 100644 --- a/ai-hub/app/db/database.py +++ b/ai-hub/app/db/database.py @@ -1,54 +1,5 @@ -import os -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, declarative_base # <-- CORRECTED IMPORT +from sqlalchemy.orm import declarative_base -# --- Configuration --- -# Determines the database mode. Can be "postgres" or "sqlite". -# Defaults to "postgres" if not set. -DB_MODE = os.getenv("DB_MODE", "postgres").lower() - -# Default database URLs -POSTGRES_DEFAULT_URL = "postgresql://user:password@localhost/ai_hub_db" -SQLITE_DEFAULT_URL = "sqlite:///./data/ai_hub.db" - -DATABASE_URL = "" -engine_args = {} - -# --- Database Initialization --- -if DB_MODE == "sqlite": - print("✅ Initializing with SQLite in-file database.") - DATABASE_URL = SQLITE_DEFAULT_URL - # SQLite requires a specific argument to allow access from multiple threads, - # which is common in web applications. - engine_args = {"connect_args": {"check_same_thread": False}} -else: # Default to postgres - # Use the provided DATABASE_URL or fall back to the default. - DATABASE_URL = os.getenv("DATABASE_URL", POSTGRES_DEFAULT_URL) - DB_MODE = "postgres" - print(f"✅ Initializing with PostgreSQL database. URL: {DATABASE_URL}") - # pool_pre_ping checks if a connection is still alive before using it from the pool. - engine_args = {"pool_pre_ping": True} - - -# Create the SQLAlchemy engine with the determined settings -engine = create_engine(DATABASE_URL, **engine_args) - -# SessionLocal is a factory for creating new database session objects -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -# Base is a class that our database model classes will inherit from. -Base = declarative_base() - - -# --- Dependency for FastAPI --- -def get_db(): - """ - FastAPI dependency that provides a database session for a single API request. - It ensures the session is always closed after the request is finished. - """ - db = SessionLocal() - try: - yield db - finally: - db.close() - +# This Base class is the foundation that all your SQLAlchemy ORM models +# (like Document, VectorMetadata, etc.) will inherit from. +Base = declarative_base() \ No newline at end of file diff --git a/ai-hub/app/db/session.py b/ai-hub/app/db/session.py new file mode 100644 index 0000000..26e8938 --- /dev/null +++ b/ai-hub/app/db/session.py @@ -0,0 +1,44 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from app.config import settings +from app.db.database import Base + +# Determine engine arguments based on the database mode from the central config +engine_args = {} +if settings.DB_MODE == "sqlite": + # This argument is required for SQLite to allow it to be used by multiple threads, + # which is the case in a web application like FastAPI. + engine_args["connect_args"] = {"check_same_thread": False} +else: + # 'pool_pre_ping' checks if a database connection is still alive before using it. + # This prevents errors from connections that have been timed out by the DB server. + engine_args["pool_pre_ping"] = True + +# Create the SQLAlchemy engine using the centralized URL and determined arguments +engine = create_engine(settings.DATABASE_URL, **engine_args) + +# SessionLocal is a factory for creating new database session objects. +# It's the standard way to interact with the database in SQLAlchemy. +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +def create_db_and_tables(): + """ + Creates all database tables defined by models inheriting from Base. + This is typically called once on application startup. + """ + print("Creating database tables...") + # Base.metadata contains all the schema information from your models. + Base.metadata.create_all(bind=engine) + +def get_db(): + """ + FastAPI dependency that provides a database session for a single API request. + + This pattern ensures that the database session is always closed after the + request is finished, even if an error occurs. + """ + db = SessionLocal() + try: + yield db + finally: + db.close() \ No newline at end of file diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py new file mode 100644 index 0000000..ab8f8f4 --- /dev/null +++ b/ai-hub/app/config.py @@ -0,0 +1,29 @@ +# app/config.py + +import os +from dotenv import load_dotenv + +# Load environment variables from a .env file +load_dotenv() + +class Settings: + # --- Database --- + DB_MODE: str = os.getenv("DB_MODE", "sqlite") + DATABASE_URL: str = ( + "sqlite:///./data/ai_hub.db" + if DB_MODE == "sqlite" + else os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") + ) + + # --- LLM API Keys & Models --- + DEEPSEEK_API_KEY: str = os.getenv("DEEPSEEK_API_KEY") + GEMINI_API_KEY: str = os.getenv("GEMINI_API_KEY") + DEEPSEEK_MODEL_NAME: str = os.getenv("DEEPSEEK_MODEL_NAME", "deepseek-chat") + GEMINI_MODEL_NAME: str = os.getenv("GEMINI_MODEL_NAME", "gemini-1.5-flash-latest") + + # --- Vector Store --- + FAISS_INDEX_PATH: str = os.getenv("FAISS_INDEX_PATH", "data/faiss_index.bin") + EMBEDDING_DIMENSION: int = int(os.getenv("EMBEDDING_DIMENSION", 768)) + +# Instantiate the settings so they can be imported and used anywhere +settings = Settings() \ No newline at end of file diff --git a/ai-hub/app/db/database.py b/ai-hub/app/db/database.py index cbb6b13..c2dfee1 100644 --- a/ai-hub/app/db/database.py +++ b/ai-hub/app/db/database.py @@ -1,54 +1,5 @@ -import os -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, declarative_base # <-- CORRECTED IMPORT +from sqlalchemy.orm import declarative_base -# --- Configuration --- -# Determines the database mode. Can be "postgres" or "sqlite". -# Defaults to "postgres" if not set. -DB_MODE = os.getenv("DB_MODE", "postgres").lower() - -# Default database URLs -POSTGRES_DEFAULT_URL = "postgresql://user:password@localhost/ai_hub_db" -SQLITE_DEFAULT_URL = "sqlite:///./data/ai_hub.db" - -DATABASE_URL = "" -engine_args = {} - -# --- Database Initialization --- -if DB_MODE == "sqlite": - print("✅ Initializing with SQLite in-file database.") - DATABASE_URL = SQLITE_DEFAULT_URL - # SQLite requires a specific argument to allow access from multiple threads, - # which is common in web applications. - engine_args = {"connect_args": {"check_same_thread": False}} -else: # Default to postgres - # Use the provided DATABASE_URL or fall back to the default. - DATABASE_URL = os.getenv("DATABASE_URL", POSTGRES_DEFAULT_URL) - DB_MODE = "postgres" - print(f"✅ Initializing with PostgreSQL database. URL: {DATABASE_URL}") - # pool_pre_ping checks if a connection is still alive before using it from the pool. - engine_args = {"pool_pre_ping": True} - - -# Create the SQLAlchemy engine with the determined settings -engine = create_engine(DATABASE_URL, **engine_args) - -# SessionLocal is a factory for creating new database session objects -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -# Base is a class that our database model classes will inherit from. -Base = declarative_base() - - -# --- Dependency for FastAPI --- -def get_db(): - """ - FastAPI dependency that provides a database session for a single API request. - It ensures the session is always closed after the request is finished. - """ - db = SessionLocal() - try: - yield db - finally: - db.close() - +# This Base class is the foundation that all your SQLAlchemy ORM models +# (like Document, VectorMetadata, etc.) will inherit from. +Base = declarative_base() \ No newline at end of file diff --git a/ai-hub/app/db/session.py b/ai-hub/app/db/session.py new file mode 100644 index 0000000..26e8938 --- /dev/null +++ b/ai-hub/app/db/session.py @@ -0,0 +1,44 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from app.config import settings +from app.db.database import Base + +# Determine engine arguments based on the database mode from the central config +engine_args = {} +if settings.DB_MODE == "sqlite": + # This argument is required for SQLite to allow it to be used by multiple threads, + # which is the case in a web application like FastAPI. + engine_args["connect_args"] = {"check_same_thread": False} +else: + # 'pool_pre_ping' checks if a database connection is still alive before using it. + # This prevents errors from connections that have been timed out by the DB server. + engine_args["pool_pre_ping"] = True + +# Create the SQLAlchemy engine using the centralized URL and determined arguments +engine = create_engine(settings.DATABASE_URL, **engine_args) + +# SessionLocal is a factory for creating new database session objects. +# It's the standard way to interact with the database in SQLAlchemy. +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +def create_db_and_tables(): + """ + Creates all database tables defined by models inheriting from Base. + This is typically called once on application startup. + """ + print("Creating database tables...") + # Base.metadata contains all the schema information from your models. + Base.metadata.create_all(bind=engine) + +def get_db(): + """ + FastAPI dependency that provides a database session for a single API request. + + This pattern ensures that the database session is always closed after the + request is finished, even if an error occurs. + """ + db = SessionLocal() + try: + yield db + finally: + db.close() \ No newline at end of file diff --git a/ai-hub/app/db_setup.py b/ai-hub/app/db_setup.py deleted file mode 100644 index 63a64a6..0000000 --- a/ai-hub/app/db_setup.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -from dotenv import load_dotenv -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session -from app.db.database import Base # Assuming `Base` is in this file - -# Load environment variables from a .env file -load_dotenv() - -# --- Database Connection Setup --- -# This configuration allows for easy switching between SQLite and PostgreSQL. -DB_MODE = os.getenv("DB_MODE", "sqlite") -if DB_MODE == "sqlite": - DATABASE_URL = "sqlite:///./data/ai_hub.db" - # The connect_args are needed for SQLite to work with FastAPI's multiple threads - engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}) -else: - DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") - engine = create_engine(DATABASE_URL) - -# Create a database session class -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -def create_db_tables(): - """Create all database tables based on the models.""" - print("Creating database tables...") - Base.metadata.create_all(bind=engine) - -# The dependency to get a database session -def get_db(): - """Dependency that provides a database session.""" - db = SessionLocal() - try: - yield db - finally: - db.close() \ No newline at end of file diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py new file mode 100644 index 0000000..ab8f8f4 --- /dev/null +++ b/ai-hub/app/config.py @@ -0,0 +1,29 @@ +# app/config.py + +import os +from dotenv import load_dotenv + +# Load environment variables from a .env file +load_dotenv() + +class Settings: + # --- Database --- + DB_MODE: str = os.getenv("DB_MODE", "sqlite") + DATABASE_URL: str = ( + "sqlite:///./data/ai_hub.db" + if DB_MODE == "sqlite" + else os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") + ) + + # --- LLM API Keys & Models --- + DEEPSEEK_API_KEY: str = os.getenv("DEEPSEEK_API_KEY") + GEMINI_API_KEY: str = os.getenv("GEMINI_API_KEY") + DEEPSEEK_MODEL_NAME: str = os.getenv("DEEPSEEK_MODEL_NAME", "deepseek-chat") + GEMINI_MODEL_NAME: str = os.getenv("GEMINI_MODEL_NAME", "gemini-1.5-flash-latest") + + # --- Vector Store --- + FAISS_INDEX_PATH: str = os.getenv("FAISS_INDEX_PATH", "data/faiss_index.bin") + EMBEDDING_DIMENSION: int = int(os.getenv("EMBEDDING_DIMENSION", 768)) + +# Instantiate the settings so they can be imported and used anywhere +settings = Settings() \ No newline at end of file diff --git a/ai-hub/app/db/database.py b/ai-hub/app/db/database.py index cbb6b13..c2dfee1 100644 --- a/ai-hub/app/db/database.py +++ b/ai-hub/app/db/database.py @@ -1,54 +1,5 @@ -import os -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, declarative_base # <-- CORRECTED IMPORT +from sqlalchemy.orm import declarative_base -# --- Configuration --- -# Determines the database mode. Can be "postgres" or "sqlite". -# Defaults to "postgres" if not set. -DB_MODE = os.getenv("DB_MODE", "postgres").lower() - -# Default database URLs -POSTGRES_DEFAULT_URL = "postgresql://user:password@localhost/ai_hub_db" -SQLITE_DEFAULT_URL = "sqlite:///./data/ai_hub.db" - -DATABASE_URL = "" -engine_args = {} - -# --- Database Initialization --- -if DB_MODE == "sqlite": - print("✅ Initializing with SQLite in-file database.") - DATABASE_URL = SQLITE_DEFAULT_URL - # SQLite requires a specific argument to allow access from multiple threads, - # which is common in web applications. - engine_args = {"connect_args": {"check_same_thread": False}} -else: # Default to postgres - # Use the provided DATABASE_URL or fall back to the default. - DATABASE_URL = os.getenv("DATABASE_URL", POSTGRES_DEFAULT_URL) - DB_MODE = "postgres" - print(f"✅ Initializing with PostgreSQL database. URL: {DATABASE_URL}") - # pool_pre_ping checks if a connection is still alive before using it from the pool. - engine_args = {"pool_pre_ping": True} - - -# Create the SQLAlchemy engine with the determined settings -engine = create_engine(DATABASE_URL, **engine_args) - -# SessionLocal is a factory for creating new database session objects -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -# Base is a class that our database model classes will inherit from. -Base = declarative_base() - - -# --- Dependency for FastAPI --- -def get_db(): - """ - FastAPI dependency that provides a database session for a single API request. - It ensures the session is always closed after the request is finished. - """ - db = SessionLocal() - try: - yield db - finally: - db.close() - +# This Base class is the foundation that all your SQLAlchemy ORM models +# (like Document, VectorMetadata, etc.) will inherit from. +Base = declarative_base() \ No newline at end of file diff --git a/ai-hub/app/db/session.py b/ai-hub/app/db/session.py new file mode 100644 index 0000000..26e8938 --- /dev/null +++ b/ai-hub/app/db/session.py @@ -0,0 +1,44 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from app.config import settings +from app.db.database import Base + +# Determine engine arguments based on the database mode from the central config +engine_args = {} +if settings.DB_MODE == "sqlite": + # This argument is required for SQLite to allow it to be used by multiple threads, + # which is the case in a web application like FastAPI. + engine_args["connect_args"] = {"check_same_thread": False} +else: + # 'pool_pre_ping' checks if a database connection is still alive before using it. + # This prevents errors from connections that have been timed out by the DB server. + engine_args["pool_pre_ping"] = True + +# Create the SQLAlchemy engine using the centralized URL and determined arguments +engine = create_engine(settings.DATABASE_URL, **engine_args) + +# SessionLocal is a factory for creating new database session objects. +# It's the standard way to interact with the database in SQLAlchemy. +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +def create_db_and_tables(): + """ + Creates all database tables defined by models inheriting from Base. + This is typically called once on application startup. + """ + print("Creating database tables...") + # Base.metadata contains all the schema information from your models. + Base.metadata.create_all(bind=engine) + +def get_db(): + """ + FastAPI dependency that provides a database session for a single API request. + + This pattern ensures that the database session is always closed after the + request is finished, even if an error occurs. + """ + db = SessionLocal() + try: + yield db + finally: + db.close() \ No newline at end of file diff --git a/ai-hub/app/db_setup.py b/ai-hub/app/db_setup.py deleted file mode 100644 index 63a64a6..0000000 --- a/ai-hub/app/db_setup.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -from dotenv import load_dotenv -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session -from app.db.database import Base # Assuming `Base` is in this file - -# Load environment variables from a .env file -load_dotenv() - -# --- Database Connection Setup --- -# This configuration allows for easy switching between SQLite and PostgreSQL. -DB_MODE = os.getenv("DB_MODE", "sqlite") -if DB_MODE == "sqlite": - DATABASE_URL = "sqlite:///./data/ai_hub.db" - # The connect_args are needed for SQLite to work with FastAPI's multiple threads - engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}) -else: - DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") - engine = create_engine(DATABASE_URL) - -# Create a database session class -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -def create_db_tables(): - """Create all database tables based on the models.""" - print("Creating database tables...") - Base.metadata.create_all(bind=engine) - -# The dependency to get a database session -def get_db(): - """Dependency that provides a database session.""" - db = SessionLocal() - try: - yield db - finally: - db.close() \ No newline at end of file diff --git a/ai-hub/tests/api/test_routes.py b/ai-hub/tests/api/test_routes.py index b98fdbf..fdee764 100644 --- a/ai-hub/tests/api/test_routes.py +++ b/ai-hub/tests/api/test_routes.py @@ -8,7 +8,7 @@ # Import the dependencies and router factory from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db from app.api.routes import create_api_router @pytest.fixture diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py new file mode 100644 index 0000000..ab8f8f4 --- /dev/null +++ b/ai-hub/app/config.py @@ -0,0 +1,29 @@ +# app/config.py + +import os +from dotenv import load_dotenv + +# Load environment variables from a .env file +load_dotenv() + +class Settings: + # --- Database --- + DB_MODE: str = os.getenv("DB_MODE", "sqlite") + DATABASE_URL: str = ( + "sqlite:///./data/ai_hub.db" + if DB_MODE == "sqlite" + else os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") + ) + + # --- LLM API Keys & Models --- + DEEPSEEK_API_KEY: str = os.getenv("DEEPSEEK_API_KEY") + GEMINI_API_KEY: str = os.getenv("GEMINI_API_KEY") + DEEPSEEK_MODEL_NAME: str = os.getenv("DEEPSEEK_MODEL_NAME", "deepseek-chat") + GEMINI_MODEL_NAME: str = os.getenv("GEMINI_MODEL_NAME", "gemini-1.5-flash-latest") + + # --- Vector Store --- + FAISS_INDEX_PATH: str = os.getenv("FAISS_INDEX_PATH", "data/faiss_index.bin") + EMBEDDING_DIMENSION: int = int(os.getenv("EMBEDDING_DIMENSION", 768)) + +# Instantiate the settings so they can be imported and used anywhere +settings = Settings() \ No newline at end of file diff --git a/ai-hub/app/db/database.py b/ai-hub/app/db/database.py index cbb6b13..c2dfee1 100644 --- a/ai-hub/app/db/database.py +++ b/ai-hub/app/db/database.py @@ -1,54 +1,5 @@ -import os -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, declarative_base # <-- CORRECTED IMPORT +from sqlalchemy.orm import declarative_base -# --- Configuration --- -# Determines the database mode. Can be "postgres" or "sqlite". -# Defaults to "postgres" if not set. -DB_MODE = os.getenv("DB_MODE", "postgres").lower() - -# Default database URLs -POSTGRES_DEFAULT_URL = "postgresql://user:password@localhost/ai_hub_db" -SQLITE_DEFAULT_URL = "sqlite:///./data/ai_hub.db" - -DATABASE_URL = "" -engine_args = {} - -# --- Database Initialization --- -if DB_MODE == "sqlite": - print("✅ Initializing with SQLite in-file database.") - DATABASE_URL = SQLITE_DEFAULT_URL - # SQLite requires a specific argument to allow access from multiple threads, - # which is common in web applications. - engine_args = {"connect_args": {"check_same_thread": False}} -else: # Default to postgres - # Use the provided DATABASE_URL or fall back to the default. - DATABASE_URL = os.getenv("DATABASE_URL", POSTGRES_DEFAULT_URL) - DB_MODE = "postgres" - print(f"✅ Initializing with PostgreSQL database. URL: {DATABASE_URL}") - # pool_pre_ping checks if a connection is still alive before using it from the pool. - engine_args = {"pool_pre_ping": True} - - -# Create the SQLAlchemy engine with the determined settings -engine = create_engine(DATABASE_URL, **engine_args) - -# SessionLocal is a factory for creating new database session objects -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -# Base is a class that our database model classes will inherit from. -Base = declarative_base() - - -# --- Dependency for FastAPI --- -def get_db(): - """ - FastAPI dependency that provides a database session for a single API request. - It ensures the session is always closed after the request is finished. - """ - db = SessionLocal() - try: - yield db - finally: - db.close() - +# This Base class is the foundation that all your SQLAlchemy ORM models +# (like Document, VectorMetadata, etc.) will inherit from. +Base = declarative_base() \ No newline at end of file diff --git a/ai-hub/app/db/session.py b/ai-hub/app/db/session.py new file mode 100644 index 0000000..26e8938 --- /dev/null +++ b/ai-hub/app/db/session.py @@ -0,0 +1,44 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from app.config import settings +from app.db.database import Base + +# Determine engine arguments based on the database mode from the central config +engine_args = {} +if settings.DB_MODE == "sqlite": + # This argument is required for SQLite to allow it to be used by multiple threads, + # which is the case in a web application like FastAPI. + engine_args["connect_args"] = {"check_same_thread": False} +else: + # 'pool_pre_ping' checks if a database connection is still alive before using it. + # This prevents errors from connections that have been timed out by the DB server. + engine_args["pool_pre_ping"] = True + +# Create the SQLAlchemy engine using the centralized URL and determined arguments +engine = create_engine(settings.DATABASE_URL, **engine_args) + +# SessionLocal is a factory for creating new database session objects. +# It's the standard way to interact with the database in SQLAlchemy. +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +def create_db_and_tables(): + """ + Creates all database tables defined by models inheriting from Base. + This is typically called once on application startup. + """ + print("Creating database tables...") + # Base.metadata contains all the schema information from your models. + Base.metadata.create_all(bind=engine) + +def get_db(): + """ + FastAPI dependency that provides a database session for a single API request. + + This pattern ensures that the database session is always closed after the + request is finished, even if an error occurs. + """ + db = SessionLocal() + try: + yield db + finally: + db.close() \ No newline at end of file diff --git a/ai-hub/app/db_setup.py b/ai-hub/app/db_setup.py deleted file mode 100644 index 63a64a6..0000000 --- a/ai-hub/app/db_setup.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -from dotenv import load_dotenv -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session -from app.db.database import Base # Assuming `Base` is in this file - -# Load environment variables from a .env file -load_dotenv() - -# --- Database Connection Setup --- -# This configuration allows for easy switching between SQLite and PostgreSQL. -DB_MODE = os.getenv("DB_MODE", "sqlite") -if DB_MODE == "sqlite": - DATABASE_URL = "sqlite:///./data/ai_hub.db" - # The connect_args are needed for SQLite to work with FastAPI's multiple threads - engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}) -else: - DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") - engine = create_engine(DATABASE_URL) - -# Create a database session class -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -def create_db_tables(): - """Create all database tables based on the models.""" - print("Creating database tables...") - Base.metadata.create_all(bind=engine) - -# The dependency to get a database session -def get_db(): - """Dependency that provides a database session.""" - db = SessionLocal() - try: - yield db - finally: - db.close() \ No newline at end of file diff --git a/ai-hub/tests/api/test_routes.py b/ai-hub/tests/api/test_routes.py index b98fdbf..fdee764 100644 --- a/ai-hub/tests/api/test_routes.py +++ b/ai-hub/tests/api/test_routes.py @@ -8,7 +8,7 @@ # Import the dependencies and router factory from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db from app.api.routes import create_api_router @pytest.fixture diff --git a/ai-hub/tests/app.py b/ai-hub/tests/app.py new file mode 100644 index 0000000..b7e6fb3 --- /dev/null +++ b/ai-hub/tests/app.py @@ -0,0 +1,56 @@ +import os +from contextlib import asynccontextmanager +from fastapi import FastAPI +from dotenv import load_dotenv +from typing import List + +# Import core application logic +from app.core.vector_store import FaissVectorStore +from app.core.retrievers import FaissDBRetriever, Retriever +from app.core.services import RAGService + +# Import the new files for database and API routes +from app.db.session import create_db_tables +from app.api.routes import create_api_router + +# Load environment variables from a .env file +load_dotenv() + +# --- Application Factory Function --- +def create_app() -> FastAPI: + """ + Factory function to create and configure the FastAPI application. + This encapsulates all setup logic, making the main entry point clean. + """ + # Initialize core services for RAG + # CORRECTED: Now passing the required arguments to FaissVectorStore + vector_store = FaissVectorStore(index_file_path="data/faiss_index.bin", dimension=768) + retrievers: List[Retriever] = [ + FaissDBRetriever(vector_store=vector_store), + ] + rag_service = RAGService(vector_store=vector_store, retrievers=retrievers) + + @asynccontextmanager + async def lifespan(app: FastAPI): + """ + Initializes the database and vector store on startup and handles + cleanup on shutdown. + """ + print("Initializing application services...") + create_db_tables() + yield + print("Shutting down application services...") + vector_store.save_index() + + app = FastAPI( + title="AI Model Hub Service", + description="A extensible hub to route requests to various LLMs with RAG capabilities.", + version="0.0.0", + lifespan=lifespan + ) + + # Create and include the API router + api_router = create_api_router(rag_service=rag_service) + app.include_router(api_router) + + return app diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py new file mode 100644 index 0000000..ab8f8f4 --- /dev/null +++ b/ai-hub/app/config.py @@ -0,0 +1,29 @@ +# app/config.py + +import os +from dotenv import load_dotenv + +# Load environment variables from a .env file +load_dotenv() + +class Settings: + # --- Database --- + DB_MODE: str = os.getenv("DB_MODE", "sqlite") + DATABASE_URL: str = ( + "sqlite:///./data/ai_hub.db" + if DB_MODE == "sqlite" + else os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") + ) + + # --- LLM API Keys & Models --- + DEEPSEEK_API_KEY: str = os.getenv("DEEPSEEK_API_KEY") + GEMINI_API_KEY: str = os.getenv("GEMINI_API_KEY") + DEEPSEEK_MODEL_NAME: str = os.getenv("DEEPSEEK_MODEL_NAME", "deepseek-chat") + GEMINI_MODEL_NAME: str = os.getenv("GEMINI_MODEL_NAME", "gemini-1.5-flash-latest") + + # --- Vector Store --- + FAISS_INDEX_PATH: str = os.getenv("FAISS_INDEX_PATH", "data/faiss_index.bin") + EMBEDDING_DIMENSION: int = int(os.getenv("EMBEDDING_DIMENSION", 768)) + +# Instantiate the settings so they can be imported and used anywhere +settings = Settings() \ No newline at end of file diff --git a/ai-hub/app/db/database.py b/ai-hub/app/db/database.py index cbb6b13..c2dfee1 100644 --- a/ai-hub/app/db/database.py +++ b/ai-hub/app/db/database.py @@ -1,54 +1,5 @@ -import os -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, declarative_base # <-- CORRECTED IMPORT +from sqlalchemy.orm import declarative_base -# --- Configuration --- -# Determines the database mode. Can be "postgres" or "sqlite". -# Defaults to "postgres" if not set. -DB_MODE = os.getenv("DB_MODE", "postgres").lower() - -# Default database URLs -POSTGRES_DEFAULT_URL = "postgresql://user:password@localhost/ai_hub_db" -SQLITE_DEFAULT_URL = "sqlite:///./data/ai_hub.db" - -DATABASE_URL = "" -engine_args = {} - -# --- Database Initialization --- -if DB_MODE == "sqlite": - print("✅ Initializing with SQLite in-file database.") - DATABASE_URL = SQLITE_DEFAULT_URL - # SQLite requires a specific argument to allow access from multiple threads, - # which is common in web applications. - engine_args = {"connect_args": {"check_same_thread": False}} -else: # Default to postgres - # Use the provided DATABASE_URL or fall back to the default. - DATABASE_URL = os.getenv("DATABASE_URL", POSTGRES_DEFAULT_URL) - DB_MODE = "postgres" - print(f"✅ Initializing with PostgreSQL database. URL: {DATABASE_URL}") - # pool_pre_ping checks if a connection is still alive before using it from the pool. - engine_args = {"pool_pre_ping": True} - - -# Create the SQLAlchemy engine with the determined settings -engine = create_engine(DATABASE_URL, **engine_args) - -# SessionLocal is a factory for creating new database session objects -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -# Base is a class that our database model classes will inherit from. -Base = declarative_base() - - -# --- Dependency for FastAPI --- -def get_db(): - """ - FastAPI dependency that provides a database session for a single API request. - It ensures the session is always closed after the request is finished. - """ - db = SessionLocal() - try: - yield db - finally: - db.close() - +# This Base class is the foundation that all your SQLAlchemy ORM models +# (like Document, VectorMetadata, etc.) will inherit from. +Base = declarative_base() \ No newline at end of file diff --git a/ai-hub/app/db/session.py b/ai-hub/app/db/session.py new file mode 100644 index 0000000..26e8938 --- /dev/null +++ b/ai-hub/app/db/session.py @@ -0,0 +1,44 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from app.config import settings +from app.db.database import Base + +# Determine engine arguments based on the database mode from the central config +engine_args = {} +if settings.DB_MODE == "sqlite": + # This argument is required for SQLite to allow it to be used by multiple threads, + # which is the case in a web application like FastAPI. + engine_args["connect_args"] = {"check_same_thread": False} +else: + # 'pool_pre_ping' checks if a database connection is still alive before using it. + # This prevents errors from connections that have been timed out by the DB server. + engine_args["pool_pre_ping"] = True + +# Create the SQLAlchemy engine using the centralized URL and determined arguments +engine = create_engine(settings.DATABASE_URL, **engine_args) + +# SessionLocal is a factory for creating new database session objects. +# It's the standard way to interact with the database in SQLAlchemy. +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +def create_db_and_tables(): + """ + Creates all database tables defined by models inheriting from Base. + This is typically called once on application startup. + """ + print("Creating database tables...") + # Base.metadata contains all the schema information from your models. + Base.metadata.create_all(bind=engine) + +def get_db(): + """ + FastAPI dependency that provides a database session for a single API request. + + This pattern ensures that the database session is always closed after the + request is finished, even if an error occurs. + """ + db = SessionLocal() + try: + yield db + finally: + db.close() \ No newline at end of file diff --git a/ai-hub/app/db_setup.py b/ai-hub/app/db_setup.py deleted file mode 100644 index 63a64a6..0000000 --- a/ai-hub/app/db_setup.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -from dotenv import load_dotenv -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session -from app.db.database import Base # Assuming `Base` is in this file - -# Load environment variables from a .env file -load_dotenv() - -# --- Database Connection Setup --- -# This configuration allows for easy switching between SQLite and PostgreSQL. -DB_MODE = os.getenv("DB_MODE", "sqlite") -if DB_MODE == "sqlite": - DATABASE_URL = "sqlite:///./data/ai_hub.db" - # The connect_args are needed for SQLite to work with FastAPI's multiple threads - engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}) -else: - DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") - engine = create_engine(DATABASE_URL) - -# Create a database session class -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -def create_db_tables(): - """Create all database tables based on the models.""" - print("Creating database tables...") - Base.metadata.create_all(bind=engine) - -# The dependency to get a database session -def get_db(): - """Dependency that provides a database session.""" - db = SessionLocal() - try: - yield db - finally: - db.close() \ No newline at end of file diff --git a/ai-hub/tests/api/test_routes.py b/ai-hub/tests/api/test_routes.py index b98fdbf..fdee764 100644 --- a/ai-hub/tests/api/test_routes.py +++ b/ai-hub/tests/api/test_routes.py @@ -8,7 +8,7 @@ # Import the dependencies and router factory from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db from app.api.routes import create_api_router @pytest.fixture diff --git a/ai-hub/tests/app.py b/ai-hub/tests/app.py new file mode 100644 index 0000000..b7e6fb3 --- /dev/null +++ b/ai-hub/tests/app.py @@ -0,0 +1,56 @@ +import os +from contextlib import asynccontextmanager +from fastapi import FastAPI +from dotenv import load_dotenv +from typing import List + +# Import core application logic +from app.core.vector_store import FaissVectorStore +from app.core.retrievers import FaissDBRetriever, Retriever +from app.core.services import RAGService + +# Import the new files for database and API routes +from app.db.session import create_db_tables +from app.api.routes import create_api_router + +# Load environment variables from a .env file +load_dotenv() + +# --- Application Factory Function --- +def create_app() -> FastAPI: + """ + Factory function to create and configure the FastAPI application. + This encapsulates all setup logic, making the main entry point clean. + """ + # Initialize core services for RAG + # CORRECTED: Now passing the required arguments to FaissVectorStore + vector_store = FaissVectorStore(index_file_path="data/faiss_index.bin", dimension=768) + retrievers: List[Retriever] = [ + FaissDBRetriever(vector_store=vector_store), + ] + rag_service = RAGService(vector_store=vector_store, retrievers=retrievers) + + @asynccontextmanager + async def lifespan(app: FastAPI): + """ + Initializes the database and vector store on startup and handles + cleanup on shutdown. + """ + print("Initializing application services...") + create_db_tables() + yield + print("Shutting down application services...") + vector_store.save_index() + + app = FastAPI( + title="AI Model Hub Service", + description="A extensible hub to route requests to various LLMs with RAG capabilities.", + version="0.0.0", + lifespan=lifespan + ) + + # Create and include the API router + api_router = create_api_router(rag_service=rag_service) + app.include_router(api_router) + + return app diff --git a/ai-hub/tests/db/test_database.py b/ai-hub/tests/db/test_database.py index 8287067..a070625 100644 --- a/ai-hub/tests/db/test_database.py +++ b/ai-hub/tests/db/test_database.py @@ -1,91 +1,13 @@ -import os import pytest -import importlib -from sqlalchemy.orm import Session -from sqlalchemy.exc import ResourceClosedError -from sqlalchemy import text -from unittest.mock import patch +from sqlalchemy.orm import declarative_base -def test_sqlite_mode_initialization(monkeypatch): +def test_base_is_declarative_base(): """ - Tests if the database initializes in SQLite mode correctly. + Tests if the Base object in app.db.database is a SQLAlchemy declarative_base. """ - # Arrange: Set environment variable for SQLite mode - monkeypatch.setenv("DB_MODE", "sqlite") + # Arrange: Import the Base object from the module + from app.db.database import Base - # Act: Reload the module to apply the monkeypatched env vars - from app.db import database - importlib.reload(database) - - # Assert: Check if the configuration is correct for SQLite - assert database.DB_MODE == "sqlite" - assert "sqlite:///./data/ai_hub.db" in database.DATABASE_URL - assert "connect_args" in database.engine_args - assert database.engine_args["connect_args"] == {"check_same_thread": False} - - # Cleanup the created SQLite file after test, if it exists - if os.path.exists("ai_hub.db"): - os.remove("ai_hub.db") - -def test_postgres_mode_initialization(monkeypatch): - """ - Tests if the database initializes in PostgreSQL mode with a custom URL. - """ - # Arrange: Set env vars for PostgreSQL mode and a specific URL - monkeypatch.setenv("DB_MODE", "postgres") - monkeypatch.setenv("DATABASE_URL", "postgresql://test_user:test_password@testhost/test_db") - - # Act: Reload the module to apply the monkeypatched env vars - from app.db import database - importlib.reload(database) - - # Assert: Check if the configuration is correct for PostgreSQL - assert database.DB_MODE == "postgres" - assert database.DATABASE_URL == "postgresql://test_user:test_password@testhost/test_db" - assert "pool_pre_ping" in database.engine_args - -def test_default_to_postgres_mode(monkeypatch): - """ - Tests if the system defaults to PostgreSQL mode when DB_MODE is not set. - """ - # Arrange: Ensure DB_MODE is not set - monkeypatch.delenv("DB_MODE", raising=False) - - # Act: Reload the module to apply the monkeypatched env vars - from app.db import database - importlib.reload(database) - - # Assert: Check that it defaulted to postgres - assert database.DB_MODE == "postgres" - assert "postgresql://user:password@localhost/ai_hub_db" in database.DATABASE_URL - -@patch('app.db.database.SessionLocal') -def test_get_db_yields_and_closes_session(mock_session_local, monkeypatch): - """ - Tests if the get_db() dependency function yields a valid, active session - and correctly closes it afterward by mocking the session object. - """ - # Arrange: Get the actual get_db function from the module - from app.db import database - - # Configure the mock session returned by SessionLocal() - mock_session = mock_session_local.return_value - - db_generator = database.get_db() - - # Act - # 1. Get the session object from the generator - db_session_instance = next(db_generator) - - # Assert - # 2. Check that the yielded object is our mock session - assert db_session_instance is mock_session - mock_session.close.assert_not_called() # The session should not be closed yet - - # 3. Exhaust the generator to trigger the 'finally' block - with pytest.raises(StopIteration): - next(db_generator) - - # 4. Assert that the close() method was called exactly once. - mock_session.close.assert_called_once() - + # Assert: Check that the Base object's metaclass is the same as a new declarative_base's metaclass. + # This confirms it's the correct type of object for SQLAlchemy models to inherit from. + assert type(Base) == type(declarative_base()) \ No newline at end of file diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py new file mode 100644 index 0000000..ab8f8f4 --- /dev/null +++ b/ai-hub/app/config.py @@ -0,0 +1,29 @@ +# app/config.py + +import os +from dotenv import load_dotenv + +# Load environment variables from a .env file +load_dotenv() + +class Settings: + # --- Database --- + DB_MODE: str = os.getenv("DB_MODE", "sqlite") + DATABASE_URL: str = ( + "sqlite:///./data/ai_hub.db" + if DB_MODE == "sqlite" + else os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") + ) + + # --- LLM API Keys & Models --- + DEEPSEEK_API_KEY: str = os.getenv("DEEPSEEK_API_KEY") + GEMINI_API_KEY: str = os.getenv("GEMINI_API_KEY") + DEEPSEEK_MODEL_NAME: str = os.getenv("DEEPSEEK_MODEL_NAME", "deepseek-chat") + GEMINI_MODEL_NAME: str = os.getenv("GEMINI_MODEL_NAME", "gemini-1.5-flash-latest") + + # --- Vector Store --- + FAISS_INDEX_PATH: str = os.getenv("FAISS_INDEX_PATH", "data/faiss_index.bin") + EMBEDDING_DIMENSION: int = int(os.getenv("EMBEDDING_DIMENSION", 768)) + +# Instantiate the settings so they can be imported and used anywhere +settings = Settings() \ No newline at end of file diff --git a/ai-hub/app/db/database.py b/ai-hub/app/db/database.py index cbb6b13..c2dfee1 100644 --- a/ai-hub/app/db/database.py +++ b/ai-hub/app/db/database.py @@ -1,54 +1,5 @@ -import os -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, declarative_base # <-- CORRECTED IMPORT +from sqlalchemy.orm import declarative_base -# --- Configuration --- -# Determines the database mode. Can be "postgres" or "sqlite". -# Defaults to "postgres" if not set. -DB_MODE = os.getenv("DB_MODE", "postgres").lower() - -# Default database URLs -POSTGRES_DEFAULT_URL = "postgresql://user:password@localhost/ai_hub_db" -SQLITE_DEFAULT_URL = "sqlite:///./data/ai_hub.db" - -DATABASE_URL = "" -engine_args = {} - -# --- Database Initialization --- -if DB_MODE == "sqlite": - print("✅ Initializing with SQLite in-file database.") - DATABASE_URL = SQLITE_DEFAULT_URL - # SQLite requires a specific argument to allow access from multiple threads, - # which is common in web applications. - engine_args = {"connect_args": {"check_same_thread": False}} -else: # Default to postgres - # Use the provided DATABASE_URL or fall back to the default. - DATABASE_URL = os.getenv("DATABASE_URL", POSTGRES_DEFAULT_URL) - DB_MODE = "postgres" - print(f"✅ Initializing with PostgreSQL database. URL: {DATABASE_URL}") - # pool_pre_ping checks if a connection is still alive before using it from the pool. - engine_args = {"pool_pre_ping": True} - - -# Create the SQLAlchemy engine with the determined settings -engine = create_engine(DATABASE_URL, **engine_args) - -# SessionLocal is a factory for creating new database session objects -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -# Base is a class that our database model classes will inherit from. -Base = declarative_base() - - -# --- Dependency for FastAPI --- -def get_db(): - """ - FastAPI dependency that provides a database session for a single API request. - It ensures the session is always closed after the request is finished. - """ - db = SessionLocal() - try: - yield db - finally: - db.close() - +# This Base class is the foundation that all your SQLAlchemy ORM models +# (like Document, VectorMetadata, etc.) will inherit from. +Base = declarative_base() \ No newline at end of file diff --git a/ai-hub/app/db/session.py b/ai-hub/app/db/session.py new file mode 100644 index 0000000..26e8938 --- /dev/null +++ b/ai-hub/app/db/session.py @@ -0,0 +1,44 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from app.config import settings +from app.db.database import Base + +# Determine engine arguments based on the database mode from the central config +engine_args = {} +if settings.DB_MODE == "sqlite": + # This argument is required for SQLite to allow it to be used by multiple threads, + # which is the case in a web application like FastAPI. + engine_args["connect_args"] = {"check_same_thread": False} +else: + # 'pool_pre_ping' checks if a database connection is still alive before using it. + # This prevents errors from connections that have been timed out by the DB server. + engine_args["pool_pre_ping"] = True + +# Create the SQLAlchemy engine using the centralized URL and determined arguments +engine = create_engine(settings.DATABASE_URL, **engine_args) + +# SessionLocal is a factory for creating new database session objects. +# It's the standard way to interact with the database in SQLAlchemy. +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +def create_db_and_tables(): + """ + Creates all database tables defined by models inheriting from Base. + This is typically called once on application startup. + """ + print("Creating database tables...") + # Base.metadata contains all the schema information from your models. + Base.metadata.create_all(bind=engine) + +def get_db(): + """ + FastAPI dependency that provides a database session for a single API request. + + This pattern ensures that the database session is always closed after the + request is finished, even if an error occurs. + """ + db = SessionLocal() + try: + yield db + finally: + db.close() \ No newline at end of file diff --git a/ai-hub/app/db_setup.py b/ai-hub/app/db_setup.py deleted file mode 100644 index 63a64a6..0000000 --- a/ai-hub/app/db_setup.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -from dotenv import load_dotenv -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session -from app.db.database import Base # Assuming `Base` is in this file - -# Load environment variables from a .env file -load_dotenv() - -# --- Database Connection Setup --- -# This configuration allows for easy switching between SQLite and PostgreSQL. -DB_MODE = os.getenv("DB_MODE", "sqlite") -if DB_MODE == "sqlite": - DATABASE_URL = "sqlite:///./data/ai_hub.db" - # The connect_args are needed for SQLite to work with FastAPI's multiple threads - engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}) -else: - DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") - engine = create_engine(DATABASE_URL) - -# Create a database session class -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -def create_db_tables(): - """Create all database tables based on the models.""" - print("Creating database tables...") - Base.metadata.create_all(bind=engine) - -# The dependency to get a database session -def get_db(): - """Dependency that provides a database session.""" - db = SessionLocal() - try: - yield db - finally: - db.close() \ No newline at end of file diff --git a/ai-hub/tests/api/test_routes.py b/ai-hub/tests/api/test_routes.py index b98fdbf..fdee764 100644 --- a/ai-hub/tests/api/test_routes.py +++ b/ai-hub/tests/api/test_routes.py @@ -8,7 +8,7 @@ # Import the dependencies and router factory from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db from app.api.routes import create_api_router @pytest.fixture diff --git a/ai-hub/tests/app.py b/ai-hub/tests/app.py new file mode 100644 index 0000000..b7e6fb3 --- /dev/null +++ b/ai-hub/tests/app.py @@ -0,0 +1,56 @@ +import os +from contextlib import asynccontextmanager +from fastapi import FastAPI +from dotenv import load_dotenv +from typing import List + +# Import core application logic +from app.core.vector_store import FaissVectorStore +from app.core.retrievers import FaissDBRetriever, Retriever +from app.core.services import RAGService + +# Import the new files for database and API routes +from app.db.session import create_db_tables +from app.api.routes import create_api_router + +# Load environment variables from a .env file +load_dotenv() + +# --- Application Factory Function --- +def create_app() -> FastAPI: + """ + Factory function to create and configure the FastAPI application. + This encapsulates all setup logic, making the main entry point clean. + """ + # Initialize core services for RAG + # CORRECTED: Now passing the required arguments to FaissVectorStore + vector_store = FaissVectorStore(index_file_path="data/faiss_index.bin", dimension=768) + retrievers: List[Retriever] = [ + FaissDBRetriever(vector_store=vector_store), + ] + rag_service = RAGService(vector_store=vector_store, retrievers=retrievers) + + @asynccontextmanager + async def lifespan(app: FastAPI): + """ + Initializes the database and vector store on startup and handles + cleanup on shutdown. + """ + print("Initializing application services...") + create_db_tables() + yield + print("Shutting down application services...") + vector_store.save_index() + + app = FastAPI( + title="AI Model Hub Service", + description="A extensible hub to route requests to various LLMs with RAG capabilities.", + version="0.0.0", + lifespan=lifespan + ) + + # Create and include the API router + api_router = create_api_router(rag_service=rag_service) + app.include_router(api_router) + + return app diff --git a/ai-hub/tests/db/test_database.py b/ai-hub/tests/db/test_database.py index 8287067..a070625 100644 --- a/ai-hub/tests/db/test_database.py +++ b/ai-hub/tests/db/test_database.py @@ -1,91 +1,13 @@ -import os import pytest -import importlib -from sqlalchemy.orm import Session -from sqlalchemy.exc import ResourceClosedError -from sqlalchemy import text -from unittest.mock import patch +from sqlalchemy.orm import declarative_base -def test_sqlite_mode_initialization(monkeypatch): +def test_base_is_declarative_base(): """ - Tests if the database initializes in SQLite mode correctly. + Tests if the Base object in app.db.database is a SQLAlchemy declarative_base. """ - # Arrange: Set environment variable for SQLite mode - monkeypatch.setenv("DB_MODE", "sqlite") + # Arrange: Import the Base object from the module + from app.db.database import Base - # Act: Reload the module to apply the monkeypatched env vars - from app.db import database - importlib.reload(database) - - # Assert: Check if the configuration is correct for SQLite - assert database.DB_MODE == "sqlite" - assert "sqlite:///./data/ai_hub.db" in database.DATABASE_URL - assert "connect_args" in database.engine_args - assert database.engine_args["connect_args"] == {"check_same_thread": False} - - # Cleanup the created SQLite file after test, if it exists - if os.path.exists("ai_hub.db"): - os.remove("ai_hub.db") - -def test_postgres_mode_initialization(monkeypatch): - """ - Tests if the database initializes in PostgreSQL mode with a custom URL. - """ - # Arrange: Set env vars for PostgreSQL mode and a specific URL - monkeypatch.setenv("DB_MODE", "postgres") - monkeypatch.setenv("DATABASE_URL", "postgresql://test_user:test_password@testhost/test_db") - - # Act: Reload the module to apply the monkeypatched env vars - from app.db import database - importlib.reload(database) - - # Assert: Check if the configuration is correct for PostgreSQL - assert database.DB_MODE == "postgres" - assert database.DATABASE_URL == "postgresql://test_user:test_password@testhost/test_db" - assert "pool_pre_ping" in database.engine_args - -def test_default_to_postgres_mode(monkeypatch): - """ - Tests if the system defaults to PostgreSQL mode when DB_MODE is not set. - """ - # Arrange: Ensure DB_MODE is not set - monkeypatch.delenv("DB_MODE", raising=False) - - # Act: Reload the module to apply the monkeypatched env vars - from app.db import database - importlib.reload(database) - - # Assert: Check that it defaulted to postgres - assert database.DB_MODE == "postgres" - assert "postgresql://user:password@localhost/ai_hub_db" in database.DATABASE_URL - -@patch('app.db.database.SessionLocal') -def test_get_db_yields_and_closes_session(mock_session_local, monkeypatch): - """ - Tests if the get_db() dependency function yields a valid, active session - and correctly closes it afterward by mocking the session object. - """ - # Arrange: Get the actual get_db function from the module - from app.db import database - - # Configure the mock session returned by SessionLocal() - mock_session = mock_session_local.return_value - - db_generator = database.get_db() - - # Act - # 1. Get the session object from the generator - db_session_instance = next(db_generator) - - # Assert - # 2. Check that the yielded object is our mock session - assert db_session_instance is mock_session - mock_session.close.assert_not_called() # The session should not be closed yet - - # 3. Exhaust the generator to trigger the 'finally' block - with pytest.raises(StopIteration): - next(db_generator) - - # 4. Assert that the close() method was called exactly once. - mock_session.close.assert_called_once() - + # Assert: Check that the Base object's metaclass is the same as a new declarative_base's metaclass. + # This confirms it's the correct type of object for SQLAlchemy models to inherit from. + assert type(Base) == type(declarative_base()) \ No newline at end of file diff --git a/ai-hub/tests/db/test_session.py b/ai-hub/tests/db/test_session.py new file mode 100644 index 0000000..17b020a --- /dev/null +++ b/ai-hub/tests/db/test_session.py @@ -0,0 +1,68 @@ +import pytest +import importlib +from unittest.mock import patch + +# --- Test Suite for app.db.session --- + +def test_sqlite_mode_initialization(monkeypatch): + """ + Tests if the session module correctly configures the SQLAlchemy engine for SQLite. + """ + # Arrange + monkeypatch.setenv("DB_MODE", "sqlite") + + # Act + from app import config + from app.db import session + importlib.reload(config) + importlib.reload(session) + + # Assert + assert session.engine.dialect.name == "sqlite" + assert session.engine.url.database == "./data/ai_hub.db" + +def test_postgres_mode_initialization(monkeypatch): + """ + Tests if the session module correctly configures the SQLAlchemy engine for PostgreSQL. + """ + # Arrange + monkeypatch.setenv("DB_MODE", "postgres") + custom_url = "postgresql://test_user:test_password@testhost/test_db" + monkeypatch.setenv("DATABASE_URL", custom_url) + + # Act + from app import config + from app.db import session + importlib.reload(config) + importlib.reload(session) + + # Assert + assert session.engine.url.drivername == "postgresql" + assert session.engine.url.username == "test_user" + assert session.engine.url.host == "testhost" + assert session.engine.url.database == "test_db" + +@patch('app.db.session.SessionLocal') +def test_get_db_yields_and_closes_session(mock_session_local): + """ + Tests if the get_db() dependency function yields a session and then closes it. + """ + # Arrange + from app.db.session import get_db + # FIX: Correctly assign the mock session from the mock factory's return_value + mock_session = mock_session_local.return_value + db_generator = get_db() + + # Act (Yield) + db_session_instance = next(db_generator) + + # Assert (Yield) + assert db_session_instance is mock_session + mock_session.close.assert_not_called() + + # Act (Close) + with pytest.raises(StopIteration): + next(db_generator) + + # Assert (Close) + mock_session.close.assert_called_once() \ No newline at end of file diff --git a/ai-hub/app/api/routes.py b/ai-hub/app/api/routes.py index 6cf3deb..23bfa68 100644 --- a/ai-hub/app/api/routes.py +++ b/ai-hub/app/api/routes.py @@ -5,7 +5,7 @@ from typing import Literal from sqlalchemy.orm import Session from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db # Pydantic Models for API requests class ChatRequest(BaseModel): diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index 7b82b4b..d18d97b 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -10,7 +10,7 @@ from app.core.services import RAGService # Import the new files for database and API routes -from app.db_setup import create_db_tables +from app.db.session import create_db_and_tables from app.api.routes import create_api_router # Load environment variables from a .env file @@ -37,7 +37,7 @@ cleanup on shutdown. """ print("Initializing application services...") - create_db_tables() + create_db_and_tables() yield print("Shutting down application services...") vector_store.save_index() diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py new file mode 100644 index 0000000..ab8f8f4 --- /dev/null +++ b/ai-hub/app/config.py @@ -0,0 +1,29 @@ +# app/config.py + +import os +from dotenv import load_dotenv + +# Load environment variables from a .env file +load_dotenv() + +class Settings: + # --- Database --- + DB_MODE: str = os.getenv("DB_MODE", "sqlite") + DATABASE_URL: str = ( + "sqlite:///./data/ai_hub.db" + if DB_MODE == "sqlite" + else os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") + ) + + # --- LLM API Keys & Models --- + DEEPSEEK_API_KEY: str = os.getenv("DEEPSEEK_API_KEY") + GEMINI_API_KEY: str = os.getenv("GEMINI_API_KEY") + DEEPSEEK_MODEL_NAME: str = os.getenv("DEEPSEEK_MODEL_NAME", "deepseek-chat") + GEMINI_MODEL_NAME: str = os.getenv("GEMINI_MODEL_NAME", "gemini-1.5-flash-latest") + + # --- Vector Store --- + FAISS_INDEX_PATH: str = os.getenv("FAISS_INDEX_PATH", "data/faiss_index.bin") + EMBEDDING_DIMENSION: int = int(os.getenv("EMBEDDING_DIMENSION", 768)) + +# Instantiate the settings so they can be imported and used anywhere +settings = Settings() \ No newline at end of file diff --git a/ai-hub/app/db/database.py b/ai-hub/app/db/database.py index cbb6b13..c2dfee1 100644 --- a/ai-hub/app/db/database.py +++ b/ai-hub/app/db/database.py @@ -1,54 +1,5 @@ -import os -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, declarative_base # <-- CORRECTED IMPORT +from sqlalchemy.orm import declarative_base -# --- Configuration --- -# Determines the database mode. Can be "postgres" or "sqlite". -# Defaults to "postgres" if not set. -DB_MODE = os.getenv("DB_MODE", "postgres").lower() - -# Default database URLs -POSTGRES_DEFAULT_URL = "postgresql://user:password@localhost/ai_hub_db" -SQLITE_DEFAULT_URL = "sqlite:///./data/ai_hub.db" - -DATABASE_URL = "" -engine_args = {} - -# --- Database Initialization --- -if DB_MODE == "sqlite": - print("✅ Initializing with SQLite in-file database.") - DATABASE_URL = SQLITE_DEFAULT_URL - # SQLite requires a specific argument to allow access from multiple threads, - # which is common in web applications. - engine_args = {"connect_args": {"check_same_thread": False}} -else: # Default to postgres - # Use the provided DATABASE_URL or fall back to the default. - DATABASE_URL = os.getenv("DATABASE_URL", POSTGRES_DEFAULT_URL) - DB_MODE = "postgres" - print(f"✅ Initializing with PostgreSQL database. URL: {DATABASE_URL}") - # pool_pre_ping checks if a connection is still alive before using it from the pool. - engine_args = {"pool_pre_ping": True} - - -# Create the SQLAlchemy engine with the determined settings -engine = create_engine(DATABASE_URL, **engine_args) - -# SessionLocal is a factory for creating new database session objects -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -# Base is a class that our database model classes will inherit from. -Base = declarative_base() - - -# --- Dependency for FastAPI --- -def get_db(): - """ - FastAPI dependency that provides a database session for a single API request. - It ensures the session is always closed after the request is finished. - """ - db = SessionLocal() - try: - yield db - finally: - db.close() - +# This Base class is the foundation that all your SQLAlchemy ORM models +# (like Document, VectorMetadata, etc.) will inherit from. +Base = declarative_base() \ No newline at end of file diff --git a/ai-hub/app/db/session.py b/ai-hub/app/db/session.py new file mode 100644 index 0000000..26e8938 --- /dev/null +++ b/ai-hub/app/db/session.py @@ -0,0 +1,44 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from app.config import settings +from app.db.database import Base + +# Determine engine arguments based on the database mode from the central config +engine_args = {} +if settings.DB_MODE == "sqlite": + # This argument is required for SQLite to allow it to be used by multiple threads, + # which is the case in a web application like FastAPI. + engine_args["connect_args"] = {"check_same_thread": False} +else: + # 'pool_pre_ping' checks if a database connection is still alive before using it. + # This prevents errors from connections that have been timed out by the DB server. + engine_args["pool_pre_ping"] = True + +# Create the SQLAlchemy engine using the centralized URL and determined arguments +engine = create_engine(settings.DATABASE_URL, **engine_args) + +# SessionLocal is a factory for creating new database session objects. +# It's the standard way to interact with the database in SQLAlchemy. +SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + +def create_db_and_tables(): + """ + Creates all database tables defined by models inheriting from Base. + This is typically called once on application startup. + """ + print("Creating database tables...") + # Base.metadata contains all the schema information from your models. + Base.metadata.create_all(bind=engine) + +def get_db(): + """ + FastAPI dependency that provides a database session for a single API request. + + This pattern ensures that the database session is always closed after the + request is finished, even if an error occurs. + """ + db = SessionLocal() + try: + yield db + finally: + db.close() \ No newline at end of file diff --git a/ai-hub/app/db_setup.py b/ai-hub/app/db_setup.py deleted file mode 100644 index 63a64a6..0000000 --- a/ai-hub/app/db_setup.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -from dotenv import load_dotenv -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session -from app.db.database import Base # Assuming `Base` is in this file - -# Load environment variables from a .env file -load_dotenv() - -# --- Database Connection Setup --- -# This configuration allows for easy switching between SQLite and PostgreSQL. -DB_MODE = os.getenv("DB_MODE", "sqlite") -if DB_MODE == "sqlite": - DATABASE_URL = "sqlite:///./data/ai_hub.db" - # The connect_args are needed for SQLite to work with FastAPI's multiple threads - engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}) -else: - DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://user:password@localhost/ai_hub_db") - engine = create_engine(DATABASE_URL) - -# Create a database session class -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -def create_db_tables(): - """Create all database tables based on the models.""" - print("Creating database tables...") - Base.metadata.create_all(bind=engine) - -# The dependency to get a database session -def get_db(): - """Dependency that provides a database session.""" - db = SessionLocal() - try: - yield db - finally: - db.close() \ No newline at end of file diff --git a/ai-hub/tests/api/test_routes.py b/ai-hub/tests/api/test_routes.py index b98fdbf..fdee764 100644 --- a/ai-hub/tests/api/test_routes.py +++ b/ai-hub/tests/api/test_routes.py @@ -8,7 +8,7 @@ # Import the dependencies and router factory from app.core.services import RAGService -from app.db_setup import get_db +from app.db.session import get_db from app.api.routes import create_api_router @pytest.fixture diff --git a/ai-hub/tests/app.py b/ai-hub/tests/app.py new file mode 100644 index 0000000..b7e6fb3 --- /dev/null +++ b/ai-hub/tests/app.py @@ -0,0 +1,56 @@ +import os +from contextlib import asynccontextmanager +from fastapi import FastAPI +from dotenv import load_dotenv +from typing import List + +# Import core application logic +from app.core.vector_store import FaissVectorStore +from app.core.retrievers import FaissDBRetriever, Retriever +from app.core.services import RAGService + +# Import the new files for database and API routes +from app.db.session import create_db_tables +from app.api.routes import create_api_router + +# Load environment variables from a .env file +load_dotenv() + +# --- Application Factory Function --- +def create_app() -> FastAPI: + """ + Factory function to create and configure the FastAPI application. + This encapsulates all setup logic, making the main entry point clean. + """ + # Initialize core services for RAG + # CORRECTED: Now passing the required arguments to FaissVectorStore + vector_store = FaissVectorStore(index_file_path="data/faiss_index.bin", dimension=768) + retrievers: List[Retriever] = [ + FaissDBRetriever(vector_store=vector_store), + ] + rag_service = RAGService(vector_store=vector_store, retrievers=retrievers) + + @asynccontextmanager + async def lifespan(app: FastAPI): + """ + Initializes the database and vector store on startup and handles + cleanup on shutdown. + """ + print("Initializing application services...") + create_db_tables() + yield + print("Shutting down application services...") + vector_store.save_index() + + app = FastAPI( + title="AI Model Hub Service", + description="A extensible hub to route requests to various LLMs with RAG capabilities.", + version="0.0.0", + lifespan=lifespan + ) + + # Create and include the API router + api_router = create_api_router(rag_service=rag_service) + app.include_router(api_router) + + return app diff --git a/ai-hub/tests/db/test_database.py b/ai-hub/tests/db/test_database.py index 8287067..a070625 100644 --- a/ai-hub/tests/db/test_database.py +++ b/ai-hub/tests/db/test_database.py @@ -1,91 +1,13 @@ -import os import pytest -import importlib -from sqlalchemy.orm import Session -from sqlalchemy.exc import ResourceClosedError -from sqlalchemy import text -from unittest.mock import patch +from sqlalchemy.orm import declarative_base -def test_sqlite_mode_initialization(monkeypatch): +def test_base_is_declarative_base(): """ - Tests if the database initializes in SQLite mode correctly. + Tests if the Base object in app.db.database is a SQLAlchemy declarative_base. """ - # Arrange: Set environment variable for SQLite mode - monkeypatch.setenv("DB_MODE", "sqlite") + # Arrange: Import the Base object from the module + from app.db.database import Base - # Act: Reload the module to apply the monkeypatched env vars - from app.db import database - importlib.reload(database) - - # Assert: Check if the configuration is correct for SQLite - assert database.DB_MODE == "sqlite" - assert "sqlite:///./data/ai_hub.db" in database.DATABASE_URL - assert "connect_args" in database.engine_args - assert database.engine_args["connect_args"] == {"check_same_thread": False} - - # Cleanup the created SQLite file after test, if it exists - if os.path.exists("ai_hub.db"): - os.remove("ai_hub.db") - -def test_postgres_mode_initialization(monkeypatch): - """ - Tests if the database initializes in PostgreSQL mode with a custom URL. - """ - # Arrange: Set env vars for PostgreSQL mode and a specific URL - monkeypatch.setenv("DB_MODE", "postgres") - monkeypatch.setenv("DATABASE_URL", "postgresql://test_user:test_password@testhost/test_db") - - # Act: Reload the module to apply the monkeypatched env vars - from app.db import database - importlib.reload(database) - - # Assert: Check if the configuration is correct for PostgreSQL - assert database.DB_MODE == "postgres" - assert database.DATABASE_URL == "postgresql://test_user:test_password@testhost/test_db" - assert "pool_pre_ping" in database.engine_args - -def test_default_to_postgres_mode(monkeypatch): - """ - Tests if the system defaults to PostgreSQL mode when DB_MODE is not set. - """ - # Arrange: Ensure DB_MODE is not set - monkeypatch.delenv("DB_MODE", raising=False) - - # Act: Reload the module to apply the monkeypatched env vars - from app.db import database - importlib.reload(database) - - # Assert: Check that it defaulted to postgres - assert database.DB_MODE == "postgres" - assert "postgresql://user:password@localhost/ai_hub_db" in database.DATABASE_URL - -@patch('app.db.database.SessionLocal') -def test_get_db_yields_and_closes_session(mock_session_local, monkeypatch): - """ - Tests if the get_db() dependency function yields a valid, active session - and correctly closes it afterward by mocking the session object. - """ - # Arrange: Get the actual get_db function from the module - from app.db import database - - # Configure the mock session returned by SessionLocal() - mock_session = mock_session_local.return_value - - db_generator = database.get_db() - - # Act - # 1. Get the session object from the generator - db_session_instance = next(db_generator) - - # Assert - # 2. Check that the yielded object is our mock session - assert db_session_instance is mock_session - mock_session.close.assert_not_called() # The session should not be closed yet - - # 3. Exhaust the generator to trigger the 'finally' block - with pytest.raises(StopIteration): - next(db_generator) - - # 4. Assert that the close() method was called exactly once. - mock_session.close.assert_called_once() - + # Assert: Check that the Base object's metaclass is the same as a new declarative_base's metaclass. + # This confirms it's the correct type of object for SQLAlchemy models to inherit from. + assert type(Base) == type(declarative_base()) \ No newline at end of file diff --git a/ai-hub/tests/db/test_session.py b/ai-hub/tests/db/test_session.py new file mode 100644 index 0000000..17b020a --- /dev/null +++ b/ai-hub/tests/db/test_session.py @@ -0,0 +1,68 @@ +import pytest +import importlib +from unittest.mock import patch + +# --- Test Suite for app.db.session --- + +def test_sqlite_mode_initialization(monkeypatch): + """ + Tests if the session module correctly configures the SQLAlchemy engine for SQLite. + """ + # Arrange + monkeypatch.setenv("DB_MODE", "sqlite") + + # Act + from app import config + from app.db import session + importlib.reload(config) + importlib.reload(session) + + # Assert + assert session.engine.dialect.name == "sqlite" + assert session.engine.url.database == "./data/ai_hub.db" + +def test_postgres_mode_initialization(monkeypatch): + """ + Tests if the session module correctly configures the SQLAlchemy engine for PostgreSQL. + """ + # Arrange + monkeypatch.setenv("DB_MODE", "postgres") + custom_url = "postgresql://test_user:test_password@testhost/test_db" + monkeypatch.setenv("DATABASE_URL", custom_url) + + # Act + from app import config + from app.db import session + importlib.reload(config) + importlib.reload(session) + + # Assert + assert session.engine.url.drivername == "postgresql" + assert session.engine.url.username == "test_user" + assert session.engine.url.host == "testhost" + assert session.engine.url.database == "test_db" + +@patch('app.db.session.SessionLocal') +def test_get_db_yields_and_closes_session(mock_session_local): + """ + Tests if the get_db() dependency function yields a session and then closes it. + """ + # Arrange + from app.db.session import get_db + # FIX: Correctly assign the mock session from the mock factory's return_value + mock_session = mock_session_local.return_value + db_generator = get_db() + + # Act (Yield) + db_session_instance = next(db_generator) + + # Assert (Yield) + assert db_session_instance is mock_session + mock_session.close.assert_not_called() + + # Act (Close) + with pytest.raises(StopIteration): + next(db_generator) + + # Assert (Close) + mock_session.close.assert_called_once() \ No newline at end of file diff --git a/ai-hub/tests/test_app.py b/ai-hub/tests/test_app.py index 6e08da0..6b25640 100644 --- a/ai-hub/tests/test_app.py +++ b/ai-hub/tests/test_app.py @@ -3,7 +3,7 @@ from sqlalchemy.orm import Session from app.app import create_app -from app.db_setup import get_db +from app.db.session import get_db # --- Dependency Override for Testing --- mock_db = MagicMock(spec=Session)