diff --git a/ai-hub/app/api/dependencies.py b/ai-hub/app/api/dependencies.py
index 0023ac0..783f8f2 100644
--- a/ai-hub/app/api/dependencies.py
+++ b/ai-hub/app/api/dependencies.py
@@ -37,6 +37,17 @@
return user
+async def get_current_admin(
+ current_user: models.User = Depends(get_current_user)
+) -> models.User:
+ if current_user.role != "admin":
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Admin role required"
+ )
+ return current_user
+
+
class ServiceContainer:
"""
A flexible container for managing and providing various application services.
diff --git a/ai-hub/app/api/routes/admin.py b/ai-hub/app/api/routes/admin.py
new file mode 100644
index 0000000..1ed915c
--- /dev/null
+++ b/ai-hub/app/api/routes/admin.py
@@ -0,0 +1,120 @@
+from fastapi import APIRouter, Depends, HTTPException
+from app.api import schemas
+from app.api.dependencies import get_current_admin
+from app.config import settings
+
+def create_admin_router() -> APIRouter:
+ router = APIRouter()
+
+ @router.put("/config/oidc", summary="Update OIDC Configuration")
+ async def update_oidc_config(
+ update: schemas.OIDCConfigUpdate,
+ admin = Depends(get_current_admin)
+ ):
+ if update.enabled is not None:
+ settings.OIDC_ENABLED = update.enabled
+ if update.client_id is not None:
+ settings.OIDC_CLIENT_ID = update.client_id
+ if update.client_secret is not None:
+ settings.OIDC_CLIENT_SECRET = update.client_secret
+ if update.server_url is not None:
+ settings.OIDC_SERVER_URL = update.server_url
+ if update.redirect_uri is not None:
+ settings.OIDC_REDIRECT_URI = update.redirect_uri
+ if update.allow_oidc_login is not None:
+ if not update.allow_oidc_login and not settings.ALLOW_PASSWORD_LOGIN:
+ raise HTTPException(status_code=400, detail="Cannot disable OIDC login while password login is also disabled.")
+ settings.ALLOW_OIDC_LOGIN = update.allow_oidc_login
+
+ settings.save_to_yaml()
+ return {"message": "OIDC configuration updated successfully"}
+
+ @router.put("/config/app", summary="Update Application Configuration")
+ async def update_app_config(
+ update: schemas.AppConfigUpdate,
+ admin = Depends(get_current_admin)
+ ):
+ if update.allow_password_login is not None:
+ if not update.allow_password_login and not settings.ALLOW_OIDC_LOGIN:
+ raise HTTPException(status_code=400, detail="Cannot disable password login while OIDC login is also disabled.")
+ settings.ALLOW_PASSWORD_LOGIN = update.allow_password_login
+
+ settings.save_to_yaml()
+ return {"message": "Application configuration updated successfully"}
+
+ @router.post("/config/oidc/test", summary="Test OIDC Discovery")
+ async def test_oidc_connection(
+ update: schemas.OIDCConfigUpdate,
+ admin = Depends(get_current_admin)
+ ):
+ if not update.server_url:
+ raise HTTPException(status_code=400, detail="Server URL is required for testing.")
+
+ import httpx
+ try:
+ discovery_url = update.server_url.rstrip("/") + "/.well-known/openid-configuration"
+ async with httpx.AsyncClient() as client:
+ response = await client.get(discovery_url, timeout=5.0)
+ if response.status_code == 200:
+ return {"success": True, "message": "OIDC Identity Provider discovered successfully!"}
+ else:
+ return {"success": False, "message": f"Discovery failed with status {response.status_code}"}
+ except Exception as e:
+ return {"success": False, "message": f"Failed to reach OIDC provider: {str(e)}"}
+
+ @router.post("/config/swarm/test", summary="Test Swarm Connection")
+ async def test_swarm_connection(
+ update: schemas.SwarmConfigUpdate,
+ admin = Depends(get_current_admin)
+ ):
+ if not update.external_endpoint:
+ raise HTTPException(status_code=400, detail="External endpoint is required for testing.")
+
+ import httpx
+ try:
+ # We try to reach the endpoint. Since it's gRPC, we might just do a TCP check
+ # or a basic GET if it's behind a proxy that handles health checks.
+ # For simplicity, we'll check if the protocol is valid and we can reach it.
+ async with httpx.AsyncClient() as client:
+ # Most swarm proxies will have a /health or just return 404/405 for GET on root
+ response = await client.get(update.external_endpoint, timeout=5.0)
+ return {"success": True, "message": f"Reached endpoint with status {response.status_code}"}
+ except Exception as e:
+ return {"success": False, "message": f"Failed to connect: {str(e)}"}
+
+ @router.put("/config/swarm", summary="Update Swarm Configuration")
+ async def update_swarm_config(
+ update: schemas.SwarmConfigUpdate,
+ admin = Depends(get_current_admin)
+ ):
+ if update.external_endpoint is not None:
+ settings.GRPC_EXTERNAL_ENDPOINT = update.external_endpoint
+ # Derived TLS enabled from endpoint protocol
+ endpoint = update.external_endpoint.lower()
+ settings.GRPC_TLS_ENABLED = endpoint.startswith("https://") or ":443" in endpoint
+
+ settings.save_to_yaml()
+ return {"message": "Swarm configuration updated successfully"}
+
+ @router.get("/config", summary="Get Admin Configuration")
+ async def get_admin_config(
+ admin = Depends(get_current_admin)
+ ):
+ return {
+ "app": {
+ "allow_password_login": settings.ALLOW_PASSWORD_LOGIN
+ },
+ "oidc": {
+ "enabled": settings.OIDC_ENABLED,
+ "client_id": settings.OIDC_CLIENT_ID,
+ "client_secret": settings.OIDC_CLIENT_SECRET,
+ "server_url": settings.OIDC_SERVER_URL,
+ "redirect_uri": settings.OIDC_REDIRECT_URI,
+ "allow_oidc_login": settings.ALLOW_OIDC_LOGIN
+ },
+ "swarm": {
+ "external_endpoint": settings.GRPC_EXTERNAL_ENDPOINT
+ }
+ }
+
+ return router
diff --git a/ai-hub/app/api/routes/api.py b/ai-hub/app/api/routes/api.py
index 529e771..3e27520 100644
--- a/ai-hub/app/api/routes/api.py
+++ b/ai-hub/app/api/routes/api.py
@@ -11,6 +11,7 @@
from .nodes import create_nodes_router
from .skills import create_skills_router
from .agent_update import create_agent_update_router
+from .admin import create_admin_router
def create_api_router(services: ServiceContainer) -> APIRouter:
"""
@@ -29,5 +30,6 @@
router.include_router(create_nodes_router(services))
router.include_router(create_skills_router(services))
router.include_router(create_agent_update_router())
+ router.include_router(create_admin_router(), prefix="/admin")
return router
\ No newline at end of file
diff --git a/ai-hub/app/api/routes/general.py b/ai-hub/app/api/routes/general.py
index dd6902d..ece0ba0 100644
--- a/ai-hub/app/api/routes/general.py
+++ b/ai-hub/app/api/routes/general.py
@@ -1,5 +1,6 @@
from fastapi import APIRouter
from app.api.dependencies import ServiceContainer
+from app.api.schemas import SystemStatus
def create_general_router(services: ServiceContainer) -> APIRouter:
router = APIRouter(tags=["General"])
@@ -7,5 +8,16 @@
@router.get("/", summary="Check Service Status")
def read_root():
return {"status": "AI Model Hub is running!"}
+
+ @router.get("/status", response_model=SystemStatus, summary="Get Full System Status")
+ def get_status():
+ settings = services.settings()
+ return SystemStatus(
+ status="running",
+ oidc_enabled=settings.OIDC_ENABLED,
+ tls_enabled=settings.GRPC_TLS_ENABLED,
+ external_endpoint=settings.GRPC_EXTERNAL_ENDPOINT,
+ version=settings.VERSION
+ )
return router
\ No newline at end of file
diff --git a/ai-hub/app/api/routes/nodes.py b/ai-hub/app/api/routes/nodes.py
index 70e081b..479b7a0 100644
--- a/ai-hub/app/api/routes/nodes.py
+++ b/ai-hub/app/api/routes/nodes.py
@@ -251,26 +251,28 @@
if not user:
raise HTTPException(status_code=404, detail="User not found.")
- # Both admins and users only see nodes explicitly granted to their group in this user-facing list.
- # This prevents the 'Personal Preferences' and 'Mesh Explorer' from showing ungranted nodes.
-
- # Nodes accessible via user's group (relational)
- accesses = db.query(models.NodeGroupAccess).filter(
- models.NodeGroupAccess.group_id == user.group_id
- ).all()
- node_ids = set([a.node_id for a in accesses])
-
- # Nodes accessible via group policy whitelist
- if user.group and user.group.policy:
- policy_nodes = user.group.policy.get("nodes", [])
- if isinstance(policy_nodes, list):
- for nid in policy_nodes:
- node_ids.add(nid)
+ # Admins see all active nodes for management/configuration purposes.
+ # Regular users only see nodes explicitly granted to their group.
+ if user.role == "admin":
+ nodes = db.query(models.AgentNode).filter(models.AgentNode.is_active == True).all()
+ else:
+ # Nodes accessible via user's group (relational)
+ accesses = db.query(models.NodeGroupAccess).filter(
+ models.NodeGroupAccess.group_id == user.group_id
+ ).all()
+ node_ids = set([a.node_id for a in accesses])
+
+ # Nodes accessible via group policy whitelist
+ if user.group and user.group.policy:
+ policy_nodes = user.group.policy.get("nodes", [])
+ if isinstance(policy_nodes, list):
+ for nid in policy_nodes:
+ node_ids.add(nid)
- nodes = db.query(models.AgentNode).filter(
- models.AgentNode.node_id.in_(list(node_ids)),
- models.AgentNode.is_active == True
- ).all()
+ nodes = db.query(models.AgentNode).filter(
+ models.AgentNode.node_id.in_(list(node_ids)),
+ models.AgentNode.is_active == True
+ ).all()
registry = _registry()
return [services.mesh_service.node_to_user_view(n, registry) for n in nodes]
@@ -702,19 +704,22 @@
logger.warning(f"[📶] User {user_id} not found for global stream.")
return
- # Nodes accessible via user's group
- accesses = db.query(models.NodeGroupAccess).filter(
- models.NodeGroupAccess.group_id == user.group_id
- ).all()
- accessible_ids = [a.node_id for a in accesses]
-
- # Nodes in group policy
- if user.group and user.group.policy:
- policy_nodes = user.group.policy.get("nodes", [])
- if isinstance(policy_nodes, list):
- accessible_ids.extend(policy_nodes)
-
- accessible_ids = list(set(accessible_ids))
+ if user.role == "admin":
+ accessible_ids = [n.node_id for n in db.query(models.AgentNode).filter(models.AgentNode.is_active == True).all()]
+ else:
+ # Nodes accessible via user's group
+ accesses = db.query(models.NodeGroupAccess).filter(
+ models.NodeGroupAccess.group_id == user.group_id
+ ).all()
+ accessible_ids = [a.node_id for a in accesses]
+
+ # Nodes in group policy
+ if user.group and user.group.policy:
+ policy_nodes = user.group.policy.get("nodes", [])
+ if isinstance(policy_nodes, list):
+ accessible_ids.extend(policy_nodes)
+
+ accessible_ids = list(set(accessible_ids))
try:
await websocket.accept()
diff --git a/ai-hub/app/api/routes/skills.py b/ai-hub/app/api/routes/skills.py
index a6aa2a6..8020b3e 100644
--- a/ai-hub/app/api/routes/skills.py
+++ b/ai-hub/app/api/routes/skills.py
@@ -17,19 +17,23 @@
"""List all skills accessible to the user."""
# Start queries
system_query = db.query(models.Skill).filter(models.Skill.is_system == True)
- user_query = db.query(models.Skill).filter(
- models.Skill.owner_id == current_user.id,
- models.Skill.is_system == False
- )
+
+ if current_user.role == 'admin':
+ # Admins see ALL skills (system + user-owned for management)
+ user_query = db.query(models.Skill).filter(models.Skill.is_system == False)
+ else:
+ user_query = db.query(models.Skill).filter(
+ models.Skill.owner_id == current_user.id,
+ models.Skill.is_system == False
+ )
# Policy: Only show enabled skills to non-admins
if current_user.role != 'admin':
system_query = system_query.filter(models.Skill.is_enabled == True)
user_query = user_query.filter(models.Skill.is_enabled == True)
- # Target feature filtering (PostgreSQL JSONB contains or standard JSON)
+ # Target feature filtering
if feature:
- # Using standard list comparison as fallback or JSONB contains
system_skills = [s for s in system_query.all() if feature in (s.features or [])]
user_skills = [s for s in user_query.all() if feature in (s.features or [])]
else:
@@ -38,7 +42,7 @@
# Skills shared with the user's group via Group Policy
group_skills = []
- if current_user.group and current_user.group.policy:
+ if current_user.role != 'admin' and current_user.group and current_user.group.policy:
group_skill_names = current_user.group.policy.get("skills", [])
if group_skill_names:
g_query = db.query(models.Skill).filter(
diff --git a/ai-hub/app/api/routes/user.py b/ai-hub/app/api/routes/user.py
index d8c394c..87dd010 100644
--- a/ai-hub/app/api/routes/user.py
+++ b/ai-hub/app/api/routes/user.py
@@ -1,3 +1,4 @@
+from datetime import datetime
from fastapi import APIRouter, HTTPException, Depends, Header, Query, Request, UploadFile, File
from fastapi.responses import RedirectResponse as redirect
from sqlalchemy.orm import Session
@@ -34,7 +35,7 @@
Retrieves the user ID from the X-User-ID header.
This simulates an authentication system and is used by the login_required decorator.
"""
- return x_user_id
+ return x_user_id or "anonymous"
def create_users_router(services: ServiceContainer) -> APIRouter:
@@ -63,8 +64,13 @@
"""
result = await services.auth_service.handle_callback(code, db)
user_id = result["user_id"]
+ linked = result.get("linked", False)
+ # Pass linked flag to frontend for notification
frontend_redirect_url = f"{state}?user_id={user_id}"
+ if linked:
+ frontend_redirect_url += "&linked=true"
+
return redirect(url=frontend_redirect_url)
@router.get("/me", response_model=schemas.UserStatus, summary="Get Current User Status")
@@ -100,6 +106,9 @@
db: Session = Depends(get_db)
):
"""Day 1: Local Username/Password Login."""
+ if not settings.ALLOW_PASSWORD_LOGIN:
+ raise HTTPException(status_code=403, detail="Password-based login is disabled. Please use OIDC/SSO.")
+
user = db.query(models.User).filter(models.User.email == request.email).first()
if not user or not user.password_hash:
raise HTTPException(status_code=401, detail="Invalid email or password")
diff --git a/ai-hub/app/api/schemas.py b/ai-hub/app/api/schemas.py
index 99a9980..d0127ed 100644
--- a/ai-hub/app/api/schemas.py
+++ b/ai-hub/app/api/schemas.py
@@ -21,7 +21,7 @@
class UserStatus(BaseModel):
"""Schema for the response when checking a user's status."""
id: str = Field(..., description="The internal user ID.")
- email: str = Field(..., description="The user's email address.")
+ email: Optional[str] = Field(None, description="The user's email address.")
is_logged_in: bool = Field(True, description="Indicates if the user is currently authenticated.")
is_anonymous: bool = Field(False, description="Indicates if the user is an anonymous user.")
oidc_configured: bool = Field(False, description="Whether OIDC SSO is enabled on the server.")
@@ -84,6 +84,28 @@
preferences: UserPreferences
effective: dict = Field(default_factory=dict)
+class SystemStatus(BaseModel):
+ """Schema for overall system status, including TLS and OIDC state."""
+ status: str
+ oidc_enabled: bool
+ tls_enabled: bool
+ external_endpoint: Optional[str] = None
+ version: str
+
+class OIDCConfigUpdate(BaseModel):
+ enabled: Optional[bool] = None
+ client_id: Optional[str] = None
+ client_secret: Optional[str] = None
+ server_url: Optional[str] = None
+ redirect_uri: Optional[str] = None
+ allow_oidc_login: Optional[bool] = None
+
+class SwarmConfigUpdate(BaseModel):
+ external_endpoint: Optional[str] = None
+
+class AppConfigUpdate(BaseModel):
+ allow_password_login: Optional[bool] = None
+
# --- Skill Schemas ---
class SkillBase(BaseModel):
name: str
diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py
index 0e6f5fd..3ea1803 100644
--- a/ai-hub/app/app.py
+++ b/ai-hub/app/app.py
@@ -181,6 +181,7 @@
prompt_service = PromptService()
# 9. Initialize the Service Container with all initialized services
services = ServiceContainer()
+ services.with_service("settings", service=lambda: settings)
services.with_document_service(vector_store=vector_store)
node_registry_service = NodeRegistryService()
diff --git a/ai-hub/app/config.py b/ai-hub/app/config.py
index da2ce5d..04d4216 100644
--- a/ai-hub/app/config.py
+++ b/ai-hub/app/config.py
@@ -15,6 +15,7 @@
version: str = "1.0.0"
log_level: str = "INFO"
super_admins: list[str] = Field(default_factory=list)
+ allow_password_login: bool = True
class OIDCSettings(BaseModel):
enabled: bool = False
@@ -22,6 +23,7 @@
client_secret: str = ""
server_url: str = ""
redirect_uri: str = ""
+ allow_oidc_login: bool = False
class DatabaseSettings(BaseModel):
mode: str = "sqlite"
@@ -50,16 +52,23 @@
index_path: str = "data/faiss_index.bin"
embedding_dimension: int = 768
+class SwarmSettings(BaseModel):
+ external_endpoint: Optional[str] = None
+
class AppConfig(BaseModel):
"""Top-level Pydantic model for application configuration."""
application: ApplicationSettings = Field(default_factory=ApplicationSettings)
database: DatabaseSettings = Field(default_factory=DatabaseSettings)
- llm_providers: LLMProvidersSettings = Field(default_factory=LLMProvidersSettings)
+ llm_providers: dict[str, dict] = Field(default_factory=dict)
+ active_llm_provider: Optional[str] = None
vector_store: VectorStoreSettings = Field(default_factory=VectorStoreSettings)
embedding_provider: EmbeddingProviderSettings = Field(default_factory=EmbeddingProviderSettings)
- tts_provider: ProviderSettings = Field(default_factory=ProviderSettings)
- stt_provider: ProviderSettings = Field(default_factory=ProviderSettings)
+ tts_providers: dict[str, dict] = Field(default_factory=dict)
+ active_tts_provider: Optional[str] = None
+ stt_providers: dict[str, dict] = Field(default_factory=dict)
+ active_stt_provider: Optional[str] = None
oidc: OIDCSettings = Field(default_factory=OIDCSettings)
+ swarm: SwarmSettings = Field(default_factory=SwarmSettings)
# --- 2. Create the Final Settings Object ---
@@ -97,11 +106,14 @@
self.SUPER_ADMINS: list[str] = [x.strip() for x in super_admins_env.split(",")] if super_admins_env else \
get_from_yaml(["application", "super_admins"]) or \
config_from_pydantic.application.super_admins
+ self.ALLOW_PASSWORD_LOGIN: bool = str(os.getenv("ALLOW_PASSWORD_LOGIN") if os.getenv("ALLOW_PASSWORD_LOGIN") is not None else
+ get_from_yaml(["application", "allow_password_login"]) if get_from_yaml(["application", "allow_password_login"]) is not None else
+ config_from_pydantic.application.allow_password_login).lower() == "true"
# --- OIDC Settings ---
- self.OIDC_ENABLED: bool = os.getenv("OIDC_ENABLED", "false").lower() == "true" or \
- get_from_yaml(["oidc", "enabled"]) or \
- config_from_pydantic.oidc.enabled
+ self.OIDC_ENABLED: bool = str(os.getenv("OIDC_ENABLED") if os.getenv("OIDC_ENABLED") is not None else
+ get_from_yaml(["oidc", "enabled"]) if get_from_yaml(["oidc", "enabled"]) is not None else
+ config_from_pydantic.oidc.enabled).lower() == "true"
self.OIDC_CLIENT_ID: str = os.getenv("OIDC_CLIENT_ID") or \
get_from_yaml(["oidc", "client_id"]) or \
config_from_pydantic.oidc.client_id
@@ -114,6 +126,22 @@
self.OIDC_REDIRECT_URI: str = os.getenv("OIDC_REDIRECT_URI") or \
get_from_yaml(["oidc", "redirect_uri"]) or \
config_from_pydantic.oidc.redirect_uri
+ self.ALLOW_OIDC_LOGIN: bool = str(os.getenv("ALLOW_OIDC_LOGIN") if os.getenv("ALLOW_OIDC_LOGIN") is not None else
+ get_from_yaml(["oidc", "allow_oidc_login"]) if get_from_yaml(["oidc", "allow_oidc_login"]) is not None else
+ config_from_pydantic.oidc.allow_oidc_login).lower() == "true"
+
+ # --- Swarm Settings ---
+ self.GRPC_EXTERNAL_ENDPOINT: Optional[str] = os.getenv("GRPC_EXTERNAL_ENDPOINT") or \
+ get_from_yaml(["swarm", "external_endpoint"]) or \
+ config_from_pydantic.swarm.external_endpoint
+
+ # Infer TLS from endpoint
+ protocol = self.GRPC_EXTERNAL_ENDPOINT.split("://")[0] if self.GRPC_EXTERNAL_ENDPOINT and "://" in self.GRPC_EXTERNAL_ENDPOINT else "http"
+ self.GRPC_TLS_ENABLED: bool = (protocol == "https")
+
+ # Legacy paths (no longer in UI, but kept for env var parity if needed)
+ self.GRPC_CERT_PATH: Optional[str] = os.getenv("GRPC_CERT_PATH") or get_from_yaml(["swarm", "cert_path"])
+ self.GRPC_KEY_PATH: Optional[str] = os.getenv("GRPC_KEY_PATH") or get_from_yaml(["swarm", "key_path"])
self.SECRET_KEY: str = os.getenv("SECRET_KEY") or \
get_from_yaml(["application", "secret_key"]) or \
@@ -141,8 +169,7 @@
# We store everything in a flat map for the legacy settings getters,
# but also provide a dynamic map.
- # 1. Resolve LLM Providers
- self.LLM_PROVIDERS = config_from_pydantic.llm_providers.providers or {}
+ self.LLM_PROVIDERS = config_from_pydantic.llm_providers or {}
# Support legacy environment variables and merge them into the providers map
for env_key, env_val in os.environ.items():
if env_key.endswith("_API_KEY") and not any(x in env_key for x in ["TTS", "STT", "EMBEDDING"]):
@@ -186,31 +213,44 @@
self.GEMINI_API_KEY
# 3. Resolve TTS (Agnostic)
- self.TTS_PROVIDER: str = os.getenv("TTS_PROVIDER") or \
- get_from_yaml(["tts_provider", "provider"]) or \
- config_from_pydantic.tts_provider.active_provider or "google_gemini"
+ self.TTS_PROVIDERS = config_from_pydantic.tts_providers or {}
+ # Legacy single-provider from YAML/Env
+ legacy_tts_provider = os.getenv("TTS_PROVIDER") or get_from_yaml(["tts_provider", "provider"])
+ if legacy_tts_provider and legacy_tts_provider not in self.TTS_PROVIDERS:
+ self.TTS_PROVIDERS[legacy_tts_provider] = {
+ "provider": legacy_tts_provider,
+ "model_name": os.getenv("TTS_MODEL_NAME") or get_from_yaml(["tts_provider", "model_name"]),
+ "voice_name": os.getenv("TTS_VOICE_NAME") or get_from_yaml(["tts_provider", "voice_name"]),
+ "api_key": os.getenv("TTS_API_KEY") or get_from_yaml(["tts_provider", "api_key"]),
+ }
- # Legacy back-compat fields
- self.TTS_VOICE_NAME: str = os.getenv("TTS_VOICE_NAME") or \
- get_from_yaml(["tts_provider", "voice_name"]) or \
- config_from_pydantic.tts_provider.voice_name or "Kore"
- self.TTS_MODEL_NAME: str = os.getenv("TTS_MODEL_NAME") or \
- get_from_yaml(["tts_provider", "model_name"]) or \
- config_from_pydantic.tts_provider.model_name or "gemini-2.5-flash-preview-tts"
- self.TTS_API_KEY: Optional[str] = os.getenv("TTS_API_KEY") or \
- get_from_yaml(["tts_provider", "api_key"]) or \
- self.GEMINI_API_KEY
+ self.TTS_PROVIDER: str = legacy_tts_provider or \
+ config_from_pydantic.active_tts_provider or \
+ (list(self.TTS_PROVIDERS.keys())[0] if self.TTS_PROVIDERS else "google_gemini")
+
+ # Legacy back-compat fields for the active one
+ active_tts = self.TTS_PROVIDERS.get(self.TTS_PROVIDER, {})
+ self.TTS_VOICE_NAME: str = active_tts.get("voice_name") or "Kore"
+ self.TTS_MODEL_NAME: str = active_tts.get("model_name") or "gemini-2.5-flash-preview-tts"
+ self.TTS_API_KEY: Optional[str] = active_tts.get("api_key") or self.GEMINI_API_KEY
# 4. Resolve STT (Agnostic)
- self.STT_PROVIDER: str = os.getenv("STT_PROVIDER") or \
- get_from_yaml(["stt_provider", "provider"]) or \
- config_from_pydantic.stt_provider.active_provider or "google_gemini"
+ self.STT_PROVIDERS = config_from_pydantic.stt_providers or {}
+ legacy_stt_provider = os.getenv("STT_PROVIDER") or get_from_yaml(["stt_provider", "provider"])
+ if legacy_stt_provider and legacy_stt_provider not in self.STT_PROVIDERS:
+ self.STT_PROVIDERS[legacy_stt_provider] = {
+ "provider": legacy_stt_provider,
+ "model_name": os.getenv("STT_MODEL_NAME") or get_from_yaml(["stt_provider", "model_name"]),
+ "api_key": os.getenv("STT_API_KEY") or get_from_yaml(["stt_provider", "api_key"]),
+ }
+
+ self.STT_PROVIDER: str = legacy_stt_provider or \
+ config_from_pydantic.active_stt_provider or \
+ (list(self.STT_PROVIDERS.keys())[0] if self.STT_PROVIDERS else "google_gemini")
- self.STT_MODEL_NAME: str = os.getenv("STT_MODEL_NAME") or \
- get_from_yaml(["stt_provider", "model_name"]) or \
- config_from_pydantic.stt_provider.model_name or "gemini-2.5-flash"
- self.STT_API_KEY: Optional[str] = os.getenv("STT_API_KEY") or \
- get_from_yaml(["stt_provider", "api_key"]) or \
+ active_stt = self.STT_PROVIDERS.get(self.STT_PROVIDER, {})
+ self.STT_MODEL_NAME: str = active_stt.get("model_name") or "gemini-2.5-flash"
+ self.STT_API_KEY: Optional[str] = active_stt.get("api_key") or \
(self.OPENAI_API_KEY if self.STT_PROVIDER == "openai" else self.GEMINI_API_KEY)
def save_to_yaml(self):
@@ -230,7 +270,8 @@
"project_name": self.PROJECT_NAME,
"version": self.VERSION,
"log_level": self.LOG_LEVEL,
- "super_admins": self.SUPER_ADMINS
+ "super_admins": self.SUPER_ADMINS,
+ "allow_password_login": self.ALLOW_PASSWORD_LOGIN
},
"database": {
"mode": self.DB_MODE,
@@ -245,21 +286,17 @@
"client_id": self.OIDC_CLIENT_ID,
"client_secret": self.OIDC_CLIENT_SECRET,
"server_url": self.OIDC_SERVER_URL,
- "redirect_uri": self.OIDC_REDIRECT_URI
+ "redirect_uri": self.OIDC_REDIRECT_URI,
+ "allow_oidc_login": self.ALLOW_OIDC_LOGIN
},
- "llm_providers": {
- "providers": self.LLM_PROVIDERS
- },
- "tts_provider": {
- "provider": self.TTS_PROVIDER,
- "model_name": self.TTS_MODEL_NAME,
- "voice_name": self.TTS_VOICE_NAME,
- "api_key": self.TTS_API_KEY
- },
- "stt_provider": {
- "provider": self.STT_PROVIDER,
- "model_name": self.STT_MODEL_NAME,
- "api_key": self.STT_API_KEY
+ "llm_providers": self.LLM_PROVIDERS,
+ "active_llm_provider": getattr(self, "ACTIVE_LLM_PROVIDER", list(self.LLM_PROVIDERS.keys())[0] if self.LLM_PROVIDERS else None),
+ "tts_providers": self.TTS_PROVIDERS,
+ "active_tts_provider": self.TTS_PROVIDER,
+ "stt_providers": self.STT_PROVIDERS,
+ "active_stt_provider": self.STT_PROVIDER,
+ "swarm": {
+ "external_endpoint": self.GRPC_EXTERNAL_ENDPOINT
}
}
diff --git a/ai-hub/app/core/services/auth.py b/ai-hub/app/core/services/auth.py
index 4657094..9139be7 100644
--- a/ai-hub/app/core/services/auth.py
+++ b/ai-hub/app/core/services/auth.py
@@ -69,10 +69,10 @@
if not all([oidc_id, email]):
raise HTTPException(status_code=400, detail="Essential user data missing from ID token (sub and email required).")
- user_id = self.services.user_service.save_user(
+ user_id, linked = self.services.user_service.save_user(
db=db,
oidc_id=oidc_id,
email=email,
username=username
)
- return {"user_id": user_id}
+ return {"user_id": user_id, "linked": linked}
diff --git a/ai-hub/app/core/services/preference.py b/ai-hub/app/core/services/preference.py
index 80dc802..e4e22c4 100644
--- a/ai-hub/app/core/services/preference.py
+++ b/ai-hub/app/core/services/preference.py
@@ -16,11 +16,39 @@
if len(k) <= 8: return "****"
return k[:4] + "*" * (len(k)-8) + k[-4:]
- def merge_user_config(self, user, db) -> Dict[str, Any]:
+ def merge_user_config(self, user, db) -> schemas.ConfigResponse:
prefs_dict = user.preferences or {}
- llm_prefs = prefs_dict.get("llm", {})
- tts_prefs = prefs_dict.get("tts", {})
- stt_prefs = prefs_dict.get("stt", {})
+
+ def normalize_section(section_name, default_active):
+ section = prefs_dict.get(section_name, {})
+ # If already new style, just return a copy
+ if isinstance(section, dict) and "providers" in section:
+ return copy.deepcopy(section)
+
+ # Legacy transformation
+ providers = {}
+ active = section.get("active_provider") or section.get("provider") or default_active
+
+ # Known providers to check for legacy transformation
+ legacy_keys = ["openai", "gemini", "deepseek", "gcloud_tts", "azure", "google", "elevenlabs"]
+ for p in legacy_keys:
+ if p in section:
+ providers[p] = section[p]
+
+ # If still no providers found but it's not empty, it might be a flat dict of other providers
+ if not providers and section and isinstance(section, dict):
+ for k, v in section.items():
+ if k not in ["active_provider", "provider", "providers"] and isinstance(v, dict):
+ providers[k] = v
+
+ return {
+ "active_provider": str(active) if active else default_active,
+ "providers": providers
+ }
+
+ llm_prefs = normalize_section("llm", "deepseek")
+ tts_prefs = normalize_section("tts", settings.TTS_PROVIDER)
+ stt_prefs = normalize_section("stt", settings.STT_PROVIDER)
system_prefs = self.services.user_service.get_system_settings(db)
system_statuses = system_prefs.get("statuses", {})
@@ -32,93 +60,79 @@
has_key = p_data and p_data.get("api_key") and p_data.get("api_key") not in ("None", "none", "")
return is_success or bool(has_key)
- # Build effective providers map
- # ... simplifying the code from user.py
- user_providers = llm_prefs.get("providers", {})
- if not user_providers:
- system_llm = system_prefs.get("llm", {}).get("providers", {})
- user_providers = system_llm if system_llm else {
- "deepseek": {"api_key": settings.DEEPSEEK_API_KEY, "model": settings.DEEPSEEK_MODEL_NAME},
- "gemini": {"api_key": settings.GEMINI_API_KEY, "model": settings.GEMINI_MODEL_NAME},
- }
+ # Build effective combined config for processing
+ def get_effective_providers(section_name, user_section_providers, sys_defaults):
+ # Start with system defaults if user has none
+ effective_providers = {}
+ if not user_section_providers:
+ effective_providers = copy.deepcopy(sys_defaults)
+ else:
+ effective_providers = copy.deepcopy(user_section_providers)
+
+ # Filter by health and mask keys
+ res = {}
+ for p, p_data in effective_providers.items():
+ if p_data and is_provider_healthy(section_name, p, p_data):
+ masked_data = copy.deepcopy(p_data)
+ masked_data["api_key"] = self.mask_key(p_data.get("api_key"))
+ res[p] = masked_data
+ return res
- llm_providers_effective = {
- p: {"api_key": self.mask_key(p_p.get("api_key")), "model": p_p.get("model")}
- for p, p_p in user_providers.items() if p_p and is_provider_healthy("llm", p, p_p)
- }
+ system_llm = system_prefs.get("llm", {}).get("providers", {
+ "deepseek": {"api_key": settings.DEEPSEEK_API_KEY, "model": settings.DEEPSEEK_MODEL_NAME},
+ "gemini": {"api_key": settings.GEMINI_API_KEY, "model": settings.GEMINI_MODEL_NAME},
+ })
+ llm_providers_effective = get_effective_providers("llm", llm_prefs["providers"], system_llm)
- user_tts_providers = tts_prefs.get("providers", {})
- if not user_tts_providers:
- system_tts = system_prefs.get("tts", {}).get("providers", {})
- user_tts_providers = system_tts if system_tts else {
- settings.TTS_PROVIDER: {
- "api_key": settings.TTS_API_KEY,
- "model": settings.TTS_MODEL_NAME,
- "voice": settings.TTS_VOICE_NAME
- }
+ system_tts = system_prefs.get("tts", {}).get("providers", {
+ settings.TTS_PROVIDER: {
+ "api_key": settings.TTS_API_KEY,
+ "model": settings.TTS_MODEL_NAME,
+ "voice": settings.TTS_VOICE_NAME
}
-
- tts_providers_effective = {
- p: {
- "api_key": self.mask_key(p_p.get("api_key")),
- "model": p_p.get("model"),
- "voice": p_p.get("voice")
- }
- for p, p_p in user_tts_providers.items() if p_p and is_provider_healthy("tts", p, p_p)
- }
+ })
+ tts_providers_effective = get_effective_providers("tts", tts_prefs["providers"], system_tts)
- user_stt_providers = stt_prefs.get("stt", {}).get("providers", {}) or stt_prefs.get("providers", {})
- if not user_stt_providers:
- system_stt = system_prefs.get("stt", {}).get("providers", {})
- user_stt_providers = system_stt if system_stt else {
- settings.STT_PROVIDER: {"api_key": settings.STT_API_KEY, "model": settings.STT_MODEL_NAME}
- }
-
- stt_providers_effective = {
- p: {"api_key": self.mask_key(p_p.get("api_key")), "model": p_p.get("model")}
- for p, p_p in user_stt_providers.items() if p_p and is_provider_healthy("stt", p, p_p)
- }
+ system_stt = system_prefs.get("stt", {}).get("providers", {
+ settings.STT_PROVIDER: {"api_key": settings.STT_API_KEY, "model": settings.STT_MODEL_NAME}
+ })
+ stt_providers_effective = get_effective_providers("stt", stt_prefs["providers"], system_stt)
effective = {
"llm": {
- "active_provider": llm_prefs.get("active_provider") or (next(iter(llm_providers_effective), None)) or "deepseek",
+ "active_provider": llm_prefs.get("active_provider") or (next(iter(llm_providers_effective), "deepseek")),
"providers": llm_providers_effective
},
"tts": {
- "active_provider": tts_prefs.get("active_provider") or (next(iter(tts_providers_effective), None)) or settings.TTS_PROVIDER,
+ "active_provider": tts_prefs.get("active_provider") or (next(iter(tts_providers_effective), settings.TTS_PROVIDER)),
"providers": tts_providers_effective
},
"stt": {
- "active_provider": stt_prefs.get("active_provider") or (next(iter(stt_providers_effective), None)) or settings.STT_PROVIDER,
+ "active_provider": stt_prefs.get("active_provider") or (next(iter(stt_providers_effective), settings.STT_PROVIDER)),
"providers": stt_providers_effective
}
}
group = user.group or self.services.user_service.get_or_create_default_group(db)
- if group and user.role != "admin":
+ if group:
policy = group.policy or {}
- def apply_policy(section_key, policy_key, p_dict):
+ def apply_policy(section_key, policy_key):
allowed = policy.get(policy_key, [])
if not allowed:
effective[section_key]["providers"] = {}
- if p_dict and "providers" in p_dict: p_dict["providers"] = {}
effective[section_key]["active_provider"] = ""
- return p_dict
+ return
providers = effective[section_key]["providers"]
filtered_eff = {k: v for k, v in providers.items() if k in allowed}
effective[section_key]["providers"] = filtered_eff
- if p_dict and "providers" in p_dict:
- p_dict["providers"] = {k: v for k, v in p_dict["providers"].items() if k in allowed}
-
if effective[section_key].get("active_provider") not in allowed:
effective[section_key]["active_provider"] = next(iter(filtered_eff), None) or ""
- return p_dict
- llm_prefs = apply_policy("llm", "llm", llm_prefs)
- tts_prefs = apply_policy("tts", "tts", tts_prefs)
- stt_prefs = apply_policy("stt", "stt", stt_prefs)
+ apply_policy("llm", "llm")
+ apply_policy("tts", "tts")
+ apply_policy("stt", "stt")
def mask_section_prefs(section_dict):
if not section_dict: return {}
@@ -139,47 +153,42 @@
effective=effective
)
+
def update_user_config(self, user, prefs: schemas.UserPreferences, db) -> schemas.UserPreferences:
# When saving, if the api_key contains ****, we must retain the old one from the DB
old_prefs = user.preferences or {}
-
+
+ def get_old_providers(section_name):
+ section = old_prefs.get(section_name, {})
+ if isinstance(section, dict) and "providers" in section:
+ return section["providers"]
+
+ # Legacy extraction
+ providers = {}
+ legacy_keys = ["openai", "gemini", "deepseek", "gcloud_tts", "azure", "google", "elevenlabs"]
+ for p in legacy_keys:
+ if p in section:
+ providers[p] = section[p]
+
+ if not providers and section and isinstance(section, dict):
+ for k, v in section.items():
+ if k not in ["active_provider", "provider", "providers"] and isinstance(v, dict):
+ providers[k] = v
+ return providers
+
def preserve_masked_keys(section_name, new_section):
if not new_section or "providers" not in new_section:
return
- old_section = old_prefs.get(section_name, {}).get("providers", {})
+ old_section_providers = get_old_providers(section_name)
for p_name, p_data in new_section["providers"].items():
- if p_data.get("api_key") and "***" in p_data["api_key"]:
- if p_name in old_section:
- p_data["api_key"] = old_section[p_name].get("api_key")
-
- def resolve_clone_from(section_name, new_section):
- if not new_section or "providers" not in new_section:
- return
- old_section = old_prefs.get(section_name, {}).get("providers", {})
- system_prefs = self.services.user_service.get_system_settings(db)
- system_section = system_prefs.get(section_name, {}).get("providers", {})
-
- for p_name, p_data in new_section["providers"].items():
- clone_source = p_data.pop("_clone_from", None)
- if not clone_source:
- continue
- real_key = (
- old_section.get(clone_source, {}).get("api_key")
- or system_section.get(clone_source, {}).get("api_key")
- )
- if real_key and "***" not in str(real_key):
- p_data["api_key"] = real_key
- logger.info(f"Resolved _clone_from: {p_name} inherited api_key from {clone_source} [{section_name}]")
- else:
- logger.warning(f"Could not resolve _clone_from for {p_name}: source '{clone_source}' key not found or masked.")
+ if p_data.get("api_key") and "***" in str(p_data["api_key"]):
+ if p_name in old_section_providers:
+ p_data["api_key"] = old_section_providers[p_name].get("api_key")
if prefs.llm: preserve_masked_keys("llm", prefs.llm)
if prefs.tts: preserve_masked_keys("tts", prefs.tts)
if prefs.stt: preserve_masked_keys("stt", prefs.stt)
- if prefs.llm: resolve_clone_from("llm", prefs.llm)
- if prefs.tts: resolve_clone_from("tts", prefs.tts)
- if prefs.stt: resolve_clone_from("stt", prefs.stt)
current_prefs = dict(user.preferences or {})
current_prefs.update({
diff --git a/ai-hub/app/core/services/user.py b/ai-hub/app/core/services/user.py
index e20e51d..beef381 100644
--- a/ai-hub/app/core/services/user.py
+++ b/ai-hub/app/core/services/user.py
@@ -80,53 +80,60 @@
db.rollback()
print(f"Failed to bootstrap local admin: {e}")
- def save_user(self, db: Session, oidc_id: str, email: str, username: str) -> str:
+ def save_user(self, db: Session, oidc_id: str, email: str, username: str) -> tuple[str, bool]:
"""
- Saves or updates a user record based on their OIDC ID.
- If a user with this OIDC ID exists, it returns their existing ID.
- Otherwise, it creates a new user record.
- The first user to register will be granted the 'admin' role.
+ Saves or updates a user record based on their OIDC ID or Email.
+ Returns (user_id, linked_flag)
"""
try:
- # Check if a user with this OIDC ID already exists
- existing_user = db.query(models.User).filter(models.User.oidc_id == oidc_id).first()
+ # 1. Check if a user with this OIDC ID already exists
+ user_by_oidc = db.query(models.User).filter(models.User.oidc_id == oidc_id).first()
- if existing_user:
- # Update the user's information and login activity
- existing_user.email = email
- existing_user.username = username
- existing_user.last_login_at = datetime.utcnow()
+ if user_by_oidc:
+ user_by_oidc.email = email
+ user_by_oidc.username = username
+ user_by_oidc.last_login_at = datetime.utcnow()
# Check if user should be promoted to admin based on config
from app.config import settings
- if email in settings.SUPER_ADMINS and existing_user.role != "admin":
- existing_user.role = "admin"
+ if email in settings.SUPER_ADMINS and user_by_oidc.role != "admin":
+ user_by_oidc.role = "admin"
db.commit()
- return existing_user.id
- else:
- # Ensure default group exists
- default_group = self.get_or_create_default_group(db)
-
- # Determine role based on SUPER_ADMINS or fallback to user
- from app.config import settings
- role = "admin" if email in settings.SUPER_ADMINS else "user"
+ return user_by_oidc.id, False
- # Create a new user record
- new_user = models.User(
- id=str(uuid.uuid4()), # Generate a unique ID for the user
- oidc_id=oidc_id,
- email=email,
- username=username,
- role=role,
- group_id=default_group.id,
- created_at=datetime.utcnow(),
- last_login_at=datetime.utcnow()
- )
- db.add(new_user)
+ # 2. Check if a user with this email already exists (Local -> OIDC Linking)
+ user_by_email = db.query(models.User).filter(models.User.email == email).first()
+ if user_by_email:
+ # Link the OIDC ID to the existing local account
+ user_by_email.oidc_id = oidc_id
+ user_by_email.username = username # Prefer OIDC display name
+ user_by_email.last_login_at = datetime.utcnow()
+
db.commit()
- db.refresh(new_user)
- return new_user.id
+ print(f"[Day 2] Linked OIDC identity {oidc_id} to existing account {email}")
+ return user_by_email.id, True
+
+ # 3. Create a new user record
+ default_group = self.get_or_create_default_group(db)
+ from app.config import settings
+ role = "admin" if email in settings.SUPER_ADMINS else "user"
+
+ new_user = models.User(
+ id=str(uuid.uuid4()),
+ oidc_id=oidc_id,
+ email=email,
+ username=username,
+ role=role,
+ group_id=default_group.id,
+ created_at=datetime.utcnow(),
+ last_login_at=datetime.utcnow()
+ )
+ db.add(new_user)
+ db.commit()
+ db.refresh(new_user)
+ return new_user.id, False
+
except SQLAlchemyError as e:
db.rollback()
raise
diff --git a/ai-hub/app/protos/browser_pb2_grpc.py b/ai-hub/app/protos/browser_pb2_grpc.py
index c69dabb..276488e 100644
--- a/ai-hub/app/protos/browser_pb2_grpc.py
+++ b/ai-hub/app/protos/browser_pb2_grpc.py
@@ -2,7 +2,7 @@
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
-from protos import browser_pb2 as protos_dot_browser__pb2
+from app.protos import browser_pb2 as protos_dot_browser__pb2
class BrowserServiceStub(object):
diff --git a/docker-compose.yml b/docker-compose.yml
index 838391d..1bfaf84 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,4 +1,3 @@
-version: '3.8'
services:
# Unified Frontend and Nginx Gateway
@@ -37,6 +36,7 @@
volumes:
- ai_hub_data:/app/data:rw
- ./config.yaml:/app/config.yaml:rw
+ - ./ai-hub/app:/app/app:rw
- ./agent-node:/app/agent-node-source:ro
- ./skills:/app/skills:ro
- browser_shm:/dev/shm:rw
@@ -53,7 +53,7 @@
container_name: cortex_browser_service
restart: always
ports:
- - "50052:50052"
+ - "50053:50052"
environment:
- SHM_PATH=/dev/shm/cortex_browser
volumes:
diff --git a/docs/auth_tls_todo.md b/docs/auth_tls_todo.md
index fc10662..17581f2 100644
--- a/docs/auth_tls_todo.md
+++ b/docs/auth_tls_todo.md
@@ -14,17 +14,17 @@
- [x] **Configuration**: Update `Settings` (`app/config.py`) to make OIDC settings optional and add an `oidc_enabled: bool` flag.
- [x] **Backend Initialization**: If `CORTEX_ADMIN_PASSWORD` is present in the environment for the `SUPER_ADMINS` initialization, hash it and assign it to the admin account.
- [x] **API Routes**: Create local login endpoints (`POST /api/v1/users/login/local` to issue JWTs) and (`PUT /api/v1/users/password` for password resets).
-- [ ] **Frontend**: Redesign the Auth/Login page to display a Username/Password default form.
+- [x] **Frontend**: Redesign the Auth/Login page to display a Username/Password default form.
## Phase 3: Day 1 Swarm Control (Insecure/Local Status)
Support running the mesh over internal loopbacks but strictly warn the end-user.
-- [ ] **Backend Configuration**: Add `GRPC_TLS_ENABLED`, `GRPC_EXTERNAL_ENDPOINT` to `config.py`.
-- [ ] **Backend API**: Expose a `/api/v1/status` or equivalent endpoint providing the current TLS/Hostname state to the frontend.
-- [ ] **Frontend UI**: Add persistent "Insecure Mode" and "Missing External Hostname" warning banners to the Swarm Dashboard frontend when running in Day 1 mode.
+- [x] **Backend Configuration**: Add `GRPC_TLS_ENABLED`, `GRPC_EXTERNAL_ENDPOINT` to `config.py`.
+- [x] **Backend API**: Expose a `/api/v1/status` or equivalent endpoint providing the current TLS/Hostname state to the frontend.
+- [x] **Frontend UI**: Add persistent "Insecure Mode" and "Missing External Hostname" warning banners to the Swarm Dashboard frontend when running in Day 1 mode.
## Phase 4: Day 2 Single Sign-On (OIDC Linking)
Allow transition to Enterprise SSO without breaking or duplicate accounting.
-- [ ] **Backend Service**: Update `app/core/services/auth.py` (`handle_callback`) to search for existing local users via `email` and safely link the incoming OIDC `sub` payload.
+- [x] **Backend Service**: Update `app/core/services/auth.py` (`handle_callback`) to search for existing local users via `email` and safely link the incoming OIDC `sub` payload.
- [ ] **Admin API**: Create `PUT /api/v1/admin/config/oidc` for UI-based toggling and configuration of SSO parameters without restarting.
- [ ] **Frontend Login**: Dynamically query `/api/v1/auth/config`. If enabled, render the "Log in with SSO" button instead of or alongside local Auth.
- [ ] **Frontend Settings**: Create an Admin Settings UI panel for OIDC Configuration.
diff --git a/docs/refactor_tracking.md b/docs/refactor_tracking.md
new file mode 100644
index 0000000..c224a39
--- /dev/null
+++ b/docs/refactor_tracking.md
@@ -0,0 +1,18 @@
+# Refactor Tracking: Settings & Persistence
+
+## 1. Open Issues & Future Improvements
+- [x] **UI Modernization (Modals)**: Replaced all native browser pop-outs (`alert()`, `confirm()`, `prompt()`) with custom UI Modals across Nodes, Skills, and Settings features for a persistent premium experience.
+
+## 2. Completed Items (Recent)
+- [x] **Nodes feature Modals Refactor**: Replaced native browser popups with custom Error and Success modals.
+- [x] **Skills feature Modals Refactor**: Replaced native browser popups with custom Error and Confirmation modals.
+- [x] **Settings feature Modals Refactor**: Transitioned group/provider deletion confirmation to custom UI modals.
+- [x] **Chrome Dark Mode Fixes**: Applied comprehensive dark mode visibility fixes to `SwarmControlPage`, `VoiceChatPage`, `ProfilePage`, and all settings cards.
+- [x] **Login Flow Improvement**: Implemented automatic redirect to the home page upon successful local and OIDC login.
+- [x] **User Preference Relocation**: Moved individual user settings (voice chat experience, AI defaults, silences sensitivity) to the Profile page.
+- [x] **Export/Import Relocation**: Moved system-wide Export/Import features to a prominent "System Maintenance & Portability" card in the Settings page.
+- [x] **Swarm Control Structural Fix**: Resolved JSX nesting errors and balanced tags in `SwarmControlPage.js`.
+- [x] **UI Modernization (Modal Triage)**: Replaced several native alerts in core pages with a custom `ErrorModal`.
+- [x] **SettingsPageContent.js Refactoring**: Modularized the settings page into domain-specific cards.
+- [x] **apiService.js Refactoring**: Split monolithic API service into domain-driven modules.
+- [x] **Multi-Provider Refactor**: Successfully transitioned STT and TTS to a multi-provider structure.
diff --git a/frontend/src/features/auth/pages/LoginPage.js b/frontend/src/features/auth/pages/LoginPage.js
index 1e53c14..f8ff809 100644
--- a/frontend/src/features/auth/pages/LoginPage.js
+++ b/frontend/src/features/auth/pages/LoginPage.js
@@ -1,35 +1,53 @@
import React, { useState, useEffect } from 'react';
-import { login, getUserStatus, logout } from '../../../services/apiService';
+import { login, loginLocal, getUserStatus, logout, getAuthConfig } from '../../../services/apiService';
const LoginPage = () => {
const [user, setUser] = useState(null);
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState(null);
+ const [oidcEnabled, setOidcEnabled] = useState(false);
+
+ // Local login state
+ const [email, setEmail] = useState('');
+ const [password, setPassword] = useState('');
+ const [successMessage, setSuccessMessage] = useState('');
useEffect(() => {
- // We now look for a 'user_id' in the URL, which is provided by the backend
- // after a successful OIDC login and callback.
+ // 1. Check if OIDC is enabled
+ const checkConfig = async () => {
+ try {
+ const config = await getAuthConfig();
+ setOidcEnabled(config.oidc_configured);
+ } catch (err) {
+ console.error("Failed to fetch auth config", err);
+ }
+ };
+ checkConfig();
+
+ // 2. Handle OIDC callback or persistent session
const params = new URLSearchParams(window.location.search);
const userIdFromUrl = params.get('user_id');
-
- // First, check localStorage for a saved user ID for persistent login
const storedUserId = localStorage.getItem('userId');
const userId = userIdFromUrl || storedUserId;
+ const isLinked = params.get("linked") === "true";
if (userId) {
setIsLoading(true);
- // Fetch the full user details using the user ID from the URL.
- // This is a more secure and robust way to handle the final callback.
const fetchUserDetails = async () => {
try {
const userStatus = await getUserStatus(userId);
setUser(userStatus);
- // Store the user ID for future requests (e.g., in localStorage)
localStorage.setItem('userId', userStatus.id);
- // Clean up the URL by removing the query parameter
window.history.replaceState({}, document.title, window.location.pathname);
+ if (isLinked) {
+ setSuccessMessage("Social identity successfully linked to your existing account!");
+ }
+ setTimeout(() => {
+ window.location.href = "/swarm-control";
+ }, 1500);
} catch (err) {
setError('Failed to get user status. Please try again.');
+ localStorage.removeItem('userId'); // Clear invalid ID
console.error(err);
} finally {
setIsLoading(false);
@@ -39,12 +57,29 @@
}
}, []);
- const handleLogin = () => {
- // Redirect to the backend's /users/login endpoint
- // The backend handles the OIDC redirect from there.
+ const handleOidcLogin = () => {
login();
};
+ const handleLocalLogin = async (e) => {
+ e.preventDefault();
+ setIsLoading(true);
+ setError(null);
+ try {
+ const result = await loginLocal(email, password);
+ setUser({ id: result.user_id, email: result.email });
+ localStorage.setItem('userId', result.user_id);
+ // Redirect to home page after successful local login
+ setTimeout(() => {
+ window.location.href = "/";
+ }, 1000);
+ } catch (err) {
+ setError(err.message || 'Login failed. Please check your credentials.');
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
const handleLogout = async () => {
setIsLoading(true);
try {
@@ -63,60 +98,125 @@
const renderContent = () => {
if (isLoading) {
return (
-
-