diff --git a/.agent/utils/env_loader.sh b/.agent/utils/env_loader.sh index dce587a..c9ac4b0 100644 --- a/.agent/utils/env_loader.sh +++ b/.agent/utils/env_loader.sh @@ -2,8 +2,10 @@ # Load production credentials from GitBucket if they are missing from the environment if [ -z "$REMOTE_PASSWORD" ] || [ -z "$REMOTE_HOST" ] || [ -z "$REMOTE_USER" ]; then # Load GitBucket token if present - if [ -f "/app/.env.gitbucket" ]; then - source "/app/.env.gitbucket" + if [ -f "$LOCAL_APP_DIR/.env.gitbucket" ]; then + source "$LOCAL_APP_DIR/.env.gitbucket" + elif [ -f "./.env.gitbucket" ]; then + source "./.env.gitbucket" fi if [ -n "$GITBUCKET_TOKEN" ] && [ -n "$DEPLOYMENT_SNIPPET_ID" ]; then diff --git a/ai-hub/app/api/routes/user.py b/ai-hub/app/api/routes/user.py index b1bcff7..9306ec5 100644 --- a/ai-hub/app/api/routes/user.py +++ b/ai-hub/app/api/routes/user.py @@ -15,6 +15,7 @@ from app.api.dependencies import ServiceContainer, get_db from app.api import schemas from app.core.services.user import login_required, verify_password, hash_password +from app.core.grpc.utils.crypto import encrypt_value, decrypt_value # Setup logging logging.basicConfig(level=logging.INFO) @@ -488,76 +489,70 @@ if not user or user.role != "admin": raise HTTPException(status_code=403, detail="Forbidden: Admin only") - prefs_dict = user.preferences or {} + prefs_dict = copy.deepcopy(user.preferences) if user.preferences else {} from app.config import settings import yaml - - def mask_secret(value): - if not value: - return None - if reveal_secrets: - return value - return "***" - - llm_prefs = prefs_dict.get("llm", {}) - tts_prefs = prefs_dict.get("tts", {}) - stt_prefs = prefs_dict.get("stt", {}) + import copy - llm_providers_export = {} - user_providers = llm_prefs.get("providers", {}) - if not user_providers: - # Fallback to system defaults if no user config exists - llm_providers_export = { - "deepseek_api_key": mask_secret(settings.DEEPSEEK_API_KEY), - "deepseek_model_name": settings.DEEPSEEK_MODEL_NAME, - "gemini_api_key": mask_secret(settings.GEMINI_API_KEY), - "gemini_model_name": settings.GEMINI_MODEL_NAME, - "openai_api_key": mask_secret(settings.OPENAI_API_KEY) - } - else: - for p, p_data in user_providers.items(): - llm_providers_export[f"{p}_api_key"] = mask_secret(p_data.get("api_key")) - llm_providers_export[f"{p}_model_name"] = p_data.get("model") + # Sensitive keys that should be encrypted + SENSITIVE_KEYS = ["api_key", "client_secret", "webhook_secret", "password", "key_content", "key_file"] - def get_provider_export(section_prefs, fallback_provider, fallback_model, fallback_api_key, fallback_voice=None): - active_p = section_prefs.get("active_provider") - providers = section_prefs.get("providers", {}) - if active_p and active_p in providers: - p_data = providers[active_p] - return { - "provider": active_p, - "model_name": p_data.get("model"), - "voice_name": p_data.get("voice"), - "api_key": mask_secret(p_data.get("api_key")) - } - # Fallback to system settings - return { - "provider": fallback_provider, - "model_name": fallback_model, - "voice_name": fallback_voice, - "api_key": mask_secret(fallback_api_key) - } - - # Layer 2 (Day 2) Export: Only LLM, TTS, STT - yaml_data = { - "llm_providers": llm_providers_export, - "tts_provider": get_provider_export(tts_prefs, settings.TTS_PROVIDER, settings.TTS_MODEL_NAME, settings.TTS_API_KEY, settings.TTS_VOICE_NAME), - "stt_provider": get_provider_export(stt_prefs, settings.STT_PROVIDER, settings.STT_MODEL_NAME, settings.STT_API_KEY) - } - - # Filter out None values recursively - def remove_none(obj): + def process_export(obj): if isinstance(obj, dict): - return {k: remove_none(v) for k, v in obj.items() if v is not None} + res = {} + for k, v in obj.items(): + if v is None: continue + if k in SENSITIVE_KEYS and v: + if reveal_secrets: + res[k] = v + else: + res[k] = encrypt_value(v) + else: + res[k] = process_export(v) + return res + elif isinstance(obj, list): + return [process_export(x) for x in obj] return obj - - clean_yaml_data = remove_none(yaml_data) + + # Ensure we have the base sections even if empty in prefs + export_data = { + "llm": prefs_dict.get("llm", {"providers": {}, "active_provider": "deepseek"}), + "tts": prefs_dict.get("tts", {"providers": {}, "active_provider": settings.TTS_PROVIDER}), + "stt": prefs_dict.get("stt", {"providers": {}, "active_provider": settings.STT_PROVIDER}) + } + + # Backfill from system settings if sections are empty + if not export_data["llm"].get("providers"): + export_data["llm"]["providers"] = { + "deepseek": {"api_key": settings.DEEPSEEK_API_KEY, "model": settings.DEEPSEEK_MODEL_NAME}, + "gemini": {"api_key": settings.GEMINI_API_KEY, "model": settings.GEMINI_MODEL_NAME}, + "openai": {"api_key": settings.OPENAI_API_KEY} + } + + if not export_data["tts"].get("providers"): + export_data["tts"]["providers"] = { + settings.TTS_PROVIDER: { + "api_key": settings.TTS_API_KEY, + "model": settings.TTS_MODEL_NAME, + "voice": settings.TTS_VOICE_NAME + } + } + + if not export_data["stt"].get("providers"): + export_data["stt"]["providers"] = { + settings.STT_PROVIDER: { + "api_key": settings.STT_API_KEY, + "model": settings.STT_MODEL_NAME + } + } + + clean_yaml_data = process_export(export_data) yaml_str = yaml.dump(clean_yaml_data, sort_keys=False, default_flow_style=False) return PlainTextResponse( content=yaml_str, media_type="application/x-yaml", - headers={"Content-Disposition": "attachment; filename=\"day2_config.yaml\""} + headers={"Content-Disposition": "attachment; filename=\"cortex_config.yaml\""} ) @router.post("/me/config/import", response_model=schemas.UserPreferences, summary="Import Configurations from YAML") @@ -580,17 +575,27 @@ except Exception as e: raise HTTPException(status_code=400, detail=f"Invalid YAML file: {e}") - # Reverse mapping: YAML -> UserPreferences structure - new_llm = { "providers": {}, "active_provider": None } - new_tts = { "providers": {}, "active_provider": None } - new_stt = { "providers": {}, "active_provider": None } + def process_import(obj): + if isinstance(obj, dict): + return {k: process_import(v) for k, v in obj.items()} + elif isinstance(obj, str): + return decrypt_value(obj) + elif isinstance(obj, list): + return [process_import(x) for x in obj] + return obj - # --- LLM --- - llm_data = data.get("llm_providers", {}) - if "providers" in llm_data: # Structured - new_llm["providers"] = llm_data["providers"] - else: # Flattened (as exported) - for k, v in llm_data.items(): + data = process_import(data) + + # Map to UserPreferences structure + new_llm = data.get("llm", {}) + new_tts = data.get("tts", {}) + new_stt = data.get("stt", {}) + + # Handle legacy flat structure if imported from old version + if not new_llm and "llm_providers" in data: + llm_flat = data.get("llm_providers", {}) + new_llm = {"providers": {}} + for k, v in llm_flat.items(): if k.endswith("_api_key"): p = k.replace("_api_key", "") if p not in new_llm["providers"]: new_llm["providers"][p] = {} @@ -599,29 +604,27 @@ p = k.replace("_model_name", "") if p not in new_llm["providers"]: new_llm["providers"][p] = {} new_llm["providers"][p]["model"] = v - - if new_llm["providers"]: - new_llm["active_provider"] = next(iter(new_llm["providers"]), None) + if new_llm["providers"]: + new_llm["active_provider"] = next(iter(new_llm["providers"]), None) - # --- TTS --- - tts_data = data.get("tts_provider", {}) - if tts_data: - p = tts_data.get("provider") or "google_gemini" - new_tts["active_provider"] = p - new_tts["providers"][p] = { - "api_key": tts_data.get("api_key"), - "model": tts_data.get("model_name"), - "voice": tts_data.get("voice_name") + if not new_tts and "tts_provider" in data: + tts_flat = data.get("tts_provider", {}) + p = tts_flat.get("provider") or "google_gemini" + new_tts = { + "active_provider": p, + "providers": { + p: {"api_key": tts_flat.get("api_key"), "model": tts_flat.get("model_name"), "voice": tts_flat.get("voice_name")} + } } - # --- STT --- - stt_data = data.get("stt_provider", {}) - if stt_data: - p = stt_data.get("provider") or "google_gemini" - new_stt["active_provider"] = p - new_stt["providers"][p] = { - "api_key": stt_data.get("api_key"), - "model": stt_data.get("model_name") + if not new_stt and "stt_provider" in data: + stt_flat = data.get("stt_provider", {}) + p = stt_flat.get("provider") or "google_gemini" + new_stt = { + "active_provider": p, + "providers": { + p: {"api_key": stt_flat.get("api_key"), "model": stt_flat.get("model_name")} + } } user.preferences = { @@ -633,36 +636,39 @@ from sqlalchemy.orm.attributes import flag_modified flag_modified(user, "preferences") - # --- Day 2 Sync --- - from app.config import settings as global_settings - if new_llm.get("providers"): - global_settings.LLM_PROVIDERS.update(new_llm["providers"]) - if new_tts.get("active_provider"): - p = new_tts["active_provider"] - p_data = new_tts["providers"].get(p, {}) - if p_data: - global_settings.TTS_PROVIDER = p - global_settings.TTS_MODEL_NAME = p_data.get("model") or global_settings.TTS_MODEL_NAME - global_settings.TTS_VOICE_NAME = p_data.get("voice") or global_settings.TTS_VOICE_NAME - global_settings.TTS_API_KEY = p_data.get("api_key") or global_settings.TTS_API_KEY - if new_stt.get("active_provider"): - p = new_stt["active_provider"] - p_data = new_stt["providers"].get(p, {}) - if p_data: - global_settings.STT_PROVIDER = p - global_settings.STT_MODEL_NAME = p_data.get("model") or global_settings.STT_MODEL_NAME - global_settings.STT_API_KEY = p_data.get("api_key") or global_settings.STT_API_KEY - - try: - global_settings.save_to_yaml() - except Exception as ey: - logger.error(f"Failed to sync settings to YAML on import: {ey}") + # Sync to global settings (only for admin) + if user.role == "admin": + from app.config import settings as global_settings + if new_llm.get("providers"): + global_settings.LLM_PROVIDERS.update(new_llm["providers"]) + if new_tts.get("active_provider"): + p = new_tts["active_provider"] + p_data = new_tts["providers"].get(p, {}) + if p_data: + global_settings.TTS_PROVIDER = p + global_settings.TTS_MODEL_NAME = p_data.get("model") or global_settings.TTS_MODEL_NAME + global_settings.TTS_VOICE_NAME = p_data.get("voice") or global_settings.TTS_VOICE_NAME + global_settings.TTS_API_KEY = p_data.get("api_key") or global_settings.TTS_API_KEY + if new_stt.get("active_provider"): + p = new_stt["active_provider"] + p_data = new_stt["providers"].get(p, {}) + if p_data: + global_settings.STT_PROVIDER = p + global_settings.STT_MODEL_NAME = p_data.get("model") or global_settings.STT_MODEL_NAME + global_settings.STT_API_KEY = p_data.get("api_key") or global_settings.STT_API_KEY + + try: + global_settings.save_to_yaml() + except Exception as ey: + logger.error(f"Failed to sync settings to YAML on import: {ey}") db.add(user) db.commit() db.refresh(user) return schemas.UserPreferences(llm=user.preferences.get("llm", {}), tts=user.preferences.get("tts", {}), stt=user.preferences.get("stt", {})) + + # --- NEW ADMIN ROUTES --- @router.get("/admin/users", response_model=list[schemas.UserProfile], summary="List All Users (Admin Only)") diff --git a/ai-hub/app/core/grpc/utils/crypto.py b/ai-hub/app/core/grpc/utils/crypto.py index 3b6dd5c..f5df074 100644 --- a/ai-hub/app/core/grpc/utils/crypto.py +++ b/ai-hub/app/core/grpc/utils/crypto.py @@ -1,5 +1,7 @@ import hmac import hashlib +import base64 +from cryptography.fernet import Fernet from app.config import settings def sign_payload(payload: str) -> str: @@ -19,3 +21,30 @@ """Verifies a signature against bytes using HMAC-SHA256.""" expected = sign_bytes(data) return hmac.compare_digest(signature, expected) + +def get_fernet() -> Fernet: + """Derives a Fernet key from the system SECRET_KEY.""" + key = base64.urlsafe_b64encode(hashlib.sha256(settings.SECRET_KEY.encode()).digest()) + return Fernet(key) + +def encrypt_value(value: str) -> str: + """Encrypts a string value and returns it with an ENC() wrapper.""" + if not value or value == "***": return value + try: + f = get_fernet() + token = f.encrypt(value.encode()).decode() + return f"ENC({token})" + except Exception: + return value + +def decrypt_value(enc_value: str) -> str: + """Decrypts a value if it is wrapped in ENC(), otherwise returns as-is.""" + if not isinstance(enc_value, str) or not enc_value.startswith("ENC("): + return enc_value + + token = enc_value[4:-1] + try: + f = get_fernet() + return f.decrypt(token.encode()).decode() + except Exception: + return enc_value diff --git a/docker-compose.yml b/docker-compose.yml index d35b263..508be70 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -33,9 +33,10 @@ - CORTEX_ADMIN_PASSWORD=${CORTEX_ADMIN_PASSWORD} - SECRET_KEY=${SECRET_KEY:-default-insecure-key} - DEBUG_GRPC=true - - DATABASE_URL=sqlite:////tmp/ai-hub.db + - DATABASE_URL=sqlite:////app/data/ai-hub.db + - CONFIG_PATH=/app/config.yaml volumes: -# - ai_hub_data:/app/data:rw + - ./data:/app/data:rw - ./config.yaml:/app/config.yaml:rw - ./ai-hub/app:/app/app:rw - ./agent-node:/app/agent-node-source:ro diff --git a/scripts/remote_deploy.sh b/scripts/remote_deploy.sh index 3eac690..6351c45 100755 --- a/scripts/remote_deploy.sh +++ b/scripts/remote_deploy.sh @@ -58,7 +58,19 @@ echo "Checking if sshpass is installed..." if ! command -v sshpass &> /dev/null; then echo "sshpass could not be found, installing..." - sudo apt-get update && sudo apt-get install -y sshpass + if [[ "$OSTYPE" == "darwin"* ]]; then + # Check if brew exists + if command -v brew &> /dev/null; then + brew install sshpass + elif [ -f "/opt/homebrew/bin/brew" ]; then + /opt/homebrew/bin/brew install sshpass + else + echo "Error: Homebrew not found. Please install sshpass manually." + exit 1 + fi + else + sudo apt-get update && sudo apt-get install -y sshpass + fi fi # 1. Sync local codebase to temporary directory on remote server