diff --git a/agent-node/bootstrap_installer.py b/agent-node/bootstrap_installer.py
index 9aee2db..c3b5691 100644
--- a/agent-node/bootstrap_installer.py
+++ b/agent-node/bootstrap_installer.py
@@ -113,44 +113,52 @@
_print("No requirements.txt found — skipping dependency install.")
return
- _print("Checking for pip...")
- pip_found = False
- try:
- subprocess.check_call([sys.executable, "-m", "pip", "--version"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
- pip_found = True
- except subprocess.CalledProcessError:
- _print("pip not found. Attempting to bootstrap pip via ensurepip...")
+ _print("Looking for pip...")
+ pip_cmd = None
+
+ # Try multiple ways to find pip
+ for cmd in [[sys.executable, "-m", "pip"], ["pip3"], ["pip"]]:
try:
- subprocess.check_call([sys.executable, "-m", "ensurepip", "--default-pip"], stdout=subprocess.DEVNULL)
- pip_found = True
- _print("pip bootstrapped successfully.")
+ subprocess.check_call(cmd + ["--version"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+ pip_cmd = cmd
+ break
+ except:
+ continue
+
+ if not pip_cmd:
+ _print("pip not found as module or command. Attempting to bootstrap pip via ensurepip...")
+ try:
+ # Try ensurepip with --user to avoid permission errors on system paths
+ subprocess.check_call([sys.executable, "-m", "ensurepip", "--user", "--default-pip"], stdout=subprocess.DEVNULL)
+ pip_cmd = [sys.executable, "-m", "pip"]
+ _print("pip bootstrapped successfully via ensurepip.")
except Exception as e:
- _print(f"Warning: Failed to bootstrap pip: {e}. If dependencies fail, please install python3-pip manually.")
+ _print(f"Warning: Failed to bootstrap pip via ensurepip: {e}")
+ _print("Attempting to download get-pip.py as last resort...")
+ try:
+ get_pip_url = "https://bootstrap.pypa.io/get-pip.py"
+ tmp_pip = os.path.join(tempfile.gettempdir(), "get-pip.py")
+ urllib.request.urlretrieve(get_pip_url, tmp_pip)
+ subprocess.check_call([sys.executable, tmp_pip, "--user"], stdout=subprocess.DEVNULL)
+ pip_cmd = [sys.executable, "-m", "pip"]
+ _print("pip installed successfully via get-pip.py.")
+ except Exception as e2:
+ _print(f"Error: All pip bootstrap attempts failed: {e2}")
- install_req_file = req_file
+ if not pip_cmd:
+ _print("-----------------------------------------------------------------------")
+ _print("ERROR: Could not find or install pip. Please install python3-pip manually.")
+ _print("-----------------------------------------------------------------------")
+ return
- _print("Installing Python dependencies (resilient mode) ...")
+ _print(f"Installing Python dependencies using {' '.join(pip_cmd)} ...")
try:
- # Using --ignore-installed to bypass "no RECORD file found" metadata errors common on Mac/Anaconda
- # and --user if we don't have root (though usually we do on NAS)
- args = [sys.executable, "-m", "pip", "install", "-r", install_req_file, "--quiet", "--ignore-installed"]
-
- # Try a quick check for root/write access to site-packages
- try:
- subprocess.check_call(args, cwd=install_dir)
- except subprocess.CalledProcessError as e:
- _print(f"Standard install failed (exit {e.returncode}). Trying --user install...")
- args_user = args + ["--user"]
- subprocess.check_call(args_user, cwd=install_dir)
-
+ # Use --user and --ignore-installed for maximum resilience on restricted environments like NAS
+ args = pip_cmd + ["install", "-r", req_file, "--quiet", "--ignore-installed", "--user"]
+ subprocess.check_call(args, cwd=install_dir)
_print("Dependencies installed successfully.")
-
except Exception as e:
_print(f"ERROR: Failed to install dependencies: {e}")
- _print("-----------------------------------------------------------------------")
- _print("HINT: If you are on Raspberry Pi / ARM and 'protobuf' or 'grpcio' fails:")
- _print(" Try manual install: sudo apt-get install python3-protobuf python3-psutil python3-grpcio")
- _print("-----------------------------------------------------------------------")
_print("The agent might fail to start if core libraries (grpcio, psutil) are missing.")
@@ -172,8 +180,8 @@
with open(config_path, "w") as f:
yaml.dump(config, f, default_flow_style=False)
_print(f"Config written to {config_path}")
- except ImportError:
- # yaml not yet installed — write manually
+ except (ImportError, AttributeError):
+ # yaml not yet installed or broken (e.g. Synology pre-installed yaml) — write manually
lines = [f"{k}: {v}\n" for k, v in config.items()]
with open(config_path, "w") as f:
f.writelines(lines)
@@ -244,7 +252,7 @@
existing_config = yaml.safe_load(f) or {}
_print(f"Loaded existing config from {config_path}")
break
- except ImportError:
+ except (ImportError, AttributeError):
# Resilient fallback for fresh environments: manual parsing
try:
with open(config_path) as f:
diff --git a/ai-hub/app/api/routes/nodes.py b/ai-hub/app/api/routes/nodes.py
index 41cb38e..144cb41 100644
--- a/ai-hub/app/api/routes/nodes.py
+++ b/ai-hub/app/api/routes/nodes.py
@@ -1149,8 +1149,8 @@
skill_cfg[skill].update(cfg)
lines = [
- "# Cortex Hub — Agent Node Configuration",
- f"# Generated for node '{node.node_id}' — keep this file secret.",
+ "# Cortex Hub - Agent Node Configuration",
+ f"# Generated for node '{node.node_id}' - keep this file secret.",
"",
f"node_id: \"{node.node_id}\"",
f"node_description: \"{node.display_name}\"",
diff --git a/ai-hub/app/core/orchestration/scheduler.py b/ai-hub/app/core/orchestration/scheduler.py
index bd1fb18..28ce3f1 100644
--- a/ai-hub/app/core/orchestration/scheduler.py
+++ b/ai-hub/app/core/orchestration/scheduler.py
@@ -67,32 +67,36 @@
if not cron_expr:
continue
+ instance = db.query(AgentInstance).filter(AgentInstance.id == instance_id).first()
+ if not instance or instance.status != 'idle':
+ continue
+
should_fire = False
try:
+ # Fallback to persistent last_heartbeat if memory map is empty (e.g. after restart)
+ last_run = self._last_run_map.get(instance_id, instance.last_heartbeat or (now - timedelta(minutes=10)))
+
if cron_expr.isdigit():
interval = int(cron_expr)
- last_run = self._last_run_map.get(instance_id, datetime.min)
if (now - last_run).total_seconds() >= interval:
should_fire = True
else:
- iter = croniter.croniter(cron_expr, now)
- last_run = self._last_run_map.get(instance_id, now - timedelta(seconds=35))
- if iter.get_next(datetime) <= now:
+ iter = croniter.croniter(cron_expr, last_run)
+ next_fire = iter.get_next(datetime)
+ if next_fire <= now:
should_fire = True
except Exception as ce:
logger.error(f"[Scheduler] Invalid cron expression '{cron_expr}' for agent {instance_id}: {ce}")
continue
if should_fire:
- instance = db.query(AgentInstance).filter(AgentInstance.id == instance_id).first()
- if instance and instance.status == 'idle':
- prompt = trigger.default_prompt or "SYSTEM: CRON WAKEUP"
- logger.info(f"[Scheduler] CRON WAKEUP: Triggering Agent {instance_id} (Cron: {cron_expr})")
- self._last_run_map[instance_id] = now
- asyncio.create_task(AgentExecutor.run(
- instance_id, prompt,
- self.services.rag_service, self.services.user_service
- ))
+ prompt = trigger.default_prompt or "SYSTEM: CRON WAKEUP"
+ logger.info(f"[Scheduler] CRON WAKEUP: Triggering Agent {instance_id} (Cron: {cron_expr})")
+ self._last_run_map[instance_id] = now
+ asyncio.create_task(AgentExecutor.run(
+ instance_id, prompt,
+ self.services.rag_service, self.services.user_service
+ ))
# --- Handle INTERVAL triggers ---
interval_triggers = db.query(AgentTrigger).filter(AgentTrigger.trigger_type == 'interval').all()
@@ -101,14 +105,10 @@
wait_seconds = trigger.interval_seconds or 60
instance = db.query(AgentInstance).filter(AgentInstance.id == instance_id).first()
- if not instance:
+ if not instance or instance.status != 'idle':
continue
- # Only fire if agent is idle (finished previous run and not suspended/paused)
- if instance.status != 'idle':
- continue
-
- last_run = self._last_run_map.get(instance_id, datetime.min)
+ last_run = self._last_run_map.get(instance_id, instance.last_heartbeat or datetime.min)
elapsed = (now - last_run).total_seconds()
if elapsed >= wait_seconds:
diff --git a/ai-hub/app/core/providers/tts/gemini.py b/ai-hub/app/core/providers/tts/gemini.py
index 20c05bc..632d077 100644
--- a/ai-hub/app/core/providers/tts/gemini.py
+++ b/ai-hub/app/core/providers/tts/gemini.py
@@ -42,9 +42,9 @@
raw_model = model_name or settings.TTS_MODEL_NAME
# Strip any provider prefix (e.g. "vertex_ai/model" or "gemini/model") → keep only the model id
model_id = raw_model.split("/")[-1]
- # Normalise short names: "gemini-2-flash-tts" → "gemini-2.5-flash-preview-tts"
- if model_id in ("gemini-2-flash-tts", "gemini-2.5-flash-tts", "flash-tts", "gemini-2.5-flash"):
- model_id = "gemini-2.5-flash-preview-tts"
+ # Normalise short names: "flash-tts" → "gemini-1.5-flash-preview-tts"
+ if model_id in ("gemini-2-flash-tts", "gemini-2.5-flash-tts", "flash-tts", "gemini-1.5-flash", "gemini-1.5-flash-preview-tts"):
+ model_id = "gemini-1.5-flash-preview-tts"
logger.info(f"Normalised model name to: {model_id}")
# Route to Vertex AI ONLY when the key is a Vertex service-account key (starting with "AQ.")
diff --git a/ai-hub/app/core/templates/provisioning/provision.py.j2 b/ai-hub/app/core/templates/provisioning/provision.py.j2
index e507398..8138514 100644
--- a/ai-hub/app/core/templates/provisioning/provision.py.j2
+++ b/ai-hub/app/core/templates/provisioning/provision.py.j2
@@ -13,14 +13,20 @@
# 2. Write agent_config.yaml
print("[*] Writing configuration...")
-with open("agent_config.yaml", "w") as f:
- f.write("""{{ config_yaml }}""")
+config_content = """{{ config_yaml }}"""
+with open("agent_config.yaml", "wb") as f:
+ f.write(config_content.encode("utf-8"))
+ f.flush()
+ os.fsync(f.fileno())
# 3. Download bootstrap_installer.py
installer_url = "{{ base_url }}/api/v1/agent/installer"
print(f"[*] Downloading installer from {installer_url} ...")
try:
urllib.request.urlretrieve(installer_url, "bootstrap_installer.py")
+ # Force sync to avoid SIGBUS/Bus Error on some NAS filesystems during next step
+ if hasattr(os, 'sync'):
+ os.sync()
except Exception as e:
print(f"❌ Failed to download installer: {e}")
sys.exit(1)
@@ -31,8 +37,8 @@
print(f"❌ Downloaded file is too small or corrupt ({size} bytes): {content}")
sys.exit(1)
-# 4. Import and run installer natively to avoid memory Map/SIGBUS errors from fork()
-print("[*] Bootstrapping agent directly in-process...")
+# 4. Run installer. Use exec() to avoid mmap issues on some NAS with normal import.
+print("[*] Bootstrapping agent...")
sys.argv = [
"bootstrap_installer.py",
"--daemon",
@@ -42,13 +48,22 @@
]
try:
- sys.path.insert(0, install_dir)
- import bootstrap_installer
- bootstrap_installer.main()
+ with open("bootstrap_installer.py", "rb") as f:
+ code = f.read()
+
+ # Define a clean globals dict for execution
+ globs = {
+ "__name__": "__main__",
+ "__file__": os.path.abspath("bootstrap_installer.py"),
+ "__builtins__": __builtins__
+ }
+ exec(code, globs)
except SystemExit as e:
if str(e) != "0" and e.code != 0:
print(f"❌ Provisioning failed! Installer exited with code {e}")
sys.exit(e.code if isinstance(e.code, int) else 1)
except Exception as e:
+ import traceback
print(f"❌ Provisioning crashed: {e}")
+ traceback.print_exc()
sys.exit(1)
diff --git a/ai-hub/app/core/templates/provisioning/provision.sh.j2 b/ai-hub/app/core/templates/provisioning/provision.sh.j2
index 0ce2b0e..34234aa 100644
--- a/ai-hub/app/core/templates/provisioning/provision.sh.j2
+++ b/ai-hub/app/core/templates/provisioning/provision.sh.j2
@@ -61,18 +61,31 @@
if [ "$USE_SOURCE" = true ]; then
echo "[*] Falling back to Python Source + Virtualenv execution..."
- SOURCE_URL="{{ base_url }}/api/v1/nodes/provision/{{ node_id }}?token={{ invite_token }}"
- mkdir -p "$INSTALL_DIR/tmp"
- curl -sSLf "$SOURCE_URL" -o provision.py
-
- # Run with -B (no .pyc) and dedicated TMPDIR
- if ! PYTHONDONTWRITEBYTECODE=1 TMPDIR="$INSTALL_DIR/tmp" python3 -B provision.py; then
- echo "❌ Provisioning failed via python bootstrap fallback."
+ # Use system /tmp for bootstrapping on NAS to avoid memory mapping issues on Volume shares
+ export TMPDIR="/tmp"
+ mkdir -p "$TMPDIR"
+
+ # 1. Download bootstrap_installer.py
+ echo "[*] Downloading installer from {{ base_url }}/api/v1/agent/installer ..."
+ if ! curl -sSLf "{{ base_url }}/api/v1/agent/installer" -o bootstrap_installer.py; then
+ echo "❌ Failed to download installer."
+ exit 1
+ fi
+
+ # 2. Run the installer directly
+ echo "[*] Bootstrapping agent via python3..."
+ # Force sync before execution to avoid Bus Error on recently written file handles
+ sync && sleep 1
+ if ! python3 -B bootstrap_installer.py \
+ --hub "{{ base_url }}" \
+ --token "{{ invite_token }}" \
+ --node-id "{{ node_id }}" \
+ --daemon; then
+ echo "❌ Provisioning failed via python bootstrap."
exit 1
fi
- # If the python script ran correctly, it would have already handled the rest of the install!
echo "✅ Python-based bootstrap provisioning successful!"
exit 0
fi
diff --git a/ai-hub/integration_tests/test_agents.py b/ai-hub/integration_tests/test_agents.py
index 0d6c89d..1a87dd4 100644
--- a/ai-hub/integration_tests/test_agents.py
+++ b/ai-hub/integration_tests/test_agents.py
@@ -71,16 +71,19 @@
# 6. Verify Agent Periodical Execution
- print("\\n[test] Waiting 15 seconds to allow background interval scheduler to wake the agent...")
+ print("\n[test] Waiting for background interval scheduler to wake the agent (timeout 60s)...")
import time
- time.sleep(15)
+ messages = []
+ for _ in range(30): # 30 * 2s = 60s
+ r_msgs = client.get(f"{BASE_URL}/sessions/{session_id}/messages", headers=_headers())
+ assert r_msgs.status_code == 200, f"Failed to fetch session messages: {r_msgs.text}"
+ messages = r_msgs.json()["messages"]
+ if any(m["sender"] == "assistant" for m in messages):
+ break
+ time.sleep(2)
- r_msgs = client.get(f"{BASE_URL}/sessions/{session_id}/messages", headers=_headers())
- assert r_msgs.status_code == 200, f"Failed to fetch session messages: {r_msgs.text}"
- messages = r_msgs.json()["messages"]
- print(f"\\n[test] Agent Messages Count: {len(messages)}")
- assert len(messages) > 0, "The agent failed to generate any response during its execution loop! It was not invoked or crashed silently."
- assert any(m["sender"] == "assistant" for m in messages), "No assistant (agent) messages generated in history!"
+ print(f"\n[test] Agent Messages Count: {len(messages)}")
+ assert any(m["sender"] == "assistant" for m in messages), f"The agent failed to generate any response within 60s! History: {messages}"
# 7. Test if agent is in the active list
r_list = client.get(f"{BASE_URL}/agents", headers=_headers())
diff --git a/ai-hub/integration_tests/test_file_sync.py b/ai-hub/integration_tests/test_file_sync.py
index b00fa69..cae4057 100644
--- a/ai-hub/integration_tests/test_file_sync.py
+++ b/ai-hub/integration_tests/test_file_sync.py
@@ -34,7 +34,7 @@
import httpx
# ── Configuration ──────────────────────────────────────────────────────────────
-BASE_URL = os.getenv("SYNC_TEST_BASE_URL", "http://127.0.0.1:8002/api/v1")
+BASE_URL = os.getenv("SYNC_TEST_BASE_URL", "http://127.0.0.1:8002/api/v1/")
USER_ID = os.getenv("SYNC_TEST_USER_ID", "c4401d34-8784-4d6e-93a0-c702bd202b66")
NODE_1 = os.getenv("SYNC_TEST_NODE1", "test-node-1")
NODE_2 = os.getenv("SYNC_TEST_NODE2", "test-node-2")
@@ -44,9 +44,9 @@
LARGE_FILE_SIZE_MB = 20
POLL_INTERVAL = 0.5 # seconds
-# Paths — relative to BASE_URL
-SESSIONS_PATH = "/sessions"
-NODES_PATH = "/nodes"
+# Paths — relative to BASE_URL (NO leading slash expected for correct joining)
+SESSIONS_PATH = "sessions"
+NODES_PATH = "nodes"
# ── Module-level: skip the whole file if nodes are not online ──────────────────
@@ -934,10 +934,31 @@
else:
# (Since the test runner is executed on host but ai_hub is Docker container, we can use docker exec)
cmd = ["docker", "exec", "ai_hub_service", "stat", f"/app/data/mirrors/{workspace_id}"]
- # This should fail if it doesn't exist.
- res_hub = subprocess.run(cmd, capture_output=True, text=True)
- assert res_hub.returncode != 0, f"Server mirror folder still physically exists! stat matched: {res_hub.stdout}"
- assert "No such file or directory" in res_hub.stderr, f"Unexpected error during server stat: {res_hub.stderr}"
+
+ # Smart check: if we are ALREADY inside the container, or docker is missing/failing, check directly
+ container_path = f"/app/data/mirrors/{workspace_id}"
+ if os.path.exists("/.dockerenv") and os.path.exists("/app/data"):
+ # We are likely inside the container
+ assert not os.path.exists(container_path), f"Server mirror folder still physically exists inside container: {container_path}"
+ else:
+ try:
+ res_hub = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
+ if res_hub.returncode == 0:
+ # It exists, so it's a failure
+ assert False, f"Server mirror folder still physically exists on host: {res_hub.stdout}"
+ else:
+ # It should fail with "No such file or directory"
+ assert "No such file or directory" in res_hub.stderr or "No such file or directory" in res_hub.stdout, \
+ f"Unexpected error during server stat: {res_hub.stderr or res_hub.stdout}"
+ except Exception as e:
+ # Fallback — if docker command itself fails, and we are not sure if we are in container,
+ # try a direct path check if the path looks accessible
+ host_mirror_path = "/app/data/mirrors" # If test is run from same mount root
+ if os.path.exists(host_mirror_path):
+ assert not os.path.exists(os.path.join(host_mirror_path, workspace_id)), \
+ f"Server mirror folder still exists via fallback path: {os.path.join(host_mirror_path, workspace_id)}"
+ else:
+ print(f"[⚠️] Could not verify server-side purge (Docker failed and path not found: {e})")
print("[Case Purge] ✅ Server-side physical mirror folder proactively erased")
diff --git a/ai-hub/test.db-shm b/ai-hub/test.db-shm
index c5e2b70..63248ae 100644
--- a/ai-hub/test.db-shm
+++ b/ai-hub/test.db-shm
Binary files differ
diff --git a/ai-hub/test.db-wal b/ai-hub/test.db-wal
index 799be6f..4875607 100644
--- a/ai-hub/test.db-wal
+++ b/ai-hub/test.db-wal
Binary files differ
diff --git a/frontend/build.log b/frontend/build.log
new file mode 100644
index 0000000..67e0258
--- /dev/null
+++ b/frontend/build.log
@@ -0,0 +1,101 @@
+
+> cortex-frontend@0.1.0 build
+> react-scripts build
+
+Creating an optimized production build...
+Browserslist: browsers data (caniuse-lite) is 7 months old. Please run:
+ npx update-browserslist-db@latest
+ Why you should do it regularly: https://github.com/browserslist/update-db#readme
+Browserslist: browsers data (caniuse-lite) is 7 months old. Please run:
+ npx update-browserslist-db@latest
+ Why you should do it regularly: https://github.com/browserslist/update-db#readme
+Compiled with warnings.
+
+[eslint]
+src/App.js
+ Line 35:10: 'userId' is assigned a value but never used no-unused-vars
+ Line 71:6: React Hook useEffect has missing dependencies: 'currentPage' and 'pathToPage'. Either include them or remove the dependency array react-hooks/exhaustive-deps
+ Line 132:6: React Hook useEffect has a missing dependency: 'authenticatedPages'. Either include it or remove the dependency array react-hooks/exhaustive-deps
+
+src/features/agents/components/AgentDrillDown.js
+ Line 135:8: React Hook useEffect has a missing dependency: 'fetchData'. Either include it or remove the dependency array react-hooks/exhaustive-deps
+
+src/features/agents/components/AgentHarnessPage.js
+ Line 2:21: 'getAgentTelemetry' is defined but never used no-unused-vars
+ Line 3:10: 'AreaChart' is defined but never used no-unused-vars
+ Line 3:21: 'Area' is defined but never used no-unused-vars
+ Line 3:27: 'XAxis' is defined but never used no-unused-vars
+ Line 3:34: 'YAxis' is defined but never used no-unused-vars
+ Line 3:41: 'Tooltip' is defined but never used no-unused-vars
+ Line 3:50: 'ResponsiveContainer' is defined but never used no-unused-vars
+
+src/features/chat/components/ChatWindow.js
+ Line 43:18: The ref value 'audioRef.current' will likely have changed by the time this effect cleanup function runs. If this ref points to a node rendered by React, copy 'audioRef.current' to a variable inside the effect, and use that variable in the cleanup function react-hooks/exhaustive-deps
+
+src/features/nodes/pages/NodesPage.js
+ Line 12:12: 'groups' is assigned a value but never used no-unused-vars
+
+src/features/profile/pages/ProfilePage.js
+ Line 18:12: 'providerStatuses' is assigned a value but never used no-unused-vars
+ Line 153:11: 'handleGeneralPreferenceUpdate' is assigned a value but never used no-unused-vars
+
+src/features/settings/components/cards/IdentityGovernanceCard.js
+ Line 12:7: 'loadGroups' is assigned a value but never used no-unused-vars
+
+src/features/settings/components/cards/NetworkIdentityCard.js
+ Line 11:7: 'fileInputRef' is assigned a value but never used no-unused-vars
+
+src/features/settings/pages/SettingsPage.js
+ Line 113:8: React Hook useEffect has a missing dependency: 'loadUserProfile'. Either include it or remove the dependency array react-hooks/exhaustive-deps
+
+src/features/swarm/hooks/useSwarmControl.js
+ Line 110:6: React Hook useEffect has a missing dependency: 'onNewSessionCreated'. Either include it or remove the dependency array. If 'onNewSessionCreated' changes too often, find the parent component that defines it and wrap that definition in useCallback react-hooks/exhaustive-deps
+ Line 194:6: React Hook useCallback has missing dependencies: 'onNewSessionCreated' and 'userConfigData?.effective?.llm?.active_provider'. Either include them or remove the dependency array. If 'onNewSessionCreated' changes too often, find the parent component that defines it and wrap that definition in useCallback react-hooks/exhaustive-deps
+
+src/features/swarm/pages/SwarmControlPage.js
+ Line 7:3: 'detachNodeFromSession' is defined but never used no-unused-vars
+ Line 106:10: 'sessionNodeStatus' is assigned a value but never used no-unused-vars
+ Line 249:6: React Hook useEffect has a missing dependency: 'fetchNodeInfo'. Either include it or remove the dependency array react-hooks/exhaustive-deps
+
+src/features/voice/hooks/useVoiceChat.js
+ Line 8:3: 'createSession' is defined but never used no-unused-vars
+ Line 213:6: React Hook useEffect has a missing dependency: 'fetchTokenUsage'. Either include it or remove the dependency array react-hooks/exhaustive-deps
+
+src/services/api/userService.js
+ Line 4:7: 'USERS_LOGOUT_ENDPOINT' is assigned a value but never used no-unused-vars
+ Line 5:7: 'USERS_ME_ENDPOINT' is assigned a value but never used no-unused-vars
+
+src/shared/components/FileSystemNavigator.js
+ Line 114:8: React Hook useEffect has a missing dependency: 'handleView'. Either include it or remove the dependency array react-hooks/exhaustive-deps
+
+src/shared/components/MultiNodeConsole.js
+ Line 60:8: React Hook useEffect has missing dependencies: 'isAIProcessing', 'onMount', and 'onUnmount'. Either include them or remove the dependency array. If 'onMount' changes too often, find the parent component that defines it and wrap that definition in useCallback react-hooks/exhaustive-deps
+ Line 208:25: Expected a default case default-case
+ Line 251:8: React Hook useEffect has a missing dependency: 'attachedNodeIds'. Either include it or remove the dependency array react-hooks/exhaustive-deps
+ Line 251:9: React Hook useEffect has a complex expression in the dependency array. Extract it to a separate variable so it can be statically checked react-hooks/exhaustive-deps
+
+src/shared/components/SessionSidebar.js
+ Line 19:8: React Hook useEffect has a missing dependency: 'fetchSessions'. Either include it or remove the dependency array react-hooks/exhaustive-deps
+
+Search for the keywords to learn more about each warning.
+To ignore, add // eslint-disable-next-line to the line before.
+
+File sizes after gzip:
+
+ 324.01 kB (+11.4 kB) build/static/js/main.cbb7862e.js
+ 16.42 kB build/static/css/main.cc910544.css
+ 1.78 kB build/static/js/453.23d913eb.chunk.js
+
+The project was built assuming it is hosted at /.
+You can control this with the homepage field in your package.json.
+
+The build folder is ready to be deployed.
+You may serve it with a static server:
+
+ npm install -g serve
+ serve -s build
+
+Find out more about deployment here:
+
+ https://cra.link/deployment
+
diff --git a/frontend/src/App.js b/frontend/src/App.js
index be947d0..0029644 100644
--- a/frontend/src/App.js
+++ b/frontend/src/App.js
@@ -1,15 +1,15 @@
// App.js
import React, { useState, useEffect } from "react";
import { Navbar } from "./shared/components";
-import { HomePage } from "./features/chat";
+import HomePage from "./features/chat/pages/HomePage";
import { VoiceChatPage } from "./features/voice";
import { SwarmControlPage } from "./features/swarm";
-import { LoginPage } from "./features/auth";
-import { SettingsPage } from "./features/settings";
-import { ProfilePage } from "./features/profile";
-import { NodesPage } from "./features/nodes";
-import { SkillsPage } from "./features/skills";
import { AgentHarnessPage, AgentDrillDown } from "./features/agents";
+import LoginPage from "./features/auth/pages/LoginPage";
+import SettingsPage from "./features/settings/pages/SettingsPage";
+import ProfilePage from "./features/profile/pages/ProfilePage";
+import { NodesPage } from "./features/nodes";
+import SkillsPage from "./features/skills/pages/SkillsPage";
import { getUserStatus, logout, getUserProfile } from "./services/apiService";
const Icon = ({ path, onClick, className }) => (
@@ -35,38 +35,44 @@
const [userId, setUserId] = useState(null);
const [userProfile, setUserProfile] = useState(null);
- const authenticatedPages = ["voice-chat", "swarm-control", "settings", "profile", "nodes", "skills", "agents-harness", "agents-drilldown"];
+ const authenticatedPages = ["voice-chat", "swarm-control", "agents", "agents-drilldown", "settings", "profile", "nodes", "skills"];
const pageToPath = {
"home": "/",
"voice-chat": "/voice",
"swarm-control": "/swarm",
+ "agents": "/agents",
+ "agents-drilldown": "/agents-drilldown",
"settings": "/settings",
"profile": "/profile",
"nodes": "/nodes",
"skills": "/skills",
- "login": "/login",
- "agents-harness": "/agents"
+ "login": "/login"
};
-
- const getPageFromPath = (path) => {
- if (path === "/agents") return "agents-harness";
- if (path.startsWith("/agents/drilldown/")) return "agents-drilldown";
- const pathToPage = Object.fromEntries(Object.entries(pageToPath).map(([pk, pv]) => [pv, pk]));
- return pathToPage[path] || "home";
- };
+ const pathToPage = Object.fromEntries(Object.entries(pageToPath).map(([pk, pv]) => [pv, pk]));
// Sync state with URL on mount and handle popstate
useEffect(() => {
const handlePopState = () => {
const path = window.location.pathname;
- setCurrentPage(getPageFromPath(path));
+ if (path.startsWith("/agents/drilldown/")) {
+ setCurrentPage("agents-drilldown");
+ } else {
+ const page = pathToPage[path] || "home";
+ setCurrentPage(page);
+ }
};
window.addEventListener("popstate", handlePopState);
// Initial sync
const initialPath = window.location.pathname;
- const initialPage = getPageFromPath(initialPath);
+ let initialPage = "home";
+ if (initialPath.startsWith("/agents/drilldown/")) {
+ initialPage = "agents-drilldown";
+ } else {
+ initialPage = pathToPage[initialPath] || "home";
+ }
+
if (initialPage !== currentPage) {
setCurrentPage(initialPage);
}
@@ -148,22 +154,25 @@
}
};
- const handleNavigate = (pageOrPath) => {
- let targetPage = pageOrPath;
- let targetPath = pageToPath[pageOrPath];
-
- // If it's a raw path like /agents/drilldown/123
- if (pageOrPath.startsWith('/')) {
- targetPath = pageOrPath;
- targetPage = getPageFromPath(pageOrPath);
+ const handleNavigate = (page) => {
+ // Handle parameterized paths (paths starting with '/')
+ if (page.startsWith("/")) {
+ const path = page;
+ if (path.startsWith("/agents/drilldown/")) {
+ setCurrentPage("agents-drilldown");
+ } else {
+ setCurrentPage(pathToPage[path] || "home");
+ }
+ window.history.pushState({}, "", path);
+ return;
}
- if (authenticatedPages.includes(targetPage) && !isLoggedIn) {
+ if (authenticatedPages.includes(page) && !isLoggedIn) {
setCurrentPage("login");
window.history.pushState({}, "", pageToPath["login"]);
} else {
- setCurrentPage(targetPage);
- window.history.pushState({}, "", targetPath || "/");
+ setCurrentPage(page);
+ window.history.pushState({}, "", pageToPath[page] || "/");
}
};
@@ -192,11 +201,12 @@
return
- curl -sSL '{window.location.origin}/api/v1/nodes/provision/sh/{node.node_id}?token={node.invite_token}' | bash
-
-
+
+
+ curl -sSL '{window.location.origin}/api/v1/nodes/provision/{node.node_id}?token={node.invite_token}' | python3
+
Installs self-contained Cortex Agent daemon with no dependencies required (Linux Native).
+Best for terminal-only servers. Installs agent as a persistent service.
{errorMessage}
- -{successMessage}
- -