diff --git a/agent-node/agent_node/config.py b/agent-node/agent_node/config.py index cdc367c..ba29271 100644 --- a/agent-node/agent_node/config.py +++ b/agent-node/agent_node/config.py @@ -40,6 +40,7 @@ HEALTH_REPORT_INTERVAL = int(os.getenv("HEALTH_REPORT_INTERVAL", _config["health_report_interval"])) MAX_SKILL_WORKERS = int(os.getenv("MAX_SKILL_WORKERS", _config["max_skill_workers"])) +DEBUG_GRPC = os.getenv("DEBUG_GRPC", "false").lower() == "true" SECRET_KEY = os.getenv("AGENT_SECRET_KEY", _config.get("secret_key", "dev-secret-key-1337")) # These are still available but likely replaced by AUTH_TOKEN / TLS_ENABLED logic diff --git a/agent-node/agent_node/node.py b/agent-node/agent_node/node.py index 45a88a8..f061dd0 100644 --- a/agent-node/agent_node/node.py +++ b/agent-node/agent_node/node.py @@ -15,7 +15,7 @@ from agent_node.core.watcher import WorkspaceWatcher from agent_node.utils.auth import verify_task_signature from agent_node.utils.network import get_secure_stub -from agent_node.config import NODE_ID, NODE_DESC, AUTH_TOKEN, HEALTH_REPORT_INTERVAL, MAX_SKILL_WORKERS +from agent_node.config import NODE_ID, NODE_DESC, AUTH_TOKEN, HEALTH_REPORT_INTERVAL, MAX_SKILL_WORKERS, DEBUG_GRPC class AgentNode: @@ -154,11 +154,24 @@ try: def _gen(): # Initial announcement for routing identity - yield agent_pb2.ClientTaskMessage( + announce_msg = agent_pb2.ClientTaskMessage( announce=agent_pb2.NodeAnnounce(node_id=self.node_id) ) + if DEBUG_GRPC: + print(f"[*] [DEBUG-gRPC] OUTBOUND: announce | {announce_msg}", flush=True) + yield announce_msg + while True: - yield self.task_queue.get() + out_msg = self.task_queue.get() + if DEBUG_GRPC: + kind = out_msg.WhichOneof('payload') + if kind == 'file_sync' and out_msg.file_sync.HasField('file_data'): + print(f"[*] [DEBUG-gRPC] OUTBOUND: {kind} (file_data, path={out_msg.file_sync.file_data.path}, size={len(out_msg.file_sync.file_data.chunk)})", flush=True) + elif kind == 'skill_event' and out_msg.skill_event.WhichOneof('data') == 'terminal_out': + print(f"[*] [DEBUG-gRPC] OUTBOUND: {kind} (terminal_out, size={len(out_msg.skill_event.terminal_out)})", flush=True) + else: + print(f"[*] [DEBUG-gRPC] OUTBOUND: {kind} | {out_msg}", flush=True) + yield out_msg responses = self.stub.TaskStream(_gen()) print(f"[*] Task Stream Online: {self.node_id}", flush=True) @@ -174,7 +187,10 @@ def _process_server_message(self, msg): kind = msg.WhichOneof('payload') - print(f"[*] Inbound: {kind}", flush=True) + if DEBUG_GRPC: + print(f"[*] [DEBUG-gRPC] INBOUND: {kind} | {msg}", flush=True) + else: + print(f"[*] Inbound: {kind}", flush=True) if kind == 'task_request': self._handle_task(msg.task_request) @@ -202,6 +218,10 @@ elif kind == 'file_sync': self._handle_file_sync(msg.file_sync) + + elif kind == 'policy_update': + print(f" [🔒] Live Sandbox Policy Update Received.") + self.sandbox.sync(msg.policy_update) def _on_sync_delta(self, session_id, file_payload): """Callback from watcher to push local changes to server.""" diff --git a/agent-node/agent_node/skills/shell.py b/agent-node/agent_node/skills/shell.py index 1740702..d267657 100644 --- a/agent-node/agent_node/skills/shell.py +++ b/agent-node/agent_node/skills/shell.py @@ -2,6 +2,7 @@ import pty import select import threading +import time import termios import struct import fcntl @@ -47,11 +48,43 @@ if fd in r: data = os.read(fd, 4096) if not data: break + + decoded = data.decode("utf-8", errors="replace") + + # Blocking/Sync logic + with self.lock: + active_tid = sess.get("active_task") + marker = sess.get("marker") + if active_tid and marker: + sess["buffer"] += decoded + if marker in decoded: + # Marker found! Extract exit code + # Format: ...marker [exit_code]\n + try: + parts = sess["buffer"].split(marker) + # The pure stdout is everything before the marker + pure_stdout = parts[0] + # The exit code is right after the marker + after_marker = parts[1].strip().split() + exit_code = int(after_marker[0]) if after_marker else 0 + + sess["result"]["stdout"] = pure_stdout + sess["result"]["status"] = 1 if exit_code == 0 else 2 # Success=1 for Skill mgr + sess["event"].set() + + # We don't want the marker itself to spam the UI stream + # So we only send the part before the marker + decoded = pure_stdout + except Exception as e: + print(f" [🐚⚠️] Marker parsing failed: {e}") + sess["event"].set() + # Stream raw terminal output back if on_event: event = agent_pb2.SkillEvent( session_id=session_id, - terminal_out=data.decode("utf-8", errors="replace") + task_id=sess.get("active_task") or "", + terminal_out=decoded ) on_event(agent_pb2.ClientTaskMessage(skill_event=event)) except (EOFError, OSError): @@ -80,7 +113,7 @@ raw_bytes = raw_payload["tty"] sess = self._ensure_session(session_id, None, on_event) os.write(sess["fd"], raw_bytes.encode("utf-8")) - on_complete(task.task_id, {"stdout": "", "status": 1}, task.trace_id) + on_complete(task.task_id, {"stdout": "", "status": 0}, task.trace_id) return True # 2. Window Resize @@ -92,23 +125,31 @@ s = struct.pack('HHHH', rows, cols, 0, 0) fcntl.ioctl(sess["fd"], termios.TIOCSWINSZ, s) print(f" [🐚] Terminal Resized to {cols}x{rows}") - on_complete(task.task_id, {"stdout": f"resized to {cols}x{rows}", "status": 1}, task.trace_id) + on_complete(task.task_id, {"stdout": f"resized to {cols}x{rows}", "status": 0}, task.trace_id) return True except Exception as pe: print(f" [🐚] Transparent TTY Fail: {pe}") return False def execute(self, task, sandbox, on_complete, on_event=None): - """Dispatches command string to the persistent PTY shell.""" + """Dispatches command string to the persistent PTY shell and WAITS for completion.""" + session_id = task.session_id or "default-session" + tid = task.task_id try: cmd = task.payload_json - session_id = task.session_id or "default-session" - + # --- Legacy Full-Command Execution (Sandboxed) --- allowed, status_msg = sandbox.verify(cmd) if not allowed: - err_msg = f"SANDBOX_VIOLATION: {status_msg}" - return on_complete(task.task_id, {"stderr": err_msg, "status": 2}, task.trace_id) + err_msg = f"\r\n[System] Command blocked: {status_msg}\r\n" + if on_event: + event = agent_pb2.SkillEvent( + session_id=session_id, task_id=tid, + terminal_out=err_msg + ) + on_event(agent_pb2.ClientTaskMessage(skill_event=event)) + + return on_complete(tid, {"stderr": f"SANDBOX_VIOLATION: {status_msg}", "status": 2}, task.trace_id) # Resolve CWD jail cwd = None @@ -123,17 +164,47 @@ # Handle Session Persistent Process sess = self._ensure_session(session_id, cwd, on_event) - # Input injection - print(f" [🐚] Shell In (Legacy): {cmd}") - full_cmd = cmd if cmd.endswith("\n") else cmd + "\n" - os.write(sess["fd"], full_cmd.encode("utf-8")) + # --- Blocking Wait Logic --- + marker = f"__CORTEX_FIN_SH_{int(time.time())}__" + event = threading.Event() + result_container = {"stdout": "", "status": 1} # 1 = Error/Fail by default + + # Register waiter in session state + with self.lock: + sess["active_task"] = tid + sess["marker"] = marker + sess["event"] = event + sess["buffer"] = "" + sess["result"] = result_container - # Return success immediately - output will stream via on_event - on_complete(task.task_id, {"stdout": "", "status": 1}, task.trace_id) + # Input injection: execute command then echo marker and exit code + print(f" [🐚] Executing (Blocking): {cmd}") + # We use a trick: execute command, then echo marker and return code. + # We use ';' to chain even if first fails, unless it's a structural error. + # 12-factor bash: ( cmd ) ; echo marker $? + full_input = f"({cmd}) ; echo \"{marker} $?\"\n" + os.write(sess["fd"], full_input.encode("utf-8")) + + # Wait for completion (triggered by reader) + # Use a slightly longer timeout than the Hub's limit to avoid race, + # though the Hub will cancel us if it gets tired first. + timeout = (task.timeout_ms / 1000.0) if task.timeout_ms > 0 else 60.0 + if event.wait(timeout): + # Success! reader found the marker + on_complete(tid, result_container, task.trace_id) + else: + # Timeout on node side + print(f" [🐚⚠️] Task {tid} timed out on node.") + on_complete(tid, {"stdout": sess["buffer"], "stderr": "TIMEOUT", "status": 2}, task.trace_id) + + # Cleanup session task state + with self.lock: + if sess.get("active_task") == tid: + sess["active_task"] = None except Exception as e: - print(f" [🐚] Execute Error: {e}") - on_complete(task.task_id, {"stderr": str(e), "status": 2}, task.trace_id) + print(f" [🐚❌] Execute Error for {tid}: {e}") + on_complete(tid, {"stderr": str(e), "status": 2}, task.trace_id) def cancel(self, task_id: str): """Cancels an active task — for persistent shell, this sends a SIGINT (Ctrl+C).""" diff --git a/agent-node/protos/agent.proto b/agent-node/protos/agent.proto index 50992ef..46b74fa 100644 --- a/agent-node/protos/agent.proto +++ b/agent-node/protos/agent.proto @@ -83,6 +83,7 @@ TaskClaimResponse claim_status = 3; TaskCancelRequest task_cancel = 4; FileSyncMessage file_sync = 5; // NEW: Ghost Mirror Sync + SandboxPolicy policy_update = 6; // NEW: Live Policy Update } } diff --git a/agent-node/protos/agent_pb2.py b/agent-node/protos/agent_pb2.py index b02c094..3075f56 100644 --- a/agent-node/protos/agent_pb2.py +++ b/agent-node/protos/agent_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: agent.proto +# source: protos/agent.proto # Protobuf Python Version: 4.25.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor @@ -14,83 +14,83 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x61gent.proto\x12\x05\x61gent\"\xde\x01\n\x13RegistrationRequest\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x12\n\nauth_token\x18\x03 \x01(\t\x12\x18\n\x10node_description\x18\x04 \x01(\t\x12\x42\n\x0c\x63\x61pabilities\x18\x05 \x03(\x0b\x32,.agent.RegistrationRequest.CapabilitiesEntry\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc5\x01\n\rSandboxPolicy\x12\'\n\x04mode\x18\x01 \x01(\x0e\x32\x19.agent.SandboxPolicy.Mode\x12\x18\n\x10\x61llowed_commands\x18\x02 \x03(\t\x12\x17\n\x0f\x64\x65nied_commands\x18\x03 \x03(\t\x12\x1a\n\x12sensitive_commands\x18\x04 \x03(\t\x12\x18\n\x10working_dir_jail\x18\x05 \x01(\t\"\"\n\x04Mode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"x\n\x14RegistrationResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x15\n\rerror_message\x18\x02 \x01(\t\x12\x12\n\nsession_id\x18\x03 \x01(\t\x12$\n\x06policy\x18\x04 \x01(\x0b\x32\x14.agent.SandboxPolicy\"\xa9\x02\n\x11\x43lientTaskMessage\x12,\n\rtask_response\x18\x01 \x01(\x0b\x32\x13.agent.TaskResponseH\x00\x12-\n\ntask_claim\x18\x02 \x01(\x0b\x32\x17.agent.TaskClaimRequestH\x00\x12,\n\rbrowser_event\x18\x03 \x01(\x0b\x32\x13.agent.BrowserEventH\x00\x12\'\n\x08\x61nnounce\x18\x04 \x01(\x0b\x32\x13.agent.NodeAnnounceH\x00\x12+\n\tfile_sync\x18\x05 \x01(\x0b\x32\x16.agent.FileSyncMessageH\x00\x12(\n\x0bskill_event\x18\x06 \x01(\x0b\x32\x11.agent.SkillEventH\x00\x42\t\n\x07payload\"y\n\nSkillEvent\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x16\n\x0cterminal_out\x18\x03 \x01(\tH\x00\x12\x10\n\x06prompt\x18\x04 \x01(\tH\x00\x12\x14\n\nkeep_alive\x18\x05 \x01(\x08H\x00\x42\x06\n\x04\x64\x61ta\"\x1f\n\x0cNodeAnnounce\x12\x0f\n\x07node_id\x18\x01 \x01(\t\"\x87\x01\n\x0c\x42rowserEvent\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12,\n\x0b\x63onsole_msg\x18\x02 \x01(\x0b\x32\x15.agent.ConsoleMessageH\x00\x12,\n\x0bnetwork_req\x18\x03 \x01(\x0b\x32\x15.agent.NetworkRequestH\x00\x42\x07\n\x05\x65vent\"\x8d\x02\n\x11ServerTaskMessage\x12*\n\x0ctask_request\x18\x01 \x01(\x0b\x32\x12.agent.TaskRequestH\x00\x12\x31\n\x10work_pool_update\x18\x02 \x01(\x0b\x32\x15.agent.WorkPoolUpdateH\x00\x12\x30\n\x0c\x63laim_status\x18\x03 \x01(\x0b\x32\x18.agent.TaskClaimResponseH\x00\x12/\n\x0btask_cancel\x18\x04 \x01(\x0b\x32\x18.agent.TaskCancelRequestH\x00\x12+\n\tfile_sync\x18\x05 \x01(\x0b\x32\x16.agent.FileSyncMessageH\x00\x42\t\n\x07payload\"$\n\x11TaskCancelRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"\xd1\x01\n\x0bTaskRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x11\n\ttask_type\x18\x02 \x01(\t\x12\x16\n\x0cpayload_json\x18\x03 \x01(\tH\x00\x12.\n\x0e\x62rowser_action\x18\x07 \x01(\x0b\x32\x14.agent.BrowserActionH\x00\x12\x12\n\ntimeout_ms\x18\x04 \x01(\x05\x12\x10\n\x08trace_id\x18\x05 \x01(\t\x12\x11\n\tsignature\x18\x06 \x01(\t\x12\x12\n\nsession_id\x18\x08 \x01(\tB\t\n\x07payload\"\xa0\x02\n\rBrowserAction\x12/\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x1f.agent.BrowserAction.ActionType\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x10\n\x08selector\x18\x03 \x01(\t\x12\x0c\n\x04text\x18\x04 \x01(\t\x12\x12\n\nsession_id\x18\x05 \x01(\t\x12\t\n\x01x\x18\x06 \x01(\x05\x12\t\n\x01y\x18\x07 \x01(\x05\"\x86\x01\n\nActionType\x12\x0c\n\x08NAVIGATE\x10\x00\x12\t\n\x05\x43LICK\x10\x01\x12\x08\n\x04TYPE\x10\x02\x12\x0e\n\nSCREENSHOT\x10\x03\x12\x0b\n\x07GET_DOM\x10\x04\x12\t\n\x05HOVER\x10\x05\x12\n\n\x06SCROLL\x10\x06\x12\t\n\x05\x43LOSE\x10\x07\x12\x08\n\x04\x45VAL\x10\x08\x12\x0c\n\x08GET_A11Y\x10\t\"\xe0\x02\n\x0cTaskResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12*\n\x06status\x18\x02 \x01(\x0e\x32\x1a.agent.TaskResponse.Status\x12\x0e\n\x06stdout\x18\x03 \x01(\t\x12\x0e\n\x06stderr\x18\x04 \x01(\t\x12\x10\n\x08trace_id\x18\x05 \x01(\t\x12\x35\n\tartifacts\x18\x06 \x03(\x0b\x32\".agent.TaskResponse.ArtifactsEntry\x12\x30\n\x0e\x62rowser_result\x18\x07 \x01(\x0b\x32\x16.agent.BrowserResponseH\x00\x1a\x30\n\x0e\x41rtifactsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"<\n\x06Status\x12\x0b\n\x07SUCCESS\x10\x00\x12\t\n\x05\x45RROR\x10\x01\x12\x0b\n\x07TIMEOUT\x10\x02\x12\r\n\tCANCELLED\x10\x03\x42\x08\n\x06result\"\xdc\x01\n\x0f\x42rowserResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x10\n\x08snapshot\x18\x03 \x01(\x0c\x12\x13\n\x0b\x64om_content\x18\x04 \x01(\t\x12\x11\n\ta11y_tree\x18\x05 \x01(\t\x12\x13\n\x0b\x65val_result\x18\x06 \x01(\t\x12.\n\x0f\x63onsole_history\x18\x07 \x03(\x0b\x32\x15.agent.ConsoleMessage\x12.\n\x0fnetwork_history\x18\x08 \x03(\x0b\x32\x15.agent.NetworkRequest\"C\n\x0e\x43onsoleMessage\x12\r\n\x05level\x18\x01 \x01(\t\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x14\n\x0ctimestamp_ms\x18\x03 \x01(\x03\"h\n\x0eNetworkRequest\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\x05\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x12\n\nlatency_ms\x18\x05 \x01(\x03\",\n\x0eWorkPoolUpdate\x12\x1a\n\x12\x61vailable_task_ids\x18\x01 \x03(\t\"4\n\x10TaskClaimRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x0f\n\x07node_id\x18\x02 \x01(\t\"E\n\x11TaskClaimResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x0f\n\x07granted\x18\x02 \x01(\x08\x12\x0e\n\x06reason\x18\x03 \x01(\t\"\xe6\x02\n\tHeartbeat\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x19\n\x11\x63pu_usage_percent\x18\x02 \x01(\x02\x12\x1c\n\x14memory_usage_percent\x18\x03 \x01(\x02\x12\x1b\n\x13\x61\x63tive_worker_count\x18\x04 \x01(\x05\x12\x1b\n\x13max_worker_capacity\x18\x05 \x01(\x05\x12\x16\n\x0estatus_message\x18\x06 \x01(\t\x12\x18\n\x10running_task_ids\x18\x07 \x03(\t\x12\x11\n\tcpu_count\x18\x08 \x01(\x05\x12\x16\n\x0ememory_used_gb\x18\t \x01(\x02\x12\x17\n\x0fmemory_total_gb\x18\n \x01(\x02\x12\x1a\n\x12\x63pu_usage_per_core\x18\x0b \x03(\x02\x12\x14\n\x0c\x63pu_freq_mhz\x18\x0c \x01(\x02\x12\x1b\n\x13memory_available_gb\x18\r \x01(\x02\x12\x10\n\x08load_avg\x18\x0e \x03(\x02\"-\n\x13HealthCheckResponse\x12\x16\n\x0eserver_time_ms\x18\x01 \x01(\x03\"\xe4\x01\n\x0f\x46ileSyncMessage\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12,\n\x08manifest\x18\x02 \x01(\x0b\x32\x18.agent.DirectoryManifestH\x00\x12\'\n\tfile_data\x18\x03 \x01(\x0b\x32\x12.agent.FilePayloadH\x00\x12#\n\x06status\x18\x04 \x01(\x0b\x32\x11.agent.SyncStatusH\x00\x12%\n\x07\x63ontrol\x18\x05 \x01(\x0b\x32\x12.agent.SyncControlH\x00\x12\x0f\n\x07task_id\x18\x06 \x01(\tB\t\n\x07payload\"\xab\x02\n\x0bSyncControl\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.agent.SyncControl.Action\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\x15\n\rrequest_paths\x18\x03 \x03(\t\x12\x0f\n\x07\x63ontent\x18\x04 \x01(\x0c\x12\x0e\n\x06is_dir\x18\x05 \x01(\x08\"\xaa\x01\n\x06\x41\x63tion\x12\x12\n\x0eSTART_WATCHING\x10\x00\x12\x11\n\rSTOP_WATCHING\x10\x01\x12\x08\n\x04LOCK\x10\x02\x12\n\n\x06UNLOCK\x10\x03\x12\x14\n\x10REFRESH_MANIFEST\x10\x04\x12\n\n\x06RESYNC\x10\x05\x12\x08\n\x04LIST\x10\x06\x12\x08\n\x04READ\x10\x07\x12\t\n\x05WRITE\x10\x08\x12\n\n\x06\x44\x45LETE\x10\t\x12\t\n\x05PURGE\x10\n\x12\x0b\n\x07\x43LEANUP\x10\x0b\"F\n\x11\x44irectoryManifest\x12\x11\n\troot_path\x18\x01 \x01(\t\x12\x1e\n\x05\x66iles\x18\x02 \x03(\x0b\x32\x0f.agent.FileInfo\"D\n\x08\x46ileInfo\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\x12\x0c\n\x04hash\x18\x03 \x01(\t\x12\x0e\n\x06is_dir\x18\x04 \x01(\x08\"_\n\x0b\x46ilePayload\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\r\n\x05\x63hunk\x18\x02 \x01(\x0c\x12\x13\n\x0b\x63hunk_index\x18\x03 \x01(\x05\x12\x10\n\x08is_final\x18\x04 \x01(\x08\x12\x0c\n\x04hash\x18\x05 \x01(\t\"\xa0\x01\n\nSyncStatus\x12$\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x16.agent.SyncStatus.Code\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x17\n\x0freconcile_paths\x18\x03 \x03(\t\"B\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\t\n\x05\x45RROR\x10\x01\x12\x16\n\x12RECONCILE_REQUIRED\x10\x02\x12\x0f\n\x0bIN_PROGRESS\x10\x03\x32\xe9\x01\n\x11\x41gentOrchestrator\x12L\n\x11SyncConfiguration\x12\x1a.agent.RegistrationRequest\x1a\x1b.agent.RegistrationResponse\x12\x44\n\nTaskStream\x12\x18.agent.ClientTaskMessage\x1a\x18.agent.ServerTaskMessage(\x01\x30\x01\x12@\n\x0cReportHealth\x12\x10.agent.Heartbeat\x1a\x1a.agent.HealthCheckResponse(\x01\x30\x01\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x12protos/agent.proto\x12\x05\x61gent\"\xde\x01\n\x13RegistrationRequest\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x12\n\nauth_token\x18\x03 \x01(\t\x12\x18\n\x10node_description\x18\x04 \x01(\t\x12\x42\n\x0c\x63\x61pabilities\x18\x05 \x03(\x0b\x32,.agent.RegistrationRequest.CapabilitiesEntry\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc5\x01\n\rSandboxPolicy\x12\'\n\x04mode\x18\x01 \x01(\x0e\x32\x19.agent.SandboxPolicy.Mode\x12\x18\n\x10\x61llowed_commands\x18\x02 \x03(\t\x12\x17\n\x0f\x64\x65nied_commands\x18\x03 \x03(\t\x12\x1a\n\x12sensitive_commands\x18\x04 \x03(\t\x12\x18\n\x10working_dir_jail\x18\x05 \x01(\t\"\"\n\x04Mode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"x\n\x14RegistrationResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x15\n\rerror_message\x18\x02 \x01(\t\x12\x12\n\nsession_id\x18\x03 \x01(\t\x12$\n\x06policy\x18\x04 \x01(\x0b\x32\x14.agent.SandboxPolicy\"\xa9\x02\n\x11\x43lientTaskMessage\x12,\n\rtask_response\x18\x01 \x01(\x0b\x32\x13.agent.TaskResponseH\x00\x12-\n\ntask_claim\x18\x02 \x01(\x0b\x32\x17.agent.TaskClaimRequestH\x00\x12,\n\rbrowser_event\x18\x03 \x01(\x0b\x32\x13.agent.BrowserEventH\x00\x12\'\n\x08\x61nnounce\x18\x04 \x01(\x0b\x32\x13.agent.NodeAnnounceH\x00\x12+\n\tfile_sync\x18\x05 \x01(\x0b\x32\x16.agent.FileSyncMessageH\x00\x12(\n\x0bskill_event\x18\x06 \x01(\x0b\x32\x11.agent.SkillEventH\x00\x42\t\n\x07payload\"y\n\nSkillEvent\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x16\n\x0cterminal_out\x18\x03 \x01(\tH\x00\x12\x10\n\x06prompt\x18\x04 \x01(\tH\x00\x12\x14\n\nkeep_alive\x18\x05 \x01(\x08H\x00\x42\x06\n\x04\x64\x61ta\"\x1f\n\x0cNodeAnnounce\x12\x0f\n\x07node_id\x18\x01 \x01(\t\"\x87\x01\n\x0c\x42rowserEvent\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12,\n\x0b\x63onsole_msg\x18\x02 \x01(\x0b\x32\x15.agent.ConsoleMessageH\x00\x12,\n\x0bnetwork_req\x18\x03 \x01(\x0b\x32\x15.agent.NetworkRequestH\x00\x42\x07\n\x05\x65vent\"\xbc\x02\n\x11ServerTaskMessage\x12*\n\x0ctask_request\x18\x01 \x01(\x0b\x32\x12.agent.TaskRequestH\x00\x12\x31\n\x10work_pool_update\x18\x02 \x01(\x0b\x32\x15.agent.WorkPoolUpdateH\x00\x12\x30\n\x0c\x63laim_status\x18\x03 \x01(\x0b\x32\x18.agent.TaskClaimResponseH\x00\x12/\n\x0btask_cancel\x18\x04 \x01(\x0b\x32\x18.agent.TaskCancelRequestH\x00\x12+\n\tfile_sync\x18\x05 \x01(\x0b\x32\x16.agent.FileSyncMessageH\x00\x12-\n\rpolicy_update\x18\x06 \x01(\x0b\x32\x14.agent.SandboxPolicyH\x00\x42\t\n\x07payload\"$\n\x11TaskCancelRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"\xd1\x01\n\x0bTaskRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x11\n\ttask_type\x18\x02 \x01(\t\x12\x16\n\x0cpayload_json\x18\x03 \x01(\tH\x00\x12.\n\x0e\x62rowser_action\x18\x07 \x01(\x0b\x32\x14.agent.BrowserActionH\x00\x12\x12\n\ntimeout_ms\x18\x04 \x01(\x05\x12\x10\n\x08trace_id\x18\x05 \x01(\t\x12\x11\n\tsignature\x18\x06 \x01(\t\x12\x12\n\nsession_id\x18\x08 \x01(\tB\t\n\x07payload\"\xa0\x02\n\rBrowserAction\x12/\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x1f.agent.BrowserAction.ActionType\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x10\n\x08selector\x18\x03 \x01(\t\x12\x0c\n\x04text\x18\x04 \x01(\t\x12\x12\n\nsession_id\x18\x05 \x01(\t\x12\t\n\x01x\x18\x06 \x01(\x05\x12\t\n\x01y\x18\x07 \x01(\x05\"\x86\x01\n\nActionType\x12\x0c\n\x08NAVIGATE\x10\x00\x12\t\n\x05\x43LICK\x10\x01\x12\x08\n\x04TYPE\x10\x02\x12\x0e\n\nSCREENSHOT\x10\x03\x12\x0b\n\x07GET_DOM\x10\x04\x12\t\n\x05HOVER\x10\x05\x12\n\n\x06SCROLL\x10\x06\x12\t\n\x05\x43LOSE\x10\x07\x12\x08\n\x04\x45VAL\x10\x08\x12\x0c\n\x08GET_A11Y\x10\t\"\xe0\x02\n\x0cTaskResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12*\n\x06status\x18\x02 \x01(\x0e\x32\x1a.agent.TaskResponse.Status\x12\x0e\n\x06stdout\x18\x03 \x01(\t\x12\x0e\n\x06stderr\x18\x04 \x01(\t\x12\x10\n\x08trace_id\x18\x05 \x01(\t\x12\x35\n\tartifacts\x18\x06 \x03(\x0b\x32\".agent.TaskResponse.ArtifactsEntry\x12\x30\n\x0e\x62rowser_result\x18\x07 \x01(\x0b\x32\x16.agent.BrowserResponseH\x00\x1a\x30\n\x0e\x41rtifactsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"<\n\x06Status\x12\x0b\n\x07SUCCESS\x10\x00\x12\t\n\x05\x45RROR\x10\x01\x12\x0b\n\x07TIMEOUT\x10\x02\x12\r\n\tCANCELLED\x10\x03\x42\x08\n\x06result\"\xdc\x01\n\x0f\x42rowserResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x10\n\x08snapshot\x18\x03 \x01(\x0c\x12\x13\n\x0b\x64om_content\x18\x04 \x01(\t\x12\x11\n\ta11y_tree\x18\x05 \x01(\t\x12\x13\n\x0b\x65val_result\x18\x06 \x01(\t\x12.\n\x0f\x63onsole_history\x18\x07 \x03(\x0b\x32\x15.agent.ConsoleMessage\x12.\n\x0fnetwork_history\x18\x08 \x03(\x0b\x32\x15.agent.NetworkRequest\"C\n\x0e\x43onsoleMessage\x12\r\n\x05level\x18\x01 \x01(\t\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x14\n\x0ctimestamp_ms\x18\x03 \x01(\x03\"h\n\x0eNetworkRequest\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\x05\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x12\n\nlatency_ms\x18\x05 \x01(\x03\",\n\x0eWorkPoolUpdate\x12\x1a\n\x12\x61vailable_task_ids\x18\x01 \x03(\t\"4\n\x10TaskClaimRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x0f\n\x07node_id\x18\x02 \x01(\t\"E\n\x11TaskClaimResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x0f\n\x07granted\x18\x02 \x01(\x08\x12\x0e\n\x06reason\x18\x03 \x01(\t\"\xe6\x02\n\tHeartbeat\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x19\n\x11\x63pu_usage_percent\x18\x02 \x01(\x02\x12\x1c\n\x14memory_usage_percent\x18\x03 \x01(\x02\x12\x1b\n\x13\x61\x63tive_worker_count\x18\x04 \x01(\x05\x12\x1b\n\x13max_worker_capacity\x18\x05 \x01(\x05\x12\x16\n\x0estatus_message\x18\x06 \x01(\t\x12\x18\n\x10running_task_ids\x18\x07 \x03(\t\x12\x11\n\tcpu_count\x18\x08 \x01(\x05\x12\x16\n\x0ememory_used_gb\x18\t \x01(\x02\x12\x17\n\x0fmemory_total_gb\x18\n \x01(\x02\x12\x1a\n\x12\x63pu_usage_per_core\x18\x0b \x03(\x02\x12\x14\n\x0c\x63pu_freq_mhz\x18\x0c \x01(\x02\x12\x1b\n\x13memory_available_gb\x18\r \x01(\x02\x12\x10\n\x08load_avg\x18\x0e \x03(\x02\"-\n\x13HealthCheckResponse\x12\x16\n\x0eserver_time_ms\x18\x01 \x01(\x03\"\xe4\x01\n\x0f\x46ileSyncMessage\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12,\n\x08manifest\x18\x02 \x01(\x0b\x32\x18.agent.DirectoryManifestH\x00\x12\'\n\tfile_data\x18\x03 \x01(\x0b\x32\x12.agent.FilePayloadH\x00\x12#\n\x06status\x18\x04 \x01(\x0b\x32\x11.agent.SyncStatusH\x00\x12%\n\x07\x63ontrol\x18\x05 \x01(\x0b\x32\x12.agent.SyncControlH\x00\x12\x0f\n\x07task_id\x18\x06 \x01(\tB\t\n\x07payload\"\xab\x02\n\x0bSyncControl\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.agent.SyncControl.Action\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\x15\n\rrequest_paths\x18\x03 \x03(\t\x12\x0f\n\x07\x63ontent\x18\x04 \x01(\x0c\x12\x0e\n\x06is_dir\x18\x05 \x01(\x08\"\xaa\x01\n\x06\x41\x63tion\x12\x12\n\x0eSTART_WATCHING\x10\x00\x12\x11\n\rSTOP_WATCHING\x10\x01\x12\x08\n\x04LOCK\x10\x02\x12\n\n\x06UNLOCK\x10\x03\x12\x14\n\x10REFRESH_MANIFEST\x10\x04\x12\n\n\x06RESYNC\x10\x05\x12\x08\n\x04LIST\x10\x06\x12\x08\n\x04READ\x10\x07\x12\t\n\x05WRITE\x10\x08\x12\n\n\x06\x44\x45LETE\x10\t\x12\t\n\x05PURGE\x10\n\x12\x0b\n\x07\x43LEANUP\x10\x0b\"F\n\x11\x44irectoryManifest\x12\x11\n\troot_path\x18\x01 \x01(\t\x12\x1e\n\x05\x66iles\x18\x02 \x03(\x0b\x32\x0f.agent.FileInfo\"D\n\x08\x46ileInfo\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\x12\x0c\n\x04hash\x18\x03 \x01(\t\x12\x0e\n\x06is_dir\x18\x04 \x01(\x08\"_\n\x0b\x46ilePayload\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\r\n\x05\x63hunk\x18\x02 \x01(\x0c\x12\x13\n\x0b\x63hunk_index\x18\x03 \x01(\x05\x12\x10\n\x08is_final\x18\x04 \x01(\x08\x12\x0c\n\x04hash\x18\x05 \x01(\t\"\xa0\x01\n\nSyncStatus\x12$\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x16.agent.SyncStatus.Code\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x17\n\x0freconcile_paths\x18\x03 \x03(\t\"B\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\t\n\x05\x45RROR\x10\x01\x12\x16\n\x12RECONCILE_REQUIRED\x10\x02\x12\x0f\n\x0bIN_PROGRESS\x10\x03\x32\xe9\x01\n\x11\x41gentOrchestrator\x12L\n\x11SyncConfiguration\x12\x1a.agent.RegistrationRequest\x1a\x1b.agent.RegistrationResponse\x12\x44\n\nTaskStream\x12\x18.agent.ClientTaskMessage\x1a\x18.agent.ServerTaskMessage(\x01\x30\x01\x12@\n\x0cReportHealth\x12\x10.agent.Heartbeat\x1a\x1a.agent.HealthCheckResponse(\x01\x30\x01\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'agent_pb2', _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'protos.agent_pb2', _globals) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._options = None _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_options = b'8\001' _globals['_TASKRESPONSE_ARTIFACTSENTRY']._options = None _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_options = b'8\001' - _globals['_REGISTRATIONREQUEST']._serialized_start=23 - _globals['_REGISTRATIONREQUEST']._serialized_end=245 - _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_start=194 - _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_end=245 - _globals['_SANDBOXPOLICY']._serialized_start=248 - _globals['_SANDBOXPOLICY']._serialized_end=445 - _globals['_SANDBOXPOLICY_MODE']._serialized_start=411 - _globals['_SANDBOXPOLICY_MODE']._serialized_end=445 - _globals['_REGISTRATIONRESPONSE']._serialized_start=447 - _globals['_REGISTRATIONRESPONSE']._serialized_end=567 - _globals['_CLIENTTASKMESSAGE']._serialized_start=570 - _globals['_CLIENTTASKMESSAGE']._serialized_end=867 - _globals['_SKILLEVENT']._serialized_start=869 - _globals['_SKILLEVENT']._serialized_end=990 - _globals['_NODEANNOUNCE']._serialized_start=992 - _globals['_NODEANNOUNCE']._serialized_end=1023 - _globals['_BROWSEREVENT']._serialized_start=1026 - _globals['_BROWSEREVENT']._serialized_end=1161 - _globals['_SERVERTASKMESSAGE']._serialized_start=1164 - _globals['_SERVERTASKMESSAGE']._serialized_end=1433 - _globals['_TASKCANCELREQUEST']._serialized_start=1435 - _globals['_TASKCANCELREQUEST']._serialized_end=1471 - _globals['_TASKREQUEST']._serialized_start=1474 - _globals['_TASKREQUEST']._serialized_end=1683 - _globals['_BROWSERACTION']._serialized_start=1686 - _globals['_BROWSERACTION']._serialized_end=1974 - _globals['_BROWSERACTION_ACTIONTYPE']._serialized_start=1840 - _globals['_BROWSERACTION_ACTIONTYPE']._serialized_end=1974 - _globals['_TASKRESPONSE']._serialized_start=1977 - _globals['_TASKRESPONSE']._serialized_end=2329 - _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_start=2209 - _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_end=2257 - _globals['_TASKRESPONSE_STATUS']._serialized_start=2259 - _globals['_TASKRESPONSE_STATUS']._serialized_end=2319 - _globals['_BROWSERRESPONSE']._serialized_start=2332 - _globals['_BROWSERRESPONSE']._serialized_end=2552 - _globals['_CONSOLEMESSAGE']._serialized_start=2554 - _globals['_CONSOLEMESSAGE']._serialized_end=2621 - _globals['_NETWORKREQUEST']._serialized_start=2623 - _globals['_NETWORKREQUEST']._serialized_end=2727 - _globals['_WORKPOOLUPDATE']._serialized_start=2729 - _globals['_WORKPOOLUPDATE']._serialized_end=2773 - _globals['_TASKCLAIMREQUEST']._serialized_start=2775 - _globals['_TASKCLAIMREQUEST']._serialized_end=2827 - _globals['_TASKCLAIMRESPONSE']._serialized_start=2829 - _globals['_TASKCLAIMRESPONSE']._serialized_end=2898 - _globals['_HEARTBEAT']._serialized_start=2901 - _globals['_HEARTBEAT']._serialized_end=3259 - _globals['_HEALTHCHECKRESPONSE']._serialized_start=3261 - _globals['_HEALTHCHECKRESPONSE']._serialized_end=3306 - _globals['_FILESYNCMESSAGE']._serialized_start=3309 - _globals['_FILESYNCMESSAGE']._serialized_end=3537 - _globals['_SYNCCONTROL']._serialized_start=3540 - _globals['_SYNCCONTROL']._serialized_end=3839 - _globals['_SYNCCONTROL_ACTION']._serialized_start=3669 - _globals['_SYNCCONTROL_ACTION']._serialized_end=3839 - _globals['_DIRECTORYMANIFEST']._serialized_start=3841 - _globals['_DIRECTORYMANIFEST']._serialized_end=3911 - _globals['_FILEINFO']._serialized_start=3913 - _globals['_FILEINFO']._serialized_end=3981 - _globals['_FILEPAYLOAD']._serialized_start=3983 - _globals['_FILEPAYLOAD']._serialized_end=4078 - _globals['_SYNCSTATUS']._serialized_start=4081 - _globals['_SYNCSTATUS']._serialized_end=4241 - _globals['_SYNCSTATUS_CODE']._serialized_start=4175 - _globals['_SYNCSTATUS_CODE']._serialized_end=4241 - _globals['_AGENTORCHESTRATOR']._serialized_start=4244 - _globals['_AGENTORCHESTRATOR']._serialized_end=4477 + _globals['_REGISTRATIONREQUEST']._serialized_start=30 + _globals['_REGISTRATIONREQUEST']._serialized_end=252 + _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_start=201 + _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_end=252 + _globals['_SANDBOXPOLICY']._serialized_start=255 + _globals['_SANDBOXPOLICY']._serialized_end=452 + _globals['_SANDBOXPOLICY_MODE']._serialized_start=418 + _globals['_SANDBOXPOLICY_MODE']._serialized_end=452 + _globals['_REGISTRATIONRESPONSE']._serialized_start=454 + _globals['_REGISTRATIONRESPONSE']._serialized_end=574 + _globals['_CLIENTTASKMESSAGE']._serialized_start=577 + _globals['_CLIENTTASKMESSAGE']._serialized_end=874 + _globals['_SKILLEVENT']._serialized_start=876 + _globals['_SKILLEVENT']._serialized_end=997 + _globals['_NODEANNOUNCE']._serialized_start=999 + _globals['_NODEANNOUNCE']._serialized_end=1030 + _globals['_BROWSEREVENT']._serialized_start=1033 + _globals['_BROWSEREVENT']._serialized_end=1168 + _globals['_SERVERTASKMESSAGE']._serialized_start=1171 + _globals['_SERVERTASKMESSAGE']._serialized_end=1487 + _globals['_TASKCANCELREQUEST']._serialized_start=1489 + _globals['_TASKCANCELREQUEST']._serialized_end=1525 + _globals['_TASKREQUEST']._serialized_start=1528 + _globals['_TASKREQUEST']._serialized_end=1737 + _globals['_BROWSERACTION']._serialized_start=1740 + _globals['_BROWSERACTION']._serialized_end=2028 + _globals['_BROWSERACTION_ACTIONTYPE']._serialized_start=1894 + _globals['_BROWSERACTION_ACTIONTYPE']._serialized_end=2028 + _globals['_TASKRESPONSE']._serialized_start=2031 + _globals['_TASKRESPONSE']._serialized_end=2383 + _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_start=2263 + _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_end=2311 + _globals['_TASKRESPONSE_STATUS']._serialized_start=2313 + _globals['_TASKRESPONSE_STATUS']._serialized_end=2373 + _globals['_BROWSERRESPONSE']._serialized_start=2386 + _globals['_BROWSERRESPONSE']._serialized_end=2606 + _globals['_CONSOLEMESSAGE']._serialized_start=2608 + _globals['_CONSOLEMESSAGE']._serialized_end=2675 + _globals['_NETWORKREQUEST']._serialized_start=2677 + _globals['_NETWORKREQUEST']._serialized_end=2781 + _globals['_WORKPOOLUPDATE']._serialized_start=2783 + _globals['_WORKPOOLUPDATE']._serialized_end=2827 + _globals['_TASKCLAIMREQUEST']._serialized_start=2829 + _globals['_TASKCLAIMREQUEST']._serialized_end=2881 + _globals['_TASKCLAIMRESPONSE']._serialized_start=2883 + _globals['_TASKCLAIMRESPONSE']._serialized_end=2952 + _globals['_HEARTBEAT']._serialized_start=2955 + _globals['_HEARTBEAT']._serialized_end=3313 + _globals['_HEALTHCHECKRESPONSE']._serialized_start=3315 + _globals['_HEALTHCHECKRESPONSE']._serialized_end=3360 + _globals['_FILESYNCMESSAGE']._serialized_start=3363 + _globals['_FILESYNCMESSAGE']._serialized_end=3591 + _globals['_SYNCCONTROL']._serialized_start=3594 + _globals['_SYNCCONTROL']._serialized_end=3893 + _globals['_SYNCCONTROL_ACTION']._serialized_start=3723 + _globals['_SYNCCONTROL_ACTION']._serialized_end=3893 + _globals['_DIRECTORYMANIFEST']._serialized_start=3895 + _globals['_DIRECTORYMANIFEST']._serialized_end=3965 + _globals['_FILEINFO']._serialized_start=3967 + _globals['_FILEINFO']._serialized_end=4035 + _globals['_FILEPAYLOAD']._serialized_start=4037 + _globals['_FILEPAYLOAD']._serialized_end=4132 + _globals['_SYNCSTATUS']._serialized_start=4135 + _globals['_SYNCSTATUS']._serialized_end=4295 + _globals['_SYNCSTATUS_CODE']._serialized_start=4229 + _globals['_SYNCSTATUS_CODE']._serialized_end=4295 + _globals['_AGENTORCHESTRATOR']._serialized_start=4298 + _globals['_AGENTORCHESTRATOR']._serialized_end=4531 # @@protoc_insertion_point(module_scope) diff --git a/agent-node/protos/agent_pb2_grpc.py b/agent-node/protos/agent_pb2_grpc.py index 932d45e..f551b0b 100644 --- a/agent-node/protos/agent_pb2_grpc.py +++ b/agent-node/protos/agent_pb2_grpc.py @@ -2,7 +2,7 @@ """Client and server classes corresponding to protobuf-defined services.""" import grpc -import agent_pb2 as agent__pb2 +from protos import agent_pb2 as protos_dot_agent__pb2 class AgentOrchestratorStub(object): @@ -17,18 +17,18 @@ """ self.SyncConfiguration = channel.unary_unary( '/agent.AgentOrchestrator/SyncConfiguration', - request_serializer=agent__pb2.RegistrationRequest.SerializeToString, - response_deserializer=agent__pb2.RegistrationResponse.FromString, + request_serializer=protos_dot_agent__pb2.RegistrationRequest.SerializeToString, + response_deserializer=protos_dot_agent__pb2.RegistrationResponse.FromString, ) self.TaskStream = channel.stream_stream( '/agent.AgentOrchestrator/TaskStream', - request_serializer=agent__pb2.ClientTaskMessage.SerializeToString, - response_deserializer=agent__pb2.ServerTaskMessage.FromString, + request_serializer=protos_dot_agent__pb2.ClientTaskMessage.SerializeToString, + response_deserializer=protos_dot_agent__pb2.ServerTaskMessage.FromString, ) self.ReportHealth = channel.stream_stream( '/agent.AgentOrchestrator/ReportHealth', - request_serializer=agent__pb2.Heartbeat.SerializeToString, - response_deserializer=agent__pb2.HealthCheckResponse.FromString, + request_serializer=protos_dot_agent__pb2.Heartbeat.SerializeToString, + response_deserializer=protos_dot_agent__pb2.HealthCheckResponse.FromString, ) @@ -62,18 +62,18 @@ rpc_method_handlers = { 'SyncConfiguration': grpc.unary_unary_rpc_method_handler( servicer.SyncConfiguration, - request_deserializer=agent__pb2.RegistrationRequest.FromString, - response_serializer=agent__pb2.RegistrationResponse.SerializeToString, + request_deserializer=protos_dot_agent__pb2.RegistrationRequest.FromString, + response_serializer=protos_dot_agent__pb2.RegistrationResponse.SerializeToString, ), 'TaskStream': grpc.stream_stream_rpc_method_handler( servicer.TaskStream, - request_deserializer=agent__pb2.ClientTaskMessage.FromString, - response_serializer=agent__pb2.ServerTaskMessage.SerializeToString, + request_deserializer=protos_dot_agent__pb2.ClientTaskMessage.FromString, + response_serializer=protos_dot_agent__pb2.ServerTaskMessage.SerializeToString, ), 'ReportHealth': grpc.stream_stream_rpc_method_handler( servicer.ReportHealth, - request_deserializer=agent__pb2.Heartbeat.FromString, - response_serializer=agent__pb2.HealthCheckResponse.SerializeToString, + request_deserializer=protos_dot_agent__pb2.Heartbeat.FromString, + response_serializer=protos_dot_agent__pb2.HealthCheckResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( @@ -98,8 +98,8 @@ timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/agent.AgentOrchestrator/SyncConfiguration', - agent__pb2.RegistrationRequest.SerializeToString, - agent__pb2.RegistrationResponse.FromString, + protos_dot_agent__pb2.RegistrationRequest.SerializeToString, + protos_dot_agent__pb2.RegistrationResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @@ -115,8 +115,8 @@ timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/agent.AgentOrchestrator/TaskStream', - agent__pb2.ClientTaskMessage.SerializeToString, - agent__pb2.ServerTaskMessage.FromString, + protos_dot_agent__pb2.ClientTaskMessage.SerializeToString, + protos_dot_agent__pb2.ServerTaskMessage.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @@ -132,7 +132,7 @@ timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/agent.AgentOrchestrator/ReportHealth', - agent__pb2.Heartbeat.SerializeToString, - agent__pb2.HealthCheckResponse.FromString, + protos_dot_agent__pb2.Heartbeat.SerializeToString, + protos_dot_agent__pb2.HealthCheckResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/ai-hub/app/api/dependencies.py b/ai-hub/app/api/dependencies.py index 82477c5..0023ac0 100644 --- a/ai-hub/app/api/dependencies.py +++ b/ai-hub/app/api/dependencies.py @@ -67,11 +67,11 @@ self.document_service = DocumentService(vector_store=vector_store) return self - def with_rag_service(self, retrievers: List[Retriever], prompt_service = None, tool_service = None): + def with_rag_service(self, retrievers: List[Retriever], prompt_service = None, tool_service = None, node_registry_service = None): """ Adds a RAGService instance to the container. """ - self.rag_service = RAGService(retrievers=retrievers, prompt_service=prompt_service, tool_service=tool_service) + self.rag_service = RAGService(retrievers=retrievers, prompt_service=prompt_service, tool_service=tool_service, node_registry_service=node_registry_service) return self def __getattr__(self, name: str) -> Any: diff --git a/ai-hub/app/api/routes/nodes.py b/ai-hub/app/api/routes/nodes.py index 2558af5..154ceb3 100644 --- a/ai-hub/app/api/routes/nodes.py +++ b/ai-hub/app/api/routes/nodes.py @@ -124,6 +124,12 @@ node.description = update.description if update.skill_config is not None: node.skill_config = update.skill_config.model_dump() + # M6: Push policy live to the node if it's connected + try: + services.orchestrator.push_policy(node_id, node.skill_config) + except Exception as e: + logger.warning(f"Could not push live policy to {node_id}: {e}") + if update.is_active is not None: node.is_active = update.is_active @@ -199,6 +205,26 @@ db.commit() return {"message": f"Access revoked for group '{group_id}' on node '{node_id}'."} + @router.post("/admin/mesh/reset", summary="[Admin] Emergency Mesh Reset") + def admin_reset_mesh( + admin_id: str, + db: Session = Depends(get_db) + ): + """ + DANGEROUS: Clears ALL live node collections from memory and resets DB statuses to offline. + Use this to resolve 'zombie' nodes or flapping connections. + """ + _require_admin(admin_id, db) + + # 1. Reset DB + _registry().reset_all_statuses() + + # 2. Clear Memory + count = _registry().clear_memory_cache() + + logger.warning(f"[Admin] Mesh Reset triggered by {admin_id}. Cleared {count} live nodes.") + return {"status": "success", "cleared_count": count} + # ================================================================== # USER-FACING ENDPOINTS # ================================================================== @@ -244,6 +270,22 @@ if not live: return {"node_id": node_id, "status": "offline"} return {"node_id": node_id, "status": live._compute_status(), "stats": live.stats} + + @router.get("/{node_id}/terminal", summary="Read Node Terminal History (AI Use Case)") + def get_node_terminal(node_id: str): + """ + AI-Specific: Returns the most recent 150 terminal interaction chunks for a live node. + This provides context for the AI reasoning agent. + """ + live = _registry().get_node(node_id) + if not live: + return {"node_id": node_id, "status": "offline", "terminal": []} + return { + "node_id": node_id, + "status": live._compute_status(), + "terminal": live.terminal_history, + "session_id": live.session_id + } @router.post("/{node_id}/dispatch", response_model=schemas.NodeDispatchResponse, summary="Dispatch Task to Node") def dispatch_to_node(node_id: str, request: schemas.NodeDispatchRequest): @@ -562,44 +604,106 @@ """ Multi-node global event bus for a user. Powers the split-window multi-pane execution UI. - - On connect: sends initial_snapshot with all live nodes. - Ongoing: streams all events from all user's nodes (disambiguated by node_id). - Every 5s: sends a mesh_heartbeat summary across all nodes. + High-performance edition: streams events with millisecond latency. """ - await websocket.accept() - registry = _registry() - - all_live = registry.list_nodes(user_id=user_id) - await websocket.send_json({ - "event": "initial_snapshot", - "user_id": user_id, - "timestamp": _now(), - "data": {"nodes": [n.to_dict() for n in all_live], "count": len(all_live)}, - }) - - q: queue.Queue = queue.Queue() - registry.subscribe_user(user_id, q) try: - while True: - await _drain(q, websocket) - await asyncio.sleep(HEARTBEAT_INTERVAL_S) - live_nodes = registry.list_nodes(user_id=user_id) - await websocket.send_json({ - "event": "mesh_heartbeat", + await websocket.accept() + except Exception as e: + logger.error(f"[📶] WebSocket accept failed for user={user_id}: {e}") + return + + registry = _registry() + logger.info(f"[📶] Multi-node stream connected for user={user_id}") + + try: + # 1. Send initial snapshot immediately + try: + all_live = registry.list_nodes(user_id=user_id) + logger.info(f"[📶] Sending initial snapshot for user={user_id} with {len(all_live)} nodes") + snapshot_data = { + "event": "initial_snapshot", "user_id": user_id, "timestamp": _now(), - "data": { - "nodes": [{"node_id": n.node_id, "status": n._compute_status(), "stats": n.stats} - for n in live_nodes] - }, - }) - except WebSocketDisconnect: - pass + "data": {"nodes": [n.to_dict() for n in all_live], "count": len(all_live)}, + } + await websocket.send_json(snapshot_data) + logger.info(f"[📶] Initial snapshot sent successfully for user={user_id}") + except Exception as e: + logger.error(f"[📶] Failed to send initial snapshot for user={user_id}: {e}", exc_info=True) + await websocket.close(code=1011) # Internal Error + return + + q: queue.Queue = queue.Queue() + registry.subscribe_user(user_id, q) + + async def send_events(): + import time + last_heartbeat = 0 + try: + while True: + # Drain all events from queue and send + await _drain(q, websocket) + + now = time.time() + if now - last_heartbeat > HEARTBEAT_INTERVAL_S: + live_nodes = registry.list_nodes(user_id=user_id) + await websocket.send_json({ + "event": "mesh_heartbeat", + "user_id": user_id, + "timestamp": _now(), + "data": { + "nodes": [{"node_id": n.node_id, "status": n._compute_status(), "stats": n.stats} + for n in live_nodes] + }, + }) + last_heartbeat = now + + # High-frequency polling (20Hz) for gaming-fast UI updates + await asyncio.sleep(0.05) + except WebSocketDisconnect: + logger.info(f"[📶] Sender disconnected for user={user_id}") + except Exception as e: + logger.error(f"[nodes/stream/all_sender] CRASH for user={user_id}: {e}", exc_info=True) + + async def receive_events(): + """Keep connection alive and handle client-initiated pings/close.""" + try: + while True: + # Consume client messages to prevent buffer bloat + data = await websocket.receive_json() + if data.get("action") == "ping": + await websocket.send_json({ + "event": "pong", + "user_id": user_id, + "timestamp": _now(), + "client_ts": data.get("ts") + }) + except WebSocketDisconnect: + logger.info(f"[📶] Receiver disconnected for user={user_id}") + except Exception as e: + logger.error(f"[nodes/stream/all_receiver] CRASH for user={user_id}: {e}", exc_info=True) + + # Run sender and receiver concurrently + sender_task = asyncio.create_task(send_events()) + receiver_task = asyncio.create_task(receive_events()) + + try: + done, pending = await asyncio.wait( + [sender_task, receiver_task], + return_when=asyncio.FIRST_COMPLETED + ) + for t in pending: + t.cancel() + except asyncio.CancelledError: + sender_task.cancel() + receiver_task.cancel() + finally: + registry.unsubscribe_user(user_id, q) + logger.info(f"[📶] Multi-node stream disconnected for user={user_id}") + except Exception as e: - logger.error(f"[nodes/stream/all] user={user_id}: {e}") - finally: - registry.unsubscribe_user(user_id, q) + logger.error(f"[nodes/stream/all] Error in stream handler for user={user_id}: {e}", exc_info=True) + # Socket will be closed by FastAPI on uncaught exception if not already closed # ================================================================== # FS EXPLORER ENDPOINTS (Modular Navigator) diff --git a/ai-hub/app/app.py b/ai-hub/app/app.py index c71d329..6f1ee5a 100644 --- a/ai-hub/app/app.py +++ b/ai-hub/app/app.py @@ -153,12 +153,17 @@ services = ServiceContainer() services.with_document_service(vector_store=vector_store) - # Core orchestration first - services.with_service("node_registry_service", service=NodeRegistryService()) + node_registry_service = NodeRegistryService() + services.with_service("node_registry_service", service=node_registry_service) tool_service = ToolService(services=services) services.with_service("tool_service", service=tool_service) - services.with_rag_service(retrievers=retrievers, prompt_service=prompt_service, tool_service=tool_service) + services.with_rag_service( + retrievers=retrievers, + prompt_service=prompt_service, + tool_service=tool_service, + node_registry_service=node_registry_service + ) services.with_service("stt_service", service=STTService(stt_provider=stt_provider)) services.with_service("tts_service", service=TTSService(tts_provider=tts_provider)) diff --git a/ai-hub/app/core/grpc/core/journal.py b/ai-hub/app/core/grpc/core/journal.py index b223f2f..566c85c 100644 --- a/ai-hub/app/core/grpc/core/journal.py +++ b/ai-hub/app/core/grpc/core/journal.py @@ -10,23 +10,53 @@ """Initializes state for a new task and returns its notification event.""" event = threading.Event() with self.lock: - self.tasks[task_id] = {"event": event, "result": None, "node_id": node_id} + self.tasks[task_id] = { + "event": event, + "result": None, + "node_id": node_id, + "stream_buffer": "" # NEW: Accumulates stdout/stderr chunks live + } return event + def append_stream(self, task_id, chunk): + """Appends a real-time output chunk to the task's buffer.""" + with self.lock: + if task_id in self.tasks: + self.tasks[task_id]["stream_buffer"] += chunk + return True + return False + def fulfill(self, task_id, result): """Processes a result from a node and triggers the waiting thread.""" with self.lock: if task_id in self.tasks: + # Merge stream buffer into the final result if not already present + if isinstance(result, dict) and "stdout" in result: + # If result already has stdout, we prefer the final one + # but ensure we don't lose the early chunks if they weren't cumulative + pass + self.tasks[task_id]["result"] = result self.tasks[task_id]["event"].set() return True return False def get_result(self, task_id): - """Returns the result associated with the given task ID.""" + """Returns the result associated with the given task ID, including the live stream buffer.""" with self.lock: data = self.tasks.get(task_id) - return data["result"] if data else None + if not data: return None + + res = data["result"] + # If no final result yet (timeout case), return the stream buffer as stdout + if res is None: + return { + "stdout": data["stream_buffer"], + "stderr": "", + "status": "TIMEOUT", + "partial": True + } + return res def pop(self, task_id): """Removes the task's state from the journal.""" diff --git a/ai-hub/app/core/grpc/core/mirror.py b/ai-hub/app/core/grpc/core/mirror.py index 08faf9d..d32a569 100644 --- a/ai-hub/app/core/grpc/core/mirror.py +++ b/ai-hub/app/core/grpc/core/mirror.py @@ -39,8 +39,9 @@ print(f" [📁🚷] Ignoring write to {file_payload.path}") return - # Prevent path traversal - safe_path = os.path.normpath(os.path.join(workspace, file_payload.path)) + # Prevent path traversal — strip leading slash to ensure it's relative to workspace + path_safe = file_payload.path.lstrip("/") + safe_path = os.path.normpath(os.path.join(workspace, path_safe)) if not safe_path.startswith(workspace): raise ValueError(f"Malicious path detected: {file_payload.path}") diff --git a/ai-hub/app/core/grpc/services/assistant.py b/ai-hub/app/core/grpc/services/assistant.py index c9d9ca8..feab7b1 100644 --- a/ai-hub/app/core/grpc/services/assistant.py +++ b/ai-hub/app/core/grpc/services/assistant.py @@ -340,17 +340,30 @@ # 12-Factor Signing Logic sig = sign_payload(cmd) req = agent_pb2.ServerTaskMessage(task_request=agent_pb2.TaskRequest( - task_id=tid, payload_json=cmd, signature=sig, session_id=session_id)) + task_id=tid, payload_json=cmd, signature=sig, session_id=session_id, + timeout_ms=timeout * 1000)) logger.info(f"[📤] Dispatching shell {tid} to {node_id}") + self.registry.emit(node_id, "task_assigned", {"command": cmd, "session_id": session_id}, task_id=tid) node.queue.put(req) + self.registry.emit(node_id, "task_start", {"command": cmd}, task_id=tid) if event.wait(timeout): res = self.journal.get_result(tid) self.journal.pop(tid) return res + + # M6: Timeout recovery. If command exceeds AI's defined expectation, we attempt to abort it + # and return whatever was captured in the stream buffer. + logger.warning(f"[⚠️] Shell task {tid} TIMEOUT after {timeout}s on {node_id}. Sending ABORT.") + try: + node.queue.put(agent_pb2.ServerTaskMessage(task_cancel=agent_pb2.TaskCancelRequest(task_id=tid))) + except: pass + + # Return partial result captured in buffer before popping + res = self.journal.get_result(tid) self.journal.pop(tid) - return {"error": "Timeout"} + return res if res else {"error": "Timeout", "stdout": "", "stderr": "", "status": "TIMEOUT"} def dispatch_browser(self, node_id, action, timeout=60, session_id=None): """Dispatches a browser action to a directed session node.""" @@ -371,7 +384,9 @@ task_id=tid, browser_action=action, signature=sig, session_id=session_id)) logger.info(f"[🌐📤] Dispatching browser {tid} to {node_id}") + self.registry.emit(node_id, "task_assigned", {"browser_action": action.action, "url": action.url}, task_id=tid) node.queue.put(req) + self.registry.emit(node_id, "task_start", {"browser_action": action.action}, task_id=tid) if event.wait(timeout): res = self.journal.get_result(tid) diff --git a/ai-hub/app/core/grpc/services/grpc_server.py b/ai-hub/app/core/grpc/services/grpc_server.py index 1b1b0d4..9cf6e4f 100644 --- a/ai-hub/app/core/grpc/services/grpc_server.py +++ b/ai-hub/app/core/grpc/services/grpc_server.py @@ -98,27 +98,84 @@ except Exception as e: logger.error(f"[⚠️] Hub token validation unavailable ({e}); proceeding as 'default' user.") - # Build allowed_commands from skill_config - # Build Sandbox Policy from skill_config['shell'] + def _build_sandbox_policy(self, skill_cfg): + """Builds a SandboxPolicy protobuf from raw skill configuration.""" shell_cfg = skill_cfg.get("shell", {}) if isinstance(skill_cfg, dict) else {} if shell_cfg is None: shell_cfg = {} + # Get sandbox config sandbox_cfg = shell_cfg.get("sandbox", {}) if isinstance(shell_cfg, dict) else {} if sandbox_cfg is None: sandbox_cfg = {} # 1. Resolve Mode - mode_str = sandbox_cfg.get("mode", "STRICT").upper() + mode_str = (sandbox_cfg.get("mode") or "STRICT").upper() grpc_mode = agent_pb2.SandboxPolicy.STRICT if mode_str == "STRICT" else agent_pb2.SandboxPolicy.PERMISSIVE # 2. Resolve Command Lists (fallback to some safe defaults if enabled but empty) allowed = sandbox_cfg.get("allowed_commands", []) or [] if not allowed and shell_cfg.get("enabled", True): - allowed = ["ls", "cat", "echo", "pwd", "uname", "curl", "python3", "git"] + allowed = [ + "ls", "cat", "echo", "pwd", "uname", "curl", "python3", "git", + "lscpu", "free", "df", "uptime", "nproc", "grep", "awk", "sed", + "hostname", "id", "whoami", "ip", "ping", "ps", "top" + ] - denied = sandbox_cfg.get("denied_commands", []) - sensitive = sandbox_cfg.get("sensitive_commands", []) + denied = sandbox_cfg.get("denied_commands", []) or [] + sensitive = sandbox_cfg.get("sensitive_commands", []) or [] jail = shell_cfg.get("cwd_jail", "") + return agent_pb2.SandboxPolicy( + mode=grpc_mode, + allowed_commands=allowed, + denied_commands=denied, + sensitive_commands=sensitive, + working_dir_jail=jail + ) + + def push_policy(self, node_id, skill_config): + """Pushes a live policy update to a connected node.""" + node = self.registry.get_node(node_id) + if not node: + logger.warning(f"[🔒] Cannot push policy to {node_id}: Node Offline") + return + + policy = self._build_sandbox_policy(skill_config) + logger.info(f"[🔒] Pushing LIVE policy update to {node_id} (Mode: {policy.mode}, Allowed: {len(policy.allowed_commands)})") + + node.queue.put(agent_pb2.ServerTaskMessage( + policy_update=policy + )) + + def SyncConfiguration(self, request, context): + """M4 Authenticated Handshake: Validate invite_token via Hub DB, then send policy.""" + node_id = request.node_id + invite_token = request.auth_token + + # --- M4: Token validation via Hub API --- + user_id = "default" + skill_cfg = {} + + if HUB_API_URL and _requests: + try: + resp = _requests.post( + f"{HUB_API_URL}{HUB_API_PATH}", + params={"node_id": node_id, "token": invite_token}, + timeout=5, + ) + payload = resp.json() or {} + if not payload.get("valid"): + reason = payload.get("reason", "Token rejected") or "Token rejected" + logger.warning(f"[🔒] SyncConfiguration REJECTED {node_id}: {reason}") + return agent_pb2.RegistrationResponse(success=False, error_message=reason) + + user_id = payload.get("user_id", "default") + skill_cfg = payload.get("skill_config", {}) + logger.info(f"[🔑] Token validated for {node_id} (owner: {user_id})") + except Exception as e: + logger.error(f"[⚠️] Hub token validation unavailable ({e}); proceeding as 'default' user.") + + policy = self._build_sandbox_policy(skill_cfg) + # Register the node in the centralized AI Hub registry self.registry.register(request.node_id, user_id, { "desc": request.node_description, @@ -127,18 +184,13 @@ return agent_pb2.RegistrationResponse( success=True, - policy=agent_pb2.SandboxPolicy( - mode=grpc_mode, - allowed_commands=allowed, - denied_commands=denied, - sensitive_commands=sensitive, - working_dir_jail=jail - ) + policy=policy ) def TaskStream(self, request_iterator, context): """Persistent Bi-directional Stream for Command & Control.""" node_id = "unknown" + node = None try: # 1. Blocking wait for Node Identity first_msg = next(request_iterator) @@ -150,6 +202,7 @@ node = self.registry.get_node(node_id) if not node: logger.error(f"[!] Stream rejected: Node {node_id} not registered") + node_id = "unknown" # Don't deregister if we didn't find the node return logger.info(f"[📶] gRPC Stream Online: {node_id}") @@ -183,10 +236,20 @@ except Exception as e: logger.error(f"[!] TaskStream Error for {node_id}: {e}") finally: - self.registry.deregister(node_id) + if node_id != "unknown": + self.registry.deregister(node_id, record=node) def _handle_client_message(self, msg, node_id, node): kind = msg.WhichOneof('payload') + if os.getenv("DEBUG_GRPC"): + # Exclude large blobs like terminal_out or file_data from full log unless needed + if kind == 'skill_event' and msg.skill_event.WhichOneof('data') == 'terminal_out': + logger.info(f"[DEBUG-gRPC] INBOUND from {node_id}: {kind} (terminal_out, size={len(msg.skill_event.terminal_out)})") + elif kind == 'file_sync' and msg.file_sync.HasField('file_data'): + logger.info(f"[DEBUG-gRPC] INBOUND from {node_id}: {kind} (file_data, path={msg.file_sync.file_data.path}, size={len(msg.file_sync.file_data.chunk)})") + else: + logger.info(f"[DEBUG-gRPC] INBOUND from {node_id}: {kind} | {msg}") + if kind == 'task_claim': task_id = msg.task_claim.task_id success, payload = self.pool.claim(task_id, node_id) @@ -213,7 +276,7 @@ elif kind == 'task_response': tr = msg.task_response - res_obj = {"stdout": tr.stdout, "status": tr.status} + res_obj = {"stdout": tr.stdout, "stderr": tr.stderr, "status": tr.status} if tr.HasField("browser_result"): br = tr.browser_result res_obj["browser"] = { @@ -234,6 +297,9 @@ if data_kind == 'terminal_out': event_data["data"] = se.terminal_out event_data["type"] = "output" + # NEW: Capture stream buffer in journal for tool callback/timeout + if se.task_id: + self.journal.append_stream(se.task_id, se.terminal_out) elif data_kind == 'prompt': event_data["data"] = se.prompt event_data["type"] = "prompt" @@ -277,7 +343,6 @@ # M6: Handle interactive 'ls' result correlation if task_id and task_id.startswith("fs-ls-"): - import os files = [ {"path": f.path, "name": os.path.basename(f.path) or f.path, "is_dir": f.is_dir, "size": f.size} for f in fs.manifest.files diff --git a/ai-hub/app/core/pipelines/code_changer.py b/ai-hub/app/core/pipelines/code_changer.py deleted file mode 100644 index bdebed0..0000000 --- a/ai-hub/app/core/pipelines/code_changer.py +++ /dev/null @@ -1,88 +0,0 @@ -import json -import os -from typing import List, Dict, Any, Tuple, Optional, Callable - -PROMPT_TEMPLATE = """ -### 🧠 Core Directives - -You are a code generation assistant specialized in producing **one precise and complete code change** per instruction. Your output must be a strict JSON object containing: - -- `reasoning`: A concise explanation of the change. -- `content`: The **full content of the file** (or an empty string for deletions). - ---- - -### 1. Input Structure - -- `overall_plan`: {overall_plan} -- `instruction`: {instruction} -- `filepath`: {filepath} -- `original_files`: {original_files} -- `updated_files`: {updated_files} - ------ - -### 2. 💻 Code Generation Rules - -Please provide **one complete and functional code file** per request, for the specified `file_path`. You must output the **entire, modified file**. - -* **Identical Code Sections:** Use the `#[unchanged_section]|||` syntax for large, sequential blocks of code that are not being modified. -* **Complete File Output:** Always provide the **full file contents** in the `content` block. Do not use placeholders like `...`. -* **Imports:** Ensure all required imports are included. - ---- - -### 3. Output Format - -Return exactly one JSON object: -{{ - "reasoning": "Brief explanation.", - "content": "Full file content" -}} -""" - -class CodeRagCodeChanger: - def __init__(self): - pass - - async def forward( - self, - overall_plan: str, - instruction: str, - filepath: str, - original_files: List[Dict[str, Any]], - updated_files: List[Dict[str, Any]], - llm_provider = None, - prompt_service = None, - db: Optional[Session] = None, - user_id: Optional[str] = None, - prompt_slug: str = "code-changer" - ) -> Tuple[str, str]: - if not llm_provider: - raise ValueError("LLM Provider is required.") - - original_json = json.dumps(original_files) - updated_json = json.dumps(updated_files) - - template = PROMPT_TEMPLATE - if prompt_service and db and user_id: - db_prompt = prompt_service.get_prompt_by_slug(db, prompt_slug, user_id) - if db_prompt: - template = db_prompt.content - - prompt = template.format( - overall_plan=overall_plan, - instruction=instruction, - filepath=filepath, - original_files=original_json, - updated_files=updated_json - ) - - response = await llm_provider.acompletion(prompt=prompt, response_format={"type": "json_object"}) - content = response.choices[0].message.content - - try: - data = json.loads(content) - return data.get("content", ""), data.get("reasoning", "") - except json.JSONDecodeError: - return "", f"Failed to parse JSON: {content}" \ No newline at end of file diff --git a/ai-hub/app/core/pipelines/code_reviewer.py b/ai-hub/app/core/pipelines/code_reviewer.py deleted file mode 100644 index 095ac0e..0000000 --- a/ai-hub/app/core/pipelines/code_reviewer.py +++ /dev/null @@ -1,77 +0,0 @@ -import json -from typing import List, Dict, Any, Tuple, Optional, Callable -from sqlalchemy.orm import Session - -PROMPT_TEMPLATE = """ -### 🧠 Core Directives - -### **Code Review Directives** -Your role is a specialized code review AI. Your primary task is to review a set of code changes and confirm they **fully and accurately address the user's original request**. - ---- -### **Critical Constraints** -Your review is strictly limited to **code content completeness**. Do not suggest or perform any file splits, moves, or large-scale refactoring. -Identify and resolve any missing logic, placeholders (like "unchanged," "same as original," "to-do"), or incomplete code. - -Return exactly one JSON object: -{{ - "reasoning": "A detailed explanation of why the decision was made.", - "decision": "Either 'complete' or 'modify'.", - "answer": "If 'complete', an empty string. If 'modify', the new execution plan instructions in JSON." -}} - -Input: -- `original_question`: {original_question} -- `execution_plan`: {execution_plan} -- `final_code_changes`: {final_code_changes} -- `original_files`: {original_files} -""" - -class CodeReviewer: - def __init__(self): - pass - - async def forward( - self, - original_question: str, - execution_plan: str, - final_code_changes: List[Dict[str, Any]], - original_files: List[Dict[str, Any]], - llm_provider = None, - prompt_service = None, - db: Optional[Session] = None, - user_id: Optional[str] = None, - prompt_slug: str = "code-reviewer" - ) -> Tuple[str, str, str]: - if not llm_provider: - raise ValueError("LLM Provider is required.") - - final_code_changes_json = json.dumps(final_code_changes) - original_files_json = json.dumps(original_files) - - template = PROMPT_TEMPLATE - if prompt_service and db and user_id: - db_prompt = prompt_service.get_prompt_by_slug(db, prompt_slug, user_id) - if db_prompt: - template = db_prompt.content - - prompt = template.format( - original_question=original_question, - execution_plan=execution_plan, - final_code_changes=final_code_changes_json, - original_files=original_files_json - ) - - response = await llm_provider.acompletion(prompt=prompt, response_format={"type": "json_object"}) - content = response.choices[0].message.content - - try: - data = json.loads(content) - decision = data.get("decision", "modify") - reasoning = data.get("reasoning", "") - answer = data.get("answer", "") - if isinstance(answer, list): - answer = json.dumps(answer) - return decision, reasoning, answer - except json.JSONDecodeError: - return "modify", f"Failed to parse JSON: {content}", "" \ No newline at end of file diff --git a/ai-hub/app/core/pipelines/file_selector.py b/ai-hub/app/core/pipelines/file_selector.py deleted file mode 100644 index e03ca5d..0000000 --- a/ai-hub/app/core/pipelines/file_selector.py +++ /dev/null @@ -1,85 +0,0 @@ -import json -from app.db import models -from typing import List, Dict, Any, Tuple, Optional -from sqlalchemy.orm import Session - -PROMPT_TEMPLATE = """ -You're an **expert file navigator** for a large codebase. Your task is to select the most critical and relevant file paths to answer a user's question. All file paths you select must exist within the provided `retrieved_files` list. - ---- - -### File Selection Criteria - -1. **Prioritize Core Files:** Identify files that contain the central logic. -2. **Be Selective:** Aim for **2 to 4 files**. -3. **Exclude Irrelevant and Unreadable Files:** Ignore binaries, images, etc. -4. **Infer User Intent:** Return only file paths that exist in the `retrieved_files` list. - ---- - -### Output Format - -Return exactly one JSON array of strings: -[ - "/path/to/file1", - "/path/to/file2" -] - -Input: -- `question`: {question} -- `chat_history`: {chat_history} -- `retrieved_files`: {retrieved_files} -""" - -class CodeRagFileSelector: - def __init__(self): - pass - - def _default_history_formatter(self, history: List[models.Message]) -> str: - return "\n".join( - f"{'Human' if msg.sender == 'user' else 'Assistant'}: {msg.content}" - for msg in history - ) - - async def forward( - self, - question: str, - retrieved_data: List[str], - history: List[models.Message], - llm_provider = None, - prompt_service = None, - db: Optional[Session] = None, - user_id: Optional[str] = None, - prompt_slug: str = "file-selector" - ) -> Tuple[List[str], str]: - if not llm_provider: - raise ValueError("LLM Provider is required.") - - retrieved_json = json.dumps(retrieved_data) - history_text = self._default_history_formatter(history) - - template = PROMPT_TEMPLATE - if prompt_service and db and user_id: - db_prompt = prompt_service.get_prompt_by_slug(db, prompt_slug, user_id) - if db_prompt: - template = db_prompt.content - - prompt = template.format( - question=question, - chat_history=history_text, - retrieved_files=retrieved_json - ) - - response = await llm_provider.acompletion(prompt=prompt, response_format={"type": "json_object"}) - content = response.choices[0].message.content - - try: - data = json.loads(content) - # If the LLM returned a JSON object instead of a list (due to response_format constraint or hallucination) - if isinstance(data, dict) and "files" in data: - return data["files"], "Selected files" - if isinstance(data, list): - return data, "Selected files" - return [], "No files selected" - except json.JSONDecodeError: - return [], f"Failed to parse JSON: {content}" \ No newline at end of file diff --git a/ai-hub/app/core/pipelines/question_decider.py b/ai-hub/app/core/pipelines/question_decider.py deleted file mode 100644 index 4b0ffbe..0000000 --- a/ai-hub/app/core/pipelines/question_decider.py +++ /dev/null @@ -1,131 +0,0 @@ -import json -import os -from app.core.pipelines.validator import Validator, TokenLimitExceededError -from app.db import models -from typing import List, Dict, Any, Tuple, Optional, Callable -from sqlalchemy.orm import Session - -PROMPT_TEMPLATE = """ -### 🧠 **Core Directives** - -You are a specialized AI assistant for software engineering tasks. Your responses—providing an answer, suggesting a code change, or requesting more files—must be based **exclusively** on the provided codebase content. Your primary goal is to be helpful and accurate while adhering strictly to the following directives. - ------ - -## 1. Data Analysis and Availability - -* **Analyze the User's Request:** Carefully examine the **`question`** and **`chat_history`** to understand what the user wants. -* **Source of Information:** The only information you can use to generate a code-related answer comes from the files provided in the **`retrieved_paths_with_content`** list. - -* **File Data & Availability** -* **`retrieved_paths_with_content`**: Files with content available. -* **`retrieved_paths_without_content`**: Files that exist but content is not loaded. - ------ - -## 2. Decision Logic - -You must choose one of three mutually exclusive decisions: `answer`, `code_change`, or `files`. - -### `decision='answer'` -* Choose this if you have all necessary info to explain a non-code-modification question. - -### `decision='code_change'` -* Choose this for any code manipulation (modify, create, delete). -* Provide a high-level strategy plan in the `answer` field as a numbered list. -* Provide the actual code instructions in a valid JSON list format. - -### `decision='files'` -* Request more files from `retrieved_paths_without_content`. - ------ - -## 3. Output Format - -You MUST respond in valid JSON format with the following fields: -- `reasoning`: Your step-by-step logic. -- `decision`: Either 'answer', 'files', or 'code_change'. -- `answer`: Depending on decision (Markdown text, file list, or high-level plan). -- `instructions`: (Only for 'code_change') The JSON list of file operations. - -User Question: {question} -Chat History: {chat_history} -Available Content: {retrieved_paths_with_content} -Missing Content: {retrieved_paths_without_content} - -Strict JSON Output:""" - -class CodeRagQuestionDecider: - def __init__(self, log_dir: str = "ai_payloads", history_formatter: Optional[Callable[[List[models.Message]], str]] = None): - self.log_dir = log_dir - self.history_formatter = history_formatter or self._default_history_formatter - self.validator = Validator() - - def _default_history_formatter(self, history: List[models.Message]) -> str: - return "\n".join( - f"{'Human' if msg.sender == 'user' else 'Assistant'}: {msg.content}" - for msg in history - ) - - async def forward( - self, - question: str, - history: List[models.Message], - retrieved_data: Dict[str, Any], - llm_provider = None, - prompt_service = None, - db: Optional[Session] = None, - user_id: Optional[str] = None, - prompt_slug: str = "question-decider" - ) -> Tuple[str, str, str, Optional[List[Dict]]]: - if not llm_provider: - raise ValueError("LLM Provider is required.") - - with_content = [] - without_content = [] - files_to_process = retrieved_data.get("retrieved_files", []) - - for file in files_to_process: - if isinstance(file, dict): - file_path = file.get("file_path") - file_content = file.get("content") - if file_content and isinstance(file_content, str): - with_content.append({"file_path": file_path, "content": file_content}) - elif file_path: - without_content.append({"file_path": file_path}) - - history_text = self.history_formatter(history) - - # Decide which prompt template to use - template = PROMPT_TEMPLATE - if prompt_service and db and user_id: - db_prompt = prompt_service.get_prompt_by_slug(db, prompt_slug, user_id) - if db_prompt: - template = db_prompt.content - - prompt = template.format( - question=question, - chat_history=history_text, - retrieved_paths_with_content=json.dumps(with_content, indent=2), - retrieved_paths_without_content=json.dumps(without_content, indent=2) - ) - - try: - self.validator.precheck_tokensize({"prompt": prompt}) - except TokenLimitExceededError as e: - raise e - - # Call LLM - response = await llm_provider.acompletion(prompt=prompt, response_format={"type": "json_object"}) - content = response.choices[0].message.content - - try: - data = json.loads(content) - answer = data.get("answer", "") - reasoning = data.get("reasoning", "") - decision = data.get("decision", "answer").lower() - instructions = data.get("instructions") - return answer, reasoning, decision, instructions - except json.JSONDecodeError: - # Fallback if LLM fails to provide valid JSON despite instructions - return content, "Failed to parse JSON", "answer", None \ No newline at end of file diff --git a/ai-hub/app/core/pipelines/rag_pipeline.py b/ai-hub/app/core/pipelines/rag_pipeline.py index c63107c..9dcfaee 100644 --- a/ai-hub/app/core/pipelines/rag_pipeline.py +++ b/ai-hub/app/core/pipelines/rag_pipeline.py @@ -102,13 +102,44 @@ ] # 2. Agentic Tool Loop (Max 5 turns to prevent infinite loops) - for _ in range(5): + for turn in range(5): request_kwargs = {} if tools: request_kwargs["tools"] = tools request_kwargs["tool_choice"] = "auto" + model = getattr(llm_provider, "model_name", "unknown") + # Safely calculate total characters in messages, handling None content + msg_lens = [] + for m in messages: + content = "" + if hasattr(m, "content") and m.content is not None: + content = m.content + elif isinstance(m, dict): + content = m.get("content") or "" + msg_lens.append(len(content)) + + total_chars = sum(msg_lens) + tool_count = len(tools) if tools else 0 + + logging.info(f"[RagPipeline] Turn {turn+1} starting. Model: {model}, Messages: {len(messages)}, Total Chars: {total_chars}, Tools: {tool_count}") + # Log the specific turn metadata for diagnostics + logging.debug(f"[RagPipeline] Turn {turn+1} Payload Metadata: {messages}") + prediction = await llm_provider.acompletion(messages=messages, **request_kwargs) + + choices = getattr(prediction, "choices", None) + if not choices or len(choices) == 0: + finish_reason = getattr(prediction, "finish_reason", "unknown") + if choices is not None and len(choices) == 0: + # Some providers return empty list for safety filters + logging.warning(f"[RagPipeline.forward] LLM ({model}) returned 0 choices. Turn: {turn+1}. Filter/Safety Trigger likely.") + + logging.error(f"[RagPipeline.forward] LLM ({model}) failed at Turn {turn+1}. Reason: {finish_reason}. Full Response: {prediction}") + return (f"The AI provider ({model}) returned an empty response (Turn {turn+1}). " + f"Reason: {finish_reason}. Context: {total_chars} chars, {tool_count} tools. " + "This is often a safety filter blocking the prompt.") + message = prediction.choices[0].message # If no tool calls, we are done diff --git a/ai-hub/app/core/services/node_registry.py b/ai-hub/app/core/services/node_registry.py index fdbb790..36d9c4d 100644 --- a/ai-hub/app/core/services/node_registry.py +++ b/ai-hub/app/core/services/node_registry.py @@ -12,6 +12,8 @@ import threading import queue import logging +import re +import uuid from datetime import datetime from typing import Dict, Optional, List, Any @@ -56,6 +58,9 @@ } self.connected_at: datetime = datetime.utcnow() self.last_heartbeat_at: datetime = datetime.utcnow() + import uuid + self.session_id: str = str(uuid.uuid4()) + self.terminal_history: List[str] = [] # Recent PTY lines for AI reading def update_stats(self, stats: dict): self.stats.update(stats) @@ -106,6 +111,10 @@ self._node_listeners: Dict[str, List[queue.Queue]] = {} # Per-user WS subscribers: user_id -> [queue, ...] (ALL nodes for that user) self._user_listeners: Dict[str, List[queue.Queue]] = {} + # Connection history for flapping detection: node_id -> [timestamp, ...] + self._connection_history: Dict[str, List[datetime]] = {} + self._FLAP_THRESHOLD = 3 # Max connections + self._FLAP_WINDOW_S = 30 # In 30 seconds # ------------------------------------------------------------------ # # DB Helpers # @@ -169,6 +178,15 @@ except Exception as e: logger.error(f"[NodeRegistry] Failed to reset DB statuses: {e}") + def clear_memory_cache(self): + """DANGEROUS: Clears all live connections from memory. Use for emergency resets.""" + with self._lock: + count = len(self._nodes) + self._nodes.clear() + self._connection_history.clear() + logger.info(f"[NodeRegistry] EMERGENCY: Cleared {count} nodes from memory cache.") + return count + # ------------------------------------------------------------------ # # Registration # @@ -179,8 +197,21 @@ Register or re-register a node. Called from gRPC SyncConfiguration on every node connect/reconnect. Persists to DB so the node survives Hub restarts as a known entity. + Includes a flapping detection check to warn about unstable containers. """ with self._lock: + # 1. Flapping Detection + now = datetime.utcnow() + history = self._connection_history.get(node_id, []) + # Filter history to only include events within the window + history = [t for t in history if (now - t).total_seconds() < self._FLAP_WINDOW_S] + history.append(now) + self._connection_history[node_id] = history + + if len(history) > self._FLAP_THRESHOLD: + logger.warning(f"[⚠️] FLAPPING DETECTED for node '{node_id}': {len(history)} connects in {self._FLAP_WINDOW_S}s.") + + # 2. Register the live connection record = LiveNodeRecord(node_id=node_id, user_id=user_id, metadata=metadata) self._nodes[node_id] = record @@ -191,19 +222,23 @@ self.emit(node_id, "node_online", record.to_dict()) return record - def deregister(self, node_id: str): + def deregister(self, node_id: str, record: Optional[LiveNodeRecord] = None): """ Remove a node from live_registry when its gRPC stream closes. - The DB record is kept with last_status='offline' so the user can - still see the node in their list (as disconnected). + Safely only removes if the passed record matches the current live registration. """ with self._lock: + current = self._nodes.get(node_id) + if record and current != record: + logger.debug(f"[📋] NodeRegistry: Ignoring deregister for {node_id} (session mismatch)") + return + node = self._nodes.pop(node_id, None) user_id = node.user_id if node else None self._db_mark_offline(node_id) self.emit(node_id, "node_offline", {"node_id": node_id, "user_id": user_id}) - print(f"[📋] NodeRegistry: Deregistered {node_id}") + logger.info(f"[📋] NodeRegistry: Deregistered {node_id}") # ------------------------------------------------------------------ # # Queries # @@ -218,7 +253,10 @@ """Returns only currently LIVE nodes (use the DB for the full list).""" with self._lock: if user_id: - return [n for n in self._nodes.values() if n.user_id == user_id] + # Use robust string comparison to handle any object/string mismatch (e.g. UUID) + live = [n for n in self._nodes.values() if str(n.user_id) == str(user_id)] + # logger.debug(f"[Registry] list_nodes(user_id={user_id}): found {len(live)} nodes.") + return live return list(self._nodes.values()) def get_best(self, user_id: Optional[str] = None) -> Optional[str]: @@ -267,6 +305,33 @@ "timestamp": datetime.utcnow().isoformat(), "data": data, } + + # M6: Store terminal history locally for AI reading + # We only store raw shell output and the commands themselves to keep the context clean. + if node: + if event_type == "task_assigned" and isinstance(data, dict): + cmd = data.get("command") + if cmd: + # Skip TTY keypress echos (manual typing) to keep AI context clean + # We usually only care about the final result or purposeful command execution + # If it's a JSON dict for tty, it's likely a character-by-character input + is_tty_char = isinstance(cmd, str) and cmd.startswith('{"tty"') + if not is_tty_char: + node.terminal_history.append(f"$ {cmd}\n") + elif event_type == "task_stdout" and isinstance(data, str): + node.terminal_history.append(data) + elif event_type == "skill_event" and isinstance(data, dict): + if data.get("type") == "output": + output_data = data.get("data", "") + # Strip ANSI codes for AI readability + ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') + clean_output = ansi_escape.sub('', output_data) + node.terminal_history.append(clean_output) + + # Keep a rolling buffer of 150 terminal interaction chunks + if len(node.terminal_history) > 150: + node.terminal_history = node.terminal_history[-150:] + seen = set() for q in node_qs + user_qs: if id(q) not in seen: diff --git a/ai-hub/app/core/services/rag.py b/ai-hub/app/core/services/rag.py index 08eba39..949baf2 100644 --- a/ai-hub/app/core/services/rag.py +++ b/ai-hub/app/core/services/rag.py @@ -12,10 +12,11 @@ Service for orchestrating conversational RAG pipelines. Manages chat interactions and message history for a session. """ - def __init__(self, retrievers: List[Retriever], prompt_service = None, tool_service = None): + def __init__(self, retrievers: List[Retriever], prompt_service = None, tool_service = None, node_registry_service = None): self.retrievers = retrievers self.prompt_service = prompt_service self.tool_service = tool_service + self.node_registry_service = node_registry_service self.faiss_retriever = next((r for r in retrievers if isinstance(r, FaissDBRetriever)), None) async def chat_with_rag( @@ -108,6 +109,45 @@ mesh_context += f" Name: {node.display_name}\n" mesh_context += f" Description: {node.description or 'No description provided.'}\n" mesh_context += f" Status: {node.last_status}\n" + + # M6: Sandbox Configuration - Expose permissions to AI + shell_config = (node.skill_config or {}).get("shell", {}) + if shell_config.get("enabled"): + sandbox = shell_config.get("sandbox", {}) + mode = sandbox.get("mode", "PERMISSIVE") + allowed = sandbox.get("allowed_commands", []) + denied = sandbox.get("denied_commands", []) + + mesh_context += f" Terminal Sandbox Mode: {mode}\n" + if mode == "STRICT": + mesh_context += f" AI Permitted Commands (Allow-list): {', '.join(allowed) if allowed else 'None'}\n" + elif mode == "PERMISSIVE": + mesh_context += f" AI Restricted Commands (Blacklist): {', '.join(denied) if denied else 'None'}\n" + + # AI advice + if mode == "STRICT" and not allowed: + mesh_context += " ⚠️ Warning: All shell commands are currently blocked by sandbox policy.\n" + + # AI Visibility: Include recent terminal history + if user_service and hasattr(user_service, "node_registry_service") or True: # Use registry if available + # We try to get the registry from the user_service or a global if needed + # But simpler: we passed it in (optional arg) + registry = getattr(self, "node_registry_service", None) + if not registry and user_service: + registry = getattr(user_service, "node_registry_service", None) + + if registry: + live = registry.get_node(node.node_id) + if live and live.terminal_history: + # Show last 40 lines (approx) to avoid context bloat + history = live.terminal_history[-40:] + mesh_context += " Recent Terminal Output:\n" + mesh_context += " ```\n" + for line in history: + mesh_context += f" {line}" + if not history[-1].endswith('\n'): mesh_context += "\n" + mesh_context += " ```\n" + mesh_context += "\n" answer_text = await rag_pipeline.forward( diff --git a/ai-hub/app/core/services/tool.py b/ai-hub/app/core/services/tool.py index c76c4f2..77d9ba8 100644 --- a/ai-hub/app/core/services/tool.py +++ b/ai-hub/app/core/services/tool.py @@ -84,7 +84,7 @@ assistant = orchestrator.assistant node_id = args.get("node_id") - + if not node_id: return {"success": False, "error": "node_id is required"} @@ -92,8 +92,22 @@ if skill.name == "mesh_terminal_control": # Maps to TaskAssistant.dispatch_single cmd = args.get("command") - res = assistant.dispatch_single(node_id, cmd) - return {"success": True, "output": res} + timeout = int(args.get("timeout", 30)) + res = assistant.dispatch_single(node_id, cmd, timeout=timeout) + + # IMPORTANT: Shell commands run in a persistent PTY. + # The primary output is streamed live through the MultiNodeConsole. + # To fulfill the requirement that AI waits for the result, we now + # return the actual stdout/stderr to the AI context. + if isinstance(res, dict) and "error" in res: + return {"success": False, "error": res["error"]} + + return { + "success": True, + "stdout": res.get("stdout", ""), + "stderr": res.get("stderr", ""), + "status": res.get("status", "UNKNOWN") + } elif skill.name == "browser_automation_agent": # Maps to TaskAssistant.dispatch_browser @@ -115,6 +129,23 @@ if action == "list": res = assistant.ls(node_id, path) + if isinstance(res, dict) and "files" in res: + # Format for readability and token efficiency + formatted = f"Directory listing for '{res.get('path', path)}' on node {node_id}:\n" + files = res.get("files") + if not files: # Handles None or empty list + formatted += "(Empty directory or failed to list files)" + else: + # Sort: directories first, then alphabetically + files.sort(key=lambda x: (not x.get("is_dir"), x.get("name", "").lower())) + limit = 100 + for f in files[:limit]: + icon = "📁" if f.get("is_dir") else "📄" + size_str = f" ({f.get('size')} bytes)" if not f.get("is_dir") else "" + formatted += f"{icon} {f.get('name')}{size_str}\n" + if len(files) > limit: + formatted += f"... and {len(files) - limit} more items." + res = formatted elif action == "read": res = assistant.cat(node_id, path) elif action == "write": diff --git a/ai-hub/app/core/skills/definitions.py b/ai-hub/app/core/skills/definitions.py index 766549f..d7d642d 100644 --- a/ai-hub/app/core/skills/definitions.py +++ b/ai-hub/app/core/skills/definitions.py @@ -7,19 +7,20 @@ "system_prompt": "You are an expert linux terminal operator. When using this skill, you have direct access to a PTY. Format commands clearly and wait for confirmation if they are destructive.", "skill_type": "remote_grpc", "is_enabled": True, - "features": ["chat"], + "features": ["chat", "swarm_control"], "config": { "service": "TerminalService", "method": "Execute", "capabilities": ["shell", "pty", "interactive"], - "parameters": { - "type": "object", - "properties": { - "command": {"type": "string", "description": "The shell command to execute."}, - "node_id": {"type": "string", "description": "The target node ID within the mesh."} - }, - "required": ["command", "node_id"] - } + "parameters": { + "type": "object", + "properties": { + "command": {"type": "string", "description": "The shell command to execute."}, + "node_id": {"type": "string", "description": "The target node ID within the mesh."}, + "timeout": {"type": "integer", "description": "The max seconds to wait for result. Default 30. Use for long-running tasks."} + }, + "required": ["command", "node_id"] + } }, "is_system": True }, @@ -29,7 +30,7 @@ "system_prompt": "You are an AI browsing assistant. Use the Playwright tool to navigate pages, extract information, and interact with web elements. Always provide reasoning for your actions.", "skill_type": "remote_grpc", "is_enabled": True, - "features": ["chat", "workflow"], + "features": ["chat", "workflow", "swarm_control"], "config": { "service": "BrowserService", "method": "Navigate", @@ -71,7 +72,7 @@ "system_prompt": "You are a file management assistant. You can browse and synchronize files across different agent nodes.", "skill_type": "local", "is_enabled": True, - "features": ["chat", "workflow"], + "features": ["chat", "workflow", "swarm_control"], "config": { "internal_module": "app.core.grpc.core.mirror", "actions": ["list", "read", "write", "delete"], diff --git a/ai-hub/app/db/models.py b/ai-hub/app/db/models.py index 7008875..d565feb 100644 --- a/ai-hub/app/db/models.py +++ b/ai-hub/app/db/models.py @@ -89,7 +89,7 @@ # Track STT and TTS providers used in this session context stt_provider_name = Column(String, nullable=True) tts_provider_name = Column(String, nullable=True) - # The feature namespace this session belongs to (e.g., "coding_assistant"). + # The feature namespace this session belongs to (e.g., "swarm_control"). feature_name = Column(String, default="default", nullable=False) # Timestamp for when the session was created. created_at = Column(DateTime, default=datetime.utcnow, nullable=False) diff --git a/ai-hub/app/protos/agent.proto b/ai-hub/app/protos/agent.proto index 50992ef..46b74fa 100644 --- a/ai-hub/app/protos/agent.proto +++ b/ai-hub/app/protos/agent.proto @@ -83,6 +83,7 @@ TaskClaimResponse claim_status = 3; TaskCancelRequest task_cancel = 4; FileSyncMessage file_sync = 5; // NEW: Ghost Mirror Sync + SandboxPolicy policy_update = 6; // NEW: Live Policy Update } } diff --git a/ai-hub/app/protos/agent_pb2.py b/ai-hub/app/protos/agent_pb2.py index b02c094..da2f584 100644 --- a/ai-hub/app/protos/agent_pb2.py +++ b/ai-hub/app/protos/agent_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: agent.proto +# source: app/protos/agent.proto # Protobuf Python Version: 4.25.1 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor @@ -14,83 +14,83 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x61gent.proto\x12\x05\x61gent\"\xde\x01\n\x13RegistrationRequest\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x12\n\nauth_token\x18\x03 \x01(\t\x12\x18\n\x10node_description\x18\x04 \x01(\t\x12\x42\n\x0c\x63\x61pabilities\x18\x05 \x03(\x0b\x32,.agent.RegistrationRequest.CapabilitiesEntry\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc5\x01\n\rSandboxPolicy\x12\'\n\x04mode\x18\x01 \x01(\x0e\x32\x19.agent.SandboxPolicy.Mode\x12\x18\n\x10\x61llowed_commands\x18\x02 \x03(\t\x12\x17\n\x0f\x64\x65nied_commands\x18\x03 \x03(\t\x12\x1a\n\x12sensitive_commands\x18\x04 \x03(\t\x12\x18\n\x10working_dir_jail\x18\x05 \x01(\t\"\"\n\x04Mode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"x\n\x14RegistrationResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x15\n\rerror_message\x18\x02 \x01(\t\x12\x12\n\nsession_id\x18\x03 \x01(\t\x12$\n\x06policy\x18\x04 \x01(\x0b\x32\x14.agent.SandboxPolicy\"\xa9\x02\n\x11\x43lientTaskMessage\x12,\n\rtask_response\x18\x01 \x01(\x0b\x32\x13.agent.TaskResponseH\x00\x12-\n\ntask_claim\x18\x02 \x01(\x0b\x32\x17.agent.TaskClaimRequestH\x00\x12,\n\rbrowser_event\x18\x03 \x01(\x0b\x32\x13.agent.BrowserEventH\x00\x12\'\n\x08\x61nnounce\x18\x04 \x01(\x0b\x32\x13.agent.NodeAnnounceH\x00\x12+\n\tfile_sync\x18\x05 \x01(\x0b\x32\x16.agent.FileSyncMessageH\x00\x12(\n\x0bskill_event\x18\x06 \x01(\x0b\x32\x11.agent.SkillEventH\x00\x42\t\n\x07payload\"y\n\nSkillEvent\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x16\n\x0cterminal_out\x18\x03 \x01(\tH\x00\x12\x10\n\x06prompt\x18\x04 \x01(\tH\x00\x12\x14\n\nkeep_alive\x18\x05 \x01(\x08H\x00\x42\x06\n\x04\x64\x61ta\"\x1f\n\x0cNodeAnnounce\x12\x0f\n\x07node_id\x18\x01 \x01(\t\"\x87\x01\n\x0c\x42rowserEvent\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12,\n\x0b\x63onsole_msg\x18\x02 \x01(\x0b\x32\x15.agent.ConsoleMessageH\x00\x12,\n\x0bnetwork_req\x18\x03 \x01(\x0b\x32\x15.agent.NetworkRequestH\x00\x42\x07\n\x05\x65vent\"\x8d\x02\n\x11ServerTaskMessage\x12*\n\x0ctask_request\x18\x01 \x01(\x0b\x32\x12.agent.TaskRequestH\x00\x12\x31\n\x10work_pool_update\x18\x02 \x01(\x0b\x32\x15.agent.WorkPoolUpdateH\x00\x12\x30\n\x0c\x63laim_status\x18\x03 \x01(\x0b\x32\x18.agent.TaskClaimResponseH\x00\x12/\n\x0btask_cancel\x18\x04 \x01(\x0b\x32\x18.agent.TaskCancelRequestH\x00\x12+\n\tfile_sync\x18\x05 \x01(\x0b\x32\x16.agent.FileSyncMessageH\x00\x42\t\n\x07payload\"$\n\x11TaskCancelRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"\xd1\x01\n\x0bTaskRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x11\n\ttask_type\x18\x02 \x01(\t\x12\x16\n\x0cpayload_json\x18\x03 \x01(\tH\x00\x12.\n\x0e\x62rowser_action\x18\x07 \x01(\x0b\x32\x14.agent.BrowserActionH\x00\x12\x12\n\ntimeout_ms\x18\x04 \x01(\x05\x12\x10\n\x08trace_id\x18\x05 \x01(\t\x12\x11\n\tsignature\x18\x06 \x01(\t\x12\x12\n\nsession_id\x18\x08 \x01(\tB\t\n\x07payload\"\xa0\x02\n\rBrowserAction\x12/\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x1f.agent.BrowserAction.ActionType\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x10\n\x08selector\x18\x03 \x01(\t\x12\x0c\n\x04text\x18\x04 \x01(\t\x12\x12\n\nsession_id\x18\x05 \x01(\t\x12\t\n\x01x\x18\x06 \x01(\x05\x12\t\n\x01y\x18\x07 \x01(\x05\"\x86\x01\n\nActionType\x12\x0c\n\x08NAVIGATE\x10\x00\x12\t\n\x05\x43LICK\x10\x01\x12\x08\n\x04TYPE\x10\x02\x12\x0e\n\nSCREENSHOT\x10\x03\x12\x0b\n\x07GET_DOM\x10\x04\x12\t\n\x05HOVER\x10\x05\x12\n\n\x06SCROLL\x10\x06\x12\t\n\x05\x43LOSE\x10\x07\x12\x08\n\x04\x45VAL\x10\x08\x12\x0c\n\x08GET_A11Y\x10\t\"\xe0\x02\n\x0cTaskResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12*\n\x06status\x18\x02 \x01(\x0e\x32\x1a.agent.TaskResponse.Status\x12\x0e\n\x06stdout\x18\x03 \x01(\t\x12\x0e\n\x06stderr\x18\x04 \x01(\t\x12\x10\n\x08trace_id\x18\x05 \x01(\t\x12\x35\n\tartifacts\x18\x06 \x03(\x0b\x32\".agent.TaskResponse.ArtifactsEntry\x12\x30\n\x0e\x62rowser_result\x18\x07 \x01(\x0b\x32\x16.agent.BrowserResponseH\x00\x1a\x30\n\x0e\x41rtifactsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"<\n\x06Status\x12\x0b\n\x07SUCCESS\x10\x00\x12\t\n\x05\x45RROR\x10\x01\x12\x0b\n\x07TIMEOUT\x10\x02\x12\r\n\tCANCELLED\x10\x03\x42\x08\n\x06result\"\xdc\x01\n\x0f\x42rowserResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x10\n\x08snapshot\x18\x03 \x01(\x0c\x12\x13\n\x0b\x64om_content\x18\x04 \x01(\t\x12\x11\n\ta11y_tree\x18\x05 \x01(\t\x12\x13\n\x0b\x65val_result\x18\x06 \x01(\t\x12.\n\x0f\x63onsole_history\x18\x07 \x03(\x0b\x32\x15.agent.ConsoleMessage\x12.\n\x0fnetwork_history\x18\x08 \x03(\x0b\x32\x15.agent.NetworkRequest\"C\n\x0e\x43onsoleMessage\x12\r\n\x05level\x18\x01 \x01(\t\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x14\n\x0ctimestamp_ms\x18\x03 \x01(\x03\"h\n\x0eNetworkRequest\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\x05\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x12\n\nlatency_ms\x18\x05 \x01(\x03\",\n\x0eWorkPoolUpdate\x12\x1a\n\x12\x61vailable_task_ids\x18\x01 \x03(\t\"4\n\x10TaskClaimRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x0f\n\x07node_id\x18\x02 \x01(\t\"E\n\x11TaskClaimResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x0f\n\x07granted\x18\x02 \x01(\x08\x12\x0e\n\x06reason\x18\x03 \x01(\t\"\xe6\x02\n\tHeartbeat\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x19\n\x11\x63pu_usage_percent\x18\x02 \x01(\x02\x12\x1c\n\x14memory_usage_percent\x18\x03 \x01(\x02\x12\x1b\n\x13\x61\x63tive_worker_count\x18\x04 \x01(\x05\x12\x1b\n\x13max_worker_capacity\x18\x05 \x01(\x05\x12\x16\n\x0estatus_message\x18\x06 \x01(\t\x12\x18\n\x10running_task_ids\x18\x07 \x03(\t\x12\x11\n\tcpu_count\x18\x08 \x01(\x05\x12\x16\n\x0ememory_used_gb\x18\t \x01(\x02\x12\x17\n\x0fmemory_total_gb\x18\n \x01(\x02\x12\x1a\n\x12\x63pu_usage_per_core\x18\x0b \x03(\x02\x12\x14\n\x0c\x63pu_freq_mhz\x18\x0c \x01(\x02\x12\x1b\n\x13memory_available_gb\x18\r \x01(\x02\x12\x10\n\x08load_avg\x18\x0e \x03(\x02\"-\n\x13HealthCheckResponse\x12\x16\n\x0eserver_time_ms\x18\x01 \x01(\x03\"\xe4\x01\n\x0f\x46ileSyncMessage\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12,\n\x08manifest\x18\x02 \x01(\x0b\x32\x18.agent.DirectoryManifestH\x00\x12\'\n\tfile_data\x18\x03 \x01(\x0b\x32\x12.agent.FilePayloadH\x00\x12#\n\x06status\x18\x04 \x01(\x0b\x32\x11.agent.SyncStatusH\x00\x12%\n\x07\x63ontrol\x18\x05 \x01(\x0b\x32\x12.agent.SyncControlH\x00\x12\x0f\n\x07task_id\x18\x06 \x01(\tB\t\n\x07payload\"\xab\x02\n\x0bSyncControl\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.agent.SyncControl.Action\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\x15\n\rrequest_paths\x18\x03 \x03(\t\x12\x0f\n\x07\x63ontent\x18\x04 \x01(\x0c\x12\x0e\n\x06is_dir\x18\x05 \x01(\x08\"\xaa\x01\n\x06\x41\x63tion\x12\x12\n\x0eSTART_WATCHING\x10\x00\x12\x11\n\rSTOP_WATCHING\x10\x01\x12\x08\n\x04LOCK\x10\x02\x12\n\n\x06UNLOCK\x10\x03\x12\x14\n\x10REFRESH_MANIFEST\x10\x04\x12\n\n\x06RESYNC\x10\x05\x12\x08\n\x04LIST\x10\x06\x12\x08\n\x04READ\x10\x07\x12\t\n\x05WRITE\x10\x08\x12\n\n\x06\x44\x45LETE\x10\t\x12\t\n\x05PURGE\x10\n\x12\x0b\n\x07\x43LEANUP\x10\x0b\"F\n\x11\x44irectoryManifest\x12\x11\n\troot_path\x18\x01 \x01(\t\x12\x1e\n\x05\x66iles\x18\x02 \x03(\x0b\x32\x0f.agent.FileInfo\"D\n\x08\x46ileInfo\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\x12\x0c\n\x04hash\x18\x03 \x01(\t\x12\x0e\n\x06is_dir\x18\x04 \x01(\x08\"_\n\x0b\x46ilePayload\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\r\n\x05\x63hunk\x18\x02 \x01(\x0c\x12\x13\n\x0b\x63hunk_index\x18\x03 \x01(\x05\x12\x10\n\x08is_final\x18\x04 \x01(\x08\x12\x0c\n\x04hash\x18\x05 \x01(\t\"\xa0\x01\n\nSyncStatus\x12$\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x16.agent.SyncStatus.Code\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x17\n\x0freconcile_paths\x18\x03 \x03(\t\"B\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\t\n\x05\x45RROR\x10\x01\x12\x16\n\x12RECONCILE_REQUIRED\x10\x02\x12\x0f\n\x0bIN_PROGRESS\x10\x03\x32\xe9\x01\n\x11\x41gentOrchestrator\x12L\n\x11SyncConfiguration\x12\x1a.agent.RegistrationRequest\x1a\x1b.agent.RegistrationResponse\x12\x44\n\nTaskStream\x12\x18.agent.ClientTaskMessage\x1a\x18.agent.ServerTaskMessage(\x01\x30\x01\x12@\n\x0cReportHealth\x12\x10.agent.Heartbeat\x1a\x1a.agent.HealthCheckResponse(\x01\x30\x01\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16\x61pp/protos/agent.proto\x12\x05\x61gent\"\xde\x01\n\x13RegistrationRequest\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x12\n\nauth_token\x18\x03 \x01(\t\x12\x18\n\x10node_description\x18\x04 \x01(\t\x12\x42\n\x0c\x63\x61pabilities\x18\x05 \x03(\x0b\x32,.agent.RegistrationRequest.CapabilitiesEntry\x1a\x33\n\x11\x43\x61pabilitiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc5\x01\n\rSandboxPolicy\x12\'\n\x04mode\x18\x01 \x01(\x0e\x32\x19.agent.SandboxPolicy.Mode\x12\x18\n\x10\x61llowed_commands\x18\x02 \x03(\t\x12\x17\n\x0f\x64\x65nied_commands\x18\x03 \x03(\t\x12\x1a\n\x12sensitive_commands\x18\x04 \x03(\t\x12\x18\n\x10working_dir_jail\x18\x05 \x01(\t\"\"\n\x04Mode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"x\n\x14RegistrationResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x15\n\rerror_message\x18\x02 \x01(\t\x12\x12\n\nsession_id\x18\x03 \x01(\t\x12$\n\x06policy\x18\x04 \x01(\x0b\x32\x14.agent.SandboxPolicy\"\xa9\x02\n\x11\x43lientTaskMessage\x12,\n\rtask_response\x18\x01 \x01(\x0b\x32\x13.agent.TaskResponseH\x00\x12-\n\ntask_claim\x18\x02 \x01(\x0b\x32\x17.agent.TaskClaimRequestH\x00\x12,\n\rbrowser_event\x18\x03 \x01(\x0b\x32\x13.agent.BrowserEventH\x00\x12\'\n\x08\x61nnounce\x18\x04 \x01(\x0b\x32\x13.agent.NodeAnnounceH\x00\x12+\n\tfile_sync\x18\x05 \x01(\x0b\x32\x16.agent.FileSyncMessageH\x00\x12(\n\x0bskill_event\x18\x06 \x01(\x0b\x32\x11.agent.SkillEventH\x00\x42\t\n\x07payload\"y\n\nSkillEvent\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x16\n\x0cterminal_out\x18\x03 \x01(\tH\x00\x12\x10\n\x06prompt\x18\x04 \x01(\tH\x00\x12\x14\n\nkeep_alive\x18\x05 \x01(\x08H\x00\x42\x06\n\x04\x64\x61ta\"\x1f\n\x0cNodeAnnounce\x12\x0f\n\x07node_id\x18\x01 \x01(\t\"\x87\x01\n\x0c\x42rowserEvent\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12,\n\x0b\x63onsole_msg\x18\x02 \x01(\x0b\x32\x15.agent.ConsoleMessageH\x00\x12,\n\x0bnetwork_req\x18\x03 \x01(\x0b\x32\x15.agent.NetworkRequestH\x00\x42\x07\n\x05\x65vent\"\xbc\x02\n\x11ServerTaskMessage\x12*\n\x0ctask_request\x18\x01 \x01(\x0b\x32\x12.agent.TaskRequestH\x00\x12\x31\n\x10work_pool_update\x18\x02 \x01(\x0b\x32\x15.agent.WorkPoolUpdateH\x00\x12\x30\n\x0c\x63laim_status\x18\x03 \x01(\x0b\x32\x18.agent.TaskClaimResponseH\x00\x12/\n\x0btask_cancel\x18\x04 \x01(\x0b\x32\x18.agent.TaskCancelRequestH\x00\x12+\n\tfile_sync\x18\x05 \x01(\x0b\x32\x16.agent.FileSyncMessageH\x00\x12-\n\rpolicy_update\x18\x06 \x01(\x0b\x32\x14.agent.SandboxPolicyH\x00\x42\t\n\x07payload\"$\n\x11TaskCancelRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"\xd1\x01\n\x0bTaskRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x11\n\ttask_type\x18\x02 \x01(\t\x12\x16\n\x0cpayload_json\x18\x03 \x01(\tH\x00\x12.\n\x0e\x62rowser_action\x18\x07 \x01(\x0b\x32\x14.agent.BrowserActionH\x00\x12\x12\n\ntimeout_ms\x18\x04 \x01(\x05\x12\x10\n\x08trace_id\x18\x05 \x01(\t\x12\x11\n\tsignature\x18\x06 \x01(\t\x12\x12\n\nsession_id\x18\x08 \x01(\tB\t\n\x07payload\"\xa0\x02\n\rBrowserAction\x12/\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x1f.agent.BrowserAction.ActionType\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x10\n\x08selector\x18\x03 \x01(\t\x12\x0c\n\x04text\x18\x04 \x01(\t\x12\x12\n\nsession_id\x18\x05 \x01(\t\x12\t\n\x01x\x18\x06 \x01(\x05\x12\t\n\x01y\x18\x07 \x01(\x05\"\x86\x01\n\nActionType\x12\x0c\n\x08NAVIGATE\x10\x00\x12\t\n\x05\x43LICK\x10\x01\x12\x08\n\x04TYPE\x10\x02\x12\x0e\n\nSCREENSHOT\x10\x03\x12\x0b\n\x07GET_DOM\x10\x04\x12\t\n\x05HOVER\x10\x05\x12\n\n\x06SCROLL\x10\x06\x12\t\n\x05\x43LOSE\x10\x07\x12\x08\n\x04\x45VAL\x10\x08\x12\x0c\n\x08GET_A11Y\x10\t\"\xe0\x02\n\x0cTaskResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12*\n\x06status\x18\x02 \x01(\x0e\x32\x1a.agent.TaskResponse.Status\x12\x0e\n\x06stdout\x18\x03 \x01(\t\x12\x0e\n\x06stderr\x18\x04 \x01(\t\x12\x10\n\x08trace_id\x18\x05 \x01(\t\x12\x35\n\tartifacts\x18\x06 \x03(\x0b\x32\".agent.TaskResponse.ArtifactsEntry\x12\x30\n\x0e\x62rowser_result\x18\x07 \x01(\x0b\x32\x16.agent.BrowserResponseH\x00\x1a\x30\n\x0e\x41rtifactsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"<\n\x06Status\x12\x0b\n\x07SUCCESS\x10\x00\x12\t\n\x05\x45RROR\x10\x01\x12\x0b\n\x07TIMEOUT\x10\x02\x12\r\n\tCANCELLED\x10\x03\x42\x08\n\x06result\"\xdc\x01\n\x0f\x42rowserResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x10\n\x08snapshot\x18\x03 \x01(\x0c\x12\x13\n\x0b\x64om_content\x18\x04 \x01(\t\x12\x11\n\ta11y_tree\x18\x05 \x01(\t\x12\x13\n\x0b\x65val_result\x18\x06 \x01(\t\x12.\n\x0f\x63onsole_history\x18\x07 \x03(\x0b\x32\x15.agent.ConsoleMessage\x12.\n\x0fnetwork_history\x18\x08 \x03(\x0b\x32\x15.agent.NetworkRequest\"C\n\x0e\x43onsoleMessage\x12\r\n\x05level\x18\x01 \x01(\t\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x14\n\x0ctimestamp_ms\x18\x03 \x01(\x03\"h\n\x0eNetworkRequest\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\x05\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x12\n\nlatency_ms\x18\x05 \x01(\x03\",\n\x0eWorkPoolUpdate\x12\x1a\n\x12\x61vailable_task_ids\x18\x01 \x03(\t\"4\n\x10TaskClaimRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x0f\n\x07node_id\x18\x02 \x01(\t\"E\n\x11TaskClaimResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x0f\n\x07granted\x18\x02 \x01(\x08\x12\x0e\n\x06reason\x18\x03 \x01(\t\"\xe6\x02\n\tHeartbeat\x12\x0f\n\x07node_id\x18\x01 \x01(\t\x12\x19\n\x11\x63pu_usage_percent\x18\x02 \x01(\x02\x12\x1c\n\x14memory_usage_percent\x18\x03 \x01(\x02\x12\x1b\n\x13\x61\x63tive_worker_count\x18\x04 \x01(\x05\x12\x1b\n\x13max_worker_capacity\x18\x05 \x01(\x05\x12\x16\n\x0estatus_message\x18\x06 \x01(\t\x12\x18\n\x10running_task_ids\x18\x07 \x03(\t\x12\x11\n\tcpu_count\x18\x08 \x01(\x05\x12\x16\n\x0ememory_used_gb\x18\t \x01(\x02\x12\x17\n\x0fmemory_total_gb\x18\n \x01(\x02\x12\x1a\n\x12\x63pu_usage_per_core\x18\x0b \x03(\x02\x12\x14\n\x0c\x63pu_freq_mhz\x18\x0c \x01(\x02\x12\x1b\n\x13memory_available_gb\x18\r \x01(\x02\x12\x10\n\x08load_avg\x18\x0e \x03(\x02\"-\n\x13HealthCheckResponse\x12\x16\n\x0eserver_time_ms\x18\x01 \x01(\x03\"\xe4\x01\n\x0f\x46ileSyncMessage\x12\x12\n\nsession_id\x18\x01 \x01(\t\x12,\n\x08manifest\x18\x02 \x01(\x0b\x32\x18.agent.DirectoryManifestH\x00\x12\'\n\tfile_data\x18\x03 \x01(\x0b\x32\x12.agent.FilePayloadH\x00\x12#\n\x06status\x18\x04 \x01(\x0b\x32\x11.agent.SyncStatusH\x00\x12%\n\x07\x63ontrol\x18\x05 \x01(\x0b\x32\x12.agent.SyncControlH\x00\x12\x0f\n\x07task_id\x18\x06 \x01(\tB\t\n\x07payload\"\xab\x02\n\x0bSyncControl\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.agent.SyncControl.Action\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\x15\n\rrequest_paths\x18\x03 \x03(\t\x12\x0f\n\x07\x63ontent\x18\x04 \x01(\x0c\x12\x0e\n\x06is_dir\x18\x05 \x01(\x08\"\xaa\x01\n\x06\x41\x63tion\x12\x12\n\x0eSTART_WATCHING\x10\x00\x12\x11\n\rSTOP_WATCHING\x10\x01\x12\x08\n\x04LOCK\x10\x02\x12\n\n\x06UNLOCK\x10\x03\x12\x14\n\x10REFRESH_MANIFEST\x10\x04\x12\n\n\x06RESYNC\x10\x05\x12\x08\n\x04LIST\x10\x06\x12\x08\n\x04READ\x10\x07\x12\t\n\x05WRITE\x10\x08\x12\n\n\x06\x44\x45LETE\x10\t\x12\t\n\x05PURGE\x10\n\x12\x0b\n\x07\x43LEANUP\x10\x0b\"F\n\x11\x44irectoryManifest\x12\x11\n\troot_path\x18\x01 \x01(\t\x12\x1e\n\x05\x66iles\x18\x02 \x03(\x0b\x32\x0f.agent.FileInfo\"D\n\x08\x46ileInfo\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0c\n\x04size\x18\x02 \x01(\x03\x12\x0c\n\x04hash\x18\x03 \x01(\t\x12\x0e\n\x06is_dir\x18\x04 \x01(\x08\"_\n\x0b\x46ilePayload\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\r\n\x05\x63hunk\x18\x02 \x01(\x0c\x12\x13\n\x0b\x63hunk_index\x18\x03 \x01(\x05\x12\x10\n\x08is_final\x18\x04 \x01(\x08\x12\x0c\n\x04hash\x18\x05 \x01(\t\"\xa0\x01\n\nSyncStatus\x12$\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x16.agent.SyncStatus.Code\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x17\n\x0freconcile_paths\x18\x03 \x03(\t\"B\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\t\n\x05\x45RROR\x10\x01\x12\x16\n\x12RECONCILE_REQUIRED\x10\x02\x12\x0f\n\x0bIN_PROGRESS\x10\x03\x32\xe9\x01\n\x11\x41gentOrchestrator\x12L\n\x11SyncConfiguration\x12\x1a.agent.RegistrationRequest\x1a\x1b.agent.RegistrationResponse\x12\x44\n\nTaskStream\x12\x18.agent.ClientTaskMessage\x1a\x18.agent.ServerTaskMessage(\x01\x30\x01\x12@\n\x0cReportHealth\x12\x10.agent.Heartbeat\x1a\x1a.agent.HealthCheckResponse(\x01\x30\x01\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'agent_pb2', _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'app.protos.agent_pb2', _globals) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._options = None _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_options = b'8\001' _globals['_TASKRESPONSE_ARTIFACTSENTRY']._options = None _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_options = b'8\001' - _globals['_REGISTRATIONREQUEST']._serialized_start=23 - _globals['_REGISTRATIONREQUEST']._serialized_end=245 - _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_start=194 - _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_end=245 - _globals['_SANDBOXPOLICY']._serialized_start=248 - _globals['_SANDBOXPOLICY']._serialized_end=445 - _globals['_SANDBOXPOLICY_MODE']._serialized_start=411 - _globals['_SANDBOXPOLICY_MODE']._serialized_end=445 - _globals['_REGISTRATIONRESPONSE']._serialized_start=447 - _globals['_REGISTRATIONRESPONSE']._serialized_end=567 - _globals['_CLIENTTASKMESSAGE']._serialized_start=570 - _globals['_CLIENTTASKMESSAGE']._serialized_end=867 - _globals['_SKILLEVENT']._serialized_start=869 - _globals['_SKILLEVENT']._serialized_end=990 - _globals['_NODEANNOUNCE']._serialized_start=992 - _globals['_NODEANNOUNCE']._serialized_end=1023 - _globals['_BROWSEREVENT']._serialized_start=1026 - _globals['_BROWSEREVENT']._serialized_end=1161 - _globals['_SERVERTASKMESSAGE']._serialized_start=1164 - _globals['_SERVERTASKMESSAGE']._serialized_end=1433 - _globals['_TASKCANCELREQUEST']._serialized_start=1435 - _globals['_TASKCANCELREQUEST']._serialized_end=1471 - _globals['_TASKREQUEST']._serialized_start=1474 - _globals['_TASKREQUEST']._serialized_end=1683 - _globals['_BROWSERACTION']._serialized_start=1686 - _globals['_BROWSERACTION']._serialized_end=1974 - _globals['_BROWSERACTION_ACTIONTYPE']._serialized_start=1840 - _globals['_BROWSERACTION_ACTIONTYPE']._serialized_end=1974 - _globals['_TASKRESPONSE']._serialized_start=1977 - _globals['_TASKRESPONSE']._serialized_end=2329 - _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_start=2209 - _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_end=2257 - _globals['_TASKRESPONSE_STATUS']._serialized_start=2259 - _globals['_TASKRESPONSE_STATUS']._serialized_end=2319 - _globals['_BROWSERRESPONSE']._serialized_start=2332 - _globals['_BROWSERRESPONSE']._serialized_end=2552 - _globals['_CONSOLEMESSAGE']._serialized_start=2554 - _globals['_CONSOLEMESSAGE']._serialized_end=2621 - _globals['_NETWORKREQUEST']._serialized_start=2623 - _globals['_NETWORKREQUEST']._serialized_end=2727 - _globals['_WORKPOOLUPDATE']._serialized_start=2729 - _globals['_WORKPOOLUPDATE']._serialized_end=2773 - _globals['_TASKCLAIMREQUEST']._serialized_start=2775 - _globals['_TASKCLAIMREQUEST']._serialized_end=2827 - _globals['_TASKCLAIMRESPONSE']._serialized_start=2829 - _globals['_TASKCLAIMRESPONSE']._serialized_end=2898 - _globals['_HEARTBEAT']._serialized_start=2901 - _globals['_HEARTBEAT']._serialized_end=3259 - _globals['_HEALTHCHECKRESPONSE']._serialized_start=3261 - _globals['_HEALTHCHECKRESPONSE']._serialized_end=3306 - _globals['_FILESYNCMESSAGE']._serialized_start=3309 - _globals['_FILESYNCMESSAGE']._serialized_end=3537 - _globals['_SYNCCONTROL']._serialized_start=3540 - _globals['_SYNCCONTROL']._serialized_end=3839 - _globals['_SYNCCONTROL_ACTION']._serialized_start=3669 - _globals['_SYNCCONTROL_ACTION']._serialized_end=3839 - _globals['_DIRECTORYMANIFEST']._serialized_start=3841 - _globals['_DIRECTORYMANIFEST']._serialized_end=3911 - _globals['_FILEINFO']._serialized_start=3913 - _globals['_FILEINFO']._serialized_end=3981 - _globals['_FILEPAYLOAD']._serialized_start=3983 - _globals['_FILEPAYLOAD']._serialized_end=4078 - _globals['_SYNCSTATUS']._serialized_start=4081 - _globals['_SYNCSTATUS']._serialized_end=4241 - _globals['_SYNCSTATUS_CODE']._serialized_start=4175 - _globals['_SYNCSTATUS_CODE']._serialized_end=4241 - _globals['_AGENTORCHESTRATOR']._serialized_start=4244 - _globals['_AGENTORCHESTRATOR']._serialized_end=4477 + _globals['_REGISTRATIONREQUEST']._serialized_start=34 + _globals['_REGISTRATIONREQUEST']._serialized_end=256 + _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_start=205 + _globals['_REGISTRATIONREQUEST_CAPABILITIESENTRY']._serialized_end=256 + _globals['_SANDBOXPOLICY']._serialized_start=259 + _globals['_SANDBOXPOLICY']._serialized_end=456 + _globals['_SANDBOXPOLICY_MODE']._serialized_start=422 + _globals['_SANDBOXPOLICY_MODE']._serialized_end=456 + _globals['_REGISTRATIONRESPONSE']._serialized_start=458 + _globals['_REGISTRATIONRESPONSE']._serialized_end=578 + _globals['_CLIENTTASKMESSAGE']._serialized_start=581 + _globals['_CLIENTTASKMESSAGE']._serialized_end=878 + _globals['_SKILLEVENT']._serialized_start=880 + _globals['_SKILLEVENT']._serialized_end=1001 + _globals['_NODEANNOUNCE']._serialized_start=1003 + _globals['_NODEANNOUNCE']._serialized_end=1034 + _globals['_BROWSEREVENT']._serialized_start=1037 + _globals['_BROWSEREVENT']._serialized_end=1172 + _globals['_SERVERTASKMESSAGE']._serialized_start=1175 + _globals['_SERVERTASKMESSAGE']._serialized_end=1491 + _globals['_TASKCANCELREQUEST']._serialized_start=1493 + _globals['_TASKCANCELREQUEST']._serialized_end=1529 + _globals['_TASKREQUEST']._serialized_start=1532 + _globals['_TASKREQUEST']._serialized_end=1741 + _globals['_BROWSERACTION']._serialized_start=1744 + _globals['_BROWSERACTION']._serialized_end=2032 + _globals['_BROWSERACTION_ACTIONTYPE']._serialized_start=1898 + _globals['_BROWSERACTION_ACTIONTYPE']._serialized_end=2032 + _globals['_TASKRESPONSE']._serialized_start=2035 + _globals['_TASKRESPONSE']._serialized_end=2387 + _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_start=2267 + _globals['_TASKRESPONSE_ARTIFACTSENTRY']._serialized_end=2315 + _globals['_TASKRESPONSE_STATUS']._serialized_start=2317 + _globals['_TASKRESPONSE_STATUS']._serialized_end=2377 + _globals['_BROWSERRESPONSE']._serialized_start=2390 + _globals['_BROWSERRESPONSE']._serialized_end=2610 + _globals['_CONSOLEMESSAGE']._serialized_start=2612 + _globals['_CONSOLEMESSAGE']._serialized_end=2679 + _globals['_NETWORKREQUEST']._serialized_start=2681 + _globals['_NETWORKREQUEST']._serialized_end=2785 + _globals['_WORKPOOLUPDATE']._serialized_start=2787 + _globals['_WORKPOOLUPDATE']._serialized_end=2831 + _globals['_TASKCLAIMREQUEST']._serialized_start=2833 + _globals['_TASKCLAIMREQUEST']._serialized_end=2885 + _globals['_TASKCLAIMRESPONSE']._serialized_start=2887 + _globals['_TASKCLAIMRESPONSE']._serialized_end=2956 + _globals['_HEARTBEAT']._serialized_start=2959 + _globals['_HEARTBEAT']._serialized_end=3317 + _globals['_HEALTHCHECKRESPONSE']._serialized_start=3319 + _globals['_HEALTHCHECKRESPONSE']._serialized_end=3364 + _globals['_FILESYNCMESSAGE']._serialized_start=3367 + _globals['_FILESYNCMESSAGE']._serialized_end=3595 + _globals['_SYNCCONTROL']._serialized_start=3598 + _globals['_SYNCCONTROL']._serialized_end=3897 + _globals['_SYNCCONTROL_ACTION']._serialized_start=3727 + _globals['_SYNCCONTROL_ACTION']._serialized_end=3897 + _globals['_DIRECTORYMANIFEST']._serialized_start=3899 + _globals['_DIRECTORYMANIFEST']._serialized_end=3969 + _globals['_FILEINFO']._serialized_start=3971 + _globals['_FILEINFO']._serialized_end=4039 + _globals['_FILEPAYLOAD']._serialized_start=4041 + _globals['_FILEPAYLOAD']._serialized_end=4136 + _globals['_SYNCSTATUS']._serialized_start=4139 + _globals['_SYNCSTATUS']._serialized_end=4299 + _globals['_SYNCSTATUS_CODE']._serialized_start=4233 + _globals['_SYNCSTATUS_CODE']._serialized_end=4299 + _globals['_AGENTORCHESTRATOR']._serialized_start=4302 + _globals['_AGENTORCHESTRATOR']._serialized_end=4535 # @@protoc_insertion_point(module_scope) diff --git a/ai-hub/app/protos/agent_pb2_grpc.py b/ai-hub/app/protos/agent_pb2_grpc.py index 932d45e..90b55ee 100644 --- a/ai-hub/app/protos/agent_pb2_grpc.py +++ b/ai-hub/app/protos/agent_pb2_grpc.py @@ -2,7 +2,7 @@ """Client and server classes corresponding to protobuf-defined services.""" import grpc -import agent_pb2 as agent__pb2 +from app.protos import agent_pb2 as app_dot_protos_dot_agent__pb2 class AgentOrchestratorStub(object): @@ -17,18 +17,18 @@ """ self.SyncConfiguration = channel.unary_unary( '/agent.AgentOrchestrator/SyncConfiguration', - request_serializer=agent__pb2.RegistrationRequest.SerializeToString, - response_deserializer=agent__pb2.RegistrationResponse.FromString, + request_serializer=app_dot_protos_dot_agent__pb2.RegistrationRequest.SerializeToString, + response_deserializer=app_dot_protos_dot_agent__pb2.RegistrationResponse.FromString, ) self.TaskStream = channel.stream_stream( '/agent.AgentOrchestrator/TaskStream', - request_serializer=agent__pb2.ClientTaskMessage.SerializeToString, - response_deserializer=agent__pb2.ServerTaskMessage.FromString, + request_serializer=app_dot_protos_dot_agent__pb2.ClientTaskMessage.SerializeToString, + response_deserializer=app_dot_protos_dot_agent__pb2.ServerTaskMessage.FromString, ) self.ReportHealth = channel.stream_stream( '/agent.AgentOrchestrator/ReportHealth', - request_serializer=agent__pb2.Heartbeat.SerializeToString, - response_deserializer=agent__pb2.HealthCheckResponse.FromString, + request_serializer=app_dot_protos_dot_agent__pb2.Heartbeat.SerializeToString, + response_deserializer=app_dot_protos_dot_agent__pb2.HealthCheckResponse.FromString, ) @@ -62,18 +62,18 @@ rpc_method_handlers = { 'SyncConfiguration': grpc.unary_unary_rpc_method_handler( servicer.SyncConfiguration, - request_deserializer=agent__pb2.RegistrationRequest.FromString, - response_serializer=agent__pb2.RegistrationResponse.SerializeToString, + request_deserializer=app_dot_protos_dot_agent__pb2.RegistrationRequest.FromString, + response_serializer=app_dot_protos_dot_agent__pb2.RegistrationResponse.SerializeToString, ), 'TaskStream': grpc.stream_stream_rpc_method_handler( servicer.TaskStream, - request_deserializer=agent__pb2.ClientTaskMessage.FromString, - response_serializer=agent__pb2.ServerTaskMessage.SerializeToString, + request_deserializer=app_dot_protos_dot_agent__pb2.ClientTaskMessage.FromString, + response_serializer=app_dot_protos_dot_agent__pb2.ServerTaskMessage.SerializeToString, ), 'ReportHealth': grpc.stream_stream_rpc_method_handler( servicer.ReportHealth, - request_deserializer=agent__pb2.Heartbeat.FromString, - response_serializer=agent__pb2.HealthCheckResponse.SerializeToString, + request_deserializer=app_dot_protos_dot_agent__pb2.Heartbeat.FromString, + response_serializer=app_dot_protos_dot_agent__pb2.HealthCheckResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( @@ -98,8 +98,8 @@ timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/agent.AgentOrchestrator/SyncConfiguration', - agent__pb2.RegistrationRequest.SerializeToString, - agent__pb2.RegistrationResponse.FromString, + app_dot_protos_dot_agent__pb2.RegistrationRequest.SerializeToString, + app_dot_protos_dot_agent__pb2.RegistrationResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @@ -115,8 +115,8 @@ timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/agent.AgentOrchestrator/TaskStream', - agent__pb2.ClientTaskMessage.SerializeToString, - agent__pb2.ServerTaskMessage.FromString, + app_dot_protos_dot_agent__pb2.ClientTaskMessage.SerializeToString, + app_dot_protos_dot_agent__pb2.ServerTaskMessage.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @@ -132,7 +132,7 @@ timeout=None, metadata=None): return grpc.experimental.stream_stream(request_iterator, target, '/agent.AgentOrchestrator/ReportHealth', - agent__pb2.Heartbeat.SerializeToString, - agent__pb2.HealthCheckResponse.FromString, + app_dot_protos_dot_agent__pb2.Heartbeat.SerializeToString, + app_dot_protos_dot_agent__pb2.HealthCheckResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/docker-compose.test-nodes.yml b/docker-compose.test-nodes.yml index cb7dbc5..722f2fa 100644 --- a/docker-compose.test-nodes.yml +++ b/docker-compose.test-nodes.yml @@ -13,8 +13,7 @@ - AGENT_SECRET_KEY=aYc2j1lYUUZXkBFFUndnleZI - AGENT_AUTH_TOKEN=cortex-secret-shared-key - AGENT_TLS_ENABLED=false - networks: - - cortex-hub_default + - DEBUG_GRPC=true restart: unless-stopped test-node-2: @@ -28,10 +27,5 @@ - AGENT_SECRET_KEY=aYc2j1lYUUZXkBFFUndnleZI - AGENT_AUTH_TOKEN=ysHjZIRXeWo-YYK6EWtBsIgJ4uNBihSnZMtt0BQW3eI - AGENT_TLS_ENABLED=false - networks: - - cortex-hub_default + - DEBUG_GRPC=true restart: unless-stopped - -networks: - cortex-hub_default: - external: true diff --git a/docker-compose.yml b/docker-compose.yml index d474455..8e641d8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -39,6 +39,7 @@ - SUPER_ADMINS=axieyangb@gmail.com,jerxie.app@gmail.com # IMPORTANT: Agent nodes must have AGENT_SECRET_KEY set to this same value - SECRET_KEY=aYc2j1lYUUZXkBFFUndnleZI + - DEBUG_GRPC=true volumes: - ai_hub_data:/app/data:rw - ./agent-node:/app/agent-node-source:ro diff --git a/docs/features/coding_assistant.md b/docs/features/coding_assistant.md deleted file mode 100644 index b077281..0000000 --- a/docs/features/coding_assistant.md +++ /dev/null @@ -1,96 +0,0 @@ -# Feature Reference: Coding Assistant Interface - -This document serves as the comprehensive reference for the **Coding Assistant** interface in the Cortex platform (`CodingAssistantPage.js`). It describes the UI structure, design philosophy, user journeys, and technical integration with the Agent Node Mesh. - ---- - -## 1. UI Overview & Layout Structure - -The Coding Assistant page is the centralized workspace for AI-assisted software development. It features a responsive flex layout combining session management, conversational AI, and distributed node orchestration within a single unified view. - -### A. Global Layout Array -- **Sidebar (`SessionSidebar.js`)**: Located on the left, handles session history, switching, and creating new conversations. -- **Main Workspace (`CodingAssistantPage.js`)**: Occupies the center/right area, framing the main conversation and control elements. - -### B. Workspace Header -- **Title Block**: Displays the "Coding Assistant" title and an at-a-glance mesh summary (e.g., `Mesh: 2 Online / 5 Total`). -- **Node Attachment Bar (M3/M6)**: - - **Compact Display**: Instead of listing all accessible nodes, the UI displays a compact visualization of currently *attached* nodes (e.g., overlapping indicators or a single summary pill). - - **File Sync Indicator**: If nodes are attached and a sync strategy is active (i.e., not 'empty'), an icon indicates that file sync is enabled. Hovering over this indicator reveals a tooltip with the exact mounting source (e.g., 'Server Sync' or 'Local Node Path: /workspace'). - - **Quick Action Icons**: - - **Manage Mesh Nodes**: Opens the detailed Node Selector modal to attach/detach compute resources. - - **Toggle Execution Console**: Reveals/hides the bottom `MultiNodeConsole` (only visible when nodes are attached). - - **Session Engine (Gear)**: Opens LLM provider configurations. -- **Resource Monitors**: - - **Token Usage Bar**: Gradient progress bar (Indigo to Red) tracking context window saturation. - - **New Chat Button**: Instantly provisions a fresh session. -- **Configuration Warnings**: Highlights missing keys or LLM configuration requirements via tooltips. - -### C. Conversational Arena (`ChatArea.js` & `ChatWindow.js`) -- **Scrollable Chat Window**: Renders the conversation history between the user and the AI. -- **Rich Message Rendering (`ChatMessage`)**: - - **Markdown Support**: Full GitHub-flavored markdown rendering. - - **Collapsible Reasoning**: AI "thought processes" can be toggled via "Show Reasoning ▼" to reduce visual clutter. - - **Utilities**: One-click copy, timestamp display, and playback tools (primarily leveraged if Voice Chat features overlap). -- **Sticky Input Bar**: Standard auto-resizing text area for sending instructions to the active LLM. - -### D. Multi-Node Execution Console (`MultiNodeConsole.js`) -- **Position**: Anchored at the bottom of the workspace (when toggled active). -- **Purpose**: A live WebSocket-powered gRPC stream multiplexer. -- **Layout**: Splits vertically for each attached node, displaying: - - Node Identity and Real-time Metrics (CPU, Memory, Active Workers) in the sub-header. - - Streaming event log (Task assignments, execution progress, errors, browser orchestration events, sync status). - - Auto-scrolling terminal window. - ---- - -## 2. Expanded Feature Modals - -### A. Mesh Node Selection Modal -- **Purpose**: Map raw compute nodes to the current chat session. -- **Initialization Strategy**: - - **Empty**: Boot up the execution context with a clean slate. - - **Sync from Server**: Clone the master "Ghost Mirror" codebase state down to the execution nodes. - - **Initialize from Node**: Use an existing local absolute path on the node (useful for massive pre-existing local repositories). -- **Attachment Controls**: Checkbox list of nodes to select or deselect. -- **Save & Sync Workflow**: Changes made in the modal are held in a draft state and only applied when the user explicitly clicks the **Save & Sync** button. Clicking **Cancel** instantly reverts the selections to the actual truth from the server. - -### B. Session Engine Overlay (LLM Config) -- **Purpose**: Switch the logical "Brain" powering the assistant on the fly. -- **Controls**: Dropdown selector parsing through registered providers and models available to the user based on their platform configuration. - -### C. Error Handling Modal -- **Purpose**: High-visibility attention grabber for critical context or backend errors that halt execution. - ---- - -## 3. Core Capabilities & User Journey - -### Journey: AI-Driven Multi-Node Orchestration -1. **Initialize**: User navigates to the Coding Assistant and starts a `/new` session. -2. **Attach Compute**: User clicks the Mesh Node Selector icon, selects a strategy (e.g., `Sync from Server`), and attaches an available (green) node like `test-node-1`. -3. **Monitor Execution**: The user instructs the AI: *"Run the backend unit tests."* -4. **Visibility**: The `MultiNodeConsole` opens automatically. The user sees the AI dispatching commands over the Execution Live Bus. -5. **Review Execution**: The user verifies the command outputs or traces directly inside the `ChatWindow` as the AI responds. -6. **Commit**: Any generated files or changes are automatically synced back upward via the node's local sync daemon. - ---- - -## 4. Source Code Mapping - -| Component | UI Purpose | Source Path | -| :--- | :--- | :--- | -| `CodingAssistantPage.js` | Main layout, state coordination, node linking | `ui/client-app/src/pages/CodingAssistantPage.js` | -| `ChatArea.js` | Layout wrapper for scroll behavior and text input | `ui/client-app/src/components/ChatArea.js` | -| `ChatWindow.js` | Complex message rendering, reasoning toggles | `ui/client-app/src/components/ChatWindow.js` | -| `MultiNodeConsole.js` | Bottom-anchored live WebSocket log stream | `ui/client-app/src/components/MultiNodeConsole.js` | -| `apiService.js` | Backend hooks for `attachNodesToSession`, LLM fetching | `ui/client-app/src/services/apiService.js` | - ---- - -## 5. Architectural Principles - -- **Separation of Concerns**: The AI's conversational context/history is completely decoupled from the actual Node execution state. The UI simply acts as a bridge displaying both simultaneously. -- **Stateless Client Polling**: The React layer assumes the backend is the authoritative source of truth for node attachments. It polls `getSessionNodeStatus` every 5 seconds to ensure the top-bar badges reflect reality. Modal interactions temporarily suppress this polling to avoid overwriting user edits until the "Save & Sync" submission. -- **Incremental & Deletion Reconciliation**: File synchronization between the Hub's Ghost Mirror and attached Agent Nodes strictly tracks and mirrors file creations, modifications, and deletions for true bidirectional consistency. -- **Responsive Degradation**: If no nodes are available, the interface degrades gracefully back into a standard LLM chat interface, abstracting away orchestration complexities from standard users. diff --git a/docs/features/swarm_control.md b/docs/features/swarm_control.md new file mode 100644 index 0000000..b554d05 --- /dev/null +++ b/docs/features/swarm_control.md @@ -0,0 +1,99 @@ +# Feature Reference: Swarm Control Interface + +This document serves as the comprehensive reference for the **Swarm Control** interface in the Cortex platform (`SwarmControlPage.js`). It describes the UI structure, terminal-centric design, and the "AI Observation" workflow. + +--- + +## 1. UI Overview & Layout Structure + +The Coding Assistant page is the centralized workspace for AI-assisted software development. It features a responsive flex layout combining session management, conversational AI, and distributed node orchestration within a single unified view. + +### A. Global Layout Array +- **Sidebar (`SessionSidebar.js`)**: Located on the left, handles session history, switching, and creating new conversations. +- **Main Workspace (`CodingAssistantPage.js`)**: Occupies the center/right area, framing the main conversation and control elements. + +### B. Workspace Header +- **Title Block**: Displays the "Coding Assistant" title and an at-a-glance mesh summary (e.g., `Mesh: 2 Online / 5 Total`). +- **Node Attachment Bar (M3/M6)**: + - **Compact Display**: Instead of listing all accessible nodes, the UI displays a compact visualization of currently *attached* nodes (e.g., overlapping indicators or a single summary pill). + - **File Sync Indicator**: If nodes are attached and a sync strategy is active (i.e., not 'empty'), an icon indicates that file sync is enabled. Hovering over this indicator reveals a tooltip with the exact mounting source (e.g., 'Server Sync' or 'Local Node Path: /workspace'). + - **Quick Action Icons**: + - **Manage Mesh Nodes**: Opens the detailed Node Selector modal to attach/detach compute resources. + - **Toggle Execution Console**: Reveals/hides the bottom `MultiNodeConsole` (only visible when nodes are attached). + - **Session Engine (Gear)**: Opens LLM provider configurations. +- **Resource Monitors**: + - **Token Usage Bar**: Gradient progress bar (Indigo to Red) tracking context window saturation. + - **New Chat Button**: Instantly provisions a fresh session. +- **Configuration Warnings**: Highlights missing keys or LLM configuration requirements via tooltips. + +### C. Conversational Arena (`ChatArea.js` & `ChatWindow.js`) +- **Scrollable Chat Window**: Renders the conversation history between the user and the AI. +- **Rich Message Rendering (`ChatMessage`)**: + - **Markdown Support**: Full GitHub-flavored markdown rendering. + - **Collapsible Reasoning**: AI "thought processes" can be toggled via "Show Reasoning ▼" to reduce visual clutter. + - **Utilities**: One-click copy, timestamp display, and playback tools (primarily leveraged if Voice Chat features overlap). +- **Sticky Input Bar**: Standard auto-resizing text area for sending instructions to the active LLM. + +### D. Swarm Execution Terminal (`MultiNodeConsole.js`) +- **Position**: Anchored at the bottom of the workspace (when toggled active). +- **Purpose**: A real-time, terminal-centric viewport for monitoring distributed agent execution. +- **Xterm.js Integration**: Replaces legacy log lists with a high-performance terminal that renders ANSI colors and raw PTY output. +- **Read-Only Observation Mode**: The terminal is strictly read-only for users to preserve the integrity of the AI's execution context. +- **AI Context Banner**: Displays a prominent notice: *"Terminal Notice: This viewport is surfaced to AI. Leaving it empty preserves context integrity."* +- **Layout**: Splits vertically for each attached node, displaying: + - **Live Hardware Header**: Real-time stats for CPU, Memory, and Active Workers. + - **Command Transparency**: Displays the actual shell commands executed by the AI (e.g., `$ python test.py`). + - **Auto-Scrolling**: High-velocity output handling with automatic scroll-to-bottom. + +--- + +## 2. Expanded Feature Modals + +### A. Mesh Node Selection Modal +- **Purpose**: Map raw compute nodes to the current chat session. +- **Initialization Strategy**: + - **Empty**: Boot up the execution context with a clean slate. + - **Sync from Server**: Clone the master "Ghost Mirror" codebase state down to the execution nodes. + - **Initialize from Node**: Use an existing local absolute path on the node (useful for massive pre-existing local repositories). +- **Attachment Controls**: Checkbox list of nodes to select or deselect. +- **Save & Sync Workflow**: Changes made in the modal are held in a draft state and only applied when the user explicitly clicks the **Save & Sync** button. Clicking **Cancel** instantly reverts the selections to the actual truth from the server. + +### B. Session Engine Overlay (LLM Config) +- **Purpose**: Switch the logical "Brain" powering the assistant on the fly. +- **Controls**: Dropdown selector parsing through registered providers and models available to the user based on their platform configuration. + +### C. Error Handling Modal +- **Purpose**: High-visibility attention grabber for critical context or backend errors that halt execution. + +--- + +## 3. Core Capabilities & User Journey + +### Journey: AI-Driven Multi-Node Orchestration +1. **Initialize**: User navigates to the Swarm Control page and starts a `/new` session. +2. **Attach Compute**: User attaches available nodes (e.g., `test-node-1`) with a chosen sync strategy. +3. **Observation Pulse**: The user instructs the AI: *"Analyze the logs in /var/log/syslog on all nodes."* +4. **Visibility**: The `Swarm Execution Terminal` slides up automatically. The user sees raw terminal output streaming in real-time. +5. **AI "Sight"**: The AI automatically reads the terminal history (via `GET /api/v1/nodes/{id}/terminal`) to observe results, even if they return non-zero exit codes. +6. **Stateful Interaction**: The AI uses the terminal's PTY session to maintain state (e.g., `cd` and environment variables) across multiple chat turns. + +--- + +## 4. Source Code Mapping + +| Component | UI Purpose | Source Path | +| :--- | :--- | :--- | +| `CodingAssistantPage.js` | Main layout, state coordination, node linking | `ui/client-app/src/pages/CodingAssistantPage.js` | +| `ChatArea.js` | Layout wrapper for scroll behavior and text input | `ui/client-app/src/components/ChatArea.js` | +| `ChatWindow.js` | Complex message rendering, reasoning toggles | `ui/client-app/src/components/ChatWindow.js` | +| `MultiNodeConsole.js` | Bottom-anchored live WebSocket log stream | `ui/client-app/src/components/MultiNodeConsole.js` | +| `apiService.js` | Backend hooks for `attachNodesToSession`, LLM fetching | `ui/client-app/src/services/apiService.js` | + +--- + +## 5. Architectural Principles + +- **Terminal-First Observability**: The node execution interface is designed to mirror a real developer's console. This provides the AI with "eyes" on the raw system state and gives users confidence by showing exactly what commands are hitting their infrastructure. +- **Context Integrity**: By enforcing read-only terminals in the UI, we ensure that the terminal state seen by the AI (via the backend history buffer) always matches what the user sees in the viewport. +- **Stateless Client Polling**: The React layer assumes the backend is the authoritative source of truth for node attachments. It polls `getSessionNodeStatus` every 5 seconds to ensure the top-bar badges reflect reality. +- **Multiplexed Resilience**: The WebSocket connection is multiplexed and self-healing, using a `wasIntentional` closing flag and content-based dependency tracking to prevent reconnection loops during rapid UI updates. diff --git a/local_rebuild.sh b/local_rebuild.sh index a394718..f965134 100755 --- a/local_rebuild.sh +++ b/local_rebuild.sh @@ -51,9 +51,9 @@ fi # We use --remove-orphans only if we are SURE we want to clean up everything not in these files. -sudo $DOCKER_CMD $COMPOSE_FILES up -d --build --remove-orphans - +sudo $DOCKER_CMD $COMPOSE_FILES up -d --build --remove-orphans > /tmp/deploy_log.txt 2>&1 echo "✅ Containers started! Checking status..." +cat /tmp/deploy_log.txt sudo docker ps --filter "name=ai_" sudo docker ps --filter "name=cortex-" diff --git a/ui/client-app/src/App.js b/ui/client-app/src/App.js index aac4d04..6fc1202 100644 --- a/ui/client-app/src/App.js +++ b/ui/client-app/src/App.js @@ -3,7 +3,7 @@ import Navbar from "./components/Navbar"; import HomePage from "./pages/HomePage"; import VoiceChatPage from "./pages/VoiceChatPage"; -import CodingAssistantPage from "./pages/CodingAssistantPage"; +import SwarmControlPage from "./pages/SwarmControlPage"; import LoginPage from "./pages/LoginPage"; import SettingsPage from "./pages/SettingsPage"; import ProfilePage from "./pages/ProfilePage"; @@ -34,7 +34,38 @@ const [userId, setUserId] = useState(null); const [userProfile, setUserProfile] = useState(null); - const authenticatedPages = ["voice-chat", "coding-assistant", "settings", "profile", "nodes", "skills"]; + const authenticatedPages = ["voice-chat", "swarm-control", "settings", "profile", "nodes", "skills"]; + const pageToPath = { + "home": "/", + "voice-chat": "/voice", + "swarm-control": "/swarm", + "settings": "/settings", + "profile": "/profile", + "nodes": "/nodes", + "skills": "/skills", + "login": "/login" + }; + const pathToPage = Object.fromEntries(Object.entries(pageToPath).map(([pk, pv]) => [pv, pk])); + + // Sync state with URL on mount and handle popstate + useEffect(() => { + const handlePopState = () => { + const path = window.location.pathname; + const page = pathToPage[path] || "home"; + setCurrentPage(page); + }; + + window.addEventListener("popstate", handlePopState); + + // Initial sync + const initialPath = window.location.pathname; + const initialPage = pathToPage[initialPath] || "home"; + if (initialPage !== currentPage) { + setCurrentPage(initialPage); + } + + return () => window.removeEventListener("popstate", handlePopState); + }, []); useEffect(() => { @@ -113,8 +144,10 @@ const handleNavigate = (page) => { if (authenticatedPages.includes(page) && !isLoggedIn) { setCurrentPage("login"); + window.history.pushState({}, "", pageToPath["login"]); } else { setCurrentPage(page); + window.history.pushState({}, "", pageToPath[page] || "/"); } }; @@ -129,8 +162,8 @@ return ; case "voice-chat": return ; - case "coding-assistant": - return ; + case "swarm-control": + return ; case "settings": // Only admins can see global settings if (userProfile?.role !== "admin") { diff --git a/ui/client-app/src/components/MultiNodeConsole.js b/ui/client-app/src/components/MultiNodeConsole.js index d870b20..03b037d 100644 --- a/ui/client-app/src/components/MultiNodeConsole.js +++ b/ui/client-app/src/components/MultiNodeConsole.js @@ -1,115 +1,248 @@ import React, { useEffect, useState, useRef } from 'react'; import { getNodeStreamUrl } from '../services/apiService'; +import { Terminal } from '@xterm/xterm'; +import { FitAddon } from '@xterm/addon-fit'; +import '@xterm/xterm/css/xterm.css'; -const MultiNodeConsole = ({ attachedNodeIds }) => { - const [logs, setLogs] = useState({}); // node_id -> array of log strings +// Sub-component for each terminal window to encapsulate Xterm.js logic +const TerminalNodeItem = ({ nodeId, stats, onMount, onUnmount, nodeConfig, isAIProcessing }) => { + const terminalRef = useRef(null); + const xtermRef = useRef(null); + const fitAddonRef = useRef(null); + + const sandbox = nodeConfig?.skill_config?.shell?.sandbox || {}; + const mode = sandbox.mode || 'PASSIVE'; + const isStrict = mode === 'STRICT'; + + // UI Feedback: boundary color & pulse logic + // AI Activity pulsing gives life to the interface when things are happening. + const borderClass = isStrict + ? (isAIProcessing ? 'border-red-500/80 ring-1 ring-red-500/40 animate-[pulse-red_2s_infinite]' : 'border-red-900/50') + : (isAIProcessing ? 'border-blue-500/80 ring-1 ring-blue-500/40 animate-[pulse-blue_2s_infinite]' : 'border-blue-900/50'); + + const statusDotClass = isStrict ? 'bg-rose-500' : 'bg-blue-500'; + + useEffect(() => { + // Initialize Xterm + const xterm = new Terminal({ + theme: { + background: '#030712', // Slightly deeper than 0d1117 for contrast + foreground: '#e6edf3', + cursor: isAIProcessing ? '#388bfd' : '#22c55e', + selectionBackground: '#388bfd', + }, + fontSize: 12, // Slightly tighter for multi-view + fontFamily: 'Menlo, Monaco, "Courier New", monospace', + cursorBlink: isAIProcessing, + cursorStyle: 'block', + convertEol: true, + scrollback: 1000, + disableStdin: true, + scrollOnOutput: true, + }); + + const fitAddon = new FitAddon(); + xterm.loadAddon(fitAddon); + xterm.open(terminalRef.current); + + setTimeout(() => fitAddon.fit(), 10); + + xtermRef.current = xterm; + fitAddonRef.current = fitAddon; + + onMount(nodeId, xterm); + + const observer = new ResizeObserver(() => fitAddon.fit()); + if (terminalRef.current) observer.observe(terminalRef.current); + + return () => { + observer.disconnect(); + onUnmount(nodeId); + xterm.dispose(); + }; + }, [nodeId]); + + return ( +
+ {/* compact Node Header */} +
+ {/* Mode Indicator Overlay Gradient */} +
+ +
+ + {nodeId} + + {mode} + +
+
+ C: {stats?.cpu_usage_percent?.toFixed(1) || '0.0'}% + M: {stats?.memory_usage_percent?.toFixed(1) || '0.0'}% +
+
+ {/* Terminal Host */} +
+
+ {isAIProcessing && ( +
+ + + + +
+ )} +
+ +