diff --git a/agent-node/VERSION b/agent-node/VERSION
index e9acec7..e7468c7 100644
--- a/agent-node/VERSION
+++ b/agent-node/VERSION
@@ -1 +1 @@
-1.0.75
+1.0.76
diff --git a/agent-node/src/agent_node/node.py b/agent-node/src/agent_node/node.py
index d56b806..b2f6825 100644
--- a/agent-node/src/agent_node/node.py
+++ b/agent-node/src/agent_node/node.py
@@ -539,7 +539,18 @@
# Optimized for Explorer: immediate children only, no hashing
with os.scandir(watch_path) as it:
for entry in it:
- is_dir = entry.is_dir()
+ if entry.name in [".cortex_sync"] and rel_path in [".", "", "/"]:
+ continue
+
+ # Native Orphan Syslink Cleanup
+ if entry.is_symlink() and not os.path.exists(entry.path):
+ try:
+ os.unlink(entry.path)
+ print(f" [๐๐งน] Cleaned up broken ghost symlink during refresh: {entry.name}")
+ except: pass
+ continue
+
+ is_dir = entry.is_dir() if not entry.is_symlink() else os.path.isdir(entry.path)
# Use metadata only
try:
stats = entry.stat()
diff --git a/ai-hub/app/api/routes/skills.py b/ai-hub/app/api/routes/skills.py
index 810bf09..b82b9b3 100644
--- a/ai-hub/app/api/routes/skills.py
+++ b/ai-hub/app/api/routes/skills.py
@@ -48,7 +48,11 @@
if feature and feature not in s.get("features", []):
continue
- filtered.append(s)
+ # Sanitize LazyFileContent from files array for Pydantic v2 JSON serialization
+ s_clean = s.copy()
+ if "files" in s_clean:
+ s_clean["files"] = [{"file_path": f["file_path"], "absolute_path": f.get("absolute_path", "")} for f in s["files"]]
+ filtered.append(s_clean)
return filtered
diff --git a/ai-hub/app/core/grpc/core/mirror.py b/ai-hub/app/core/grpc/core/mirror.py
index 630fb9c..4104d10 100644
--- a/ai-hub/app/core/grpc/core/mirror.py
+++ b/ai-hub/app/core/grpc/core/mirror.py
@@ -38,7 +38,7 @@
workspace = self.get_workspace_path(session_id)
# Phase 3 ignore filter
- ignore_filter = self.get_ignore_filter(session_id)
+ ignore_filter = CortexIgnore(workspace, is_upstream=True)
if ignore_filter.is_ignored(file_payload.path):
print(f" [๐๐ท] Ignoring write to {file_payload.path}")
return
@@ -51,7 +51,26 @@
safe_path = os.path.normpath(os.path.join(workspace, path_safe))
if not safe_path.startswith(workspace):
raise ValueError(f"Malicious path detected: {file_payload.path}")
-
+
+ # --- SYSTEM SKILL IMMUTABILITY LOCK ---
+ # Prevent any AI agent from modifying strict system skills across the mesh
+ if path_safe.startswith(".skills/"):
+ parts = path_safe.split("/")
+ if len(parts) >= 2:
+ skill_dir = os.path.join(workspace, ".skills", parts[1])
+ if os.path.islink(skill_dir):
+ try:
+ meta_path = os.path.join(os.path.realpath(skill_dir), ".metadata.json")
+ if os.path.exists(meta_path):
+ with open(meta_path, "r") as mf:
+ meta_json = json.load(mf)
+ if meta_json.get("is_system", False):
+ print(f" [๐๐ซ] SYSTEM IMMUTABILITY LOCK: Node blocked from modifying immutable system skill '{parts[1]}'.")
+ return
+ except Exception:
+ pass
+ # ----------------------------------------
+
os.makedirs(os.path.dirname(safe_path), exist_ok=True)
# 0. Fast-Path for single-chunk files (Zero UX latency, no locking)
@@ -79,6 +98,7 @@
os.chown(safe_path, parent_stat.st_uid, parent_stat.st_gid)
except: pass
print(f" [๐โก] Fast Sync Complete: {file_payload.path}")
+ self._check_skill_promotion(session_id, file_payload.path, safe_path)
return
tmp_path = safe_path + ".cortex_tmp"
@@ -140,6 +160,7 @@
os.replace(tmp_path, safe_path)
print(f" [๐โ
] Sync Complete: {file_payload.path} (Swapped and verified)")
+ self._check_skill_promotion(session_id, file_payload.path, safe_path)
except Exception as e:
print(f" [๐โ] Server atomic swap failed for {file_payload.path}: {e}")
success = False
@@ -151,6 +172,67 @@
try: os.remove(tmp_path)
except: pass
+ def _check_skill_promotion(self, session_id: str, rel_path: str, safe_path: str):
+ import re
+ import shutil
+ match = re.match(r"^\.skills/([^/]+)/[sS][kK][iI][lL][lL]\.md$", rel_path)
+ if match:
+ skill_name = match.group(1)
+
+ # Normalize filename explicitly to uppercase SKILL.md for the UI backend loader
+ if not safe_path.endswith("SKILL.md"):
+ new_safe_path = os.path.join(os.path.dirname(safe_path), "SKILL.md")
+ try: os.rename(safe_path, new_safe_path)
+ except: pass
+ safe_path = new_safe_path
+ skill_workspace_dir = os.path.dirname(safe_path)
+
+ # If it's a real folder and not a symlink, the AI created it locally
+ if os.path.isdir(skill_workspace_dir) and not os.path.islink(skill_workspace_dir):
+ from app.config import settings
+ feature_name = "swarm_control"
+ owner_id = "admin"
+
+ # Attempt to determine feature and owner from session DB
+ try:
+ from app.db.session import get_db_session
+ from app.db.models import Session
+ with get_db_session() as db:
+ sess = db.query(Session).filter(Session.sync_workspace_id == session_id).first()
+ if sess:
+ owner_id = sess.user_id
+ if sess.feature_name:
+ feature_name = sess.feature_name
+ except Exception:
+ pass
+
+ global_skills_dir = os.path.join(settings.DATA_DIR, "skills", feature_name)
+ os.makedirs(global_skills_dir, exist_ok=True)
+ target_physical_dir = os.path.join(global_skills_dir, skill_name)
+
+ if not os.path.exists(target_physical_dir):
+ try:
+ print(f" [๐โญ] Symlink Inversion Promotor Triggered for: {skill_name}")
+ # Move physical files securely into backend VFS
+ shutil.move(skill_workspace_dir, target_physical_dir)
+
+ # Apply Metadata
+ meta_path = os.path.join(target_physical_dir, ".metadata.json")
+ if not os.path.exists(meta_path):
+ meta = {
+ "owner_id": owner_id,
+ "is_system": False,
+ "extra_metadata": {"emoji": "๐ค"}
+ }
+ with open(meta_path, "w") as f:
+ json.dump(meta, f, indent=4)
+
+ # Plop a symlink back into the session sandbox
+ os.symlink(target_physical_dir, skill_workspace_dir, target_is_directory=True)
+ print(f" [๐โญ] Successfully promoted {skill_name} to global VFS!")
+ except Exception as e:
+ print(f" [๐โ] Failed to promote local skill {skill_name}: {e}")
+
def delete_file(self, session_id: str, rel_path: str):
"""Deletes a file or directory from the local mirror."""
workspace = self.get_workspace_path(session_id)
@@ -159,9 +241,29 @@
if not safe_path.startswith(workspace):
raise ValueError(f"Malicious path detected: {rel_path}")
+
+ # --- SYSTEM SKILL IMMUTABILITY LOCK ---
+ if path_safe.startswith(".skills/"):
+ parts = path_safe.split("/")
+ if len(parts) >= 2:
+ skill_dir = os.path.join(workspace, ".skills", parts[1])
+ if os.path.islink(skill_dir):
+ try:
+ meta_path = os.path.join(os.path.realpath(skill_dir), ".metadata.json")
+ if os.path.exists(meta_path):
+ with open(meta_path, "r") as mf:
+ meta_json = json.load(mf)
+ if meta_json.get("is_system", False):
+ print(f" [๐๐ซ] SYSTEM IMMUTABILITY LOCK: Node blocked from deleting immutable system skill '{parts[1]}'.")
+ return
+ except Exception:
+ pass
+ # ----------------------------------------
- if os.path.exists(safe_path):
- if os.path.isdir(safe_path):
+ if os.path.exists(safe_path) or os.path.islink(safe_path):
+ if os.path.islink(safe_path):
+ os.unlink(safe_path)
+ elif os.path.isdir(safe_path):
shutil.rmtree(safe_path)
else:
os.remove(safe_path)
@@ -174,7 +276,7 @@
ignore_filter = self.get_ignore_filter(session_id)
raw_files = []
- for root, dirs, filenames in os.walk(workspace):
+ for root, dirs, filenames in os.walk(workspace, followlinks=True):
dirs[:] = [d for d in dirs if not ignore_filter.is_ignored(os.path.relpath(os.path.join(root, d), workspace))]
for filename in filenames:
abs_path = os.path.join(root, filename)
@@ -242,7 +344,7 @@
"""Compares remote manifest with local mirror using parallel verification."""
from concurrent.futures import ThreadPoolExecutor
workspace = self.get_workspace_path(session_id)
- ignore_filter = self.get_ignore_filter(session_id)
+ ignore_filter = CortexIgnore(workspace, is_upstream=True)
expected_paths = {f.path for f in remote_manifest.files}
diff --git a/ai-hub/app/core/grpc/services/assistant.py b/ai-hub/app/core/grpc/services/assistant.py
index c0f45f9..6353403 100644
--- a/ai-hub/app/core/grpc/services/assistant.py
+++ b/ai-hub/app/core/grpc/services/assistant.py
@@ -303,12 +303,27 @@
files = []
try:
for entry in os.scandir(abs_path):
+ if entry.name in [".cortex_sync"] and path in [".", "", "/"]:
+ continue
+
+ # Native Orphan Syslink Cleanup
+ if entry.is_symlink() and not os.path.exists(entry.path):
+ try:
+ os.unlink(entry.path)
+ logger.info(f"[๐๐งน] Cleaned up broken ghost symlink during refresh: {entry.name}")
+ except: pass
+ continue
+
rel = os.path.relpath(entry.path, workspace)
+ try:
+ f_size = entry.stat().st_size if entry.is_file() else 0
+ except: f_size = 0
+
files.append({
"path": rel,
"name": entry.name,
- "is_dir": entry.is_dir(),
- "size": entry.stat().st_size if entry.is_file() else 0,
+ "is_dir": entry.is_dir() if not entry.is_symlink() else os.path.isdir(entry.path),
+ "size": f_size,
"is_synced": True
})
return {"files": files, "path": path}
@@ -397,7 +412,7 @@
def write(self, node_id: str, path: str, content: bytes = b"", is_dir: bool = False, timeout=10, session_id="__fs_explorer__"):
"""Creates or updates a file/directory on a node (waits for status)."""
node = self.registry.get_node(node_id)
- if not node: return {"error": "Offline"}
+ if not node and node_id not in ["hub", "server", "local"]: return {"error": f"Node {node_id} Offline"}
# Phase 1: Sync local mirror ON HUB instantly (Zero Latency)
if self.mirror and session_id != "__fs_explorer__":
@@ -443,6 +458,8 @@
logger.error(f"[๐โ๏ธ] Local mirror write error: {e}")
return {"error": str(e)}
+ if not node: return {"success": True, "message": "Written to Hub local mirror and dispatched"}
+
# Legacy/Explorer path: await node confirmation
tid = f"fs-write-{int(time.time()*1000)}"
event = self.journal.register(tid, node_id)
@@ -510,7 +527,7 @@
def rm(self, node_id: str, path: str, timeout=10, session_id="__fs_explorer__"):
"""Deletes a file or directory on a node (waits for status)."""
node = self.registry.get_node(node_id)
- if not node: return {"error": "Offline"}
+ if not node and node_id not in ["hub", "server", "local"]: return {"error": f"Node {node_id} Offline"}
# Phase 1: Sync local mirror ON HUB instantly
if self.mirror and session_id != "__fs_explorer__":
@@ -548,6 +565,8 @@
logger.error(f"[๐๐๏ธ] Local mirror rm error: {e}")
return {"error": str(e)}
+ if not node: return {"success": True, "message": "Removed from Hub local mirror and dispatched"}
+
# Legacy/Explorer path: await node confirmation
tid = f"fs-rm-{int(time.time()*1000)}"
event = self.journal.register(tid, node_id)
diff --git a/frontend/src/shared/components/FileSystemNavigator.js b/frontend/src/shared/components/FileSystemNavigator.js
index eb19b55..4d293db 100644
--- a/frontend/src/shared/components/FileSystemNavigator.js
+++ b/frontend/src/shared/components/FileSystemNavigator.js
@@ -54,18 +54,38 @@
return [...newFiles, ...preserved];
};
- const loadRoot = useCallback(async () => {
+ const loadRoot = useCallback(async (isManualRefresh = false) => {
setLoading(true);
setError(null);
try {
- const files = await fetchLevel(initialPath);
- setTree(prev => mergeFiles(prev, files, initialPath));
+ const rootFiles = await fetchLevel(initialPath);
+
+ if (isManualRefresh === true) {
+ let combined = [...rootFiles];
+ const expandedPaths = Object.keys(expanded).filter(p => expanded[p]);
+ if (expandedPaths.length > 0) {
+ const results = await Promise.allSettled(
+ expandedPaths.map(async (path) => {
+ const fetchPath = (path === "/" || path === ".") ? "." : (path.startsWith("/") ? path.slice(1) : path);
+ return await fetchLevel(fetchPath);
+ })
+ );
+ results.forEach((res) => {
+ if (res.status === 'fulfilled') {
+ combined = [...combined, ...res.value];
+ }
+ });
+ }
+ setTree(combined);
+ } else {
+ setTree(prev => mergeFiles(prev, rootFiles, initialPath));
+ }
} catch (err) {
setError(err.message || "Failed to connect to node filesystem.");
} finally {
setLoading(false);
}
- }, [initialPath, fetchLevel]);
+ }, [initialPath, fetchLevel, expanded]);
useEffect(() => {
if (nodeId) {
@@ -167,7 +187,7 @@
try {
await nodeFsTouch(nodeId, fullPath, "", isDir, sessionId);
setNewItemModal(null);
- setTimeout(loadRoot, 500);
+ setTimeout(() => loadRoot(true), 500);
} catch (err) {
setError(`Failed to create: ${err.message}`);
} finally {
@@ -185,7 +205,7 @@
// Optimistically remove from tree to force UI update
setTree(prev => prev.filter(f => !f.path.startsWith(path)));
setDeleteModal(null);
- setTimeout(loadRoot, 500);
+ setTimeout(() => loadRoot(true), 500);
} catch (err) {
setError(`Failed to delete: ${err.message}`);
} finally {
@@ -277,7 +297,7 @@
try {
// If uploadTargetPath is ".", upload to root. If a dir path, use as-is.
await nodeFsUpload(nodeId, uploadTargetPath, file, sessionId);
- setTimeout(loadRoot, 600);
+ setTimeout(() => loadRoot(true), 600);
} catch (err) {
setError(`Upload failed: ${err.message}`);
} finally {
@@ -409,7 +429,7 @@
-