diff --git a/ai-hub/integration_tests/test_browser_llm.py b/ai-hub/integration_tests/test_browser_llm.py index 56c920b..173f788 100644 --- a/ai-hub/integration_tests/test_browser_llm.py +++ b/ai-hub/integration_tests/test_browser_llm.py @@ -77,12 +77,22 @@ # Step 3: Verify the browser agent physically saved the screenshot and metadata to the file sync system. # Since the session_id is either used directly or via sync_workspace_id wrapper, we search the mirrors directory. import subprocess - cmd = [ - "docker", "exec", "ai_hub_service", "bash", "-c", - "find /app/data/mirrors/ -name '.browser_data' -type d" - ] - res = subprocess.run(cmd, capture_output=True, text=True) - assert res.returncode == 0, f"Failed to search for .browser_data inside Docker: {res.stderr}" + import time + + # Give the mesh file-sync engine up to 15 seconds to sync the .browser_data from the Node back into the Hub's mirrors. + print("\\n[test] Waiting up to 15s for file-sync mesh propagation of browser artifacts...") + res = None + for _ in range(15): + cmd = [ + "docker", "exec", "ai_hub_service", "bash", "-c", + "find /app/data/mirrors/ -name '.browser_data' -type d" + ] + res = subprocess.run(cmd, capture_output=True, text=True) + if res.stdout.strip(): + break + time.sleep(1) + + assert res and res.returncode == 0, f"Failed to search for .browser_data inside Docker: {res.stderr}" # We expect at least one .browser_data directory corresponding to our session found_dirs = res.stdout.strip().split('\n')