Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 38 additions & 5 deletions openspace/dashboard_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -424,11 +424,18 @@ def _discover_workflow_dirs() -> List[Path]:
for root in WORKFLOW_ROOTS:
if not root.exists():
continue
_scan_workflow_tree(root, discovered)
_scan_workflow_tree(root, discovered, root=root)
return sorted(discovered.values(), key=lambda item: item.stat().st_mtime, reverse=True)


def _scan_workflow_tree(directory: Path, discovered: Dict[str, Path], *, _depth: int = 0, _max_depth: int = 6) -> None:
def _scan_workflow_tree(
directory: Path,
discovered: Dict[str, Path],
*,
root: Path,
_depth: int = 0,
_max_depth: int = 6,
) -> None:
if _depth > _max_depth:
return
try:
Expand All @@ -439,9 +446,35 @@ def _scan_workflow_tree(directory: Path, discovered: Dict[str, Path], *, _depth:
if not child.is_dir():
continue
if (child / "metadata.json").exists() or (child / "traj.jsonl").exists():
discovered.setdefault(child.name, child)
discovered.setdefault(_workflow_id(child, root=root), child)
else:
_scan_workflow_tree(child, discovered, _depth=_depth + 1, _max_depth=_max_depth)
_scan_workflow_tree(
child,
discovered,
root=root,
_depth=_depth + 1,
_max_depth=_max_depth,
)


def _workflow_root(path: Path) -> Optional[Path]:
resolved = path.resolve()
for root in WORKFLOW_ROOTS:
try:
resolved.relative_to(root.resolve())
return root
except ValueError:
continue
return None


def _workflow_id(path: Path, *, root: Optional[Path] = None) -> str:
workflow_root = root or _workflow_root(path)
if workflow_root is None:
return path.name

relative = path.resolve().relative_to(workflow_root.resolve())
return f"{workflow_root.name}__{'__'.join(relative.parts)}"


def _get_workflow_dir(workflow_id: str) -> Optional[Path]:
Expand Down Expand Up @@ -514,7 +547,7 @@ def _build_workflow_summary(workflow_dir: Path) -> Dict[str, Any]:
iterations = len(trajectory)

return {
"id": workflow_dir.name,
"id": _workflow_id(workflow_dir),
"path": str(workflow_dir),
"task_id": metadata.get("task_id") or metadata.get("task_name") or workflow_dir.name,
"task_name": metadata.get("task_name") or metadata.get("task_id") or workflow_dir.name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,24 @@
through different transport mechanisms.
"""

from importlib.util import find_spec

from .base import MCPBaseConnector # noqa: F401
from .http import HttpConnector # noqa: F401
from .sandbox import SandboxConnector # noqa: F401
from .stdio import StdioConnector # noqa: F401
from .websocket import WebSocketConnector # noqa: F401

if find_spec("websockets") is not None:
from .websocket import WebSocketConnector # noqa: F401
else:
class WebSocketConnector:
"""Fallback connector when optional websocket dependency is unavailable."""

def __init__(self, *args, **kwargs):
raise ImportError(
"WebSocket MCP transport requires optional dependency 'websockets'. "
"Install it with: pip install websockets"
)

__all__ = [
"MCPBaseConnector",
Expand Down
10 changes: 8 additions & 2 deletions openspace/mcp_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,11 @@ def seekable(self):
_UPLOAD_META_FILENAME = ".upload_meta.json"


def _normalize_skill_dir(path_str: str) -> str:
"""Return a canonical string form for a skill directory path."""
return str(Path(path_str).expanduser().resolve())


async def _get_openspace():
"""Lazy-initialise the OpenSpace engine."""
global _openspace_instance
Expand Down Expand Up @@ -281,8 +286,9 @@ async def _auto_register_skill_dirs(skill_dirs: List[str]) -> int:
store = _get_store()
db_created = await store.sync_from_registry(added)

is_first = any(d not in _registered_skill_dirs for d in skill_dirs)
for d in skill_dirs:
normalized_valid_dirs = [_normalize_skill_dir(str(d)) for d in valid_dirs]
is_first = any(d not in _registered_skill_dirs for d in normalized_valid_dirs)
for d in normalized_valid_dirs:
_registered_skill_dirs.add(d)

if added:
Expand Down
21 changes: 13 additions & 8 deletions openspace/tool_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ def __init__(self, config: Optional[OpenSpaceConfig] = None):
self._skill_store: Optional[SkillStore] = None
self._execution_analyzer: Optional[ExecutionAnalyzer] = None
self._skill_evolver: Optional[SkillEvolver] = None
self._skill_selection_llm: Optional[LLMClient] = None
self._execution_count: int = 0 # For periodic metric-based evolution
self._last_evolved_skills: List[Dict[str, Any]] = [] # Tracks skills evolved during last execute()

Expand Down Expand Up @@ -507,7 +508,9 @@ async def execute(
f"Executing with GroundingAgent "
f"(max {max_iterations} iterations, no skills)..."
)
result = await self._grounding_agent.process(execution_context)
execution_context_p0 = {**execution_context}
execution_context_p0["max_iterations"] = max_iterations
result = await self._grounding_agent.process(execution_context_p0)

execution_time = asyncio.get_event_loop().time() - start_time

Expand Down Expand Up @@ -730,12 +733,14 @@ def _get_skill_selection_llm(self) -> Optional[LLMClient]:
"""
# 1. Dedicated skill selection model (OpenSpaceConfig.skill_registry_model)
if self.config.skill_registry_model:
return LLMClient(
model=self.config.skill_registry_model,
timeout=30.0, # skill selection should be fast
max_retries=2,
**self.config.llm_kwargs,
)
if self._skill_selection_llm is None:
self._skill_selection_llm = LLMClient(
model=self.config.skill_registry_model,
timeout=30.0, # skill selection should be fast
max_retries=2,
**self.config.llm_kwargs,
)
return self._skill_selection_llm

# 2. Tool retrieval model
if hasattr(self._grounding_agent, '_tool_retrieval_llm') and self._grounding_agent._tool_retrieval_llm:
Expand Down Expand Up @@ -933,4 +938,4 @@ def __repr__(self) -> str:
if self._running:
status = "running"
backends = ", ".join(self.config.backend_scope) if self.config.backend_scope else "all"
return f"<OpenSpace(status={status}, backends={backends}, model={self.config.llm_model})>"
return f"<OpenSpace(status={status}, backends={backends}, model={self.config.llm_model})>"
5 changes: 3 additions & 2 deletions openspace/utils/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -424,7 +424,8 @@ def print_summary(self, result: Dict[str, Any]):
status_text = status_display.get(status, status)

print(box.text_line(f" Status: {status_text}", indent=4, text_color=''))
print(box.text_line(f" Execution Time: {colorize(f'{result.get('execution_time', 0):.2f}s', 'c')}", indent=4, text_color=''))
exec_time = result.get("execution_time", 0)
print(box.text_line(f" Execution Time: {colorize(f'{exec_time:.2f}s', 'c')}", indent=4, text_color=''))
print(box.text_line(f" Iterations: {colorize(str(result.get('iterations', 0)), 'y')}", indent=4, text_color=''))
print(box.text_line(f" Completed Tasks: {colorize(str(result.get('completed_tasks', 0)), 'g')}", indent=4, text_color=''))

Expand All @@ -443,4 +444,4 @@ def create_ui(enable_live: bool = True, compact: bool = False) -> OpenSpaceUI:
enable_live: Whether to enable live display updates
compact: Use compact layout for smaller terminals
"""
return OpenSpaceUI(enable_live=enable_live, compact=compact)
return OpenSpaceUI(enable_live=enable_live, compact=compact)
100 changes: 100 additions & 0 deletions tests/test_dashboard_server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import importlib
import sys
import tempfile
import types
import unittest
from pathlib import Path


def _install_dashboard_stubs():
flask_mod = types.ModuleType("flask")

class Flask:
def __init__(self, *args, **kwargs):
pass

def route(self, *args, **kwargs):
def decorator(fn):
return fn

return decorator

flask_mod.Flask = Flask
flask_mod.abort = lambda *args, **kwargs: None
flask_mod.jsonify = lambda payload=None, **kwargs: payload if payload is not None else kwargs
flask_mod.send_from_directory = lambda *args, **kwargs: None
flask_mod.url_for = lambda *args, **kwargs: "/artifact"

action_recorder_mod = types.ModuleType("openspace.recording.action_recorder")
action_recorder_mod.analyze_agent_actions = lambda actions: {}
action_recorder_mod.load_agent_actions = lambda path: []

recording_utils_mod = types.ModuleType("openspace.recording.utils")
recording_utils_mod.load_recording_session = lambda path: {
"metadata": {},
"trajectory": [],
"plans": [],
"decisions": [],
"statistics": {},
}

skill_engine_mod = types.ModuleType("openspace.skill_engine")
skill_engine_mod.SkillStore = type("SkillStore", (), {})

skill_types_mod = types.ModuleType("openspace.skill_engine.types")
skill_types_mod.SkillRecord = type("SkillRecord", (), {})

stubs = {
"flask": flask_mod,
"openspace.recording.action_recorder": action_recorder_mod,
"openspace.recording.utils": recording_utils_mod,
"openspace.skill_engine": skill_engine_mod,
"openspace.skill_engine.types": skill_types_mod,
}
originals = {name: sys.modules.get(name) for name in stubs}
sys.modules.update(stubs)
return originals


class DashboardServerTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._originals = _install_dashboard_stubs()
sys.modules.pop("openspace.dashboard_server", None)
cls.dashboard = importlib.import_module("openspace.dashboard_server")

@classmethod
def tearDownClass(cls):
sys.modules.pop("openspace.dashboard_server", None)
for name, module in cls._originals.items():
if module is None:
sys.modules.pop(name, None)
else:
sys.modules[name] = module

def test_discover_workflow_dirs_keeps_duplicate_leaf_names_from_different_roots(self):
with tempfile.TemporaryDirectory() as tmp:
base = Path(tmp)
root_a = base / "recordings"
root_b = base / "results"
dir_a = root_a / "shared-name"
dir_b = root_b / "shared-name"
dir_a.mkdir(parents=True)
dir_b.mkdir(parents=True)
(dir_a / "metadata.json").write_text("{}", encoding="utf-8")
(dir_b / "metadata.json").write_text("{}", encoding="utf-8")

original_roots = self.dashboard.WORKFLOW_ROOTS
self.dashboard.WORKFLOW_ROOTS = [root_a, root_b]
try:
workflows = self.dashboard._discover_workflow_dirs()
workflow_ids = {self.dashboard._workflow_id(path) for path in workflows}
finally:
self.dashboard.WORKFLOW_ROOTS = original_roots

self.assertEqual(len(workflows), 2)
self.assertEqual(len(workflow_ids), 2)


if __name__ == "__main__":
unittest.main()
36 changes: 36 additions & 0 deletions tests/test_mcp_connectors_optional.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import importlib
import importlib.util
import unittest
from unittest.mock import patch


MODULE_NAME = "openspace.grounding.backends.mcp.transport.connectors"


class MCPConnectorsOptionalDepsTests(unittest.TestCase):
def test_websocket_connector_available_when_dependency_installed(self):
connectors = importlib.import_module(MODULE_NAME)
self.assertEqual(
connectors.WebSocketConnector.__module__,
f"{MODULE_NAME}.websocket",
)

def test_websocket_connector_fallback_when_dependency_missing(self):
connectors = importlib.import_module(MODULE_NAME)
original_find_spec = importlib.util.find_spec

def fake_find_spec(name: str, *args, **kwargs):
if name == "websockets":
return None
return original_find_spec(name, *args, **kwargs)

with patch("importlib.util.find_spec", side_effect=fake_find_spec):
connectors = importlib.reload(connectors)
with self.assertRaisesRegex(ImportError, "pip install websockets"):
connectors.WebSocketConnector(url="ws://localhost:1234")

importlib.reload(connectors)


if __name__ == "__main__":
unittest.main()
Loading