Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 78 additions & 9 deletions dashboard/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
DIST = BASE / 'dist' # React 构建产物 (npm run build)
DATA = BASE.parent / "data"
SCRIPTS = BASE.parent / 'scripts'
_ACTIVE_TASK_DATA_DIR = None

# 静态资源 MIME 类型
_MIME_TYPES = {
Expand Down Expand Up @@ -82,21 +83,84 @@ def now_iso():
return datetime.datetime.now(datetime.timezone.utc).isoformat().replace('+00:00', 'Z')


def load_tasks():
return atomic_json_read(DATA / 'tasks_source.json', [])
def _iter_task_data_dirs():
"""返回可用的任务数据目录候选(优先 workspace,其次本地 data)。"""
dirs = [DATA]
oclaw_home = pathlib.Path.home() / '.openclaw'
for p in sorted(oclaw_home.glob('workspace-*/data')):
if p.is_dir():
dirs.append(p)
Comment on lines +88 to +92
Copy link

Copilot AI Mar 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

_iter_task_data_dirs() 的注释写的是“优先 workspace,其次本地 data”,但当前实现是先把 DATA 放进列表再 append workspace。由于 get_task_data_dir() 在评分相同的情况下不会覆盖 best_dir(使用的是 > 而不是 >=),这会导致同分时始终偏向本地 DATA,与注释/预期不一致。建议调整候选目录顺序(workspace 在前)或在评分相同时显式优先 workspace。

Suggested change
dirs = [DATA]
oclaw_home = pathlib.Path.home() / '.openclaw'
for p in sorted(oclaw_home.glob('workspace-*/data')):
if p.is_dir():
dirs.append(p)
dirs = []
oclaw_home = pathlib.Path.home() / '.openclaw'
for p in sorted(oclaw_home.glob('workspace-*/data')):
if p.is_dir():
dirs.append(p)
dirs.append(DATA)

Copilot uses AI. Check for mistakes.
return dirs


Comment on lines +89 to 95
Copy link

Copilot AI Mar 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这里重新定义了 oclaw_home = pathlib.Path.home() / '.openclaw',而文件顶部已定义常量 OCLAW_HOME。建议直接复用 OCLAW_HOME,避免路径来源分叉(也方便测试/patch)。

Suggested change
oclaw_home = pathlib.Path.home() / '.openclaw'
for p in sorted(oclaw_home.glob('workspace-*/data')):
if p.is_dir():
dirs.append(p)
return dirs
for p in sorted(OCLAW_HOME.glob('workspace-*/data')):
if p.is_dir():
dirs.append(p)
return dirs

Copilot uses AI. Check for mistakes.
def save_tasks(tasks):
atomic_json_write(DATA / 'tasks_source.json', tasks)
# Trigger refresh (异步,不阻塞,避免僵尸进程)
def _task_source_score(task_file: pathlib.Path):
"""给任务源打分:优先非 demo 任务,其次任务数,再按文件更新时间。"""
try:
tasks = atomic_json_read(task_file, [])
except Exception:
tasks = []
if not isinstance(tasks, list):
tasks = []
non_demo = 0
for t in tasks:
tid = str((t or {}).get('id', ''))
Copy link

Copilot AI Mar 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

_task_source_score() 遍历 tasks 时假设每个元素都是 dict:(t or {}).get(...) 在 t 为 str/int 等非 dict 时会抛 AttributeError,进而导致 get_task_data_dir()/healthz/live-status 等接口异常。建议在循环内先判断 t 是否为 dict(或使用 try/except / getattr 兜底),保证异常/脏数据不会把服务打挂。

Suggested change
tid = str((t or {}).get('id', ''))
if not isinstance(t, dict):
continue
tid = str(t.get('id', ''))

Copilot uses AI. Check for mistakes.
if tid and not tid.startswith('JJC-DEMO'):
non_demo += 1
try:
mtime = task_file.stat().st_mtime
except Exception:
mtime = 0
return (1 if non_demo > 0 else 0, non_demo, len(tasks), mtime)


def get_task_data_dir():
"""自动选择当前任务数据目录,并缓存结果以保持一次服务期内稳定。"""
global _ACTIVE_TASK_DATA_DIR
if _ACTIVE_TASK_DATA_DIR and _ACTIVE_TASK_DATA_DIR.is_dir():
return _ACTIVE_TASK_DATA_DIR

best_dir = DATA
best_score = (-1, -1, -1, -1)
for d in _iter_task_data_dirs():
tf = d / 'tasks_source.json'
if not tf.exists():
continue
score = _task_source_score(tf)
if score > best_score:
best_score = score
best_dir = d

_ACTIVE_TASK_DATA_DIR = best_dir
log.info(f'任务数据源: {_ACTIVE_TASK_DATA_DIR}')
return _ACTIVE_TASK_DATA_DIR
Comment on lines +116 to +135
Copy link

Copilot AI Mar 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

get_task_data_dir() 会在“当前没有任何候选目录包含 tasks_source.json”时依然把 _ACTIVE_TASK_DATA_DIR 缓存为 DATA(best_dir 初始值),之后即使 workspace/data 下生成了 tasks_source.json 也不会重新探测(因为缓存目录 is_dir() 仍为 True)。建议只在找到有效 tasks_source.json 时才缓存,或增加重探测条件/TTL(例如当选中的 tasks_source.json 不存在时重新选择)。

Copilot uses AI. Check for mistakes.


def _refresh_live_data_async(task_data_dir: pathlib.Path):
"""触发对应数据目录的 live_status 刷新脚本。"""
script = task_data_dir.parent / 'scripts' / 'refresh_live_data.py'
if not script.exists():
script = SCRIPTS / 'refresh_live_data.py'

def _refresh():
try:
subprocess.run(['python3', str(SCRIPTS / 'refresh_live_data.py')], timeout=30)
subprocess.run(['python3', str(script)], timeout=30)
Comment on lines +140 to +146
Copy link

Copilot AI Mar 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

_refresh_live_data_async() 在 workspace 数据目录未包含 scripts/refresh_live_data.py 时会 fallback 到项目级 SCRIPTS/refresh_live_data.py;但该脚本内部把 DATA 固定为“脚本所在仓库的 data/”,会把 live_status.json 写到项目 data 而不是已选中的 task_data_dir,导致看板读取的 live_status.json 仍不可见/不同步。建议:不要对 workspace 场景 fallback 到项目脚本;或给脚本增加 data_dir 参数/环境变量并在此处传入,确保生成文件落在 task_data_dir。

Copilot uses AI. Check for mistakes.
except Exception as e:
log.warning(f'refresh_live_data.py 触发失败: {e}')

threading.Thread(target=_refresh, daemon=True).start()


def load_tasks():
task_data_dir = get_task_data_dir()
return atomic_json_read(task_data_dir / 'tasks_source.json', [])


def save_tasks(tasks):
task_data_dir = get_task_data_dir()
atomic_json_write(task_data_dir / 'tasks_source.json', tasks)
_refresh_live_data_async(task_data_dir)


def handle_task_action(task_id, action, reason):
"""Stop/cancel/resume a task from the dashboard."""
tasks = load_tasks()
Expand Down Expand Up @@ -2124,12 +2188,17 @@ def do_GET(self):
if p in ('', '/dashboard', '/dashboard.html'):
self.send_file(DIST / 'index.html')
elif p == '/healthz':
checks = {'dataDir': DATA.is_dir(), 'tasksReadable': (DATA / 'tasks_source.json').exists()}
checks['dataWritable'] = os.access(str(DATA), os.W_OK)
task_data_dir = get_task_data_dir()
checks = {
'dataDir': task_data_dir.is_dir(),
'tasksReadable': (task_data_dir / 'tasks_source.json').exists(),
}
checks['dataWritable'] = os.access(str(task_data_dir), os.W_OK)
all_ok = all(checks.values())
self.send_json({'status': 'ok' if all_ok else 'degraded', 'ts': now_iso(), 'checks': checks})
elif p == '/api/live-status':
self.send_json(read_json(DATA / 'live_status.json'))
task_data_dir = get_task_data_dir()
self.send_json(read_json(task_data_dir / 'live_status.json'))
elif p == '/api/agent-config':
self.send_json(read_json(DATA / 'agent_config.json'))
Comment on lines 2199 to 2203
Copy link

Copilot AI Mar 11, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PR 描述中提到“统一 /healthz /api/live-status /api/agent-config 使用同一数据源”,但当前 diff 只把 /healthz 与 /api/live-status 切到 get_task_data_dir();/api/agent-config(以及后续 model-change-log/last-result 等)仍固定读取 DATA。若这些文件也可能落在 workspace data 下,会继续出现数据源不一致。建议确认需求后要么同步改为使用 task_data_dir,要么在 PR 描述中澄清哪些文件仍然固定在 DATA。

Copilot uses AI. Check for mistakes.
elif p == '/api/model-change-log':
Expand Down
Loading