Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion agent/context_manager/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def _load_system_prompt(
"""Load and render the system prompt from YAML file with Jinja2"""
prompt_file = Path(__file__).parent.parent / "prompts" / f"{prompt_file_suffix}"

with open(prompt_file, "r") as f:
with open(prompt_file, "r", encoding="utf-8") as f:
prompt_data = yaml.safe_load(f)
template_str = prompt_data.get("system_prompt", "")

Expand Down
6 changes: 4 additions & 2 deletions agent/core/agent_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -683,8 +683,10 @@ def _extract_thinking_state(


def _should_replay_thinking_state(model_name: str | None) -> bool:
"""Only Anthropic's native adapter accepts replayed thinking metadata."""
return bool(model_name and model_name.startswith("anthropic/"))
"""Only Anthropic and DeepSeek accept/require replayed thinking metadata."""
if not model_name:
return False
return model_name.startswith("anthropic/") or "deepseek" in model_name.lower()


def _is_invalid_thinking_signature_error(exc: Exception) -> bool:
Expand Down
2 changes: 2 additions & 0 deletions agent/core/model_switcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@
# ":cheapest" / ":preferred" / ":<provider>" to override the default
# routing policy (auto = fastest with failover).
SUGGESTED_MODELS = [
{"id": "openai/deepseek-v4-flash", "label": "deepseek-v4-flash (Custom API)"},
{"id": "openai/deepseek-v4-pro", "label": "deepseek-v4-pro (Custom API)"},
{"id": "openai/gpt-5.5", "label": "GPT-5.5"},
{"id": "openai/gpt-5.4", "label": "GPT-5.4"},
{"id": "anthropic/claude-opus-4-7", "label": "Claude Opus 4.7"},
Expand Down
6 changes: 3 additions & 3 deletions agent/core/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@ def save_trajectory_local(
# Atomic-ish write: stage to .tmp then rename so a crash mid-write
# doesn't leave a truncated JSON that breaks the retry scanner.
tmp_path = filepath.with_suffix(filepath.suffix + ".tmp")
with open(tmp_path, "w") as f:
with open(tmp_path, "w", encoding="utf-8") as f:
json.dump(trajectory, f, indent=2)
tmp_path.replace(filepath)

Expand All @@ -487,14 +487,14 @@ def update_local_save_status(
) -> bool:
"""Update the upload status of an existing local save file"""
try:
with open(filepath, "r") as f:
with open(filepath, "r", encoding="utf-8") as f:
data = json.load(f)

data["upload_status"] = upload_status
data["upload_url"] = dataset_url
data["last_save_time"] = datetime.now().isoformat()

with open(filepath, "w") as f:
with open(filepath, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2)

return True
Expand Down
32 changes: 22 additions & 10 deletions agent/core/session_uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def _write_row_payload(data: dict, tmp_path: str) -> None:
"tools": json.dumps(scrubbed["tools"]),
}

with open(tmp_path, "w") as tmp:
with open(tmp_path, "w", encoding="utf-8") as tmp:
json.dump(session_row, tmp)


Expand All @@ -285,7 +285,7 @@ def _write_claude_code_payload(data: dict, tmp_path: str) -> None:
# Scrub before conversion so secrets never reach the upload temp file.
scrubbed = _scrub_session_for_upload(data)
events = to_claude_code_jsonl(scrubbed)
with open(tmp_path, "w") as tmp:
with open(tmp_path, "w", encoding="utf-8") as tmp:
for event in events:
tmp.write(json.dumps(event))
tmp.write("\n")
Expand All @@ -302,14 +302,20 @@ def _url_field(format: str) -> str:

def _read_session_file(session_file: str) -> dict:
"""Read a local session file while respecting uploader file locks."""
import fcntl
has_fcntl = True
try:
import fcntl
except ImportError:
has_fcntl = False

with open(session_file, "r") as f:
fcntl.flock(f, fcntl.LOCK_SH)
with open(session_file, "r", encoding="utf-8") as f:
if has_fcntl:
fcntl.flock(f, fcntl.LOCK_SH)
try:
return json.load(f)
finally:
fcntl.flock(f, fcntl.LOCK_UN)
if has_fcntl:
fcntl.flock(f, fcntl.LOCK_UN)


def _update_upload_status(
Expand All @@ -325,10 +331,15 @@ def _update_upload_status(
local session JSON file. Re-read under an exclusive lock so one uploader
cannot clobber fields written by the other.
"""
import fcntl
has_fcntl = True
try:
import fcntl
except ImportError:
has_fcntl = False

with open(session_file, "r+") as f:
fcntl.flock(f, fcntl.LOCK_EX)
with open(session_file, "r+", encoding="utf-8") as f:
if has_fcntl:
fcntl.flock(f, fcntl.LOCK_EX)
try:
data = json.load(f)
data[status_key] = status
Expand All @@ -341,7 +352,8 @@ def _update_upload_status(
f.flush()
os.fsync(f.fileno())
finally:
fcntl.flock(f, fcntl.LOCK_UN)
if has_fcntl:
fcntl.flock(f, fcntl.LOCK_UN)


def dataset_card_readme(repo_id: str) -> str:
Expand Down
Binary file added run_test.py
Binary file not shown.
1 change: 1 addition & 0 deletions test_litellm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
import os, litellm; from litellm import completion; from dotenv import load_dotenv; load_dotenv(); response = completion(model='openai/deepseek-chat', messages=[{'role': 'user', 'content': 'hello'}]); print(response)
Loading