From e270982c3fce4329f545d024b52d45e0f32f722b Mon Sep 17 00:00:00 2001 From: Ali Madad Date: Sun, 22 Mar 2026 20:13:58 +0000 Subject: [PATCH 1/5] docs(readme): sync upstream discord invite --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 2662574..794b2ce 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ A swarm intelligence prediction engine. Upload documents describing any scenario, and MiroFish simulates thousands of AI agents reacting on social media to predict how events will unfold. **Live:** [synth.scty.org](https://synth.scty.org) +**Discord:** [discord.gg/ePf5aPaHnA](https://discord.gg/ePf5aPaHnA) > Fork of [666ghj/MiroFish](https://github.com/666ghj/MiroFish) — fully translated to English, local graph storage with embedded KuzuDB by default, Claude/Codex CLI support added. From 1445e657da0b44b4284828e29aa169ec85265fd6 Mon Sep 17 00:00:00 2001 From: Ali Madad Date: Sun, 22 Mar 2026 20:16:36 +0000 Subject: [PATCH 2/5] fix: lint --- package.json | 3 ++- scripts/lint.mjs | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 scripts/lint.mjs diff --git a/package.json b/package.json index 63ace21..d27bf82 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,8 @@ "dev": "concurrently --kill-others -n \"backend,frontend\" -c \"green,cyan\" \"npm run backend\" \"npm run frontend\"", "backend": "cd backend && uv run python run.py", "frontend": "cd frontend && npm run dev", - "build": "cd frontend && npm run build" + "build": "cd frontend && npm run build", + "lint": "node scripts/lint.mjs" }, "devDependencies": { "concurrently": "^9.1.2" diff --git a/scripts/lint.mjs b/scripts/lint.mjs new file mode 100644 index 0000000..6760ef1 --- /dev/null +++ b/scripts/lint.mjs @@ -0,0 +1,37 @@ +import { spawnSync } from 'node:child_process'; + +const shouldFix = process.argv.includes('--fix'); + +if (shouldFix) { + console.log('No auto-fixers are configured; running verification checks only.'); +} + +const commands = [ + { + cmd: 'npm', + args: ['run', 'build', '--prefix', 'frontend'], + }, + { + cmd: 'uv', + args: ['run', 'pytest', 'tests'], + cwd: 'backend', + env: { + UV_CACHE_DIR: '/tmp/uv-cache', + }, + }, +]; + +for (const command of commands) { + const result = spawnSync(command.cmd, command.args, { + cwd: command.cwd, + env: { + ...process.env, + ...command.env, + }, + stdio: 'inherit', + }); + + if (result.status !== 0) { + process.exit(result.status ?? 1); + } +} From db83a3bb03048b490f623ae9acf0e20bbac7eee1 Mon Sep 17 00:00:00 2001 From: 666ghj <670939375@qq.com> Date: Fri, 20 Mar 2026 10:50:39 +0800 Subject: [PATCH 3/5] fix(readme): update Discord link to valid invite URL (cherry picked from commit 1536a7933450abc4dbecec90bf4bb7990ef27a4f) # Conflicts: # README-EN.md # README.md From 3d28aa6fec6c8c8754029bdc3d079eb7e602ba53 Mon Sep 17 00:00:00 2001 From: Ali Madad Date: Sun, 22 Mar 2026 20:22:58 +0000 Subject: [PATCH 4/5] fix(lint): support npm on windows --- scripts/lint.mjs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/lint.mjs b/scripts/lint.mjs index 6760ef1..be982ec 100644 --- a/scripts/lint.mjs +++ b/scripts/lint.mjs @@ -1,6 +1,7 @@ import { spawnSync } from 'node:child_process'; const shouldFix = process.argv.includes('--fix'); +const npmCommand = process.platform === 'win32' ? 'npm.cmd' : 'npm'; if (shouldFix) { console.log('No auto-fixers are configured; running verification checks only.'); @@ -8,7 +9,7 @@ if (shouldFix) { const commands = [ { - cmd: 'npm', + cmd: npmCommand, args: ['run', 'build', '--prefix', 'frontend'], }, { From 98375a2cbc07b0c5728b5aa0d371f927fb899c4a Mon Sep 17 00:00:00 2001 From: Ali Madad Date: Sun, 22 Mar 2026 20:25:35 +0000 Subject: [PATCH 5/5] test(llm): cover upstream response sanitization --- backend/tests/test_llm_client.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 backend/tests/test_llm_client.py diff --git a/backend/tests/test_llm_client.py b/backend/tests/test_llm_client.py new file mode 100644 index 0000000..0ec1f86 --- /dev/null +++ b/backend/tests/test_llm_client.py @@ -0,0 +1,26 @@ +import sys +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).resolve().parents[1])) + +from app.utils.llm_client import LLMClient + + +def test_clean_content_strips_think_tags(): + client = object.__new__(LLMClient) + + cleaned = LLMClient._clean_content( + client, + 'hidden reasoning\n{"status": "ok"}', + ) + + assert cleaned == '{"status": "ok"}' + + +def test_chat_json_accepts_markdown_fenced_json(): + client = object.__new__(LLMClient) + client.chat = lambda **kwargs: '```json\n{"status": "ok"}\n```' + + payload = LLMClient.chat_json(client, messages=[]) + + assert payload == {"status": "ok"}