-
Notifications
You must be signed in to change notification settings - Fork 2.1k
fix: queue telegram bridge messages per chat to prevent session lock contention #860
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Open
Junior00619
wants to merge
1
commit into
NVIDIA:main
Choose a base branch
from
Junior00619:fix/telegram-bridge-session-lock
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
+241
−51
Open
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,173 @@ | ||
| // SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. | ||
| // SPDX-License-Identifier: Apache-2.0 | ||
| // | ||
| // Tests for #860: per-chat message queuing prevents concurrent agent calls | ||
| // and caps queue depth to provide backpressure. | ||
|
|
||
| import { describe, it, expect } from "vitest"; | ||
| import { createRequire } from "node:module"; | ||
|
|
||
| const require = createRequire(import.meta.url); | ||
| const { chatQueues, chatQueueDepths, chatEpochs, MAX_QUEUE_DEPTH } = require("../scripts/telegram-bridge"); | ||
|
|
||
| describe("telegram bridge queue serialization", () => { | ||
| it("exports MAX_QUEUE_DEPTH as 5", () => { | ||
| expect(MAX_QUEUE_DEPTH).toBe(5); | ||
| }); | ||
|
|
||
| it("two concurrent jobs on the same chatId execute sequentially", async () => { | ||
| const order = []; | ||
| let resolveFirst; | ||
| const firstBlocks = new Promise((r) => { resolveFirst = r; }); | ||
|
|
||
| const job1 = async () => { | ||
| order.push("job1-start"); | ||
| await firstBlocks; | ||
| order.push("job1-end"); | ||
| }; | ||
| const job2 = async () => { | ||
| order.push("job2-start"); | ||
| order.push("job2-end"); | ||
| }; | ||
|
|
||
| const chatId = "test-serial"; | ||
| const prev = chatQueues.get(chatId) || Promise.resolve(); | ||
| const chain1 = prev.then(job1, job1); | ||
| chatQueues.set(chatId, chain1); | ||
|
|
||
| const chain2 = chain1.then(job2, job2); | ||
| chatQueues.set(chatId, chain2); | ||
|
|
||
| // job1 should have started but job2 should be waiting | ||
| await new Promise((r) => setTimeout(r, 10)); | ||
| expect(order).toEqual(["job1-start"]); | ||
|
|
||
| // Unblock job1 — job2 should run after | ||
| resolveFirst(); | ||
| await chain2; | ||
|
|
||
| expect(order).toEqual(["job1-start", "job1-end", "job2-start", "job2-end"]); | ||
|
|
||
| // Cleanup | ||
| chatQueues.delete(chatId); | ||
| }); | ||
|
|
||
| it("different chatIds run independently (in parallel)", async () => { | ||
| const order = []; | ||
| let resolveA; | ||
| const blockA = new Promise((r) => { resolveA = r; }); | ||
|
|
||
| const jobA = async () => { | ||
| order.push("A-start"); | ||
| await blockA; | ||
| order.push("A-end"); | ||
| }; | ||
| const jobB = async () => { | ||
| order.push("B-start"); | ||
| order.push("B-end"); | ||
| }; | ||
|
|
||
| const prevA = chatQueues.get("chatA") || Promise.resolve(); | ||
| const chainA = prevA.then(jobA, jobA); | ||
| chatQueues.set("chatA", chainA); | ||
|
|
||
| const prevB = chatQueues.get("chatB") || Promise.resolve(); | ||
| const chainB = prevB.then(jobB, jobB); | ||
| chatQueues.set("chatB", chainB); | ||
|
|
||
| // B should complete even though A is blocked | ||
| await chainB; | ||
| expect(order).toContain("B-start"); | ||
| expect(order).toContain("B-end"); | ||
| expect(order).not.toContain("A-end"); | ||
|
|
||
| resolveA(); | ||
| await chainA; | ||
| expect(order).toEqual(["A-start", "B-start", "B-end", "A-end"]); | ||
|
|
||
| chatQueues.delete("chatA"); | ||
| chatQueues.delete("chatB"); | ||
| }); | ||
|
|
||
| it("chatQueueDepths tracks pending jobs and decrements on completion", async () => { | ||
| const chatId = "test-depth"; | ||
| chatQueueDepths.set(chatId, 3); | ||
| expect(chatQueueDepths.get(chatId)).toBe(3); | ||
|
|
||
| chatQueueDepths.set(chatId, chatQueueDepths.get(chatId) - 1); | ||
| expect(chatQueueDepths.get(chatId)).toBe(2); | ||
|
|
||
| chatQueueDepths.delete(chatId); | ||
| }); | ||
|
|
||
| it("MAX_QUEUE_DEPTH caps at 5 pending jobs", () => { | ||
| const chatId = "test-cap"; | ||
| // Simulate 5 queued jobs | ||
| chatQueueDepths.set(chatId, 5); | ||
|
|
||
| const depth = chatQueueDepths.get(chatId) || 0; | ||
| expect(depth >= MAX_QUEUE_DEPTH).toBe(true); | ||
|
|
||
| chatQueueDepths.delete(chatId); | ||
| }); | ||
|
|
||
| it("/reset during in-flight job does not cause overlapping runs", async () => { | ||
| const chatId = "test-reset-race"; | ||
| let resolveOld; | ||
| const blockOld = new Promise((r) => { resolveOld = r; }); | ||
| const executed = []; | ||
|
|
||
| // Epoch starts at 0 | ||
| chatEpochs.delete(chatId); | ||
| const epochBefore = chatEpochs.get(chatId) || 0; | ||
|
|
||
| // Enqueue a blocking "old" job that captures epoch 0 | ||
| const oldJob = async () => { | ||
| executed.push("old-start"); | ||
| await blockOld; | ||
| executed.push("old-end"); | ||
| }; | ||
| const prev = chatQueues.get(chatId) || Promise.resolve(); | ||
| const chain1 = prev.then(oldJob, oldJob); | ||
| chatQueues.set(chatId, chain1); | ||
| chatQueueDepths.set(chatId, 1); | ||
|
|
||
| // old job starts | ||
| await new Promise((r) => setTimeout(r, 10)); | ||
| expect(executed).toEqual(["old-start"]); | ||
|
|
||
| // Simulate /reset: bump epoch, clear queue state | ||
| chatQueues.delete(chatId); | ||
| chatQueueDepths.delete(chatId); | ||
| chatEpochs.set(chatId, epochBefore + 1); | ||
| const epochAfter = chatEpochs.get(chatId) || 0; | ||
| expect(epochAfter).toBe(1); | ||
|
|
||
| // Enqueue a "new" job that captures epoch 1 — stale-check should skip | ||
| // it if it was queued under the old epoch, but here it's under the new one. | ||
| const newJob = async () => { | ||
| // Check: new job's epoch matches current, so it should run | ||
| const currentEpoch = chatEpochs.get(chatId) || 0; | ||
| expect(currentEpoch).toBe(epochAfter); | ||
| executed.push("new-run"); | ||
| }; | ||
| const prevNew = chatQueues.get(chatId) || Promise.resolve(); | ||
| const chain2 = prevNew.then(newJob, newJob); | ||
| chatQueues.set(chatId, chain2); | ||
|
|
||
| // new job runs immediately (fresh chain, not blocked by old) | ||
| await chain2; | ||
| expect(executed).toContain("new-run"); | ||
| expect(executed).not.toContain("old-end"); | ||
|
|
||
| // Resolve old job — it should complete without throwing | ||
| resolveOld(); | ||
| await chain1; | ||
| expect(executed).toContain("old-end"); | ||
|
|
||
| // Cleanup | ||
| chatQueues.delete(chatId); | ||
| chatQueueDepths.delete(chatId); | ||
| chatEpochs.delete(chatId); | ||
| }); | ||
| }); |
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.