Skip to content

Commit 9c3e0d1

Browse files
Teagan42Copilot
andauthored
refactor: track imdb retry queue persistence (#61)
* refactor(loader): track imdb retry queue for persistence * Update mcp_plex/loader.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update mcp_plex/loader.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
1 parent 111983b commit 9c3e0d1

File tree

4 files changed

+54
-13
lines changed

4 files changed

+54
-13
lines changed

mcp_plex/loader.py

Lines changed: 47 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,9 @@
55
import json
66
import logging
77
import sys
8+
from collections import deque
89
from pathlib import Path
9-
from typing import Awaitable, List, Optional, Sequence, TypeVar
10+
from typing import Awaitable, Iterable, List, Optional, Sequence, TypeVar
1011

1112
import click
1213
import httpx
@@ -43,10 +44,42 @@
4344
_imdb_cache: IMDbCache | None = None
4445
_imdb_max_retries: int = 3
4546
_imdb_backoff: float = 1.0
46-
_imdb_retry_queue: asyncio.Queue[str] | None = None
47+
_imdb_retry_queue: "_IMDbRetryQueue" | None = None
4748
_imdb_batch_limit: int = 5
4849
_qdrant_batch_size: int = 1000
4950

51+
52+
class _IMDbRetryQueue(asyncio.Queue[str]):
53+
"""Queue that tracks items in a deque for safe serialization."""
54+
55+
def __init__(self, initial: Iterable[str] | None = None):
56+
super().__init__()
57+
self._items: deque[str] = deque()
58+
if initial:
59+
for imdb_id in initial:
60+
imdb_id_str = str(imdb_id)
61+
super().put_nowait(imdb_id_str)
62+
self._items.append(imdb_id_str)
63+
64+
def put_nowait(self, item: str) -> None: # type: ignore[override]
65+
super().put_nowait(item)
66+
self._items.append(item)
67+
68+
def get_nowait(self) -> str: # type: ignore[override]
69+
if not self._items:
70+
raise RuntimeError("Desynchronization: Queue is not empty but self._items is empty.")
71+
try:
72+
item = super().get_nowait()
73+
except asyncio.QueueEmpty:
74+
raise RuntimeError("Desynchronization: self._items is not empty but asyncio.Queue is empty.")
75+
self._items.popleft()
76+
return item
77+
78+
def snapshot(self) -> list[str]:
79+
"""Return a list of the current queue contents."""
80+
81+
return list(self._items)
82+
5083
# Known Qdrant-managed dense embedding models with their dimensionality and
5184
# similarity metric. To support a new server-side embedding model, add an entry
5285
# here with the appropriate vector size and `models.Distance` value.
@@ -182,14 +215,20 @@ def _load_imdb_retry_queue(path: Path) -> None:
182215
"""Populate the retry queue from a JSON file if it exists."""
183216

184217
global _imdb_retry_queue
185-
_imdb_retry_queue = asyncio.Queue()
218+
ids: list[str] = []
186219
if path.exists():
187220
try:
188-
ids = json.loads(path.read_text())
189-
for imdb_id in ids:
190-
_imdb_retry_queue.put_nowait(str(imdb_id))
221+
data = json.loads(path.read_text())
222+
if isinstance(data, list):
223+
ids = [str(imdb_id) for imdb_id in data]
224+
else:
225+
logger.warning(
226+
"IMDb retry queue file %s did not contain a list; ignoring its contents",
227+
path,
228+
)
191229
except Exception:
192230
logger.exception("Failed to load IMDb retry queue from %s", path)
231+
_imdb_retry_queue = _IMDbRetryQueue(ids)
193232

194233

195234
async def _process_imdb_retry_queue(client: httpx.AsyncClient) -> None:
@@ -210,8 +249,7 @@ def _persist_imdb_retry_queue(path: Path) -> None:
210249

211250
if _imdb_retry_queue is None:
212251
return
213-
ids = list(_imdb_retry_queue._queue) # type: ignore[attr-defined]
214-
path.write_text(json.dumps(ids))
252+
path.write_text(json.dumps(_imdb_retry_queue.snapshot()))
215253

216254

217255
async def _upsert_in_batches(
@@ -598,7 +636,7 @@ async def run(
598636
async with httpx.AsyncClient(timeout=30) as client:
599637
await _process_imdb_retry_queue(client)
600638
else:
601-
_imdb_retry_queue = asyncio.Queue()
639+
_imdb_retry_queue = _IMDbRetryQueue()
602640

603641
items: List[AggregatedItem]
604642
if sample_dir is not None:

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "mcp-plex"
7-
version = "0.26.28"
7+
version = "0.26.29"
88

99
description = "Plex-Oriented Model Context Protocol Server"
1010
requires-python = ">=3.11,<3.13"

tests/test_loader_unit.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -370,6 +370,9 @@ async def first_run():
370370

371371
async def second_run():
372372
_load_imdb_retry_queue(queue_path)
373+
assert loader._imdb_retry_queue is not None
374+
assert loader._imdb_retry_queue.qsize() == 1
375+
assert loader._imdb_retry_queue.snapshot() == ["tt0111161"]
373376
async with httpx.AsyncClient(transport=httpx.MockTransport(second_transport)) as client:
374377
await _process_imdb_retry_queue(client)
375378
_persist_imdb_retry_queue(queue_path)
@@ -388,8 +391,7 @@ def test_load_imdb_retry_queue_invalid_json(tmp_path):
388391

389392

390393
def test_process_imdb_retry_queue_requeues(monkeypatch):
391-
queue: asyncio.Queue[str] = asyncio.Queue()
392-
queue.put_nowait("tt0111161")
394+
queue = loader._IMDbRetryQueue(["tt0111161"])
393395
monkeypatch.setattr(loader, "_imdb_retry_queue", queue)
394396

395397
async def fake_fetch(client, imdb_id):
@@ -403,6 +405,7 @@ async def run_test():
403405

404406
asyncio.run(run_test())
405407
assert queue.qsize() == 1
408+
assert queue.snapshot() == ["tt0111161"]
406409

407410

408411
def test_resolve_tmdb_season_number_matches_name():

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)