From 04441c23f8b9a9a083c50224ee69822bbb37c809 Mon Sep 17 00:00:00 2001 From: Ratish1 Date: Fri, 28 Nov 2025 22:25:45 +0400 Subject: [PATCH] fix(streaming): ensure process_stream returns non-empty message content --- src/strands/event_loop/streaming.py | 3 +++ tests/strands/event_loop/test_streaming.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/strands/event_loop/streaming.py b/src/strands/event_loop/streaming.py index 43836fe34..33d5687da 100644 --- a/src/strands/event_loop/streaming.py +++ b/src/strands/event_loop/streaming.py @@ -413,6 +413,9 @@ async def process_stream( elif "redactContent" in chunk: handle_redact_content(chunk["redactContent"], state) + if not state["message"]["content"]: + state["message"]["content"].append({"text": ""}) + yield ModelStopReason(stop_reason=stop_reason, message=state["message"], usage=usage, metrics=metrics) diff --git a/tests/strands/event_loop/test_streaming.py b/tests/strands/event_loop/test_streaming.py index 3f5a6c998..b0ddd7513 100644 --- a/tests/strands/event_loop/test_streaming.py +++ b/tests/strands/event_loop/test_streaming.py @@ -578,7 +578,7 @@ def test_extract_usage_metrics_empty_metadata(): "end_turn", { "role": "assistant", - "content": [], + "content": [{"text": ""}], }, {"inputTokens": 0, "outputTokens": 0, "totalTokens": 0}, {"latencyMs": 0, "timeToFirstByteMs": 0},