Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions docs/agents.md
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down Expand Up @@ -386,6 +387,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down Expand Up @@ -1050,6 +1052,7 @@ with capture_run_messages() as messages: # (2)!
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -1074,6 +1077,7 @@ with capture_run_messages() as messages: # (2)!
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
1 change: 1 addition & 0 deletions docs/api/models/function.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ async def model_function(
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
]
Expand Down
6 changes: 6 additions & 0 deletions docs/deferred-tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -152,6 +153,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelRequest(
Expand All @@ -173,6 +175,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -197,6 +200,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -324,6 +328,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -350,6 +355,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
7 changes: 7 additions & 0 deletions docs/message-history.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ print(result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -95,6 +96,7 @@ async def main():
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
)
]
Expand Down Expand Up @@ -122,6 +124,7 @@ async def main():
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -178,6 +181,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -198,6 +202,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down Expand Up @@ -303,6 +308,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -323,6 +329,7 @@ print(result2.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
2 changes: 2 additions & 0 deletions docs/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ async def test_forecast():
timestamp=IsNow(tz=timezone.utc), # (7)!
),
],
timestamp=IsNow(tz=timezone.utc),
run_id=IsStr(),
),
ModelResponse(
Expand Down Expand Up @@ -158,6 +159,7 @@ async def test_forecast():
timestamp=IsNow(tz=timezone.utc),
),
],
timestamp=IsNow(tz=timezone.utc),
run_id=IsStr(),
),
ModelResponse(
Expand Down
3 changes: 3 additions & 0 deletions docs/tools.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
),
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -110,6 +111,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand All @@ -132,6 +134,7 @@ print(dice_result.all_messages())
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
),
ModelResponse(
Expand Down
11 changes: 9 additions & 2 deletions pydantic_ai_slim/pydantic_ai/_agent_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from pydantic_ai._function_schema import _takes_ctx as is_takes_ctx # type: ignore
from pydantic_ai._instrumentation import DEFAULT_INSTRUMENTATION_VERSION
from pydantic_ai._tool_manager import ToolManager
from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, run_in_executor
from pydantic_ai._utils import dataclasses_no_defaults_repr, get_union_args, is_async_callable, now_utc, run_in_executor
from pydantic_ai.builtin_tools import AbstractBuiltinTool
from pydantic_graph import BaseNode, GraphRunContext
from pydantic_graph.beta import Graph, GraphBuilder
Expand Down Expand Up @@ -487,6 +487,7 @@ async def _make_request(
async def _prepare_request(
self, ctx: GraphRunContext[GraphAgentState, GraphAgentDeps[DepsT, NodeRunEndT]]
) -> tuple[ModelSettings | None, models.ModelRequestParameters, list[_messages.ModelMessage], RunContext[DepsT]]:
self.request.timestamp = now_utc()
self.request.run_id = self.request.run_id or ctx.state.run_id
ctx.state.message_history.append(self.request)

Expand All @@ -504,6 +505,11 @@ async def _prepare_request(
# Update the new message index to ensure `result.new_messages()` returns the correct messages
ctx.deps.new_message_index -= len(original_history) - len(message_history)

# Ensure the last request has a timestamp (history processors may create new ModelRequest objects without one)
last_request = message_history[-1]
if isinstance(last_request, _messages.ModelRequest) and last_request.timestamp is None:
last_request.timestamp = self.request.timestamp

# Merge possible consecutive trailing `ModelRequest`s into one, with tool call parts before user parts,
# but don't store it in the message history on state. This is just for the benefit of model classes that want clear user/assistant boundaries.
# See `tests/test_tools.py::test_parallel_tool_return_with_deferred` for an example where this is necessary
Expand Down Expand Up @@ -780,7 +786,7 @@ def _handle_final_result(

# For backwards compatibility, append a new ModelRequest using the tool returns and retries
if tool_responses:
messages.append(_messages.ModelRequest(parts=tool_responses, run_id=ctx.state.run_id))
messages.append(_messages.ModelRequest(parts=tool_responses, run_id=ctx.state.run_id, timestamp=now_utc()))

return End(final_result)

Expand Down Expand Up @@ -1359,6 +1365,7 @@ def _clean_message_history(messages: list[_messages.ModelMessage]) -> list[_mess
merged_message = _messages.ModelRequest(
parts=parts,
instructions=last_message.instructions or message.instructions,
timestamp=message.timestamp or last_message.timestamp,
)
clean_messages[-1] = merged_message
else:
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/agent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,6 +524,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
7 changes: 6 additions & 1 deletion pydantic_ai_slim/pydantic_ai/agent/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,11 @@ async def on_complete() -> None:

# For backwards compatibility, append a new ModelRequest using the tool returns and retries
if parts:
messages.append(_messages.ModelRequest(parts, run_id=graph_ctx.state.run_id))
messages.append(
_messages.ModelRequest(
parts, run_id=graph_ctx.state.run_id, timestamp=_utils.now_utc()
)
)

await agent_run.next(_agent_graph.SetFinalResult(final_result))

Expand Down Expand Up @@ -1030,6 +1034,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/agent/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
1 change: 1 addition & 0 deletions pydantic_ai_slim/pydantic_ai/durable_exec/dbos/_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -824,6 +824,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -769,6 +769,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -843,6 +843,7 @@ async def main():
timestamp=datetime.datetime(...),
)
],
timestamp=datetime.datetime(...),
run_id='...',
)
),
Expand Down
12 changes: 9 additions & 3 deletions pydantic_ai_slim/pydantic_ai/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -1001,6 +1001,11 @@ class ModelRequest:

_: KW_ONLY

# Default is None for backwards compatibility with old serialized messages that don't have this field.
# Using a default_factory would incorrectly fill in the current time for deserialized historical messages.
timestamp: datetime | None = None
"""The timestamp when the request was sent to the model."""

Comment on lines +1006 to +1008
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ModelRequest.timestamp needs to be None by default for backwards compat

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That warrants a code comment!

instructions: str | None = None
"""The instructions for the model."""

Expand All @@ -1016,7 +1021,7 @@ class ModelRequest:
@classmethod
def user_text_prompt(cls, user_prompt: str, *, instructions: str | None = None) -> ModelRequest:
"""Create a `ModelRequest` with a single user prompt as text."""
return cls(parts=[UserPromptPart(user_prompt)], instructions=instructions)
return cls(parts=[UserPromptPart(user_prompt)], instructions=instructions, timestamp=_now_utc())

__repr__ = _utils.dataclasses_no_defaults_repr

Expand Down Expand Up @@ -1242,9 +1247,10 @@ class ModelResponse:
"""The name of the model that generated the response."""

timestamp: datetime = field(default_factory=_now_utc)
"""The timestamp of the response.
"""The timestamp when the response was received locally.

If the model provides a timestamp in the response (as OpenAI does) that will be used.
This is always a high-precision local datetime. Provider-specific timestamps
(if available) are stored in `provider_details['timestamp']`.
"""

kind: Literal['response'] = 'response'
Expand Down
3 changes: 1 addition & 2 deletions pydantic_ai_slim/pydantic_ai/models/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -580,7 +580,6 @@ async def _process_streamed_response(
model_request_parameters=model_request_parameters,
_model_name=first_chunk.message.model,
_response=peekable_response,
_timestamp=_utils.now_utc(),
_provider_name=self._provider.name,
_provider_url=self._provider.base_url,
)
Expand Down Expand Up @@ -1142,9 +1141,9 @@ class AnthropicStreamedResponse(StreamedResponse):

_model_name: AnthropicModelName
_response: AsyncIterable[BetaRawMessageStreamEvent]
_timestamp: datetime
_provider_name: str
_provider_url: str
_timestamp: datetime = field(default_factory=_utils.now_utc)

async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: # noqa: C901
current_block: BetaContentBlock | None = None
Expand Down
Loading