Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion pydantic_ai_slim/pydantic_ai/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -882,7 +882,10 @@ def model_response(self) -> str:
description = self.content
else:
json_errors = error_details_ta.dump_json(self.content, exclude={'__all__': {'ctx'}}, indent=2)
description = f'{len(self.content)} validation errors: {json_errors.decode()}'
plural = isinstance(self.content, list) and len(self.content) != 1
description = (
f'{len(self.content)} validation error{"s" if plural else ""}:\n```json\n{json_errors.decode()}\n```'
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I like using format_as_xml as Samuel suggested

)
return f'{description}\n\nFix the errors and try again.'

def otel_event(self, settings: InstrumentationSettings) -> Event:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ interactions:
name: get_chinese_zodiac
response:
call_error: |-
1 validation errors: [
1 validation error: [
{
"type": "greater_than",
"loc": [
Expand Down
9 changes: 6 additions & 3 deletions tests/test_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def return_model(messages: list[ModelMessage], info: AgentInfo) -> ModelResponse
),
ModelResponse(
parts=[ToolCallPart(tool_name='final_result', args='{"a": 42, "b": "foo"}', tool_call_id=IsStr())],
usage=RequestUsage(input_tokens=87, output_tokens=14),
usage=RequestUsage(input_tokens=89, output_tokens=14),
model_name='function:return_model:',
timestamp=IsNow(tz=timezone.utc),
),
Expand Down Expand Up @@ -245,7 +245,9 @@ def check_b(cls, v: str) -> str:
retry_prompt = user_retry.parts[0]
assert isinstance(retry_prompt, RetryPromptPart)
assert retry_prompt.model_response() == snapshot("""\
1 validation errors: [
1 validation error:
```json
[
{
"type": "value_error",
"loc": [
Expand All @@ -255,6 +257,7 @@ def check_b(cls, v: str) -> str:
"input": "foo"
}
]
```

Fix the errors and try again.""")

Expand Down Expand Up @@ -1763,7 +1766,7 @@ class CityLocation(BaseModel):
),
ModelResponse(
parts=[TextPart(content='{"city": "Mexico City", "country": "Mexico"}')],
usage=RequestUsage(input_tokens=85, output_tokens=12),
usage=RequestUsage(input_tokens=87, output_tokens=12),
model_name='function:return_city_location:',
timestamp=IsDatetime(),
),
Expand Down