Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 14 additions & 2 deletions cp-agent/cp_agent/agents/coder/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from uuid import UUID
from zoneinfo import ZoneInfo

import litellm
from litellm import CustomStreamWrapper, acompletion
from loguru import logger

Expand Down Expand Up @@ -468,9 +469,20 @@ async def _recursively_process_messages(
yield TextEvent(text=error_msg)
self.state_manager.task.timeout()

except litellm.exceptions.BadRequestError as e:
logger.exception(e)
error_msg = f"Bad request error: {e.message}"
Copy link

Copilot AI Apr 12, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Consider using str(e) instead of e.message to safely obtain the error message, as not all exceptions may have a 'message' attribute.

Suggested change
error_msg = f"Bad request error: {e.message}"
error_msg = f"Bad request error: {str(e)}"

Copilot uses AI. Check for mistakes.
logger.error(error_msg)
await self.message_manager.add_assistant_message(error_msg)
yield TextEvent(text=error_msg)
self.state_manager.task.fail()

except Exception as e:
logger.error(f"Stream processing error: {e}", exc_info=True)
error_msg = f"Error processing assistant response: {str(e)}"
logger.error(
f"Task {self.state_manager.task.id} failed: {e}", exc_info=True
)
error_msg = f"Task failed: {str(e)}"
await self.message_manager.add_assistant_message(error_msg)
yield TextEvent(text=error_msg)
self.state_manager.task.fail()

Expand Down
10 changes: 9 additions & 1 deletion cp-agent/cp_agent/agents/coder/message_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@ def __init__(

self.chat_history: list[dict[str, Any]] = []
self.enable_prompt_cache = enable_prompt_cache
self.checkpoint_count = 0
self.max_checkpoints = 4

async def compact_memory(self) -> None:
"""Manually trigger memory compaction."""
Expand Down Expand Up @@ -75,7 +77,8 @@ async def add_user_message(
async def add_assistant_message(self, content: str) -> None:
"""Add assistant message to both API memory and chat history."""

if self.enable_prompt_cache:
if self.enable_prompt_cache and self.checkpoint_count < self.max_checkpoints:
self.checkpoint_count += 1
message_content: list[MessagePart] = [create_text_block(content)]
if not IS_BEDROCK:
message_content = [create_text_block(content, "ephemeral")]
Expand All @@ -87,6 +90,11 @@ async def add_assistant_message(self, content: str) -> None:

self.memory.rpush("messages", dict(message))

async def reset_checkpoints(self) -> None:
"""Reset the checkpoint counter, typically after a new conversation starts."""
self.checkpoint_count = 0
logger.debug("Reset checkpoint counter")

async def add_memory_item(
self, content: MessageContent, role: str = "user"
) -> None:
Expand Down
Loading