Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 34 additions & 30 deletions packages/ema/src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { EventEmitter } from "node:events";
import { type LLMClient } from "./llm";
import { AgentConfig } from "./config";
import { Logger } from "./logger";
import { RetryExhaustedError, isAbortError } from "./llm/retry";
import { MessageHistory, RetryExhaustedError, isAbortError } from "./llm/base";
import type { LLMResponse, Message, Content, FunctionResponse } from "./schema";
import type { Tool, ToolResult, ToolContext } from "./tools/base";
import type { EmaReply } from "./tools/ema_reply_tool";
Expand Down Expand Up @@ -94,7 +94,7 @@ export class ContextManager {
events: AgentEventsEmitter;
logger: Logger;

state: AgentState = {
private _state: AgentState = {
systemPrompt: "",
messages: [],
tools: [],
Expand All @@ -104,50 +104,53 @@ export class ContextManager {
llmClient: LLMClient,
events: AgentEventsEmitter,
logger: Logger,
tokenLimit: number = 80000,
public history: MessageHistory = llmClient.createHistory(),
) {
this.llmClient = llmClient;
this.events = events;
this.logger = logger;
}

get state(): AgentState {
return this._state;
}

set state(v: AgentState) {
this._state = v;
// trigger the messages setter
this.messages = v.messages;
}

get systemPrompt(): string {
return this.state.systemPrompt;
return this._state.systemPrompt;
}

set systemPrompt(v: string) {
this.state.systemPrompt = v;
this._state.systemPrompt = v;
}

get messages(): Message[] {
return this.state.messages;
return this._state.messages;
}

set messages(v: Message[]) {
this.state.messages = v;
this._state.messages = v;
this.history = v.reduce(
(acc, msg) => acc.appendMessage(msg),
this.llmClient.createHistory(),
);
}
Comment on lines 132 to 142
Copy link

Copilot AI Mar 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ContextManager.messages rebuilds history from the passed array, but later mutations happen via history.addModelMessage/addToolMessage without updating _state.messages. This splits state between two histories and makes getHistory()/state.messages inaccurate after the loop starts. Recommend keeping only one canonical store (preferably history.messages) and deriving messages from it, or ensuring all append operations go through a single API that updates both.

Copilot uses AI. Check for mistakes.

get tools(): Tool[] {
return this.state.tools;
return this._state.tools;
}

set tools(v: Tool[]) {
this.state.tools = v;
this._state.tools = v;
}

/** Add a user message to context. */
addUserMessage(contents: Content[]): void {
this.messages.push({ role: "user", contents: contents });
}

/** Add an model message to context. */
addModelMessage(response: LLMResponse): void {
this.messages.push(response.message);
}

/** Add a tool result message to context. */
addToolMessage(contents: FunctionResponse[]): void {
this.messages.push({ role: "user", contents: contents });
get toolContext(): ToolContext | undefined {
return this._state.toolContext;
}

/** Get message history (shallow copy). */
Expand Down Expand Up @@ -189,7 +192,6 @@ export class Agent {
this.llm,
this.events,
this.logger,
this.config.tokenLimit,
);
}

Expand Down Expand Up @@ -243,6 +245,10 @@ export class Agent {
this.contextManager.messages,
);

const handler = this.llm.buildHandler(
this.contextManager.tools,
this.contextManager.systemPrompt,
);
while (step < maxSteps) {
if (this.abortRequested) {
this.finishAborted();
Expand All @@ -253,10 +259,8 @@ export class Agent {
// Call LLM with context from context manager
let response: LLMResponse;
try {
response = await this.llm.generate(
this.contextManager.messages,
this.contextManager.tools,
this.contextManager.systemPrompt,
response = await handler.generate(
this.contextManager.history,
this.abortController?.signal,
);
this.logger.debug(`LLM response received.`, response);
Expand Down Expand Up @@ -291,7 +295,7 @@ export class Agent {
}

// Add model message to context
this.contextManager.addModelMessage(response);
this.contextManager.history.addModelMessage(response);

// Check if task is complete (no tool calls)
if (checkCompleteMessages(this.contextManager.messages)) {
Expand Down Expand Up @@ -341,7 +345,7 @@ export class Agent {
try {
result = await tool.execute(
callArgs,
this.contextManager.state.toolContext,
this.contextManager.toolContext,
);
} catch (err) {
const errorDetail = `${(err as Error).name}: ${(err as Error).message}`;
Expand Down Expand Up @@ -376,7 +380,7 @@ export class Agent {
}

// Add all function responses to context
this.contextManager.addToolMessage(functionResponses);
this.contextManager.history.addToolMessage(functionResponses);

step += 1;
}
Expand Down
126 changes: 115 additions & 11 deletions packages/ema/src/llm/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,33 +4,137 @@
*/

import type { Tool } from "../tools/base";
import type { Message, LLMResponse } from "../schema";
import type {
Message,
LLMResponse,
FunctionResponse,
Content,
} from "../schema";

export class RetryExhaustedError extends Error {
public lastException: Error;
public attempts: number;

constructor(lastException: Error, attempts: number) {
super(
`Retry failed after ${attempts} attempts. Last error: ${lastException.message}`,
);
this.name = "RetryExhaustedError";
this.lastException = lastException;
this.attempts = attempts;
}
}

/**
* Elegant retry mechanism module
*
* Provides decorators and utility functions to support retry logic for async functions.
*
* Features:
* - Supports exponential backoff strategy
* - Configurable retry count and intervals
* - Supports specifying retryable exception types
* - Detailed logging
* - Fully decoupled, non-invasive to business code
*/
export class RetryConfig {
constructor(
/**
* Whether to enable retry mechanism
*/
public readonly enabled: boolean = true,
/**
* Maximum number of retries
*/
public readonly max_retries: number = 3,
/**
* Initial delay time (seconds)
*/
public readonly initial_delay: number = 1.0,
/**
* Maximum delay time (seconds)
*/
public readonly max_delay: number = 60.0,
/**
* Exponential backoff base
*/
public readonly exponential_base: number = 2.0,
/**
* Retryable exception types
*/
// public readonly retryable_exceptions: Array<typeof Error> = [Error],
) {}
}

export function isAbortError(error: unknown): boolean {
if (!(error instanceof Error)) {
return false;
}
if (error.name === "AbortError") {
return true;
}
return error.message.toLowerCase().includes("abort");
}
Comment on lines +28 to +77
Copy link

Copilot AI Mar 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

RetryConfig/RetryExhaustedError/isAbortError were moved into llm/base.ts and llm/retry.ts is deleted, but packages/ema/src/config.ts still imports/exports RetryConfig from ./llm/retry (now missing). Please update the public import/export path (and any downstream imports) to point at the new location to avoid a module-not-found build failure.

Copilot uses AI. Check for mistakes.

/**
* Abstract base class for LLM clients.
*
* This class defines the interface that all LLM clients must implement,
* regardless of the underlying API protocol (Anthropic, OpenAI, etc.).
*/
export abstract class LLMClientBase {
export abstract class LLMClientBase<M = any> {
retryCallback: ((exception: Error, attempt: number) => void) | undefined =
undefined;

abstract adaptTools(tools: Tool[]): any[];

abstract adaptMessages(messages: Message[]): any[];
abstract appendMessage(history: M[], message: Message): M[];

abstract makeApiRequest(
apiMessages: any[],
history: MessageHistory<M>,
apiTools?: any[],
systemPrompt?: string,
signal?: AbortSignal,
): Promise<any>;

abstract generate(
messages: Message[],
tools?: Tool[],
systemPrompt?: string,
signal?: AbortSignal,
): Promise<LLMResponse>;
}

/**
* Holds the messages for the specific LLM provider.
*/
export class MessageHistory<M = any> {
messages: Message[] = [];
private apiMessages: M[] = [];

Comment on lines +104 to +107
Copy link

Copilot AI Mar 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

MessageHistory.messages is publicly mutable. If callers push/splice this array directly, apiMessages will become out of sync (since only appendMessage() updates both). Consider making messages private/readonly and exposing a read-only view (or a getMessages() copy) to preserve the invariant that messages and apiMessages represent the same history.

Copilot uses AI. Check for mistakes.
constructor(private readonly client: LLMClientBase<M>) {}

getApiMessagesForClient(client: LLMClientBase<M>): M[] {
if (client !== this.client) {
// this ensures that we always give correct message format to the client
throw new Error(
`Client mismatch: converted to ${this.client.constructor.name} while expected ${client.constructor.name}`,
);
}
return this.apiMessages;
}

/** Adds a user message to context. */
addUserMessage(contents: Content[]): void {
this.appendMessage({ role: "user", contents: contents });
}

/** Adds an model message to context. */
addModelMessage(response: LLMResponse): void {
this.appendMessage(response.message);
}

/** Adds a tool result message to context. */
addToolMessage(contents: FunctionResponse[]): void {
this.appendMessage({ role: "user", contents: contents });
}

appendMessage(message: Message): this {
this.messages.push(message);
this.apiMessages = this.client.appendMessage(this.apiMessages, message);
return this;
}
}
Comment on lines +101 to +140
Copy link

Copilot AI Mar 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New core behavior (MessageHistory incremental conversion + LLMClient.buildHandler/withRetry) isn’t covered by tests (no MessageHistory references under packages/ema/src/tests). Adding vitest coverage for history append/merging and retry enabled/disabled paths would help prevent regressions, especially since this PR changes the primary LLM call flow.

Copilot generated this review using guidance from repository custom instructions.
Loading
Loading