Skip to content

Commit 3c8c04b

Browse files
Add prompt-caching examples for Effect AI (#44)
Add prompt-caching examples for Effect AI - Add typescript/effect-ai/src/prompt-caching/user-message-cache.ts - Demonstrates cache_control using options.openrouter.cacheControl in Prompt - Shows Effect.gen pattern with Layer-based dependency injection - Critical configuration: stream_options.include_usage in model config layer - Evidence-based verification via response.usage.cachedInputTokens Run biome format Simplify Effect AI prompt-caching README to link to main docs Rename prompt caching examples with anthropic prefix
1 parent 081b7ee commit 3c8c04b

File tree

10 files changed

+517
-1
lines changed

10 files changed

+517
-1
lines changed

docs/prompt-caching.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,4 @@ See ecosystem-specific examples in this repository for runnable reference implem
1212

1313
- **TypeScript + fetch**: [typescript/fetch/src/prompt-caching/](../typescript/fetch/src/prompt-caching/)
1414
- **AI SDK v5** (Vercel): [typescript/ai-sdk-v5/src/prompt-caching/](../typescript/ai-sdk-v5/src/prompt-caching/)
15+
- **Effect AI** (@effect/ai): [typescript/effect-ai/src/prompt-caching/](../typescript/effect-ai/src/prompt-caching/)

typescript/effect-ai/README.md

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
# Effect-TS AI Examples
2+
3+
Examples using Effect-TS with @effect/ai and @effect/ai-openrouter for type-safe, composable AI operations.
4+
5+
## Prerequisites
6+
7+
- Bun runtime: `curl -fsSL https://bun.sh/install | bash`
8+
- `OPENROUTER_API_KEY` environment variable
9+
10+
## Running Examples
11+
12+
```bash
13+
# From monorepo root (typescript/)
14+
bun examples
15+
16+
# Or from this workspace
17+
cd effect-ai
18+
bun examples
19+
```
20+
21+
## Features
22+
23+
- [prompt-caching](./src/prompt-caching/) - Anthropic caching examples with Effect patterns
24+
25+
### Key Configuration
26+
27+
**CRITICAL**: The Effect AI example requires:
28+
```typescript
29+
config: {
30+
stream_options: { include_usage: true }
31+
}
32+
```
33+
34+
Without this, `usage.cachedInputTokens` will be undefined in the response.
35+
36+
### Effect Patterns Demonstrated
37+
38+
- `Effect.gen` for generator-based composition
39+
- Layer-based dependency injection
40+
- Type-safe error handling
41+
- Evidence-based validation
42+
43+
## Dependencies
44+
45+
- `@openrouter-examples/shared` - Shared constants (LARGE_SYSTEM_PROMPT) and types
46+
- `@effect/ai` - Effect AI abstractions
47+
- `@effect/ai-openrouter` - OpenRouter provider for Effect AI
48+
- `effect` - Effect-TS core library

typescript/effect-ai/package.json

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
{
2+
"name": "@openrouter-examples/effect-ai",
3+
"version": "1.0.0",
4+
"private": true,
5+
"type": "module",
6+
"scripts": {
7+
"examples": "bun run run-examples.ts",
8+
"typecheck": "tsc --noEmit"
9+
},
10+
"dependencies": {
11+
"@openrouter-examples/shared": "workspace:*",
12+
"@effect/ai": "^0.32.1",
13+
"@effect/ai-openrouter": "^0.6.0",
14+
"@effect/platform": "^0.93.0",
15+
"@effect/platform-bun": "^0.83.0",
16+
"effect": "^3.19.3"
17+
},
18+
"devDependencies": {
19+
"@types/bun": "latest"
20+
}
21+
}
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
#!/usr/bin/env bun
2+
/**
3+
* Run all example files in the src/ directory
4+
* Each example is run in a separate process to handle process.exit() calls
5+
*/
6+
7+
import { readdirSync, statSync } from 'fs';
8+
import { join } from 'path';
9+
import { $ } from 'bun';
10+
11+
const srcDir = join(import.meta.dir, 'src');
12+
13+
// Recursively find all .ts files in src/
14+
function findExamples(dir: string): string[] {
15+
const entries = readdirSync(dir);
16+
const files: string[] = [];
17+
18+
for (const entry of entries) {
19+
const fullPath = join(dir, entry);
20+
const stat = statSync(fullPath);
21+
22+
if (stat.isDirectory()) {
23+
files.push(...findExamples(fullPath));
24+
} else if (entry.endsWith('.ts')) {
25+
files.push(fullPath);
26+
}
27+
}
28+
29+
return files.sort();
30+
}
31+
32+
const examples = findExamples(srcDir);
33+
console.log(`Found ${examples.length} example(s)\n`);
34+
35+
let failed = 0;
36+
for (const example of examples) {
37+
const relativePath = example.replace(import.meta.dir + '/', '');
38+
console.log(`\n${'='.repeat(80)}`);
39+
console.log(`Running: ${relativePath}`);
40+
console.log('='.repeat(80));
41+
42+
try {
43+
await $`bun run ${example}`.quiet();
44+
console.log(`✅ ${relativePath} completed successfully`);
45+
} catch (error) {
46+
console.error(`❌ ${relativePath} failed`);
47+
failed++;
48+
}
49+
}
50+
51+
console.log(`\n${'='.repeat(80)}`);
52+
console.log(`Results: ${examples.length - failed}/${examples.length} passed`);
53+
console.log('='.repeat(80));
54+
55+
if (failed > 0) {
56+
process.exit(1);
57+
}
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
# Prompt Caching Examples (Effect AI)
2+
3+
Examples demonstrating prompt caching with @effect/ai and @effect/ai-openrouter.
4+
5+
## Documentation
6+
7+
For full prompt caching documentation including all providers, pricing, and configuration details, see:
8+
- **[OpenRouter Prompt Caching Guide](https://openrouter.ai/docs/features/prompt-caching)**
9+
10+
## Examples in This Directory
11+
12+
See the TypeScript files in this directory for specific examples.
Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
/**
2+
* Example: Anthropic Prompt Caching - Multi-Message Conversation (Effect AI)
3+
*
4+
* This example demonstrates Anthropic prompt caching in a multi-message conversation
5+
* via OpenRouter using Effect AI.
6+
*
7+
* Pattern: User message cache in multi-turn conversation using Effect patterns
8+
*/
9+
10+
import * as OpenRouterClient from '@effect/ai-openrouter/OpenRouterClient';
11+
import * as OpenRouterLanguageModel from '@effect/ai-openrouter/OpenRouterLanguageModel';
12+
import * as LanguageModel from '@effect/ai/LanguageModel';
13+
import * as Prompt from '@effect/ai/Prompt';
14+
import { FetchHttpClient } from '@effect/platform';
15+
import * as BunContext from '@effect/platform-bun/BunContext';
16+
import { LARGE_SYSTEM_PROMPT } from '@openrouter-examples/shared/constants';
17+
import { Console, Effect, Layer, Redacted } from 'effect';
18+
19+
const program = Effect.gen(function* () {
20+
const testId = Date.now();
21+
const largeContext = `Test ${testId}: Context:\n\n${LARGE_SYSTEM_PROMPT}`;
22+
23+
yield* Console.log(
24+
'╔════════════════════════════════════════════════════════════════════════════╗',
25+
);
26+
yield* Console.log(
27+
'║ Anthropic Prompt Caching - Multi-Message (Effect AI) ║',
28+
);
29+
yield* Console.log(
30+
'╚════════════════════════════════════════════════════════════════════════════╝',
31+
);
32+
yield* Console.log('');
33+
yield* Console.log('Testing cache_control in multi-turn conversation');
34+
yield* Console.log('');
35+
36+
const makePrompt = () =>
37+
Prompt.make([
38+
{
39+
role: 'user' as const,
40+
content: [
41+
{
42+
type: 'text' as const,
43+
text: largeContext,
44+
options: {
45+
openrouter: {
46+
cacheControl: { type: 'ephemeral' as const },
47+
},
48+
},
49+
},
50+
{
51+
type: 'text' as const,
52+
text: "Hello, what's your purpose?",
53+
},
54+
],
55+
},
56+
{
57+
role: 'assistant' as const,
58+
content: "I'm an AI assistant designed to help with various tasks.",
59+
},
60+
{
61+
role: 'user' as const,
62+
content: 'What programming languages do you know?',
63+
},
64+
]);
65+
66+
yield* Console.log('First Call (Cache Miss Expected)');
67+
const response1 = yield* LanguageModel.generateText({
68+
prompt: makePrompt(),
69+
});
70+
const cached1 = response1.usage.cachedInputTokens ?? 0;
71+
yield* Console.log(` Response: ${response1.text.substring(0, 80)}...`);
72+
yield* Console.log(` cached_tokens=${cached1}`);
73+
74+
yield* Effect.sleep('1 second');
75+
76+
yield* Console.log('\nSecond Call (Cache Hit Expected)');
77+
const response2 = yield* LanguageModel.generateText({
78+
prompt: makePrompt(),
79+
});
80+
const cached2 = response2.usage.cachedInputTokens ?? 0;
81+
yield* Console.log(` Response: ${response2.text.substring(0, 80)}...`);
82+
yield* Console.log(` cached_tokens=${cached2}`);
83+
84+
// Analysis
85+
yield* Console.log('\n' + '='.repeat(80));
86+
yield* Console.log('ANALYSIS');
87+
yield* Console.log('='.repeat(80));
88+
yield* Console.log(`First call: cached_tokens=${cached1} (expected: 0)`);
89+
yield* Console.log(`Second call: cached_tokens=${cached2} (expected: >0)`);
90+
91+
const success = cached1 === 0 && cached2 > 0;
92+
93+
if (success) {
94+
yield* Console.log('\n✓ SUCCESS - Multi-message caching is working correctly');
95+
} else {
96+
yield* Console.log('\n✗ FAILURE - Multi-message caching is not working as expected');
97+
}
98+
99+
yield* Console.log('='.repeat(80));
100+
});
101+
102+
const OpenRouterClientLayer = OpenRouterClient.layer({
103+
apiKey: Redacted.make(process.env.OPENROUTER_API_KEY!),
104+
}).pipe(Layer.provide(FetchHttpClient.layer));
105+
106+
const OpenRouterModelLayer = OpenRouterLanguageModel.layer({
107+
model: 'anthropic/claude-3.5-sonnet',
108+
config: {
109+
stream_options: { include_usage: true },
110+
},
111+
}).pipe(Layer.provide(OpenRouterClientLayer));
112+
113+
await program.pipe(
114+
Effect.provide(OpenRouterModelLayer),
115+
Effect.provide(BunContext.layer),
116+
Effect.runPromise,
117+
);
118+
119+
console.log('\n✓ Program completed successfully');
Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
/**
2+
* Example: Anthropic Prompt Caching - Control (No cache_control) (Effect AI)
3+
*
4+
* This is a CONTROL scenario demonstrating that without cache_control,
5+
* no caching occurs.
6+
*
7+
* Purpose: Validates that cache behavior is due to cache_control, not coincidence
8+
*/
9+
10+
import * as OpenRouterClient from '@effect/ai-openrouter/OpenRouterClient';
11+
import * as OpenRouterLanguageModel from '@effect/ai-openrouter/OpenRouterLanguageModel';
12+
import * as LanguageModel from '@effect/ai/LanguageModel';
13+
import * as Prompt from '@effect/ai/Prompt';
14+
import { FetchHttpClient } from '@effect/platform';
15+
import * as BunContext from '@effect/platform-bun/BunContext';
16+
import { LARGE_SYSTEM_PROMPT } from '@openrouter-examples/shared/constants';
17+
import { Console, Effect, Layer, Redacted } from 'effect';
18+
19+
const program = Effect.gen(function* () {
20+
const testId = Date.now();
21+
const largeContext = `Test ${testId}: Context:\n\n${LARGE_SYSTEM_PROMPT}`;
22+
23+
yield* Console.log(
24+
'╔════════════════════════════════════════════════════════════════════════════╗',
25+
);
26+
yield* Console.log(
27+
'║ Anthropic Prompt Caching - Control (No cache_control) (Effect AI) ║',
28+
);
29+
yield* Console.log(
30+
'╚════════════════════════════════════════════════════════════════════════════╝',
31+
);
32+
yield* Console.log('');
33+
yield* Console.log('Testing WITHOUT cache_control (control scenario)');
34+
yield* Console.log('');
35+
36+
const makePrompt = () =>
37+
Prompt.make([
38+
{
39+
role: 'user' as const,
40+
content: [
41+
{
42+
type: 'text' as const,
43+
text: largeContext,
44+
// NO cache_control - this is the control
45+
},
46+
{
47+
type: 'text' as const,
48+
text: 'What are the key principles?',
49+
},
50+
],
51+
},
52+
]);
53+
54+
yield* Console.log('First Call (No Cache Expected)');
55+
const response1 = yield* LanguageModel.generateText({
56+
prompt: makePrompt(),
57+
});
58+
const cached1 = response1.usage.cachedInputTokens ?? 0;
59+
yield* Console.log(` cached_tokens=${cached1}`);
60+
61+
yield* Effect.sleep('1 second');
62+
63+
yield* Console.log('\nSecond Call (No Cache Expected)');
64+
const response2 = yield* LanguageModel.generateText({
65+
prompt: makePrompt(),
66+
});
67+
const cached2 = response2.usage.cachedInputTokens ?? 0;
68+
yield* Console.log(` cached_tokens=${cached2}`);
69+
70+
// Analysis
71+
yield* Console.log('\n' + '='.repeat(80));
72+
yield* Console.log('ANALYSIS (CONTROL)');
73+
yield* Console.log('='.repeat(80));
74+
yield* Console.log(`First call: cached_tokens=${cached1} (expected: 0)`);
75+
yield* Console.log(`Second call: cached_tokens=${cached2} (expected: 0)`);
76+
77+
if (cached1 === 0 && cached2 === 0) {
78+
yield* Console.log('✓ No cache metrics present (expected for control)');
79+
} else {
80+
yield* Console.log('✗ Unexpected cache metrics in control scenario');
81+
}
82+
83+
const success = cached1 === 0 && cached2 === 0;
84+
85+
if (success) {
86+
yield* Console.log('\n✓ SUCCESS - Control scenario confirms no false positives');
87+
} else {
88+
yield* Console.log('\n✗ FAILURE - Control scenario shows unexpected cache behavior');
89+
}
90+
91+
yield* Console.log('='.repeat(80));
92+
});
93+
94+
const OpenRouterClientLayer = OpenRouterClient.layer({
95+
apiKey: Redacted.make(process.env.OPENROUTER_API_KEY!),
96+
}).pipe(Layer.provide(FetchHttpClient.layer));
97+
98+
const OpenRouterModelLayer = OpenRouterLanguageModel.layer({
99+
model: 'anthropic/claude-3.5-sonnet',
100+
config: {
101+
stream_options: { include_usage: true },
102+
},
103+
}).pipe(Layer.provide(OpenRouterClientLayer));
104+
105+
await program.pipe(
106+
Effect.provide(OpenRouterModelLayer),
107+
Effect.provide(BunContext.layer),
108+
Effect.runPromise,
109+
);
110+
111+
console.log('\n✓ Program completed successfully');

0 commit comments

Comments
 (0)