Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ A blazing-fast AI search engine powered by Firecrawl's web scraping API. Get int
## Features

- **Real-time Web Search** - Powered by Firecrawl's search API
- **AI Responses** - Streaming answers with GPT-4o-mini
- **Flexible AI Providers** - Choose between OpenAI and OpenRouter
- **Configurable Models** - Select from available models when using OpenRouter
- **AI Responses** - Streaming answers from your chosen LLM
- **Source Citations** - Every claim backed by references
- **Live Stock Data** - Automatic TradingView charts
- **Smart Follow-ups** - AI-generated questions
Expand All @@ -33,9 +35,12 @@ cp .env.example .env.local
Add to `.env.local`:
```
FIRECRAWL_API_KEY=fc-your-api-key
OPENAI_API_KEY=sk-your-api-key
OPENAI_API_KEY=sk-your-api-key # Required if using the OpenAI provider
OPENROUTER_API_KEY=your-openrouter-key # Required if using the OpenRouter provider
```

**Note:** If environment variables for API keys are not set, the application will prompt you to enter them in the UI. These will be stored in your browser's `localStorage`.

### Run
```bash
npm run dev
Expand All @@ -47,9 +52,10 @@ Visit http://localhost:3000

- **Firecrawl** - Web scraping API
- **Next.js 15** - React framework
- **OpenAI** - GPT-4o-mini
- **OpenAI / OpenRouter** - LLM providers
- **Vercel AI SDK** - Streaming
- **TradingView** - Stock charts
- **Shadcn UI & Tailwind CSS** - Frontend components

## Deploy

Expand Down
4 changes: 3 additions & 1 deletion app/api/fireplexity/check-env/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ import { NextResponse } from 'next/server'

export async function GET() {
return NextResponse.json({
hasFirecrawlKey: !!process.env.FIRECRAWL_API_KEY
hasFirecrawlKey: !!process.env.FIRECRAWL_API_KEY,
hasOpenAIKey: !!process.env.OPENAI_API_KEY,
hasOpenRouterKey: !!process.env.OPENROUTER_API_KEY
})
}
55 changes: 39 additions & 16 deletions app/api/fireplexity/search/route.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { NextResponse } from 'next/server'
import { createOpenAI } from '@ai-sdk/openai'
import { createOpenAI, OpenAIProvider } from '@ai-sdk/openai'
import { streamText, generateText, createDataStreamResponse } from 'ai'
import { detectCompanyTicker } from '@/lib/company-ticker-map'
import { selectRelevantContent } from '@/lib/content-selection'
Expand All @@ -12,28 +12,51 @@ export async function POST(request: Request) {
const body = await request.json()
const messages = body.messages || []
const query = messages[messages.length - 1]?.content || body.query
const provider = body.provider || 'openai' // Default to openai
const openRouterModel = body.openRouterModel // Model for OpenRouter
const openRouterApiKey = body.openRouterApiKey || process.env.OPENROUTER_API_KEY

console.log(`[${requestId}] Query received:`, query)
console.log(`[${requestId}] Provider:`, provider)
if (provider === 'openrouter') {
console.log(`[${requestId}] OpenRouter Model:`, openRouterModel)
}

if (!query) {
return NextResponse.json({ error: 'Query is required' }, { status: 400 })
}

// Use API key from request body if provided, otherwise fall back to environment variable
// API key validation
const firecrawlApiKey = body.firecrawlApiKey || process.env.FIRECRAWL_API_KEY
const openaiApiKey = process.env.OPENAI_API_KEY

if (!firecrawlApiKey) {
return NextResponse.json({ error: 'Firecrawl API key not configured' }, { status: 500 })
}

if (!openaiApiKey) {
return NextResponse.json({ error: 'OpenAI API key not configured' }, { status: 500 })
}

// Configure OpenAI with API key
const openai = createOpenAI({
apiKey: openaiApiKey
})
let llmProvider: OpenAIProvider
let modelName: string = 'gpt-4o-mini' // Default OpenAI model

if (provider === 'openai') {
const openaiApiKey = process.env.OPENAI_API_KEY
if (!openaiApiKey) {
return NextResponse.json({ error: 'OpenAI API key not configured' }, { status: 500 })
}
llmProvider = createOpenAI({ apiKey: openaiApiKey })
} else if (provider === 'openrouter') {
if (!openRouterApiKey) {
return NextResponse.json({ error: 'OpenRouter API key not configured' }, { status: 500 })
}
if (!openRouterModel) {
return NextResponse.json({ error: 'OpenRouter model not selected' }, { status: 400 })
}
llmProvider = createOpenAI({
apiKey: openRouterApiKey,
baseURL: 'https://openrouter.ai/api/v1',
})
modelName = openRouterModel
console.log(`[${requestId}] Using OpenRouter model: ${modelName}`)
} else {
return NextResponse.json({ error: 'Invalid provider specified' }, { status: 400 })
}

// Initialize Firecrawl
const firecrawl = new FirecrawlApp({ apiKey: firecrawlApiKey })
Expand Down Expand Up @@ -176,7 +199,7 @@ export async function POST(request: Request) {
: `user: ${query}`

const followUpPromise = generateText({
model: openai('gpt-4o-mini'),
model: llmProvider(modelName), // Use dynamic model and provider
messages: [
{
role: 'system',
Expand All @@ -195,7 +218,7 @@ export async function POST(request: Request) {

// Stream the text generation
const result = streamText({
model: openai('gpt-4o-mini'),
model: llmProvider(modelName), // Use dynamic model and provider
messages: aiMessages,
temperature: 0.7,
maxTokens: 2000
Expand Down Expand Up @@ -239,11 +262,11 @@ export async function POST(request: Request) {
const errorResponses: Record<number, { error: string; suggestion?: string }> = {
401: {
error: 'Invalid API key',
suggestion: 'Please check your Firecrawl API key is correct.'
suggestion: 'Please check your API key for the selected provider.'
},
402: {
error: 'Insufficient credits',
suggestion: 'You\'ve run out of Firecrawl credits. Please upgrade your plan.'
suggestion: 'You\'ve run out of credits for the selected provider. Please upgrade your plan.'
},
429: {
error: 'Rate limit exceeded',
Expand Down
25 changes: 25 additions & 0 deletions app/api/openrouter/models/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { NextResponse } from 'next/server'

export async function GET() {
try {
const response = await fetch('https://openrouter.ai/api/v1/models', {
method: 'GET',
headers: {
// Authorization: `Bearer ${process.env.OPENROUTER_API_KEY}`, // Add API key if required by OpenRouter for this endpoint
},
})

if (!response.ok) {
const errorData = await response.json()
console.error('OpenRouter API error:', errorData)
return NextResponse.json({ error: 'Failed to fetch models from OpenRouter', details: errorData }, { status: response.status })
}

const data = await response.json()
return NextResponse.json(data)
} catch (error) {
console.error('Error fetching OpenRouter models:', error)
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
return NextResponse.json({ error: 'Internal server error', details: errorMessage }, { status: 500 })
}
}
Loading