Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 76 additions & 0 deletions src/app/api/ai/chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import { NextResponse } from "next/server";

type ContextBlock = {
id: string;
title?: string;
blockType?: string;
content?: string;
};

const summarizeContext = (context: ContextBlock[]) =>
context
.map(
(block, index) =>
`#${index + 1} [${block.blockType || "block"}] ${block.title || "Untitled"}\n${(block.content || "").slice(0, 500)}`,
)
.join("\n\n");

export async function POST(req: Request) {
try {
const body = (await req.json()) as {
prompt?: string;
context?: ContextBlock[];
};

const prompt = (body.prompt || "").trim();
const context = Array.isArray(body.context) ? body.context : [];

if (!prompt) {
return NextResponse.json({ error: "Prompt is required" }, { status: 400 });
}

const apiKey = process.env.OPENAI_API_KEY;
if (!apiKey) {
return NextResponse.json({
response: `I received your prompt: "${prompt}". Set OPENAI_API_KEY to enable live AI responses.`,
});
}

const input = [
{
role: "system",
content:
"You are an assistant helping with project blocks. Use provided context blocks if relevant.",
},
{
role: "user",
content: `Prompt:\n${prompt}\n\nContext blocks:\n${summarizeContext(context)}`,
},
];

const response = await fetch("https://api.openai.com/v1/responses", {
method: "POST",
headers: {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
model: process.env.AI_CHAT_MODEL || "gpt-4.1-mini",
input,
}),
});

if (!response.ok) {
const text = await response.text();
return NextResponse.json({ error: text || "AI request failed" }, { status: 502 });
}

const data = (await response.json()) as {
output_text?: string;
};

return NextResponse.json({ response: data.output_text || "No response." });
} catch {
return NextResponse.json({ error: "Invalid request" }, { status: 400 });
}
}
235 changes: 235 additions & 0 deletions src/app/components/project/AIChatBlock.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,235 @@
"use client";

import { memo, useMemo, useState } from "react";
import {
Handle,
Position,
type NodeProps,
type Node,
useEdges,
} from "@xyflow/react";
import { Loader2, MessageSquare } from "lucide-react";
import { useI18n } from "@providers/I18nProvider";
import { BlockData } from "./CanvasBlock";
import { BlockFooter } from "./BlockFooter";
import { parseJsonRecord } from "@lib/metadata-parsers";
import "./ai-chat-block.css";

type AIChatBlockProps = NodeProps<Node<BlockData>>;

type ChatMessage = {
role: "user" | "assistant";
content: string;
createdAt: string;
contextBlockIds?: string[];
};

const AIChatBlock = memo(({ id, data, selected }: AIChatBlockProps) => {
const { dict, lang } = useI18n();
const edges = useEdges();
const metadata = useMemo(() => parseJsonRecord(data.metadata), [data.metadata]);
const messages = (Array.isArray(metadata.messages)
? metadata.messages
: []) as ChatMessage[];

const [prompt, setPrompt] = useState(data.content || "");
const [loading, setLoading] = useState(false);
const [error, setError] = useState<string | null>(null);

const includeSelectedContext =
typeof metadata.includeSelectedContext === "boolean"
? metadata.includeSelectedContext
: true;
const expandLinkedNodes = metadata.expandLinkedNodes === true;

const selectedContextBlockIds = useMemo(() => {
const allBlocks = data.allBlocks || [];
const selectedIds = allBlocks
.filter((block) => block.selected && block.id !== id)
.map((block) => block.id);

if (!expandLinkedNodes) return selectedIds;

const linked = new Set(selectedIds);
for (const selectedId of selectedIds) {
for (const edge of data.allLinks || []) {
if (edge.source === selectedId && edge.target !== id) linked.add(edge.target);
if (edge.target === selectedId && edge.source !== id) linked.add(edge.source);
}
}

return Array.from(linked);
}, [data.allBlocks, data.allLinks, expandLinkedNodes, id]);

const selectedContext = useMemo(() => {
if (!includeSelectedContext) return [];
const allBlocks = data.allBlocks || [];
const idSet = new Set(selectedContextBlockIds);
return allBlocks
.filter((block) => idSet.has(block.id))
.map((block) => ({
id: block.id,
title: block.data.title || "",
blockType: block.data.blockType,
content: block.data.content || "",
}));
}, [data.allBlocks, includeSelectedContext, selectedContextBlockIds]);

const persist = async (next: {
content?: string;
messages?: ChatMessage[];
includeSelectedContext?: boolean;
expandLinkedNodes?: boolean;
}) => {
const now = new Date().toISOString();
const editor =
data.currentUser?.displayName ||
data.currentUser?.username ||
dict.project.anonymous;

const nextMeta = {
...metadata,
messages: next.messages ?? messages,
includeSelectedContext:
next.includeSelectedContext ?? includeSelectedContext,
expandLinkedNodes: next.expandLinkedNodes ?? expandLinkedNodes,
};

await data.onContentChange?.(
id,
next.content ?? prompt,
now,
editor,
JSON.stringify(nextMeta),
data.title,
data.reactions,
);
};

const submit = async () => {
if (!prompt.trim() || loading) return;
setLoading(true);
setError(null);

try {
const res = await fetch(`/api/ai/chat`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
prompt: prompt.trim(),
context: selectedContext,
}),
});

if (!res.ok) throw new Error("chat_request_failed");
const body = (await res.json()) as { response?: string };
const assistantContent = body.response || "No response generated.";
const now = new Date().toISOString();
const nextMessages: ChatMessage[] = [
...messages,
{
role: "user",
content: prompt.trim(),
createdAt: now,
contextBlockIds: selectedContextBlockIds,
},
{ role: "assistant", content: assistantContent, createdAt: now },
];

await persist({ content: "", messages: nextMessages });
setPrompt("");
} catch {
setError(dict.common.error || "Request failed");
} finally {
setLoading(false);
}
};

const isHandleConnected = (handleId: string) =>
edges.some(
(e) =>
(e.source === id && e.sourceHandle === handleId) ||
(e.target === id && e.targetHandle === handleId),
);

return (
<div className={`block-card ai-chat-block ${selected ? "selected" : ""}`}>
<Handle
type="target"
position={Position.Left}
id="left"
className={`custom-handle left ${isHandleConnected("left") ? "connected" : ""}`}
/>
<Handle
type="source"
position={Position.Right}
id="right"
className={`custom-handle right ${isHandleConnected("right") ? "connected" : ""}`}
/>

<div className="ai-chat-header">
<MessageSquare size={14} />
<span>{dict.blocks.blockTypeChat || "AI Chat"}</span>
</div>

<div className="ai-chat-options">
<label>
<input
type="checkbox"
checked={includeSelectedContext}
onChange={(e) => {
void persist({ includeSelectedContext: e.target.checked });
}}
/>
{dict.blocks.chatIncludeSelected || "Include selected blocks"}
</label>
<label>
<input
type="checkbox"
checked={expandLinkedNodes}
onChange={(e) => {
void persist({ expandLinkedNodes: e.target.checked });
}}
/>
{dict.blocks.chatExpandLinked || "Expand linked nodes (depth 1)"}
</label>
</div>

<textarea
className="ai-chat-prompt"
value={prompt}
placeholder={dict.blocks.chatPromptPlaceholder || "Ask AI..."}
onChange={(e) => setPrompt(e.target.value)}
/>

<button className="ai-chat-submit" onClick={submit} disabled={loading}>
{loading ? <Loader2 className="ai-chat-spin" size={14} /> : null}
{dict.blocks.chatSend || "Send"}
</button>

{error ? <p className="ai-chat-error">{error}</p> : null}

<div className="ai-chat-messages">
{messages
.filter((message) => message.role === "assistant")
.slice(-4)
.map((message, index) => (
<p key={`${message.createdAt}-${index}`}>{message.content}</p>
))}
</div>

<BlockFooter
dict={dict}
lang={lang}
updatedAt={data.updatedAt}
authorName={data.authorName}
isContentLocked={data.isContentLocked}
isPositionLocked={data.isPositionLocked}
/>
</div>
);
});

AIChatBlock.displayName = "AIChatBlock";

export default AIChatBlock;
5 changes: 4 additions & 1 deletion src/app/components/project/AddBlockModal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import {
Terminal,
Kanban,
Folder,
MessageSquare,
} from "lucide-react";
import { FaGithub } from "react-icons/fa";
import { VercelIcon } from "../icons/VercelIcon";
Expand All @@ -34,7 +35,8 @@ type AddableBlockType =
| "sketch"
| "shell"
| "folder"
| "vercel";
| "vercel"
| "chat";

interface AddBlockModalProps {
isOpen: boolean;
Expand All @@ -59,6 +61,7 @@ const BLOCK_TYPES = [
{ type: "kanban", icon: Kanban, labelKey: "blockTypeKanban" },
{ type: "sketch", icon: PenTool, labelKey: "blockTypeSketch" },
{ type: "shell", icon: Terminal, labelKey: "blockTypeShell" },
{ type: "chat", icon: MessageSquare, labelKey: "blockTypeChat" },
{ type: "vercel", icon: VercelIcon, labelKey: "blockTypeVercel" },
] as const;

Expand Down
7 changes: 6 additions & 1 deletion src/app/components/project/CanvasBlock.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import {
Position,
type NodeProps,
type Node,
type Edge,
useReactFlow,
} from "@xyflow/react";
import {
Expand Down Expand Up @@ -62,7 +63,8 @@ export type BlockData = {
| "kanban"
| "sketch"
| "shell"
| "folder";
| "folder"
| "chat";
label?: string;
metadata?: string | Record<string, unknown>;
isLocked?: boolean;
Expand Down Expand Up @@ -132,6 +134,8 @@ export type BlockData = {
};
userRole?: "creator" | "owner" | "editor" | "viewer";
directChildrenCount?: number;
allBlocks?: Node<BlockData>[];
allLinks?: Edge[];
};

export interface BlockMetadata {
Expand Down Expand Up @@ -164,6 +168,7 @@ export type CanvasBlockProps = NodeProps<
| "sketch"
| "shell"
| "folder"
| "chat"
>
>;

Expand Down
Loading
Loading