Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions apps/feature-request/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# Mesh connection
MESH_URL=http://localhost:3000
MESH_API_KEY=mk_xxxxxxx
MESH_ORG_SLUG=my-org

# Chat config
AGENT_ID=vmcp_xxxxxxx
MODEL_CONNECTION_ID=conn_xxxxxxx
MODEL_ID=google/gemini-2.5-flash
TOOL_MODE=smart_tool_selection
31 changes: 31 additions & 0 deletions apps/feature-request/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# dependencies
node_modules
/.pnp
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/versions

# next.js
/.next/
/out/

# production
/build

# env files
.env
.env.local
.env.*.local

# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*

# typescript
*.tsbuildinfo
next-env.d.ts
78 changes: 78 additions & 0 deletions apps/feature-request/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import { SYSTEM_PROMPT } from "@/lib/system-prompt";

export async function POST(req: Request) {
const { messages } = await req.json();

const meshUrl = process.env.MESH_URL;
const apiKey = process.env.MESH_API_KEY;
const orgSlug = process.env.MESH_ORG_SLUG;
const agentId = process.env.AGENT_ID;
const modelConnectionId = process.env.MODEL_CONNECTION_ID;
const modelId = process.env.MODEL_ID;
const toolMode = process.env.TOOL_MODE || "smart_tool_selection";

if (
!meshUrl ||
!apiKey ||
!orgSlug ||
!agentId ||
!modelConnectionId ||
!modelId
) {
return new Response(
JSON.stringify({
error: "Server misconfigured — missing environment variables",
}),
{ status: 500, headers: { "Content-Type": "application/json" } },
);
}

const response = await fetch(`${meshUrl}/api/${orgSlug}/decopilot/stream`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
messages: [
{
id: crypto.randomUUID(),
role: "system",
parts: [{ type: "text", text: SYSTEM_PROMPT }],
},
...messages,
Copy link
Contributor

@cubic-dev-ai cubic-dev-ai bot Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2: Validate that messages is an array before spreading it. As written, a malformed or missing messages field will throw and return a 500 instead of a client error.

Prompt for AI agents
Check if this issue is valid — if so, understand the root cause and fix it. At apps/feature-request/app/api/chat/route.ts, line 43:

<comment>Validate that `messages` is an array before spreading it. As written, a malformed or missing `messages` field will throw and return a 500 instead of a client error.</comment>

<file context>
@@ -0,0 +1,78 @@
+          role: "system",
+          parts: [{ type: "text", text: SYSTEM_PROMPT }],
+        },
+        ...messages,
+      ],
+      model: {
</file context>
Fix with Cubic

],
model: {
id: modelId,
connectionId: modelConnectionId,
},
agent: {
id: agentId,
mode: toolMode,
},
}),
});

if (!response.ok) {
const errorText = await response.text().catch(() => "Unknown error");
return new Response(
JSON.stringify({
error: `Mesh API error: ${response.status}`,
details: errorText,
}),
{
status: response.status,
headers: { "Content-Type": "application/json" },
},
);
}

return new Response(response.body, {
status: response.status,
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
Connection: "keep-alive",
},
});
}
75 changes: 75 additions & 0 deletions apps/feature-request/app/globals.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
@import "tailwindcss";

@custom-variant dark (&:is(.dark *));

:root {
--background: oklch(1 0 0);
--foreground: oklch(0.145 0 0);
--card: oklch(1 0 0);
--card-foreground: oklch(0.145 0 0);
--primary: oklch(0.205 0 0);
--primary-foreground: oklch(0.985 0 0);
--secondary: oklch(0.961 0 0);
--secondary-foreground: oklch(0.205 0 0);
--muted: oklch(0.961 0 0);
--muted-foreground: oklch(0.556 0 0);
--accent: oklch(0.961 0 0);
--accent-foreground: oklch(0.205 0 0);
--destructive: oklch(0.577 0.245 27.325);
--border: oklch(0.922 0 0);
--input: oklch(0.922 0 0);
--ring: oklch(0.708 0 0);
--radius: 0.625rem;
}

.dark {
--background: oklch(0.145 0 0);
--foreground: oklch(0.985 0 0);
--card: oklch(0.205 0 0);
--card-foreground: oklch(0.985 0 0);
--primary: oklch(0.922 0 0);
--primary-foreground: oklch(0.205 0 0);
--secondary: oklch(0.269 0 0);
--secondary-foreground: oklch(0.985 0 0);
--muted: oklch(0.269 0 0);
--muted-foreground: oklch(0.708 0 0);
--accent: oklch(0.269 0 0);
--accent-foreground: oklch(0.985 0 0);
--destructive: oklch(0.396 0.141 25.723);
--border: oklch(0.275 0 0);
--input: oklch(0.275 0 0);
--ring: oklch(0.556 0 0);
}

@theme inline {
--color-background: var(--background);
--color-foreground: var(--foreground);
--color-card: var(--card);
--color-card-foreground: var(--card-foreground);
--color-primary: var(--primary);
--color-primary-foreground: var(--primary-foreground);
--color-secondary: var(--secondary);
--color-secondary-foreground: var(--secondary-foreground);
--color-muted: var(--muted);
--color-muted-foreground: var(--muted-foreground);
--color-accent: var(--accent);
--color-accent-foreground: var(--accent-foreground);
--color-destructive: var(--destructive);
--color-border: var(--border);
--color-input: var(--input);
--color-ring: var(--ring);
--radius-sm: calc(var(--radius) - 4px);
--radius-md: calc(var(--radius) - 2px);
--radius-lg: var(--radius);
--radius-xl: calc(var(--radius) + 4px);
}

@layer base {
* {
@apply border-border;
}
body {
@apply bg-background text-foreground;
font-family: "Inter", system-ui, -apple-system, sans-serif;
}
}
20 changes: 20 additions & 0 deletions apps/feature-request/app/layout.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import type { Metadata } from "next";
import "./globals.css";

export const metadata: Metadata = {
title: "Feature Request - MCP Mesh",
description:
"Propose a feature for MCP Mesh. Chat with our AI tech lead to shape your idea into a clear plan.",
};

export default function RootLayout({
children,
}: {
children: React.ReactNode;
}) {
return (
<html lang="en">
<body className="antialiased">{children}</body>
</html>
);
}
157 changes: 157 additions & 0 deletions apps/feature-request/app/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
"use client";

import { useChat } from "@ai-sdk/react";
import { DefaultChatTransport } from "ai";
import { useRef, useEffect, useState } from "react";
import { ChatMessages } from "@/components/chat-messages";
import { ChatInput } from "@/components/chat-input";

const transport = new DefaultChatTransport({
api: "/api/chat",
});

const ICE_BREAKERS = [
"I'd like a way to see which MCP tools are most used",
"Can we add keyboard shortcuts to the chat?",
"I want to export my conversation history",
];

function EmptyState({ onSelect }: { onSelect: (text: string) => void }) {
return (
<div className="flex flex-col items-center justify-center gap-8 text-center px-4 py-12">
<div className="flex flex-col items-center gap-3">
<div className="size-14 rounded-2xl bg-primary/10 flex items-center justify-center">
<svg
width="28"
height="28"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth="1.5"
strokeLinecap="round"
strokeLinejoin="round"
className="text-primary"
>
<path d="M21 15a2 2 0 0 1-2 2H7l-4 4V5a2 2 0 0 1 2-2h14a2 2 0 0 1 2 2z" />
</svg>
</div>
<h1 className="text-2xl font-semibold text-foreground">
Request a Feature
</h1>
<p className="text-muted-foreground text-sm max-w-md leading-relaxed">
Describe a feature you&apos;d like to see in MCP Mesh. I&apos;ll help
you shape it into a clear plan and create a GitHub issue.
</p>
</div>

<div className="flex flex-col gap-2 w-full max-w-md">
<p className="text-xs text-muted-foreground font-medium uppercase tracking-wide">
Try one of these
</p>
{ICE_BREAKERS.map((text) => (
<button
key={text}
type="button"
onClick={() => onSelect(text)}
className="text-left text-sm px-4 py-3 rounded-xl border border-border bg-card hover:bg-accent hover:border-accent-foreground/10 transition-colors cursor-pointer"
>
{text}
</button>
))}
</div>
</div>
);
}

export default function FeatureRequestPage() {
const scrollRef = useRef<HTMLDivElement>(null);
const [input, setInput] = useState("");

const { messages, sendMessage, status, stop } = useChat({
transport,
});

const isStreaming = status === "streaming" || status === "submitted";
const isEmpty = messages.length === 0;

// Auto-scroll to bottom on new messages
useEffect(() => {
if (scrollRef.current) {
scrollRef.current.scrollTop = scrollRef.current.scrollHeight;
}
}, [messages]);

const handleSend = () => {
const text = input.trim();
if (!text) return;
setInput("");
sendMessage({ text });
};

const handleIceBreaker = (text: string) => {
setInput("");
sendMessage({ text });
};

return (
<div className="flex flex-col h-dvh bg-background">
{/* Header */}
<header className="flex-none border-b border-border px-4 py-3">
<div className="max-w-2xl mx-auto flex items-center gap-3">
<div className="size-8 rounded-lg bg-primary/10 flex items-center justify-center shrink-0">
<svg
width="16"
height="16"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
className="text-primary"
>
<path d="M21 15a2 2 0 0 1-2 2H7l-4 4V5a2 2 0 0 1 2-2h14a2 2 0 0 1 2 2z" />
</svg>
</div>
<div>
<h1 className="text-sm font-medium text-foreground">
MCP Mesh — Feature Request
</h1>
<p className="text-xs text-muted-foreground">
Describe your idea and we&apos;ll shape it into a plan
</p>
</div>
</div>
</header>

{/* Messages area */}
<div ref={scrollRef} className="flex-1 overflow-y-auto min-h-0">
<div className="max-w-2xl mx-auto px-4">
{isEmpty ? (
<EmptyState onSelect={handleIceBreaker} />
) : (
<div className="py-6">
<ChatMessages messages={messages} isStreaming={isStreaming} />
</div>
)}
</div>
</div>

{/* Input area */}
<div className="flex-none border-t border-border bg-background">
<div className="max-w-2xl mx-auto px-4 py-3">
<ChatInput
value={input}
onChange={setInput}
onSubmit={handleSend}
onStop={stop}
isStreaming={isStreaming}
/>
<p className="text-[10px] text-muted-foreground/60 text-center mt-2">
AI-powered feature planning. Responses may not always be accurate.
</p>
</div>
</div>
</div>
);
}
Loading
Loading