From 2053550c259888354a80e6564e5236ed8659ba66 Mon Sep 17 00:00:00 2001 From: Cho Young-Hwi Date: Wed, 18 Mar 2026 17:02:21 +0000 Subject: [PATCH 1/2] [#329] Index MCV2_Bond mint/burn trades for storyline tokens - Migration: trade_history table with (tx_hash, log_index) uniqueness - Database types: added trade_history to Database interface - ABI: added Minted/Burned event definitions from MCV2_Bond.sol - Cron route: /api/cron/trade-history scans MCV2_Bond events for known storyline tokens only, with own cursor (id=2) and CRON_SECRET fail-closed auth - Direct indexer: /api/index/trade accepts txHash+tokenAddress, decodes mint/burn events and upserts to trade_history - TradingWidget: fire-and-forget POST to trade indexer after successful mint/burn for instant chart updates - Price per token computed as reserveAmount/tokenAmount - Total supply read at trade block number Fixes #329 Co-Authored-By: Claude Opus 4.6 (1M context) --- lib/contracts/abi.ts | 30 +++ lib/supabase.ts | 45 ++++ src/app/api/cron/trade-history/route.ts | 225 ++++++++++++++++++++ src/app/api/index/trade/route.ts | 115 ++++++++++ src/components/TradingWidget.tsx | 12 ++ supabase/migrations/00016_trade_history.sql | 18 ++ 6 files changed, 445 insertions(+) create mode 100644 src/app/api/cron/trade-history/route.ts create mode 100644 src/app/api/index/trade/route.ts create mode 100644 supabase/migrations/00016_trade_history.sql diff --git a/lib/contracts/abi.ts b/lib/contracts/abi.ts index 923f8bde..ac387367 100644 --- a/lib/contracts/abi.ts +++ b/lib/contracts/abi.ts @@ -87,6 +87,36 @@ export const donateFunction = { outputs: [], } as const; +// --------------------------------------------------------------------------- +// MCV2_Bond events (from MCV2_Bond.sol upstream) +// --------------------------------------------------------------------------- + +export const mcv2MintedEvent = { + type: "event", + name: "Minted", + inputs: [ + { name: "token", type: "address", indexed: true }, + { name: "account", type: "address", indexed: true }, + { name: "tokenAmount", type: "uint256", indexed: false }, + { name: "reserveAmount", type: "uint256", indexed: false }, + { name: "beneficiary", type: "address", indexed: false }, + ], +} as const; + +export const mcv2BurnedEvent = { + type: "event", + name: "Burned", + inputs: [ + { name: "token", type: "address", indexed: true }, + { name: "account", type: "address", indexed: true }, + { name: "tokenAmount", type: "uint256", indexed: false }, + { name: "refundAmount", type: "uint256", indexed: false }, + { name: "beneficiary", type: "address", indexed: false }, + ], +} as const; + +export const mcv2BondEventAbi = [mcv2MintedEvent, mcv2BurnedEvent] as const; + // --------------------------------------------------------------------------- // MCV2_Bond view functions // --------------------------------------------------------------------------- diff --git a/lib/supabase.ts b/lib/supabase.ts index e2e8081d..ed1a4811 100644 --- a/lib/supabase.ts +++ b/lib/supabase.ts @@ -343,6 +343,51 @@ export interface Database { }; Relationships: []; }; + trade_history: { + Row: { + id: number; + token_address: string; + storyline_id: number; + event_type: string; + price_per_token: number; + total_supply: number; + reserve_amount: number; + block_number: number; + block_timestamp: string; + tx_hash: string; + log_index: number; + contract_address: string; + }; + Insert: { + id?: never; + token_address: string; + storyline_id: number; + event_type: string; + price_per_token: number; + total_supply: number; + reserve_amount: number; + block_number: number; + block_timestamp: string; + tx_hash: string; + log_index: number; + contract_address: string; + }; + Update: { + id?: never; + token_address?: string; + storyline_id?: number; + event_type?: string; + price_per_token?: number; + total_supply?: number; + reserve_amount?: number; + block_number?: number; + block_timestamp?: string; + tx_hash?: string; + log_index?: number; + contract_address?: string; + }; + Relationships: []; + }; }; Views: { [_ in never]: never; diff --git a/src/app/api/cron/trade-history/route.ts b/src/app/api/cron/trade-history/route.ts new file mode 100644 index 00000000..932cc860 --- /dev/null +++ b/src/app/api/cron/trade-history/route.ts @@ -0,0 +1,225 @@ +import { NextResponse } from "next/server"; +import { decodeEventLog, formatUnits, type Log } from "viem"; +import { publicClient } from "../../../../../lib/rpc"; +import { createServerClient } from "../../../../../lib/supabase"; +import { mcv2BondEventAbi } from "../../../../../lib/contracts/abi"; +import { MCV2_BOND } from "../../../../../lib/contracts/constants"; +import { erc20Abi } from "../../../../../lib/price"; +import type { Database } from "../../../../../lib/supabase"; + +const SCAN_BLOCKS = BigInt(200); +const CURSOR_ID = 2; // separate cursor row from backfill (id=1) + +type TradeInsert = Database["public"]["Tables"]["trade_history"]["Insert"]; +type SupabaseClient = NonNullable>; + +/** Fail closed in production when CRON_SECRET is unset */ +function verifyCron(req: Request): boolean { + const secret = process.env.CRON_SECRET; + if (!secret) { + return process.env.NODE_ENV !== "production"; + } + const authHeader = req.headers.get("authorization"); + return authHeader === `Bearer ${secret}`; +} + +export async function GET(req: Request) { + if (!verifyCron(req)) { + return NextResponse.json({ error: "Unauthorized" }, { status: 401 }); + } + + const supabase = createServerClient(); + if (!supabase) { + return NextResponse.json({ error: "Supabase not configured" }, { status: 500 }); + } + + const currentBlock = await publicClient.getBlockNumber(); + + // Read cursor + const { data: cursor } = await supabase + .from("backfill_cursor") + .select("last_block") + .eq("id", CURSOR_ID) + .single(); + + // If no cursor row exists, create one + if (!cursor) { + await supabase.from("backfill_cursor").insert({ id: CURSOR_ID, last_block: 0 }); + } + + const lastBlock = cursor?.last_block ? BigInt(cursor.last_block) : BigInt(0); + const fromBlock = lastBlock > BigInt(0) ? lastBlock + BigInt(1) : BigInt(0); + + if (fromBlock > currentBlock) { + return NextResponse.json({ skipped: true, reason: "Already up to date" }); + } + + const toBlock = + fromBlock + SCAN_BLOCKS < currentBlock ? fromBlock + SCAN_BLOCKS : currentBlock; + + // Load known storyline token addresses + const { data: storylines } = await supabase + .from("storylines") + .select("storyline_id, token_address") + .not("token_address", "is", null); + + const tokenToStoryline = new Map(); + for (const s of storylines ?? []) { + if (s.token_address) { + tokenToStoryline.set(s.token_address.toLowerCase(), s.storyline_id); + } + } + + if (tokenToStoryline.size === 0) { + // Advance cursor even if no tokens to track + await supabase + .from("backfill_cursor") + .update({ last_block: Number(toBlock), updated_at: new Date().toISOString() }) + .eq("id", CURSOR_ID); + return NextResponse.json({ skipped: true, reason: "No storyline tokens to track" }); + } + + // Fetch all MCV2_Bond logs in range + const logs = await publicClient.getLogs({ + address: MCV2_BOND, + fromBlock, + toBlock, + }); + + let inserted = 0; + let skipped = 0; + let errors = 0; + + const blockTimestampCache = new Map(); + async function getTimestamp(blockNumber: bigint): Promise { + const cached = blockTimestampCache.get(blockNumber); + if (cached) return cached; + const block = await publicClient.getBlock({ blockNumber }); + const ts = new Date(Number(block.timestamp) * 1000).toISOString(); + blockTimestampCache.set(blockNumber, ts); + return ts; + } + + for (const log of logs) { + try { + const decoded = decodeEventLog({ + abi: mcv2BondEventAbi, + data: log.data, + topics: log.topics, + }); + + if (decoded.eventName !== "Minted" && decoded.eventName !== "Burned") { + skipped++; + continue; + } + + const tokenAddress = ( + decoded.args as { token: `0x${string}` } + ).token.toLowerCase(); + const storylineId = tokenToStoryline.get(tokenAddress); + if (storylineId === undefined) { + skipped++; + continue; + } + + await processTradeEvent( + decoded, + log, + tokenAddress, + storylineId, + supabase, + getTimestamp, + ); + inserted++; + } catch (err) { + // Skip events that don't decode as Minted/Burned + if (err instanceof Error && err.message.includes("could not find")) { + skipped++; + continue; + } + console.error( + `Trade indexer error at tx=${log.transactionHash} logIndex=${log.logIndex}:`, + err instanceof Error ? err.message : err, + ); + errors++; + } + } + + // Advance cursor + await supabase + .from("backfill_cursor") + .update({ last_block: Number(toBlock), updated_at: new Date().toISOString() }) + .eq("id", CURSOR_ID); + + return NextResponse.json({ + scanned: { fromBlock: Number(fromBlock), toBlock: Number(toBlock) }, + trades: inserted, + skipped, + errors, + }); +} + +type DecodedEvent = ReturnType>; + +async function processTradeEvent( + decoded: DecodedEvent, + log: Log, + tokenAddress: string, + storylineId: number, + supabase: SupabaseClient, + getTimestamp: (blockNumber: bigint) => Promise, +) { + const args = decoded.args as { + token: `0x${string}`; + account: `0x${string}`; + tokenAmount: bigint; + reserveAmount?: bigint; + refundAmount?: bigint; + }; + + const isMint = decoded.eventName === "Minted"; + const reserveAmount = isMint ? args.reserveAmount! : args.refundAmount!; + const tokenAmount = args.tokenAmount; + + // Compute price per token (reserve per token, 18 decimals) + const pricePerToken = + tokenAmount > BigInt(0) + ? Number(formatUnits(reserveAmount, 18)) / Number(formatUnits(tokenAmount, 18)) + : 0; + + // Read total supply at this block + let totalSupply = BigInt(0); + try { + totalSupply = await publicClient.readContract({ + address: args.token, + abi: erc20Abi, + functionName: "totalSupply", + blockNumber: log.blockNumber!, + }); + } catch { + // Fall back to 0 if historical read fails + } + + const timestampISO = await getTimestamp(log.blockNumber!); + + const row: TradeInsert = { + token_address: tokenAddress, + storyline_id: storylineId, + event_type: isMint ? "mint" : "burn", + price_per_token: pricePerToken, + total_supply: Number(formatUnits(totalSupply, 18)), + reserve_amount: Number(formatUnits(reserveAmount, 18)), + block_number: Number(log.blockNumber!), + block_timestamp: timestampISO, + tx_hash: log.transactionHash!.toLowerCase(), + log_index: log.logIndex!, + contract_address: MCV2_BOND.toLowerCase(), + }; + + const { error } = await supabase + .from("trade_history") + .upsert(row, { onConflict: "tx_hash,log_index" }); + if (error) { + throw new Error(`Database error (trade): ${error.message}`); + } +} diff --git a/src/app/api/index/trade/route.ts b/src/app/api/index/trade/route.ts new file mode 100644 index 00000000..a71e9cda --- /dev/null +++ b/src/app/api/index/trade/route.ts @@ -0,0 +1,115 @@ +import { NextResponse } from "next/server"; +import { type Hex, decodeEventLog, formatUnits } from "viem"; +import { publicClient, getReceiptWithRetry } from "../../../../../lib/rpc"; +import { createServerClient } from "../../../../../lib/supabase"; +import { mcv2BondEventAbi } from "../../../../../lib/contracts/abi"; +import { MCV2_BOND } from "../../../../../lib/contracts/constants"; +import { erc20Abi } from "../../../../../lib/price"; +import type { Database } from "../../../../../lib/supabase"; + +type TradeInsert = Database["public"]["Tables"]["trade_history"]["Insert"]; + +function error(message: string, status = 400) { + return NextResponse.json({ error: message }, { status }); +} + +export async function POST(req: Request) { + const body = await req.json(); + const txHash = body.txHash as Hex | undefined; + const tokenAddress = (body.tokenAddress as string | undefined)?.toLowerCase(); + + if (!txHash) return error("txHash required"); + if (!tokenAddress) return error("tokenAddress required"); + + const supabase = createServerClient(); + if (!supabase) return error("Supabase not configured", 500); + + // Look up storyline for this token + const { data: storyline } = await supabase + .from("storylines") + .select("storyline_id") + .eq("token_address", tokenAddress) + .single(); + + if (!storyline) return error("Unknown token address", 404); + + const receipt = await getReceiptWithRetry(txHash); + if (!receipt) return error("Receipt not found", 404); + + const block = await publicClient.getBlock({ blockNumber: receipt.blockNumber }); + const timestampISO = new Date(Number(block.timestamp) * 1000).toISOString(); + + let indexed = 0; + + for (const log of receipt.logs) { + if (log.address.toLowerCase() !== MCV2_BOND.toLowerCase()) continue; + + try { + const decoded = decodeEventLog({ + abi: mcv2BondEventAbi, + data: log.data, + topics: log.topics, + }); + + if (decoded.eventName !== "Minted" && decoded.eventName !== "Burned") continue; + + const args = decoded.args as { + token: `0x${string}`; + tokenAmount: bigint; + reserveAmount?: bigint; + refundAmount?: bigint; + }; + + if (args.token.toLowerCase() !== tokenAddress) continue; + + const isMint = decoded.eventName === "Minted"; + const reserveAmount = isMint ? args.reserveAmount! : args.refundAmount!; + const tokenAmount = args.tokenAmount; + + const pricePerToken = + tokenAmount > BigInt(0) + ? Number(formatUnits(reserveAmount, 18)) / + Number(formatUnits(tokenAmount, 18)) + : 0; + + let totalSupply = BigInt(0); + try { + totalSupply = await publicClient.readContract({ + address: args.token, + abi: erc20Abi, + functionName: "totalSupply", + blockNumber: receipt.blockNumber, + }); + } catch { + // Fall back to 0 + } + + const row: TradeInsert = { + token_address: tokenAddress, + storyline_id: storyline.storyline_id, + event_type: isMint ? "mint" : "burn", + price_per_token: pricePerToken, + total_supply: Number(formatUnits(totalSupply, 18)), + reserve_amount: Number(formatUnits(reserveAmount, 18)), + block_number: Number(receipt.blockNumber), + block_timestamp: timestampISO, + tx_hash: txHash.toLowerCase(), + log_index: log.logIndex!, + contract_address: MCV2_BOND.toLowerCase(), + }; + + const { error: dbError } = await supabase + .from("trade_history") + .upsert(row, { onConflict: "tx_hash,log_index" }); + if (dbError) { + console.error(`Trade index DB error: ${dbError.message}`); + } else { + indexed++; + } + } catch { + // Skip non-matching events + } + } + + return NextResponse.json({ indexed }); +} diff --git a/src/components/TradingWidget.tsx b/src/components/TradingWidget.tsx index bc8625cd..917608db 100644 --- a/src/components/TradingWidget.tsx +++ b/src/components/TradingWidget.tsx @@ -78,6 +78,7 @@ export function TradingWidget({ tokenAddress }: { tokenAddress: Address }) { try { setError(null); setTxHash(null); + let tradeHash: string | null = null; if (tab === "buy") { // Buy: approve PLOT_TOKEN → mint @@ -112,6 +113,7 @@ export function TradingWidget({ tokenAddress }: { tokenAddress: Address }) { gas: BigInt(2_000_000), }); setTxHash(hash); + tradeHash = hash; setTxState("pending"); await publicClient.waitForTransactionReceipt({ hash }); } else { @@ -146,6 +148,7 @@ export function TradingWidget({ tokenAddress }: { tokenAddress: Address }) { gas: BigInt(2_000_000), }); setTxHash(hash); + tradeHash = hash; setTxState("pending"); await publicClient.waitForTransactionReceipt({ hash }); } @@ -153,6 +156,15 @@ export function TradingWidget({ tokenAddress }: { tokenAddress: Address }) { setTxState("done"); setAmount(""); refetchBalance(); + + // Index the trade for price history (fire-and-forget) + if (tradeHash) { + fetch("/api/index/trade", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ txHash: tradeHash, tokenAddress }), + }).catch(() => {}); + } } catch (err) { setError(err instanceof Error ? err.message : "Transaction failed"); setTxState("error"); diff --git a/supabase/migrations/00016_trade_history.sql b/supabase/migrations/00016_trade_history.sql new file mode 100644 index 00000000..dd77cdde --- /dev/null +++ b/supabase/migrations/00016_trade_history.sql @@ -0,0 +1,18 @@ +create table if not exists public.trade_history ( + id bigint generated always as identity primary key, + token_address text not null, + storyline_id bigint not null, + event_type text not null, + price_per_token numeric not null, + total_supply numeric not null, + reserve_amount numeric not null, + block_number bigint not null, + block_timestamp timestamptz not null, + tx_hash text not null, + log_index integer not null, + contract_address text not null, + unique (tx_hash, log_index) +); + +create index idx_trade_history_token_ts on public.trade_history (token_address, block_timestamp); +create index idx_trade_history_storyline on public.trade_history (storyline_id); From 8f492f5cfbfa8159f80bd995e3a60fa3f8720eb7 Mon Sep 17 00:00:00 2001 From: Cho Young-Hwi Date: Wed, 18 Mar 2026 17:04:19 +0000 Subject: [PATCH 2/2] [#329] Add migration to allow multiple backfill cursor rows Drop check(id=1) constraint on backfill_cursor so the trade-history cron can use its own cursor row (id=2). Seeds the new row. Co-Authored-By: Claude Opus 4.6 (1M context) --- supabase/migrations/00017_backfill_cursor_multi.sql | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 supabase/migrations/00017_backfill_cursor_multi.sql diff --git a/supabase/migrations/00017_backfill_cursor_multi.sql b/supabase/migrations/00017_backfill_cursor_multi.sql new file mode 100644 index 00000000..c4c67e7a --- /dev/null +++ b/supabase/migrations/00017_backfill_cursor_multi.sql @@ -0,0 +1,6 @@ +-- Allow multiple cursor rows (one per cron job) by dropping the id=1 check. +alter table backfill_cursor drop constraint if exists backfill_cursor_id_check; + +-- Seed trade-history cursor (id=2) +insert into backfill_cursor (id, last_block) values (2, 0) + on conflict (id) do nothing;