(null);
+ const [healthBannerDismissed, setHealthBannerDismissed] = useState(false);
+ const [isResettingContacts, setIsResettingContacts] = useState(false);
- // Identity context (domain and chain binding)
const chainId = Number(import.meta.env.VITE_CHAIN_ID);
- const rpId = globalThis.location?.host ?? "";
- const identityContext = useMemo(() => ({ chainId, rpId }), [chainId, rpId]);
- const addLog = useCallback((message: string) => {
- const timestamp = new Date().toLocaleTimeString();
- const logEntry = `[${timestamp}] ${message}\n`;
- setActivityLogs(prev => {
- const newLogs = prev + logEntry;
+ const {
+ identityKeyPair,
+ identityProof,
+ executor,
+ identitySigner,
+ safeAddr,
+ needsIdentityCreation,
+ identityContext,
+ // Session state
+ sessionSignerAddr,
+ needsSessionSetup,
+ isSafeDeployed,
+ isModuleEnabled,
+ setIsSafeDeployed,
+ setIsModuleEnabled,
+ setNeedsSessionSetup,
+ signingStep,
+ // Actions
+ needsModeSelection,
+ fastModeAvailable,
+ executionMode,
+ emitterAddress,
+ createIdentity,
+ } = useInitIdentity({
+ walletClient,
+ address,
+ chainId,
+ readProvider,
+ ready,
+ onIdentityCreated: () => setShowToast(true),
+ onReset: () => {
+ setSelectedContact(null);
+ setVerbethClient(null);
+ },
+ });
- setTimeout(() => {
- if (logRef.current && isActivityLogOpen) {
- logRef.current.scrollTop = logRef.current.scrollHeight;
- }
- }, 0);
-
- return newLogs;
- });
- }, [isActivityLogOpen]);
-
-
- const createIdentity = useCallback(async () => {
- // Wagmi
- if (signer && address) {
- setLoading(true);
- try {
- addLog("Deriving new identity key (2 signatures)...");
-
- const result = await deriveIdentityKeyPairWithProof(signer, address, identityContext);
-
- setIdentityKeyPair(result.keyPair);
- setIdentityProof(result.identityProof);
-
- const identityToStore: StoredIdentity = {
- address: address,
- keyPair: result.keyPair,
- derivedAt: Date.now(),
- proof: result.identityProof
- };
-
- await dbService.saveIdentity(identityToStore);
- addLog(`New identity key derived and saved for EOA`);
- setNeedsIdentityCreation(false);
- setShowToast(true);
-
- } catch (signError: any) {
- if (signError.code === 4001) {
- addLog("User rejected signing request.");
- } else {
- addLog(`✗ Failed to derive identity: ${signError.message}`);
- }
- } finally {
- setLoading(false);
- }
- return;
- }
+ // useSessionSetup receives state from useInitIdentity
+ const {
+ sessionSignerBalance,
+ refreshSessionBalance,
+ setupSession,
+ } = useSessionSetup({
+ walletClient,
+ address,
+ safeAddr,
+ sessionSignerAddr,
+ chainId,
+ readProvider,
+ isSafeDeployed,
+ isModuleEnabled,
+ setIsSafeDeployed,
+ setIsModuleEnabled,
+ setNeedsSessionSetup,
+ executionMode,
+ });
+
+ // ===========================================================================
+ // Create VerbethClient with storage adapters using factory function
+ // ===========================================================================
+ useEffect(() => {
+ const currentAddress = address;
- addLog("✗ Missing signer/provider or address for identity creation");
- }, [signer, address, identityContext, addLog]);
+ if (executor && identityKeyPair && identityProof && identitySigner && currentAddress) {
+ const client = createVerbethClient({
+ address: currentAddress,
+ signer: identitySigner,
+ identityKeyPair,
+ identityProof,
+ executor,
+ sessionStore,
+ pendingStore,
+ });
+
+ setVerbethClient(client);
+ } else {
+ setVerbethClient(null);
+ }
+ }, [executor, identityKeyPair, identityProof, identitySigner, address]);
const {
messages,
pendingHandshakes,
contacts,
addMessage,
+ updateMessageStatus,
+ removeMessage,
removePendingHandshake,
updateContact,
- processEvents
+ processEvents,
+ markMessagesLost
} = useMessageProcessor({
readProvider,
identityContext,
address: address ?? undefined,
+ emitterAddress: emitterAddress ?? undefined,
identityKeyPair,
- onLog: addLog
+ verbethClient,
});
+ const { hasPendingReset, pendingHandshake: pendingResetHandshake, limboAfterTimestamp } =
+ usePendingSessionReset(selectedContact, pendingHandshakes);
const {
isInitialLoading,
isLoadingMore,
canLoadMore,
syncProgress,
+ syncStatus,
loadMoreHistory,
+ health,
} = useMessageListener({
readProvider,
address: address ?? undefined,
- onLog: addLog,
- onEventsProcessed: processEvents
+ emitterAddress: emitterAddress ?? undefined,
+ onEventsProcessed: processEvents,
+ viemClient,
+ verbethClient,
});
const {
sendHandshake,
acceptHandshake,
- sendMessageToContact
+ sendMessageToContact,
+ retryFailedMessage,
+ cancelQueuedMessage,
+ getContactQueueStatus,
} = useChatActions({
verbethClient,
- addLog,
updateContact: async (contact: Contact) => { await updateContact(contact); },
addMessage: async (message: any) => { await addMessage(message); },
+ updateMessageStatus,
+ removeMessage,
removePendingHandshake: async (id: string) => { await removePendingHandshake(id); },
setSelectedContact,
setLoading,
setMessage,
setRecipientAddress,
+ markMessagesLost,
});
- useEffect(() => {
- const currentAddress = address;
-
- if (executor && identityKeyPair && identityProof && signer && currentAddress) {
- const client = new VerbethClient({
- executor,
- identityKeyPair,
- identityProof,
- signer,
- address: currentAddress,
- });
- setVerbethClient(client);
- addLog(`VerbethClient initialized for ${currentAddress.slice(0, 8)}...`);
- } else {
- setVerbethClient(null);
- }
- }, [executor, identityKeyPair, identityProof, signer, address, addLog]);
-
// sync handshakeToasts
useEffect(() => {
const currentlyConnected = isConnected;
@@ -207,6 +202,7 @@ export default function App() {
sender: h.sender,
message: h.message,
verified: h.verified,
+ isExistingContact: h.isExistingContact,
onAccept: (msg: string) => acceptHandshake(h, msg),
onReject: () => removePendingHandshake(h.id),
}))
@@ -217,13 +213,44 @@ export default function App() {
setHandshakeToasts((prev) => prev.filter((n) => n.id !== id));
};
+ // Auto-reset health banner when health recovers to "ok"
useEffect(() => {
- setReady(readProvider !== null && isConnected && walletClient !== undefined);
- }, [readProvider, isConnected, walletClient]);
+ if (health.level === "ok" && healthBannerDismissed) {
+ setHealthBannerDismissed(false);
+ }
+ }, [health.level, healthBannerDismissed]);
+
+ const providerLabel = (() => {
+ switch (transportStatus) {
+ case "ws":
+ return "Alchemy WS + HTTP";
+ case "http-alchemy":
+ return "Alchemy HTTP";
+ case "http-public":
+ return "Public HTTP";
+ case "disconnected":
+ return "Disconnected";
+ }
+ })();
+
+ const syncStatusLabel = (() => {
+ switch (syncStatus.mode) {
+ case "catching_up":
+ return `Catching up (${syncStatus.pendingRanges} ranges queued)`;
+ case "retrying":
+ return `Retrying (${syncStatus.pendingRanges} ranges pending)`;
+ case "degraded":
+ return "Degraded";
+ case "synced":
+ return "Synced";
+ default:
+ return "Idle";
+ }
+ })();
useEffect(() => {
- handleInitialization();
- }, [ready, readProvider, walletClient, address]);
+ setReady(readProvider !== null && isConnected && walletClient !== undefined);
+ }, [readProvider, isConnected, walletClient]);
// hide handshake form when we have contacts AND user is connected
useEffect(() => {
@@ -231,76 +258,100 @@ export default function App() {
setShowHandshakeForm(!ready || !currentlyConnected || contacts.length === 0 || needsIdentityCreation);
}, [ready, isConnected, contacts.length, needsIdentityCreation]);
- const handleInitialization = useCallback(async () => {
- try {
- if (ready && readProvider && walletClient && address) {
- await initializeWagmiAccount();
- return;
- }
-
- if (!address) {
- resetState();
- }
- } catch (error) {
- console.error("Failed to initialize:", error);
- addLog(`✗ Failed to initialize: ${error instanceof Error ? error.message : 'Unknown error'}`);
- }
- }, [ready, readProvider, walletClient, address]);
-
- const initializeWagmiAccount = async () => {
- const ethersProvider = new BrowserProvider(walletClient!.transport);
- const ethersSigner = await ethersProvider.getSigner();
-
- const net = await ethersProvider.getNetwork();
- if (Number(net.chainId) !== chainId) {
- addLog(`Wrong network: connected to chain ${Number(net.chainId)}, expected ${chainId}. Please switch network in your wallet.`);
- return;
- }
-
- const contractInstance = LogChainV1__factory.connect(LOGCHAIN_SINGLETON_ADDR, ethersSigner as any);
- const executorInstance = ExecutorFactory.createEOA(contractInstance);
-
- setSigner(ethersSigner);
- setExecutor(executorInstance);
- setContract(contractInstance);
-
- if (address !== currentAccount) {
- console.log(`EOA connected: ${address!.slice(0, 8)}...`);
- await switchToAccount(address!);
- }
- };
-
- const switchToAccount = async (newAddress: string) => {
- setIdentityKeyPair(null);
- setIdentityProof(null);
- setSelectedContact(null);
-
- await dbService.switchAccount(newAddress);
- setCurrentAccount(newAddress);
+ const renderMessage = (msg: Message) => {
+ const isOutgoing = msg.direction === 'outgoing';
+ const isFailed = msg.status === 'failed';
+ const isPending = msg.status === 'pending';
+ const isLost = msg.isLost === true;
+ const isInLimbo = !isLost && hasPendingReset && isOutgoing && msg.type !== 'system' && limboAfterTimestamp && msg.timestamp > limboAfterTimestamp;
+
+ return (
+
+
+
+ {msg.type === "system" && msg.verified !== undefined && (
+ msg.verified ? (
+
+
+
+ Identity proof verified
+
+
+ ) : (
+
+
+
+ Identity proof not verified
+
+
+ )
+ )}
- const storedIdentity = await dbService.getIdentity(newAddress);
- if (storedIdentity) {
- setIdentityKeyPair(storedIdentity.keyPair);
- setIdentityProof(storedIdentity.proof ?? null);
- setNeedsIdentityCreation(false);
- addLog(`Identity keys restored from database`);
- } else {
- setNeedsIdentityCreation(true);
- }
- };
+ {msg.type === "system" && msg.decrypted ? (
+ <>
+ {msg.decrypted.split(":")[0]}:
+ {msg.decrypted.split(":").slice(1).join(":")}
+ >
+ ) : (
+ msg.decrypted || msg.ciphertext
+ )}
+
+
+
+
+ {new Date(msg.timestamp).toLocaleTimeString()}
+
+ {isOutgoing && (
+
+ {isLost ? '✗' :
+ isInLimbo ? '✓' :
+ msg.status === 'confirmed' ? '✓✓' :
+ msg.status === 'failed' ? '✗' :
+ msg.status === 'pending' ? '✓' : '?'}
+
+ )}
+
+
- const resetState = () => {
- setCurrentAccount(null);
- setIdentityKeyPair(null);
- setIdentityProof(null);
- setSelectedContact(null);
- setSigner(null);
- setContract(null);
- setExecutor(null);
- setNeedsIdentityCreation(false);
- setVerbethClient(null);
+ {/* Failed message actions */}
+ {isFailed && isOutgoing && (
+
+ Failed to send
+ retryFailedMessage(msg.id)}
+ className="flex items-center gap-1 text-blue-400 hover:text-blue-300 transition-colors"
+ title="Send again"
+ >
+
+ Retry
+
+ cancelQueuedMessage(msg.id)}
+ className="flex items-center gap-1 text-gray-400 hover:text-gray-300 transition-colors"
+ title="Delete message"
+ >
+
+ Delete
+
+
+ )}
+
+ );
};
+ // Get queue status for selected contact
+ const queueStatus = selectedContact ? getContactQueueStatus(selectedContact) : null;
return (
@@ -330,10 +381,10 @@ export default function App() {
{/* LEFT: title */}
- Unstoppable Chat
+ Verbeth
- powered by Verbeth
+ powered by the world computer
{/* RIGHT: auth buttons - EOA only */}
@@ -366,11 +417,29 @@ export default function App() {
setShowToast(false)} />
- {needsIdentityCreation ? (
+ {/* Session Setup Prompt - show when wallet connected but session not ready */}
+ {isConnected && sessionSignerAddr && !needsIdentityCreation && (needsSessionSetup || (sessionSignerBalance !== null && sessionSignerBalance < BigInt(0.0001 * 1e18))) && (
+
+ )}
+
+ {(needsIdentityCreation || needsModeSelection) ? (
) : showHandshakeForm ? (
a.timestamp - b.timestamp)
- .map((msg) => (
-
-
- {msg.type === "system" && msg.verified !== undefined && (
- msg.verified ? (
-
-
-
- Identity proof verified
-
-
- ) : (
-
-
-
- Identity proof not verified
-
-
- )
- )}
-
- {msg.type === "system" && msg.decrypted ? (
- <>
- {msg.decrypted.split(":")[0]}:
- {msg.decrypted.split(":").slice(1).join(":")}
- >
- ) : (
- msg.decrypted || msg.ciphertext
- )}
-
-
-
-
- {new Date(msg.timestamp).toLocaleTimeString()}
-
- {msg.direction === 'outgoing' && (
-
- {msg.status === 'confirmed' ? '✓✓' :
- msg.status === 'failed' ? '✗' :
- msg.status === 'pending' ? '✓' : '?'}
-
- )}
-
-
- ))}
+ .map(renderMessage)}
+
{messages.filter(m => {
const currentAddress = address;
if (!currentAddress || !selectedContact?.address) return false;
@@ -533,13 +553,37 @@ export default function App() {
(selectedContact.topicOutbound && m.topic === selectedContact.topicOutbound) ||
(selectedContact.topicInbound && m.topic === selectedContact.topicInbound)
);
- }).length === 0 && (
+ }).length === 0 && !hasPendingReset && (
No messages yet. {selectedContact.status === 'established' ? 'Start the conversation!' : 'Waiting for handshake completion.'}
)}
+
+ {hasPendingReset && pendingResetHandshake && (
+
+ )}
+ {/* Queue Status Indicator */}
+ {queueStatus && queueStatus.queueLength > 0 && (
+
+ {queueStatus.isProcessing ? (
+ <>
+
+ Sending {queueStatus.queueLength} message{queueStatus.queueLength > 1 ? 's' : ''}...
+ >
+ ) : (
+ <>
+ 📨
+ {queueStatus.queueLength} message{queueStatus.queueLength > 1 ? 's' : ''} queued
+ >
+ )}
+
+ )}
+
{/* Message Input */}
{selectedContact.status === 'established' && selectedContact.identityPubKey && (
@@ -584,85 +628,39 @@ export default function App() {
- {/* Activity Log + Debug Info */}
- {ready && (
-
-
-
setIsActivityLogOpen(!isActivityLogOpen)}
- >
-
-
-
Activity Log
-
- {isActivityLogOpen ? '▼' : '▶'}
-
-
- {canLoadMore && ready && isActivityLogOpen && (
-
{
- e.stopPropagation();
- loadMoreHistory();
- }}
- disabled={isLoadingMore}
- className="px-3 py-1 text-sm bg-gray-700 hover:bg-gray-600 disabled:bg-gray-800 disabled:cursor-not-allowed rounded flex items-center gap-2"
- >
- {isLoadingMore ? (
- <>
-
- Loading blocks...
- >
- ) : (
- <>
- 📂
- Load More History
- >
- )}
-
- )}
-
- {(isInitialLoading || isLoadingMore) && isActivityLogOpen && (
-
-
-
{isInitialLoading ? 'Initial sync...' : 'Loading more...'}
- {syncProgress && (
-
({syncProgress.current}/{syncProgress.total})
- )}
-
- )}
-
-
-
-
-
- {!isActivityLogOpen && (
-
-
Contract: {LOGCHAIN_SINGLETON_ADDR}
-
Network: Base (Chain ID: {chainId})
-
Contract creation block: {CONTRACT_CREATION_BLOCK}
-
Status: {ready ? '🟢 Ready' : '🔴 Not Ready'} {(isInitialLoading || isLoadingMore) ? '⏳ Loading' : ''}
-
- )}
-
- )}
)}
+
+ {ready && health.level === "warning" && !healthBannerDismissed && (
+
+
+ {health.message}
+ setHealthBannerDismissed(true)}
+ className="text-amber-400 hover:text-amber-200 font-bold px-2"
+ aria-label="Dismiss"
+ >
+ ×
+
+
+
+ )}
+
+ {ready && (
+
+
Contract: {VERBETH_SINGLETON_ADDR}
+
Network: Base ({chainId}) · {providerLabel}
+
Status: {ready ? '🟢 Ready' : '🔴 Not Ready'} {(isInitialLoading || isLoadingMore) ? '⏳ Loading' : ''}
+
+ Sync: {syncStatusLabel}
+ {syncStatus.lastError ? ` · Last error: ${syncStatus.lastError}` : ""}
+ {" · Health: "}{health.level === "ok" ? "OK" : "Warning"}
+
+
+ )}
);
-}
\ No newline at end of file
+}
diff --git a/apps/demo/src/components/CelebrationToast.tsx b/apps/demo/src/components/CelebrationToast.tsx
index 84dc9c3..b8b4538 100644
--- a/apps/demo/src/components/CelebrationToast.tsx
+++ b/apps/demo/src/components/CelebrationToast.tsx
@@ -11,7 +11,7 @@ export function CelebrationToast({ show, onClose }: CelebrationToastProps) {
if (!show) return;
const t = setTimeout(onClose, 3000);
return () => clearTimeout(t);
- }, [show, onClose]);
+ }, [show]); // eslint-disable-line react-hooks/exhaustive-deps
const width = 320;
const margin = 16;
diff --git a/apps/demo/src/components/IdentityCreation.tsx b/apps/demo/src/components/IdentityCreation.tsx
index 4b2fb70..bf1286e 100644
--- a/apps/demo/src/components/IdentityCreation.tsx
+++ b/apps/demo/src/components/IdentityCreation.tsx
@@ -1,8 +1,159 @@
+import { motion, AnimatePresence } from 'framer-motion';
+import { ExecutionMode } from '../types.js';
+import { useState } from 'react';
+
+
interface IdentityCreationProps {
loading: boolean;
- onCreateIdentity: () => void;
+ onCreateIdentity: (mode: ExecutionMode) => void;
onImportIdentity?: () => void;
address: string;
+ signingStep?: 1 | 2 | null;
+ needsModeSelection: boolean;
+ fastModeAvailable: boolean;
+ chainId: number;
+}
+
+function Spinner() {
+ return (
+
+
+
+
+ );
+}
+
+function StepIndicator({ step }: { step: 1 | 2 }) {
+ return (
+
+
+
+
+
+
+
+
+ {step > 1 ? (
+
+
+
+ ) : (
+ '1'
+ )}
+
+
+ Derive keys
+
+
+
+
+ {step >= 1 && (
+ 1 ? '100%' : '50%' }}
+ transition={{ duration: 0.3 }}
+ className="absolute inset-y-0 left-0 bg-blue-600"
+ />
+ )}
+
+
+
+
+ 2
+
+
+ Create proof
+
+
+
+
+ );
+}
+
+function ModeCard({
+ mode,
+ title,
+ description,
+ details,
+ recommended,
+ disabled,
+ disabledReason,
+ comingSoon,
+ onClick,
+}: {
+ mode: ExecutionMode;
+ title: string;
+ description: string;
+ details: string[];
+ recommended?: boolean;
+ disabled?: boolean;
+ disabledReason?: string;
+ comingSoon?: boolean;
+ onClick: () => void;
+}) {
+ return (
+
+
+ {title}
+ {recommended && (
+
+ Recommended
+
+ )}
+ {comingSoon && (
+
+ Coming Soon
+
+ )}
+
+ {description}
+
+ {details.map((detail, i) => (
+ • {detail}
+ ))}
+
+ {disabledReason && (
+ {disabledReason}
+ )}
+
+ );
}
export function IdentityCreation({
@@ -10,52 +161,162 @@ export function IdentityCreation({
onCreateIdentity,
onImportIdentity,
address,
+ signingStep,
+ needsModeSelection,
+ fastModeAvailable,
+ chainId,
}: IdentityCreationProps) {
+ //Track selected mode locally before identity creation starts
+ const [selectedMode, setSelectedMode] = useState(null);
+
+ const handleModeSelect = (mode: ExecutionMode) => {
+ setSelectedMode(mode);
+ };
+
+ const handleCreateIdentity = () => {
+ if (selectedMode) {
+ onCreateIdentity(selectedMode);
+ }
+ };
+
return (
+ {/* Step indicator - shows during signing */}
+
+ {signingStep && }
+
+
- {/* Connected as...*/}
{address
? <>Hello, {address.slice(0, 6)}...{address.slice(-4)} >
: "Not connected"}
- Create Your Identity
+ {needsModeSelection && !selectedMode
+ ? "Choose Your Experience"
+ : "Create Your Identity"}
-
- Choose how to set up your encrypted messaging identity:
-
-
-
- {loading ? "Creating..." : "Create New Identity (2 signatures)"}
-
+ {/* Mode Selection (before identity creation) */}
+ {needsModeSelection && !selectedMode && !signingStep && (
+
+ handleModeSelect('fast')}
+ />
-
- Import Previous Identity (Coming Soon)
-
-
+
handleModeSelect('classic')}
+ />
-
-
- You will be asked to sign two messages.
- The first signature deterministically derives your identity keys.
- The second signature creates an Identity Proof that binds the generated public keys
- to your wallet address, the current chain, and this dapp origin.
- Keys are stored locally and never leave your device.
-
-
+ {}}
+ />
+
+ )}
+
+ {/* Mode selected badge */}
+ {selectedMode && !signingStep && (
+
+
+ {selectedMode === 'fast' ? '⚡ Fast Mode' : '🔐 Classic Mode'}
+
+ setSelectedMode(null)}
+ className="text-xs text-gray-500 hover:text-gray-300"
+ >
+ Change
+
+
+ )}
+
+ {/* Create Identity button (after mode selection) */}
+ {selectedMode && (
+
+
+ {loading ? "Creating..." : "Create Identity"}
+
+
+ {/* Mode-specific info */}
+
+ {selectedMode === 'classic' ? (
+
+ You'll sign two messages to derive your identity keys.
+ Each future message will require wallet confirmation.
+
+ ) : (
+
+ You'll sign two messages to derive your identity keys.
+ After a one-time setup, messages are sent without popups.
+
+ )}
+
+
+ )}
+
+ {/* Back to mode selection if not started */}
+ {!needsModeSelection && !selectedMode && !signingStep && (
+
+
onCreateIdentity('fast')}
+ disabled={loading || !fastModeAvailable}
+ className="w-full px-4 py-3 bg-blue-600 hover:bg-blue-700 disabled:bg-gray-600 disabled:cursor-not-allowed rounded font-medium transition-colors"
+ >
+ {loading ? "Creating..." : "Create New"}
+
+
+
+ Import Previous Identity (Coming Soon)
+
+
+
+
+ You will be asked to sign two messages.
+ The first signature deterministically derives your identity keys.
+ The second signature creates an Identity Proof.
+
+
+
+ )}
);
diff --git a/apps/demo/src/components/InitialForm.tsx b/apps/demo/src/components/InitialForm.tsx
index 91051e0..74b088a 100644
--- a/apps/demo/src/components/InitialForm.tsx
+++ b/apps/demo/src/components/InitialForm.tsx
@@ -72,17 +72,17 @@ export function InitialForm({
onClick={onBackToChats}
className="text-sm text-gray-400 hover:text-white flex items-center gap-1 transition-colors"
>
- ← Back to chats
+ Go to chats
>
) : (
- {isAnyConnected ? "Hi, start your first chat" : "Have Your Say"}
+ {isAnyConnected ? "Hi, start your first chat" : "Send a message ina block"}
- Uncensorable. Private by design.
+ Private, unstoppable digital correspondence.
diff --git a/apps/demo/src/components/PinnedResetRequest.tsx b/apps/demo/src/components/PinnedResetRequest.tsx
new file mode 100644
index 0000000..e1ca5bf
--- /dev/null
+++ b/apps/demo/src/components/PinnedResetRequest.tsx
@@ -0,0 +1,79 @@
+// src/components/PinnedResetRequest.tsx
+
+import { useState } from 'react';
+import type { PendingHandshake } from '../types.js';
+
+interface PinnedResetRequestProps {
+ handshake: PendingHandshake;
+ onAccept: (handshake: PendingHandshake, message: string) => void;
+}
+
+export function PinnedResetRequest({ handshake, onAccept }: PinnedResetRequestProps) {
+ const [response, setResponse] = useState('');
+ const [isExpanded, setIsExpanded] = useState(false);
+
+ const handleAccept = () => {
+ if (response.trim()) {
+ onAccept(handshake, response.trim());
+ }
+ };
+
+ const shortAddress = `${handshake.sender.slice(0, 6)}...${handshake.sender.slice(-4)}`;
+
+ return (
+
+
+
setIsExpanded(!isExpanded)}
+ >
+
+
⚠️
+
+
+ {shortAddress} requests a new handshake
+
+
+ Your session was reset. Please accept to re-establish secure communication.
+
+
+
+ {isExpanded ? '▼' : '▶'}
+
+
+
+
+ {/* Expanded content */}
+ {isExpanded && (
+
+
+ "{handshake.message}"
+
+
+
+ setResponse(e.target.value)}
+ placeholder="La tua risposta..."
+ className="flex-1 px-3 py-1.5 bg-gray-900/80 border border-amber-800/40 rounded text-sm text-white placeholder-gray-500 focus:outline-none focus:border-amber-600"
+ onKeyDown={(e) => {
+ if (e.key === 'Enter' && response.trim()) {
+ handleAccept();
+ }
+ }}
+ />
+
+ Accept
+
+
+
+ )}
+
+
+ );
+}
\ No newline at end of file
diff --git a/apps/demo/src/components/SessionSetupPrompt.tsx b/apps/demo/src/components/SessionSetupPrompt.tsx
new file mode 100644
index 0000000..2ba5784
--- /dev/null
+++ b/apps/demo/src/components/SessionSetupPrompt.tsx
@@ -0,0 +1,159 @@
+interface SessionSetupPromptProps {
+ sessionSignerAddr: string | null;
+ sessionSignerBalance: bigint | null;
+ needsSessionSetup: boolean;
+ isSafeDeployed?: boolean;
+ isModuleEnabled?: boolean;
+ onSetupSession: () => void;
+ onRefreshBalance?: () => void;
+ loading: boolean;
+}
+
+export function SessionSetupPrompt({
+ sessionSignerAddr,
+ sessionSignerBalance,
+ needsSessionSetup,
+ isSafeDeployed = false,
+ isModuleEnabled = false,
+ onSetupSession,
+ onRefreshBalance,
+ loading,
+}: SessionSetupPromptProps) {
+ if (!sessionSignerAddr) return null;
+
+ const balanceEth = sessionSignerBalance !== null ? Number(sessionSignerBalance) / 1e18 : 0;
+ const needsFunding = sessionSignerBalance === null || sessionSignerBalance < BigInt(0.0001 * 1e18);
+
+ if (!loading) {
+ console.log(`[SessionSetupPrompt] balance: ${balanceEth} ETH, needsFunding: ${needsFunding}, needsSessionSetup: ${needsSessionSetup}, isSafeDeployed: ${isSafeDeployed}, isModuleEnabled: ${isModuleEnabled}`);
+ }
+
+ if (!needsFunding && !needsSessionSetup) {
+ return (
+
+
+ ✅ Ready for gasless messaging!
+
+
+ Session balance: {balanceEth.toFixed(6)} ETH
+
+
+ );
+ }
+
+ // Determine what setup is needed
+ const getSetupInfo = () => {
+ if (!isSafeDeployed) {
+ return {
+ txCount: "1 tx",
+ title: "Deploy & Authorize",
+ description: "One-time setup: deploy your Safe wallet, enable the session module, and authorize gasless messaging — all in a single transaction.",
+ steps: [
+ "Deploy your Safe smart wallet",
+ "Enable session module",
+ "Register session signer + allow Verbeth target",
+ ],
+ };
+ } else if (!isModuleEnabled) {
+ // Safe exists but module not enabled
+ return {
+ txCount: "2 txs",
+ title: "Enable Module & Authorize",
+ description: "Enable the session module on your existing Safe and authorize gasless messaging.",
+ steps: [
+ "Enable session module on Safe",
+ "Register session signer + allow Verbeth target",
+ ],
+ };
+ } else {
+ // Safe + module exist, just need session setup
+ return {
+ txCount: "1 tx",
+ title: "Authorize Session",
+ description: "One-time setup: authorize your session wallet to send messages without popups.",
+ steps: [
+ "Register session signer + allow Verbeth target",
+ ],
+ };
+ }
+ };
+
+ const setupInfo = getSetupInfo();
+
+ return (
+
+ {/* Funding prompt */}
+ {needsFunding && (
+
+
+ ⛽ Step 1: Fund Session Wallet
+
+
+ Send a small amount of ETH to this address to pay for gas:
+
+
+ {sessionSignerAddr}
+
+
+
+ Balance: {balanceEth.toFixed(6)} ETH
+
+ navigator.clipboard.writeText(sessionSignerAddr)}
+ className="text-sm px-3 py-1 bg-gray-700 hover:bg-gray-600 rounded transition-colors"
+ >
+ 📋 Copy
+
+ {onRefreshBalance && (
+
+ 🔄 Refresh
+
+ )}
+
+
+ Recommended: 0.001 - 0.005 ETH (~20-100 messages on Base)
+
+
+ )}
+
+ {/* Session setup prompt - only show when funded */}
+ {!needsFunding && needsSessionSetup && (
+
+
+ 🔑 Step 2: {setupInfo.title}
+
+
+ {setupInfo.description}
+
+
+ {/* Show what will happen */}
+
+
What happens:
+
+ {setupInfo.steps.map((step, i) => (
+ {step}
+ ))}
+
+
+
+
+ {loading ? (
+
+ ⏳ Setting up...
+
+ ) : (
+ `${setupInfo.title} (${setupInfo.txCount})`
+ )}
+
+
+ )}
+
+ );
+}
\ No newline at end of file
diff --git a/apps/demo/src/components/SideToastNotification.tsx b/apps/demo/src/components/SideToastNotification.tsx
index eb21a80..ba077f6 100644
--- a/apps/demo/src/components/SideToastNotification.tsx
+++ b/apps/demo/src/components/SideToastNotification.tsx
@@ -27,6 +27,7 @@ interface SideToastNotificationsProps {
sender: string;
message: string;
verified: boolean;
+ isExistingContact?: boolean;
onAccept: (msg: string) => void;
onReject: () => void;
}[];
@@ -122,11 +123,10 @@ export function SideToastNotifications({
{/* Notifications container */}
= 640) ? 'opacity-100' : 'opacity-0 sm:opacity-100'
- }`}
- style={{
- top: "5px",
+ className={`fixed left-1/2 z-[9999] w-full flex flex-col items-center pointer-events-none transition-opacity duration-300 ${shouldShowNotifications && (mobileVisible || window.innerWidth >= 640) ? 'opacity-100' : 'opacity-0 sm:opacity-100'
+ }`}
+ style={{
+ top: "5px",
transform: "translateX(-50%)",
visibility: shouldShowNotifications && (mobileVisible || window.innerWidth >= 640) ? 'visible' : 'hidden'
}}
@@ -207,6 +207,11 @@ export function SideToastNotifications({
⚠️ Unverified
)}
+ {notif.isExistingContact && (
+
+ 🔄 Session reset
+
+ )}
"{notif.message}"
@@ -239,7 +244,7 @@ export function SideToastNotifications({
const input = document.getElementById(
`side-toast-response-${notif.id}`
) as HTMLInputElement;
- const note = input?.value ?? "";
+ const note = input?.value ?? "";
notif.onAccept(note.trim());
if (input) input.value = "";
removeNotification(notif.id);
diff --git a/apps/demo/src/hooks/README.md b/apps/demo/src/hooks/README.md
deleted file mode 100644
index eca15fe..0000000
--- a/apps/demo/src/hooks/README.md
+++ /dev/null
@@ -1,56 +0,0 @@
-## Event listening logic
-
-```plaintext
-[ INITIAL SCAN (last X blocks) ] → [ REAL-TIME SCAN ONLY ]
- | |
- (loadMore) (new blocks)
- |
- [ SCAN OTHER OLDER BLOCKS ]
-```
-
-The listener maintains a rolling view of on-chain message events for the connected account. It performs three types of scans depending on context.
-
-### 1. Initial scan
-
-* Runs **only once per account** (no prior scan found).
-* Reads the last `INITIAL_SCAN_BLOCKS` (default: 1000).
-* Persists in DB:
-
- ```ts
- lastKnownBlock = currentBlock
- oldestScannedBlock = startBlock
- initialScanComplete[address] = true
- ```
-* Then switches to real-time mode
-
-### 2. Real-time scan
-
-* Starts automatically on every subsequent login.
-* Restores persisted state and catches up from where it left off:
-
- ```text
- lastKnownBlock + 1 → currentBlock - REAL_TIME_BUFFER
- ```
-
- (buffer ≈ 3 blocks behind the tip to avoid public RPC inconsistencies)
-* Polls every few seconds
-* With enough time, fetches any messages received while the user was offline
-* After that, it continues polling forward in real time with the same buffer
-
-### 3. “Load more history”
-
-* It is an optional action triggered by the user
-* Extends history further back in time:
-
- ```text
- oldestScannedBlock - 1 → older blocks
- ```
-* Updates `oldestScannedBlock` in the DB
-* Useful only for exploring very old messages
-
----
-
-
-> [!NOTE]
-> Opening a contact does not cause new chain reads. The listener always scans globally
-
diff --git a/apps/demo/src/hooks/index.ts b/apps/demo/src/hooks/index.ts
new file mode 100644
index 0000000..d131140
--- /dev/null
+++ b/apps/demo/src/hooks/index.ts
@@ -0,0 +1,12 @@
+// src/hooks/index.ts
+
+/**
+ * Hooks for VerbEth messaging app.
+ */
+
+export { useMessageProcessor } from './useMessageProcessor.js';
+export { useMessageQueue } from './useMessageQueue.js';
+export { useChatActions } from './useChatActions.js';
+export { usePendingSessionReset } from './usePendingSessionReset.js';
+
+export type { QueuedMessage, QueuedMessageStatus } from './useMessageQueue.js';
\ No newline at end of file
diff --git a/apps/demo/src/hooks/listener/README.md b/apps/demo/src/hooks/listener/README.md
new file mode 100644
index 0000000..e00e9f1
--- /dev/null
+++ b/apps/demo/src/hooks/listener/README.md
@@ -0,0 +1,90 @@
+## Event Listener Architecture
+
+The listener recovers every on-chain event for the connected account and keeps up with new blocks in real time. It is split into four independent paths that never block each other.
+
+```plaintext
+ Alchemy WS
+ (block notify)
+ |
+ v
+ +------------------+
+ | handleNewBlock |
+ | |
+ | clamp to the |
+ polling fallback | read provider |
+ (if no WS) ------->| tip so we never |
+ | scan past what |
+ | getLogs can see |
+ +--------+---------+
+ |
+ scan [lastKnown+1 .. maxSafe]
+ |
+ v
++----------------+ +----------------+ +--------------------+
+| Bootstrap | | Real-time | | Backfill worker |
+| | | | | |
+| first login: | | moves cursor | | retries failed |
+| scan recent | | forward each | | ranges from |
+| window | | block | | syncState with |
+| | | | | exp. backoff |
++-------+--------+ +-------+--------+ +---------+----------+
+ | | |
+ | catch-up on | |
+ | reconnect: | |
+ | only marks | |
+ | "synced" if no | |
+ | failed ranges | |
+ | remain | |
+ v v v
+ +---------------------------------------------------+
+ | processRange / getLogs |
+ | |
+ | reads from public HTTP node |
+ +---------------------------------------------------+
+ |
+ v
+ onEventsProcessed callback
+ |
+ v
+ dedup by receipt key
+ event:::-
+
+ |
+ v
+ +------------------------+
+ | loadMoreHistory |
+ | (user scrolls back) |
+ | scans older ranges |
+ | on demand |
+ +------------------------+
+```
+
+### How the pieces fit together
+
+**Bootstrap** runs once per account. On first login it scans a recent window of blocks and persists the cursor boundaries. On subsequent logins it catches up from the saved cursor to the current tip. If any chunk fails during catch-up, the failed range stays in `syncState` so the backfill worker can retry it later. The sync state is only marked "synced" when no pending ranges remain.
+
+**Real-time scanning** is driven by block notifications. When Alchemy WS is available, `watchBlockNumber` triggers the scan. Otherwise a polling interval checks the read provider every 5 seconds. Either way, the scan ceiling is clamped to what the read provider has actually indexed. This prevents the cursor from advancing past blocks the HTTP node has not served yet, which could cause silent message loss when the WS tip runs ahead.
+
+**Backfill worker** wakes up every 4 seconds and pulls one retryable range at a time from the persisted queue. Retries use exponential backoff with jitter (1.5s base, 2 min cap). Once the queue drains, status flips back to "synced". Single-flight execution keeps RPC usage friendly.
+
+**Load more** lets the user scroll backward into older history. It extends the scanned window toward the contract creation block in `INITIAL_SCAN_BLOCKS`-sized chunks.
+
+### RPC split
+
+Two providers serve different roles:
+
+- **Alchemy WS** -- block-head notifications only. Fast, low latency, no CORS issues.
+- **Public HTTP** -- all data reads (`getLogs`, `getBlockNumber`, balances). The real-time scan asks this provider for its tip before scanning, so the cursor never outruns what this node can actually return.
+
+### Idempotency
+
+Every processed event is tracked by a persistent receipt key (`event:::-`). Retries and overlapping scans are safe because the dedup layer silently drops anything already seen.
+
+### Modules
+
+- `useMessageListener.ts` -- orchestration (bootstrap, real-time, worker, UI state)
+- `listener/scanPlanner.ts` -- range planning and cursor clamping
+- `listener/logFetcher.ts` -- `getLogs` with retries and automatic range splitting
+- `listener/eventQuerySpecs.ts` -- event filters and log-to-ProcessedEvent mapping
+- `listener/syncStateStore.ts` -- persisted sync state helpers (load, save, enqueue, dequeue)
+- `listener/healthScore.ts` -- pure health scoring functions
\ No newline at end of file
diff --git a/apps/demo/src/hooks/listener/eventQuerySpecs.ts b/apps/demo/src/hooks/listener/eventQuerySpecs.ts
new file mode 100644
index 0000000..0e03786
--- /dev/null
+++ b/apps/demo/src/hooks/listener/eventQuerySpecs.ts
@@ -0,0 +1,184 @@
+import { AbiCoder, getBytes, keccak256, toUtf8Bytes } from "ethers";
+import { matchHsrToContact, type PendingContactEntry } from "@verbeth/sdk";
+import {
+ EVENT_SIGNATURES,
+ VERBETH_SINGLETON_ADDR,
+ type Contact,
+ type EventType,
+ type ProcessedEvent,
+} from "../../types.js";
+
+type RpcFilter = Record;
+
+export interface ScanQueryContext {
+ address: string;
+ emitterAddress?: string;
+ activeTopics: string[];
+ pendingContacts: Contact[];
+}
+
+interface QuerySpec {
+ id: string;
+ eventType: EventType;
+ buildFilter: (ctx: ScanQueryContext) => RpcFilter | null;
+ mapLog?: (
+ log: any,
+ ctx: ScanQueryContext
+ ) => { matchedContactAddress?: string } | null;
+}
+
+function toLogIndex(log: any): number {
+ const value = typeof log.logIndex !== "undefined" ? log.logIndex : log.index;
+ return Number(value ?? 0);
+}
+
+function userRecipientHash(address: string): string {
+ return keccak256(toUtf8Bytes(`contact:${address.toLowerCase()}`));
+}
+
+function findMatchingContact(log: any, pendingContacts: Contact[]): Contact | null {
+ const inResponseTo = log.topics[1] as `0x${string}`;
+ const abiCoder = new AbiCoder();
+ const [responderEphemeralRBytes, ciphertextBytes] = abiCoder.decode(
+ ["bytes32", "bytes"],
+ log.data
+ );
+ const responderEphemeralR = getBytes(responderEphemeralRBytes);
+ const encryptedPayload = new TextDecoder().decode(getBytes(ciphertextBytes));
+
+ const entries: PendingContactEntry[] = pendingContacts
+ .filter(
+ (contact): contact is Contact & {
+ handshakeEphemeralSecret: string;
+ handshakeKemSecret: string;
+ } => !!contact.handshakeEphemeralSecret && !!contact.handshakeKemSecret
+ )
+ .map((contact) => ({
+ address: contact.address,
+ handshakeEphemeralSecret: getBytes(contact.handshakeEphemeralSecret),
+ kemSecretKey: getBytes(contact.handshakeKemSecret),
+ }));
+
+ const matchedAddress = matchHsrToContact(
+ entries,
+ inResponseTo,
+ responderEphemeralR,
+ encryptedPayload
+ );
+ if (!matchedAddress) return null;
+
+ return (
+ pendingContacts.find(
+ (contact) => contact.address.toLowerCase() === matchedAddress.toLowerCase()
+ ) ?? null
+ );
+}
+
+function getQuerySpecs(): QuerySpec[] {
+ return [
+ {
+ id: "handshake",
+ eventType: "handshake",
+ buildFilter: (ctx) => ({
+ address: VERBETH_SINGLETON_ADDR,
+ topics: [EVENT_SIGNATURES.Handshake, userRecipientHash(ctx.address)],
+ }),
+ },
+ {
+ id: "handshake_response",
+ eventType: "handshake_response",
+ buildFilter: (ctx) => {
+ if (ctx.pendingContacts.length === 0) return null;
+ return {
+ address: VERBETH_SINGLETON_ADDR,
+ topics: [EVENT_SIGNATURES.HandshakeResponse],
+ };
+ },
+ mapLog: (log, ctx) => {
+ const match = findMatchingContact(log, ctx.pendingContacts);
+ if (!match) return null;
+ return { matchedContactAddress: match.address };
+ },
+ },
+ {
+ id: "message_inbound",
+ eventType: "message",
+ buildFilter: (ctx) => {
+ if (ctx.activeTopics.length === 0) return null;
+ return {
+ address: VERBETH_SINGLETON_ADDR,
+ topics: [EVENT_SIGNATURES.MessageSent, null, ctx.activeTopics],
+ };
+ },
+ },
+ {
+ id: "message_outbound_confirmation",
+ eventType: "message",
+ buildFilter: (ctx) => {
+ const emitter = ctx.emitterAddress ?? ctx.address;
+ if (!emitter) return null;
+ const senderTopic =
+ "0x000000000000000000000000" + emitter.slice(2).toLowerCase();
+ return {
+ address: VERBETH_SINGLETON_ADDR,
+ topics: [EVENT_SIGNATURES.MessageSent, senderTopic],
+ };
+ },
+ },
+ ];
+}
+
+function toProcessedEvent(
+ log: any,
+ eventType: EventType,
+ extra: { matchedContactAddress?: string } | null
+): ProcessedEvent | null {
+ if (extra === null) return null;
+ const txHash = log.transactionHash as string;
+ const logIndex = toLogIndex(log);
+ const logKey = `${txHash}-${logIndex}`;
+
+ return {
+ logKey,
+ eventType,
+ rawLog: log,
+ txHash,
+ logIndex,
+ blockNumber: Number(log.blockNumber ?? 0),
+ timestamp: Date.now(),
+ matchedContactAddress: extra?.matchedContactAddress,
+ };
+}
+
+export async function collectEventsForRange(params: {
+ fromBlock: number;
+ toBlock: number;
+ context: ScanQueryContext;
+ getLogs: (filter: RpcFilter, fromBlock: number, toBlock: number) => Promise;
+}): Promise {
+ const { fromBlock, toBlock, context, getLogs } = params;
+ const specs = getQuerySpecs();
+ const eventsByKey = new Map();
+
+ for (const spec of specs) {
+ const filter = spec.buildFilter(context);
+ if (!filter) continue;
+
+ const logs = await getLogs(filter, fromBlock, toBlock);
+ for (const log of logs) {
+ const extra = spec.mapLog ? spec.mapLog(log, context) : {};
+ const event = toProcessedEvent(log, spec.eventType, extra);
+ if (!event) continue;
+
+ const key = `${event.eventType}:${event.logKey}`;
+ if (!eventsByKey.has(key)) {
+ eventsByKey.set(key, event);
+ }
+ }
+ }
+
+ return Array.from(eventsByKey.values()).sort((a, b) => {
+ if (a.blockNumber !== b.blockNumber) return a.blockNumber - b.blockNumber;
+ return a.logIndex - b.logIndex;
+ });
+}
diff --git a/apps/demo/src/hooks/listener/healthScore.ts b/apps/demo/src/hooks/listener/healthScore.ts
new file mode 100644
index 0000000..97e45b0
--- /dev/null
+++ b/apps/demo/src/hooks/listener/healthScore.ts
@@ -0,0 +1,97 @@
+import type { ListenerHealthLevel, ListenerHealthReason, ListenerHealthStatus, ListenerSyncMode } from "../../types.js";
+
+export interface HealthMetrics {
+ rateLimitEvents: number[];
+ wsErrors: number[];
+ pendingRanges: number;
+ pendingRangesSince: number | null;
+ tipLagBlocks: number;
+ tipLagSince: number | null;
+ syncMode: ListenerSyncMode;
+}
+
+const WINDOW_MS = 60_000;
+const RATE_LIMIT_THRESHOLD = 5;
+const WS_ERROR_THRESHOLD = 3;
+const BACKLOG_THRESHOLD = 10;
+const BACKLOG_SUSTAINED_MS = 30_000;
+const TIP_LAG_THRESHOLD = 20;
+const TIP_LAG_SUSTAINED_MS = 30_000;
+
+export function pruneWindow(events: number[], windowMs: number): number[] {
+ const cutoff = Date.now() - windowMs;
+ return events.filter((t) => t > cutoff);
+}
+
+export function evaluateHealth(metrics: HealthMetrics): ListenerHealthStatus {
+ const now = Date.now();
+ const reasons: ListenerHealthReason[] = [];
+
+ if (metrics.syncMode === "degraded") {
+ reasons.push("backlog");
+ }
+
+ const recentRateLimits = metrics.rateLimitEvents.filter((t) => t > now - WINDOW_MS);
+ if (recentRateLimits.length >= RATE_LIMIT_THRESHOLD) {
+ reasons.push("rate_limit");
+ }
+
+ const recentWsErrors = metrics.wsErrors.filter((t) => t > now - WINDOW_MS);
+ if (recentWsErrors.length >= WS_ERROR_THRESHOLD) {
+ reasons.push("ws_error");
+ }
+
+ if (
+ metrics.pendingRanges >= BACKLOG_THRESHOLD &&
+ metrics.pendingRangesSince !== null &&
+ now - metrics.pendingRangesSince >= BACKLOG_SUSTAINED_MS
+ ) {
+ if (!reasons.includes("backlog")) {
+ reasons.push("backlog");
+ }
+ }
+
+ if (
+ metrics.tipLagBlocks >= TIP_LAG_THRESHOLD &&
+ metrics.tipLagSince !== null &&
+ now - metrics.tipLagSince >= TIP_LAG_SUSTAINED_MS
+ ) {
+ reasons.push("tip_lag");
+ }
+
+ const level: ListenerHealthLevel = reasons.length > 0 ? "warning" : "ok";
+ return {
+ level,
+ reasons,
+ message: level === "ok" ? "" : formatHealthMessage(reasons),
+ updatedAt: now,
+ };
+}
+
+export function formatHealthMessage(reasons: ListenerHealthReason[]): string {
+ const parts: string[] = [];
+ for (const r of reasons) {
+ switch (r) {
+ case "rate_limit":
+ parts.push("RPC rate limits detected");
+ break;
+ case "backlog":
+ parts.push("block scan backlog building up");
+ break;
+ case "tip_lag":
+ parts.push("read node falling behind chain tip");
+ break;
+ case "ws_error":
+ parts.push("WebSocket connection errors");
+ break;
+ }
+ }
+ return parts.join("; ") + ". Messaging remains fully functional.";
+}
+
+export const OK_HEALTH: ListenerHealthStatus = {
+ level: "ok",
+ reasons: [],
+ message: "",
+ updatedAt: 0,
+};
diff --git a/apps/demo/src/hooks/listener/logFetcher.ts b/apps/demo/src/hooks/listener/logFetcher.ts
new file mode 100644
index 0000000..28b4ca8
--- /dev/null
+++ b/apps/demo/src/hooks/listener/logFetcher.ts
@@ -0,0 +1,124 @@
+import type { BlockRange } from "../../types.js";
+import { splitRangeOnProviderLimit } from "./scanPlanner.js";
+
+type RpcFilter = Record;
+
+export type FetcherTelemetryEvent =
+ | { type: "rate_limit"; attempt: number }
+ | { type: "retry"; attempt: number; error: string }
+ | { type: "range_split"; from: number; to: number }
+ | { type: "success"; from: number; to: number; logCount: number };
+
+interface LogFetcherConfig {
+ provider: any;
+ maxRetries: number;
+ maxRangeProvider: number;
+ baseDelayMs?: number;
+ onTelemetry?: (event: FetcherTelemetryEvent) => void;
+}
+
+function toMessage(error: unknown): string {
+ if (error instanceof Error) return error.message.toLowerCase();
+ return String(error ?? "").toLowerCase();
+}
+
+export function isRateLimitError(error: unknown): boolean {
+ const message = toMessage(error);
+ const code = (error as any)?.code;
+ return (
+ code === 429 ||
+ message.includes("429") ||
+ message.includes("rate") ||
+ message.includes("too many request") ||
+ message.includes("compute units exceeded") ||
+ message.includes("limit")
+ );
+}
+
+export function isRangeError(error: unknown): boolean {
+ const message = toMessage(error);
+ return (
+ message.includes("block range") ||
+ message.includes("query returned more than") ||
+ message.includes("response size") ||
+ message.includes("result exceeds")
+ );
+}
+
+function sleep(ms: number): Promise {
+ return new Promise((resolve) => setTimeout(resolve, ms));
+}
+
+function backoffMs(baseDelayMs: number, attempt: number): number {
+ const jitter = Math.floor(Math.random() * 200);
+ return Math.floor(baseDelayMs * 2 ** attempt) + jitter;
+}
+
+export function createLogFetcher(config: LogFetcherConfig) {
+ const {
+ provider,
+ maxRetries,
+ maxRangeProvider,
+ baseDelayMs = 900,
+ onTelemetry,
+ } = config;
+
+ async function getLogsForRange(
+ filter: RpcFilter,
+ range: BlockRange,
+ attempt = 0
+ ): Promise {
+ const splitRanges = splitRangeOnProviderLimit(
+ range.fromBlock,
+ range.toBlock,
+ maxRangeProvider
+ );
+
+ if (splitRanges.length > 1) {
+ const merged: any[] = [];
+ for (const splitRange of splitRanges) {
+ const logs = await getLogsForRange(filter, splitRange, 0);
+ merged.push(...logs);
+ }
+ return merged;
+ }
+
+ try {
+ const logs = await provider.getLogs({
+ ...filter,
+ fromBlock: range.fromBlock,
+ toBlock: range.toBlock,
+ });
+ onTelemetry?.({ type: "success", from: range.fromBlock, to: range.toBlock, logCount: logs.length });
+ return logs;
+ } catch (error) {
+ if (isRangeError(error) && range.fromBlock < range.toBlock) {
+ const [left, right] = splitRangeOnProviderLimit(
+ range.fromBlock,
+ range.toBlock,
+ Math.floor((range.toBlock - range.fromBlock) / 2)
+ );
+ if (left && right) {
+ onTelemetry?.({ type: "range_split", from: range.fromBlock, to: range.toBlock });
+ const leftLogs = await getLogsForRange(filter, left, 0);
+ const rightLogs = await getLogsForRange(filter, right, 0);
+ return [...leftLogs, ...rightLogs];
+ }
+ }
+
+ if (isRateLimitError(error) && attempt < maxRetries) {
+ onTelemetry?.({ type: "rate_limit", attempt });
+ const wait = backoffMs(baseDelayMs, attempt);
+ await sleep(wait);
+ return getLogsForRange(filter, range, attempt + 1);
+ }
+
+ onTelemetry?.({ type: "retry", attempt, error: toMessage(error) });
+ throw error;
+ }
+ }
+
+ return {
+ getLogsForRange,
+ };
+}
diff --git a/apps/demo/src/hooks/listener/scanPlanner.ts b/apps/demo/src/hooks/listener/scanPlanner.ts
new file mode 100644
index 0000000..49be48f
--- /dev/null
+++ b/apps/demo/src/hooks/listener/scanPlanner.ts
@@ -0,0 +1,55 @@
+import type { BlockRange } from "../../types.js";
+
+export function normalizeBlockRange(
+ fromBlock: number,
+ toBlock: number
+): BlockRange | null {
+ if (!Number.isFinite(fromBlock) || !Number.isFinite(toBlock)) return null;
+ if (fromBlock > toBlock) return null;
+ return { fromBlock, toBlock };
+}
+
+export function clampCursorToTip(cursor: number, tip: number): number {
+ if (!Number.isFinite(cursor)) return tip;
+ return Math.max(0, Math.min(cursor, tip));
+}
+
+export function planRanges(
+ fromBlock: number,
+ toBlock: number,
+ chunkSize: number
+): BlockRange[] {
+ const normalized = normalizeBlockRange(fromBlock, toBlock);
+ if (!normalized) return [];
+ const size = Math.max(1, Math.floor(chunkSize));
+ const ranges: BlockRange[] = [];
+ let start = normalized.fromBlock;
+
+ while (start <= normalized.toBlock) {
+ const end = Math.min(start + size - 1, normalized.toBlock);
+ ranges.push({ fromBlock: start, toBlock: end });
+ start = end + 1;
+ }
+
+ return ranges;
+}
+
+export function splitRangeOnProviderLimit(
+ fromBlock: number,
+ toBlock: number,
+ maxRangeProvider: number
+): BlockRange[] {
+ const normalized = normalizeBlockRange(fromBlock, toBlock);
+ if (!normalized) return [];
+ const limit = Math.max(1, Math.floor(maxRangeProvider));
+
+ if (normalized.toBlock - normalized.fromBlock <= limit) {
+ return [normalized];
+ }
+
+ const mid = normalized.fromBlock + Math.floor((normalized.toBlock - normalized.fromBlock) / 2);
+ return [
+ ...splitRangeOnProviderLimit(normalized.fromBlock, mid, limit),
+ ...splitRangeOnProviderLimit(mid + 1, normalized.toBlock, limit),
+ ];
+}
diff --git a/apps/demo/src/hooks/listener/syncStateStore.ts b/apps/demo/src/hooks/listener/syncStateStore.ts
new file mode 100644
index 0000000..eef45cb
--- /dev/null
+++ b/apps/demo/src/hooks/listener/syncStateStore.ts
@@ -0,0 +1,150 @@
+import { dbService } from "../../services/DbService.js";
+import type {
+ ListenerSyncStatus,
+ PendingRange,
+ PersistedSyncState,
+} from "../../types.js";
+
+export function createDefaultSyncState(): PersistedSyncState {
+ return {
+ pendingRanges: [],
+ status: "idle",
+ updatedAt: Date.now(),
+ };
+}
+
+export function toSyncStatus(state: PersistedSyncState): ListenerSyncStatus {
+ const pending = state.pendingRanges.length;
+ if (pending > 0 && state.status === "catching_up") {
+ return {
+ mode: "catching_up",
+ pendingRanges: pending,
+ lastError: state.lastError,
+ isComplete: false,
+ };
+ }
+ if (pending > 0 && state.status === "degraded") {
+ return {
+ mode: "retrying",
+ pendingRanges: pending,
+ lastError: state.lastError,
+ isComplete: false,
+ };
+ }
+ if (state.status === "degraded") {
+ return {
+ mode: "degraded",
+ pendingRanges: pending,
+ lastError: state.lastError,
+ isComplete: false,
+ };
+ }
+ if (state.status === "synced" && pending === 0) {
+ return {
+ mode: "synced",
+ pendingRanges: 0,
+ lastError: state.lastError,
+ isComplete: true,
+ };
+ }
+ return {
+ mode: "idle",
+ pendingRanges: pending,
+ lastError: state.lastError,
+ isComplete: pending === 0,
+ };
+}
+
+export async function loadSyncState(addr: string): Promise {
+ return (await dbService.getSyncState(addr)) ?? createDefaultSyncState();
+}
+
+export async function saveSyncState(
+ addr: string,
+ patch: Partial
+): Promise {
+ const prev = await loadSyncState(addr);
+ const next: PersistedSyncState = {
+ ...prev,
+ ...patch,
+ updatedAt: Date.now(),
+ };
+ await dbService.setSyncState(addr, next);
+ return next;
+}
+
+export async function enqueueRanges(
+ addr: string,
+ ranges: PendingRange[],
+ status: PersistedSyncState["status"],
+ targetTip?: number
+): Promise {
+ const state = await loadSyncState(addr);
+ const byRange = new Map();
+
+ for (const existing of state.pendingRanges) {
+ byRange.set(`${existing.fromBlock}-${existing.toBlock}`, existing);
+ }
+ for (const incoming of ranges) {
+ const key = `${incoming.fromBlock}-${incoming.toBlock}`;
+ const prev = byRange.get(key);
+ if (!prev) {
+ byRange.set(key, incoming);
+ continue;
+ }
+ byRange.set(key, {
+ ...prev,
+ ...incoming,
+ attempts: Math.max(prev.attempts, incoming.attempts),
+ });
+ }
+
+ const pendingRanges = Array.from(byRange.values()).sort((a, b) => {
+ if (a.nextRetryAt !== b.nextRetryAt) return a.nextRetryAt - b.nextRetryAt;
+ return a.fromBlock - b.fromBlock;
+ });
+
+ return saveSyncState(addr, {
+ status,
+ pendingRanges,
+ targetTip: targetTip ?? state.targetTip,
+ });
+}
+
+export async function dequeueRetryableRanges(
+ addr: string,
+ now: number,
+ limit = 1
+): Promise {
+ const prevState = await loadSyncState(addr);
+ if (prevState.pendingRanges.length === 0) return [];
+
+ const retryable = prevState.pendingRanges
+ .filter((r) => r.nextRetryAt <= now)
+ .sort((a, b) => {
+ if (a.nextRetryAt !== b.nextRetryAt) return a.nextRetryAt - b.nextRetryAt;
+ return a.attempts - b.attempts;
+ })
+ .slice(0, Math.max(1, limit));
+
+ if (retryable.length === 0) return [];
+
+ const retryableKeys = new Set(
+ retryable.map((r) => `${r.fromBlock}-${r.toBlock}-${r.attempts}`)
+ );
+ const remaining = prevState.pendingRanges.filter(
+ (r) => !retryableKeys.has(`${r.fromBlock}-${r.toBlock}-${r.attempts}`)
+ );
+
+ await saveSyncState(addr, {
+ pendingRanges: remaining,
+ status: remaining.length > 0 ? prevState.status : "synced",
+ lastRetryAt: now,
+ });
+
+ return retryable;
+}
+
+export async function clearSyncState(addr: string): Promise {
+ await dbService.clearSyncState(addr);
+}
diff --git a/apps/demo/src/hooks/useChatActions.ts b/apps/demo/src/hooks/useChatActions.ts
index 32072dd..47409d0 100644
--- a/apps/demo/src/hooks/useChatActions.ts
+++ b/apps/demo/src/hooks/useChatActions.ts
@@ -1,49 +1,90 @@
-import { useCallback } from "react";
+// src/hooks/useChatActions.ts
+// CLEANED VERSION - uses VerbethClient for all session creation
+
+/**
+ * Chat Actions Hook.
+ *
+ * Provides high-level chat actions:
+ * - sendHandshake / acceptHandshake
+ * - sendMessageToContact
+ * - Retry/cancel failed messages
+ * - Queue status management
+ *
+ * Uses VerbethClient for session creation.
+ */
+
+import { useCallback, useRef } from "react";
+import { hexlify } from "ethers";
+import type { VerbethClient } from "@verbeth/sdk";
+import { dbService } from "../services/DbService.js";
import {
- pickOutboundTopic,
- VerbethClient
-} from "@verbeth/sdk";
-import { Contact, generateTempMessageId } from "../types.js";
+ Contact,
+ generateTempMessageId,
+} from "../types.js";
+import { useMessageQueue } from "./useMessageQueue.js";
interface UseChatActionsProps {
- verbethClient: VerbethClient | null;
- addLog: (message: string) => void;
+ verbethClient: VerbethClient | null;
updateContact: (contact: Contact) => Promise;
addMessage: (message: any) => Promise;
+ updateMessageStatus: (id: string, status: "pending" | "confirmed" | "failed", error?: string) => Promise;
+ removeMessage: (id: string) => Promise;
removePendingHandshake: (id: string) => Promise;
setSelectedContact: (contact: Contact | null) => void;
setLoading: (loading: boolean) => void;
setMessage: (message: string) => void;
setRecipientAddress: (address: string) => void;
+ markMessagesLost: (contactAddress: string, afterTimestamp: number) => Promise;
}
export const useChatActions = ({
verbethClient,
- addLog,
updateContact,
addMessage,
+ updateMessageStatus,
+ removeMessage,
removePendingHandshake,
setSelectedContact,
setLoading,
setMessage,
setRecipientAddress,
+ markMessagesLost,
}: UseChatActionsProps) => {
+ const {
+ queueMessage,
+ retryMessage,
+ cancelMessage,
+ getQueueStatus,
+ invalidateSessionCache,
+ clearAllQueues,
+ } = useMessageQueue({
+ verbethClient,
+ addMessage,
+ updateMessageStatus,
+ removeMessage,
+ updateContact,
+ });
+
+ // Guard against duplicate acceptHandshake calls (e.g., rapid double-click)
+ const acceptingHandshakesRef = useRef>(new Set());
+
+ // ===========================================================================
+ // Handshake Operations
+ // ===========================================================================
+
+ /**
+ * Send a handshake to initiate contact.
+ * Stores ephemeral secret and KEM secret for ratchet session init when response arrives.
+ */
const sendHandshake = useCallback(
async (recipientAddress: string, message: string) => {
- if (!verbethClient) {
- addLog("✗ Client not initialized");
- return;
- }
-
- if (!recipientAddress || !message) {
- addLog("✗ Missing recipient address or message");
- return;
- }
+ if (!verbethClient) return;
+ if (!recipientAddress || !message) return;
setLoading(true);
try {
- const { tx, ephemeralKeyPair } = await verbethClient.sendHandshake(
+ const { tx, ephemeralKeyPair, kemKeyPair } = await verbethClient.sendHandshake(
recipientAddress,
message
);
@@ -52,7 +93,8 @@ export const useChatActions = ({
address: recipientAddress,
ownerAddress: verbethClient.userAddress,
status: "handshake_sent",
- ephemeralKey: ephemeralKeyPair.secretKey,
+ handshakeEphemeralSecret: hexlify(ephemeralKeyPair.secretKey),
+ handshakeKemSecret: hexlify(kemKeyPair.secretKey),
lastMessage: message,
lastTimestamp: Date.now(),
};
@@ -81,25 +123,16 @@ export const useChatActions = ({
await addMessage(handshakeMessage);
- addLog(
- `Handshake sent to ${recipientAddress.slice(0, 8)}...: "${message}" (tx: ${tx.hash})`
- );
setMessage("");
setRecipientAddress("");
} catch (error) {
- console.error("Failed to send handshake:", error);
- addLog(
- `✗ Failed to send handshake: ${
- error instanceof Error ? error.message : "Unknown error"
- }`
- );
+ console.error("[verbeth] handshake failed:", error);
} finally {
setLoading(false);
}
},
[
- verbethClient,
- addLog,
+ verbethClient,
updateContact,
addMessage,
setSelectedContact,
@@ -109,40 +142,80 @@ export const useChatActions = ({
]
);
+ /**
+ * Accept a handshake from another user.
+ * Creates ratchet session using VerbethClient and establishes contact.
+ * Supports PQ-hybrid: if initiator includes KEM, kemSharedSecret is derived.
+ */
const acceptHandshake = useCallback(
async (handshake: any, responseMessage: string) => {
+ // Prevent duplicate calls for same handshake
+ if (acceptingHandshakesRef.current.has(handshake.id)) {
+ return;
+ }
+ acceptingHandshakesRef.current.add(handshake.id);
+
if (!verbethClient) {
- addLog("✗ Client not initialized");
+ acceptingHandshakesRef.current.delete(handshake.id);
return;
}
try {
- const { tx, duplexTopics } = await verbethClient.acceptHandshake(
- handshake.ephemeralPubKey,
- handshake.identityPubKey,
+ // Use full ephemeral key (may include KEM public key)
+ const ephemeralKey = handshake.ephemeralPubKeyFull || handshake.ephemeralPubKey;
+
+ const {
+ salt,
+ responderEphemeralSecret,
+ responderEphemeralPublic,
+ kemSharedSecret,
+ } = await verbethClient.acceptHandshake(
+ ephemeralKey,
responseMessage
);
- // Client auto-derived topics, just use them!
+ // Create session using VerbethClient (handles topic derivation and hybrid KDF)
+ const ratchetSession = verbethClient.createResponderSession({
+ contactAddress: handshake.sender,
+ responderEphemeralSecret,
+ responderEphemeralPublic,
+ initiatorEphemeralPubKey: ephemeralKey,
+ salt,
+ kemSharedSecret,
+ });
+
+ // Save session - SDK will pick it up via SessionStore adapter
+ await dbService.ratchet.saveRatchetSession(ratchetSession);
+
const newContact: Contact = {
address: handshake.sender,
ownerAddress: verbethClient.userAddress,
status: "established",
identityPubKey: handshake.identityPubKey,
signingPubKey: handshake.signingPubKey,
- topicOutbound: pickOutboundTopic(false, duplexTopics), // Responder
- topicInbound: pickOutboundTopic(true, duplexTopics), // Responder
+ topicOutbound: ratchetSession.currentTopicOutbound,
+ topicInbound: ratchetSession.currentTopicInbound,
+ conversationId: ratchetSession.conversationId,
lastMessage: responseMessage,
lastTimestamp: Date.now(),
};
await updateContact(newContact);
+
+ // Mark messages as lost if this is a session reset
+ if (handshake.isExistingContact && handshake.timestamp) {
+ const lostCount = await markMessagesLost(handshake.sender, handshake.timestamp);
+ if (lostCount > 0) {
+ console.log(`${lostCount} messages marked as lost`);
+ }
+ }
+
await removePendingHandshake(handshake.id);
setSelectedContact(newContact);
const acceptanceMessage = {
id: generateTempMessageId(),
- topic: duplexTopics.topicOut,
+ topic: ratchetSession.currentTopicOutbound,
sender: verbethClient.userAddress,
recipient: handshake.sender,
ciphertext: "",
@@ -160,112 +233,99 @@ export const useChatActions = ({
};
await addMessage(acceptanceMessage);
-
- addLog(
- `✅ Handshake accepted from ${handshake.sender.slice(
- 0,
- 8
- )}...: "${responseMessage}"`
- );
} catch (error) {
- console.error("Failed to accept handshake:", error);
- addLog(
- `✗ Failed to accept handshake: ${
- error instanceof Error ? error.message : "Unknown error"
- }`
- );
+ console.error("[verbeth] accept handshake failed:", error);
+ } finally {
+ acceptingHandshakesRef.current.delete(handshake.id);
}
},
[
- verbethClient,
- addLog,
+ verbethClient,
updateContact,
removePendingHandshake,
addMessage,
setSelectedContact,
+ markMessagesLost,
]
);
+ // ===========================================================================
+ // Message Operations
+ // ===========================================================================
+
+ /**
+ * Send a message to a contact.
+ * Uses the message queue for sequential processing.
+ */
const sendMessageToContact = useCallback(
async (contact: Contact, messageText: string) => {
- if (!verbethClient) {
- addLog("✗ Client not initialized");
- return;
- }
+ if (!verbethClient || !contact.conversationId) return;
- if (!contact.identityPubKey) {
- addLog("✗ Contact not established or missing identity key");
- return;
+ const messageId = await queueMessage(contact, messageText);
+
+ if (messageId) {
+ setMessage("");
}
+ },
+ [verbethClient, queueMessage, setMessage]
+ );
- setLoading(true);
- try {
- if (!contact.topicOutbound) {
- addLog("✗ Contact doesn't have outbound topic established");
- return;
- }
-
- await verbethClient.sendMessage(
- contact.topicOutbound,
- contact.identityPubKey,
- messageText
- );
-
- // Create pending message for UI
- const pendingMessage = {
- id: generateTempMessageId(),
- topic: contact.topicOutbound,
- sender: verbethClient.userAddress,
- recipient: contact.address,
- ciphertext: "",
- timestamp: Date.now(),
- blockTimestamp: Date.now(),
- blockNumber: 0,
- direction: "outgoing" as const,
- decrypted: messageText,
- read: true,
- nonce: 0,
- dedupKey: `pending-${generateTempMessageId()}`,
- type: "text" as const,
- ownerAddress: verbethClient.userAddress,
- status: "pending" as const,
- };
+ /**
+ * Retry a failed message.
+ * Note: The message number will be different from the original attempt.
+ */
+ const retryFailedMessage = useCallback(
+ async (messageId: string) => {
+ await retryMessage(messageId);
+ },
+ [retryMessage]
+ );
- await addMessage(pendingMessage);
+ /**
+ * Cancel a queued message.
+ */
+ const cancelQueuedMessage = useCallback(
+ async (messageId: string) => {
+ await cancelMessage(messageId);
+ },
+ [cancelMessage]
+ );
- const updatedContact: Contact = {
- ...contact,
- lastMessage: messageText,
- lastTimestamp: Date.now(),
- };
- await updateContact(updatedContact);
+ /**
+ * Get the queue status for a contact.
+ */
+ const getContactQueueStatus = useCallback(
+ (contact: Contact) => {
+ if (!contact.conversationId) {
+ return { queueLength: 0, isProcessing: false, pendingMessages: [] };
+ }
+ return getQueueStatus(contact.conversationId);
+ },
+ [getQueueStatus]
+ );
- addLog(
- `Message sent to ${contact.address.slice(0, 8)}...: "${messageText}"`
- );
- } catch (error) {
- console.error("Failed to send message:", error);
- addLog(
- `✗ Failed to send message: ${
- error instanceof Error ? error.message : "Unknown error"
- }`
- );
- } finally {
- setLoading(false);
+ /**
+ * Invalidate session cache for a contact.
+ * Call this when a session is reset or updated externally.
+ */
+ const invalidateContactSessionCache = useCallback(
+ (contact: Contact) => {
+ if (contact.conversationId) {
+ invalidateSessionCache(contact.conversationId);
}
},
- [
- verbethClient,
- addLog,
- addMessage,
- updateContact,
- setLoading,
- ]
+ [invalidateSessionCache]
);
return {
sendHandshake,
acceptHandshake,
sendMessageToContact,
+ // Queue-related actions
+ retryFailedMessage,
+ cancelQueuedMessage,
+ getContactQueueStatus,
+ invalidateContactSessionCache,
+ clearAllQueues,
};
-};
+};
\ No newline at end of file
diff --git a/apps/demo/src/hooks/useInitIdentity.ts b/apps/demo/src/hooks/useInitIdentity.ts
new file mode 100644
index 0000000..17a8327
--- /dev/null
+++ b/apps/demo/src/hooks/useInitIdentity.ts
@@ -0,0 +1,384 @@
+// src/hooks/useInitIdentity.ts
+import { useState, useEffect, useCallback, useMemo } from 'react';
+import { BrowserProvider, Wallet } from 'ethers';
+import {
+ VerbethV1__factory,
+ type VerbethV1,
+} from '@verbeth/contracts/typechain-types/index.js';
+import {
+ IExecutor,
+ ExecutorFactory,
+ deriveIdentityKeys,
+ createBindingProof,
+ DerivedIdentityKeys,
+ IdentityKeyPair,
+ IdentityProof,
+ SafeSessionSigner,
+} from '@verbeth/sdk';
+import { dbService } from '../services/DbService.js';
+import {
+ getOrCreateSafeForOwner,
+ predictVerbEthSafeAddress,
+ isHelperAvailable,
+} from '../services/safeAccount.js';
+import {
+ VERBETH_SINGLETON_ADDR,
+ SAFE_MODULE_ADDRESS,
+ StoredIdentity,
+ ExecutionMode,
+} from '../types.js';
+
+interface UseInitIdentityParams {
+ walletClient: any;
+ address: string | undefined;
+ chainId: number;
+ readProvider: any;
+ ready: boolean;
+ onIdentityCreated?: () => void;
+ onReset?: () => void;
+}
+
+export function useInitIdentity({
+ walletClient,
+ address,
+ chainId,
+ readProvider,
+ ready,
+ onIdentityCreated,
+ onReset,
+}: UseInitIdentityParams) {
+ const [identityKeyPair, setIdentityKeyPair] = useState(null);
+ const [identityProof, setIdentityProof] = useState(null);
+ const [executor, setExecutor] = useState(null);
+ const [contract, setContract] = useState(null);
+ const [identitySigner, setIdentitySigner] = useState(null);
+ const [txSigner, setTxSigner] = useState(null);
+ const [safeAddr, setSafeAddr] = useState(null);
+ const [currentAccount, setCurrentAccount] = useState(null);
+ const [needsIdentityCreation, setNeedsIdentityCreation] = useState(false);
+ const [loading, setLoading] = useState(false);
+ const [reinitTrigger, setReinitTrigger] = useState(0);
+
+ const [sessionSignerAddr, setSessionSignerAddr] = useState(null);
+ const [needsSessionSetup, setNeedsSessionSetup] = useState(false);
+ const [isSafeDeployed, setIsSafeDeployed] = useState(false);
+ const [isModuleEnabled, setIsModuleEnabled] = useState(false);
+ const [signingStep, setSigningStep] = useState<1 | 2 | null>(null);
+
+ // Execution mode state
+ const [executionMode, setExecutionMode] = useState(null);
+ const [needsModeSelection, setNeedsModeSelection] = useState(false);
+ const [emitterAddress, setEmitterAddress] = useState(null);
+
+ const rpId = globalThis.location?.host ?? '';
+ const identityContext = useMemo(() => ({ chainId, rpId }), [chainId, rpId]);
+
+ // Check if fast mode is available on this chain
+ const fastModeAvailable = useMemo(() => isHelperAvailable(chainId), [chainId]);
+
+ const resetState = useCallback(() => {
+ setCurrentAccount(null);
+ setIdentityKeyPair(null);
+ setIdentityProof(null);
+ setIdentitySigner(null);
+ setTxSigner(null);
+ setContract(null);
+ setExecutor(null);
+ setSafeAddr(null);
+ setSessionSignerAddr(null);
+ setNeedsIdentityCreation(false);
+ setNeedsSessionSetup(false);
+ setIsSafeDeployed(false);
+ setIsModuleEnabled(false);
+ setExecutionMode(null);
+ setNeedsModeSelection(false);
+ setEmitterAddress(null);
+ onReset?.();
+ }, [onReset]);
+
+ const switchToAccount = useCallback(async (newAddress: string) => {
+ setIdentityKeyPair(null);
+ setIdentityProof(null);
+
+ await dbService.switchAccount(newAddress);
+ setCurrentAccount(newAddress);
+
+ const storedIdentity = await dbService.getIdentity(newAddress);
+ if (storedIdentity && storedIdentity.sessionPrivateKey) {
+ setIdentityKeyPair(storedIdentity.keyPair);
+ setIdentityProof(storedIdentity.proof ?? null);
+ // Restore mode from storage
+ setExecutionMode(storedIdentity.executionMode ?? 'fast'); // default to fast for legacy
+ setEmitterAddress(storedIdentity.emitterAddress ?? null);
+ setNeedsIdentityCreation(false);
+ setNeedsModeSelection(false);
+ } else if (storedIdentity && !storedIdentity.sessionPrivateKey) {
+ setNeedsModeSelection(true);
+ } else {
+ // Need mode selection before identity creation
+ setNeedsModeSelection(true);
+ }
+ }, []);
+
+ const initializeWagmiAccount = useCallback(async () => {
+ if (!walletClient || !address || !readProvider) return;
+
+ const ethersProvider = new BrowserProvider(walletClient.transport);
+ const ethersSigner = await ethersProvider.getSigner();
+ setIdentitySigner(ethersSigner);
+
+ if (address !== currentAccount) {
+ await switchToAccount(address);
+ }
+
+ const net = await ethersProvider.getNetwork();
+ if (Number(net.chainId) !== chainId) {
+ console.error(`[verbeth] wrong network: connected to chain ${Number(net.chainId)}, expected ${chainId}`);
+ return;
+ }
+
+ const storedIdentity = await dbService.getIdentity(address);
+
+ if (!storedIdentity || !storedIdentity.sessionPrivateKey) {
+ return;
+ }
+
+ const currentMode = storedIdentity.executionMode ?? 'fast';
+ setExecutionMode(currentMode);
+ setEmitterAddress(storedIdentity.emitterAddress ?? address);
+
+ // =========================================================================
+ // CLASSIC MODE: EOA executor, no Safe setup needed
+ // =========================================================================
+ if (currentMode === 'classic') {
+
+ const contractInstance = VerbethV1__factory.connect(VERBETH_SINGLETON_ADDR, ethersSigner as any);
+ const executorInstance = ExecutorFactory.createEOA(contractInstance);
+
+ setExecutor(executorInstance);
+ setContract(contractInstance);
+ setTxSigner(ethersSigner);
+ setSafeAddr(null);
+ setNeedsSessionSetup(false);
+ return;
+ }
+
+ // =========================================================================
+ // FAST MODE: VerbEth Safe + session signer
+ // =========================================================================
+ const sessionPrivKey = storedIdentity.sessionPrivateKey;
+ const sessionWallet = new Wallet(sessionPrivKey, readProvider);
+ const sessionAddr = storedIdentity.sessionAddress ?? await sessionWallet.getAddress();
+ setSessionSignerAddr(sessionAddr);
+
+ const { safeAddress, isDeployed, moduleEnabled } = await getOrCreateSafeForOwner({
+ chainId,
+ ownerAddress: address as `0x${string}`,
+ providerEip1193: walletClient.transport,
+ ethersSigner,
+ deployIfMissing: false,
+ sessionConfig: {
+ sessionSigner: sessionAddr,
+ target: VERBETH_SINGLETON_ADDR,
+ },
+ // Never use API for fast mode
+ useApiLookup: false,
+ });
+
+ setSafeAddr(safeAddress);
+ setIsSafeDeployed(isDeployed);
+ setIsModuleEnabled(moduleEnabled ?? false);
+ if (!isDeployed || !(moduleEnabled ?? false)) {
+ setNeedsSessionSetup(true);
+ }
+
+ console.log(`\n========== SAFE & SESSION INFO (Fast Mode) ==========`);
+ console.log(`Connected EOA wallet: ${address}`);
+ console.log(`VerbEth Safe address: ${safeAddress}`);
+ console.log(` Safe deployed: ${isDeployed}`);
+ console.log(` Module enabled: ${moduleEnabled}`);
+ console.log(` Chain ID: ${chainId}`);
+ console.log(` Session signer: ${sessionAddr}`);
+
+ const balance = await readProvider.getBalance(sessionAddr);
+ console.log(` Session signer balance: ${Number(balance) / 1e18} ETH`);
+
+ const safeSessionSigner = new SafeSessionSigner({
+ provider: readProvider,
+ safeAddress,
+ moduleAddress: SAFE_MODULE_ADDRESS,
+ verbEthAddress: VERBETH_SINGLETON_ADDR,
+ sessionSigner: sessionWallet,
+ });
+ setTxSigner(safeSessionSigner);
+
+ if (isDeployed) {
+ const isValid = await safeSessionSigner.isSessionValid();
+ const isTargetAllowed = await safeSessionSigner.isTargetAllowed();
+ console.log(` Session valid: ${isValid}`);
+ console.log(` Target allowed: ${isTargetAllowed}`);
+ setNeedsSessionSetup(!isValid || !isTargetAllowed);
+ }
+
+ console.log(`=====================================================\n`);
+
+ const contractInstance = VerbethV1__factory.connect(VERBETH_SINGLETON_ADDR, safeSessionSigner as any);
+ const executorInstance = ExecutorFactory.createEOA(contractInstance);
+
+ setExecutor(executorInstance);
+ setContract(contractInstance);
+ }, [walletClient, address, currentAccount, chainId, readProvider, switchToAccount]);
+
+ const createIdentity = useCallback(async (selectedMode: ExecutionMode) => {
+ if (!identitySigner || !address || !walletClient) {
+ return;
+ }
+
+ if (selectedMode === 'custom') {
+ return;
+ }
+
+ setSigningStep(1);
+ setLoading(true);
+
+ try {
+ // ================================================================
+ // Step 1: Derive keys (same for all modes)
+ // ================================================================
+
+ const derivedKeys: DerivedIdentityKeys = await deriveIdentityKeys(
+ identitySigner,
+ address
+ );
+
+ console.log(`✓ Keys derived, session signer: ${derivedKeys.sessionAddress}`);
+
+ // ================================================================
+ // Step 2: Determine emitter address based on mode
+ // ================================================================
+ let emitter: string;
+
+ if (selectedMode === 'classic') {
+ // Classic mode: EOA is the emitter
+ emitter = address;
+ console.log(`✓ Classic mode: emitter = EOA (${address})`);
+ } else {
+ // Fast mode: Predict VerbEth Safe address (deterministic, no API)
+ emitter = await predictVerbEthSafeAddress({
+ chainId,
+ ownerAddress: address as `0x${string}`,
+ sessionSignerAddr: derivedKeys.sessionAddress,
+ providerEip1193: walletClient.transport,
+ });
+ console.log(`✓ Fast mode: emitter = VerbEth Safe (${emitter})`);
+ }
+
+ // ================================================================
+ // Step 3: Create binding proof with correct emitter
+ // ================================================================
+ setSigningStep(2);
+
+ const proof = await createBindingProof(
+ identitySigner,
+ address,
+ derivedKeys,
+ emitter,
+ identityContext
+ );
+
+ // ================================================================
+ // Step 4: Store identity with mode info
+ // ================================================================
+ const identityToStore: StoredIdentity = {
+ address: address,
+ keyPair: derivedKeys.keyPair,
+ derivedAt: Date.now(),
+ proof: proof,
+ sessionPrivateKey: derivedKeys.sessionPrivateKey,
+ sessionAddress: derivedKeys.sessionAddress,
+ // Store mode and emitter
+ executionMode: selectedMode,
+ emitterAddress: emitter,
+ };
+
+ await dbService.saveIdentity(identityToStore);
+
+ setIdentityKeyPair(derivedKeys.keyPair);
+ setIdentityProof(proof);
+ setExecutionMode(selectedMode);
+ setEmitterAddress(emitter);
+ setNeedsIdentityCreation(false);
+ setNeedsModeSelection(false);
+ setSigningStep(null);
+
+ console.log(`[verbeth] identity created in ${selectedMode} mode for ${address.slice(0, 10)}...`);
+
+ onIdentityCreated?.();
+ setReinitTrigger((t) => t + 1);
+
+ } catch (signError: any) {
+ if (signError.code !== 4001) {
+ console.error('[verbeth] identity creation failed:', signError);
+ }
+ } finally {
+ setLoading(false);
+ setSigningStep(null);
+ }
+ }, [identitySigner, address, walletClient, chainId, identityContext, onIdentityCreated]);
+
+ // Handle initialization
+ useEffect(() => {
+ const handleInit = async () => {
+ try {
+ if (ready && readProvider && walletClient && address) {
+ await initializeWagmiAccount();
+ return;
+ }
+ if (!address) {
+ resetState();
+ }
+ } catch (error) {
+ console.error('[verbeth] initialization failed:', error);
+ }
+ };
+ handleInit();
+ }, [ready, readProvider, walletClient, address, reinitTrigger]);
+
+ return {
+ // Identity state
+ identityKeyPair,
+ identityProof,
+ executor,
+ contract,
+ identitySigner,
+ txSigner,
+ safeAddr,
+ currentAccount,
+ needsIdentityCreation,
+ identityLoading: loading,
+ identityContext,
+ signingStep,
+
+ // Mode state
+ executionMode,
+ needsModeSelection,
+ emitterAddress,
+ fastModeAvailable,
+ setExecutionMode,
+
+ // Session state
+ sessionSignerAddr,
+ needsSessionSetup,
+ isSafeDeployed,
+ isModuleEnabled,
+ setSessionSignerAddr,
+ setNeedsSessionSetup,
+ setIsSafeDeployed,
+ setIsModuleEnabled,
+
+ // Actions
+ createIdentity,
+ resetState,
+ triggerReinit: () => setReinitTrigger((t) => t + 1),
+ };
+}
\ No newline at end of file
diff --git a/apps/demo/src/hooks/useMessageListener.ts b/apps/demo/src/hooks/useMessageListener.ts
index 0239059..cfc3c9e 100644
--- a/apps/demo/src/hooks/useMessageListener.ts
+++ b/apps/demo/src/hooks/useMessageListener.ts
@@ -1,407 +1,341 @@
-// apps/demo/src/hooks/useMessageListener.ts
-
-import { useState, useEffect, useRef, useCallback } from "react";
-import { keccak256, toUtf8Bytes } from "ethers";
+import { useState, useEffect, useRef, useCallback, useMemo } from "react";
import { dbService } from "../services/DbService.js";
import {
- LOGCHAIN_SINGLETON_ADDR,
CONTRACT_CREATION_BLOCK,
INITIAL_SCAN_BLOCKS,
MAX_RETRIES,
MAX_RANGE_PROVIDER,
CHUNK_SIZE,
REAL_TIME_BUFFER,
- EVENT_SIGNATURES,
Contact,
- ScanProgress,
- ScanChunk,
ProcessedEvent,
MessageListenerResult,
+ ListenerSyncStatus,
+ ListenerHealthStatus,
+ PendingRange,
} from "../types.js";
+import { collectEventsForRange } from "./listener/eventQuerySpecs.js";
+import { createLogFetcher } from "./listener/logFetcher.js";
+import type { FetcherTelemetryEvent } from "./listener/logFetcher.js";
+import { clampCursorToTip, planRanges } from "./listener/scanPlanner.js";
+import {
+ dequeueRetryableRanges,
+ enqueueRanges,
+ loadSyncState,
+ saveSyncState,
+ toSyncStatus,
+} from "./listener/syncStateStore.js";
+import { evaluateHealth, pruneWindow, OK_HEALTH } from "./listener/healthScore.js";
+import type { HealthMetrics } from "./listener/healthScore.js";
interface UseMessageListenerProps {
readProvider: any;
address: string | undefined;
- onLog: (message: string) => void;
- onEventsProcessed: (events: ProcessedEvent[]) => void;
+ /** Safe address in fast mode, EOA in classic mode. Used for outbound confirmations. */
+ emitterAddress: string | undefined;
+ onEventsProcessed: (events: ProcessedEvent[]) => Promise;
+ /** When provided, uses watchBlockNumber (WS subscription) instead of setInterval polling. */
+ viemClient?: any;
+ verbethClient?: any;
+}
+
+const IDLE_SYNC_STATUS: ListenerSyncStatus = {
+ mode: "idle",
+ pendingRanges: 0,
+ isComplete: false,
+};
+
+function toErrorMessage(error: unknown): string {
+ if (error instanceof Error) return error.message;
+ return String(error ?? "unknown error");
}
export const useMessageListener = ({
readProvider,
address,
- onLog,
+ emitterAddress,
onEventsProcessed,
+ viemClient,
+ verbethClient,
}: UseMessageListenerProps): MessageListenerResult => {
const [isInitialLoading, setIsInitialLoading] = useState(false);
const [isLoadingMore, setIsLoadingMore] = useState(false);
const [canLoadMore, setCanLoadMore] = useState(true);
- const [syncProgress, setSyncProgress] = useState(null);
+ const [syncProgress, setSyncProgress] = useState<{ current: number; total: number } | null>(null);
+ const [syncStatus, setSyncStatus] = useState(IDLE_SYNC_STATUS);
const [lastKnownBlock, setLastKnownBlock] = useState(null);
- const [oldestScannedBlock, setOldestScannedBlock] = useState(
- null
- );
-
- const processedLogs = useRef(new Set());
- const scanChunks = useRef([]);
-
- const calculateRecipientHash = (recipientAddr: string) => {
- return keccak256(toUtf8Bytes(`contact:${recipientAddr.toLowerCase()}`));
- };
+ const [oldestScannedBlock, setOldestScannedBlock] = useState(null);
+ const [health, setHealth] = useState(OK_HEALTH);
+
+ const lastKnownBlockRef = useRef(null);
+ const onEventsProcessedRef = useRef(onEventsProcessed);
+ const isRealtimeScanRunningRef = useRef(false);
+ const isBackfillWorkerRunningRef = useRef(false);
+ const hasBootstrappedRef = useRef(false);
+
+ // Health metric refs
+ const rateLimitTimestampsRef = useRef([]);
+ const wsErrorTimestampsRef = useRef([]);
+ const tipLagStateRef = useRef<{ blocks: number; since: number | null }>({ blocks: 0, since: null });
+ const pendingRangeStateRef = useRef<{ count: number; since: number | null }>({ count: 0, since: null });
+
+ onEventsProcessedRef.current = onEventsProcessed;
+
+ const recomputeHealth = useCallback(() => {
+ rateLimitTimestampsRef.current = pruneWindow(rateLimitTimestampsRef.current, 60_000);
+ wsErrorTimestampsRef.current = pruneWindow(wsErrorTimestampsRef.current, 60_000);
+
+ const metrics: HealthMetrics = {
+ rateLimitEvents: rateLimitTimestampsRef.current,
+ wsErrors: wsErrorTimestampsRef.current,
+ pendingRanges: pendingRangeStateRef.current.count,
+ pendingRangesSince: pendingRangeStateRef.current.since,
+ tipLagBlocks: tipLagStateRef.current.blocks,
+ tipLagSince: tipLagStateRef.current.since,
+ syncMode: syncStatus.mode,
+ };
+ setHealth(evaluateHealth(metrics));
+ }, [syncStatus.mode]);
+
+ const handleTelemetry = useCallback((event: FetcherTelemetryEvent) => {
+ if (event.type === "rate_limit") {
+ rateLimitTimestampsRef.current.push(Date.now());
+ recomputeHealth();
+ }
+ }, [recomputeHealth]);
+
+ const logFetcher = useMemo(() => {
+ if (!readProvider) return null;
+ return createLogFetcher({
+ provider: readProvider,
+ maxRetries: MAX_RETRIES,
+ maxRangeProvider: MAX_RANGE_PROVIDER,
+ onTelemetry: handleTelemetry,
+ });
+ }, [readProvider, handleTelemetry]);
+
+ const refreshSyncStatus = useCallback(async () => {
+ if (!address) {
+ setSyncStatus(IDLE_SYNC_STATUS);
+ return;
+ }
+ const state = await loadSyncState(address);
+ const status = toSyncStatus(state);
+ setSyncStatus(status);
+
+ const prevCount = pendingRangeStateRef.current.count;
+ const newCount = status.pendingRanges;
+ if (newCount > 0 && prevCount === 0) {
+ pendingRangeStateRef.current = { count: newCount, since: Date.now() };
+ } else if (newCount === 0) {
+ pendingRangeStateRef.current = { count: 0, since: null };
+ } else {
+ pendingRangeStateRef.current = { ...pendingRangeStateRef.current, count: newCount };
+ }
+ }, [address]);
- // Load contacts directly from database when needed
const getCurrentContacts = useCallback(async (): Promise => {
if (!address) return [];
try {
return await dbService.getAllContacts(address);
} catch (error) {
- onLog(`✗ Failed to load contacts: ${error}`);
+ console.error("[verbeth] failed to load contacts:", error);
return [];
}
- }, [address, onLog]);
-
- // RPC helper with retry logic
- const safeGetLogs = async (
- filter: any,
- fromBlock: number,
- toBlock: number,
- retries = MAX_RETRIES
- ): Promise => {
- let attempt = 0;
- let delay = 1000;
-
- while (attempt < retries) {
- try {
- if (fromBlock > toBlock) {
- onLog(`⚠️ Invalid block range: ${fromBlock} > ${toBlock}`);
- return [];
- }
+ }, [address]);
- if (toBlock - fromBlock > MAX_RANGE_PROVIDER) {
- const mid = fromBlock + Math.floor((toBlock - fromBlock) / 2);
- const firstHalf = await safeGetLogs(filter, fromBlock, mid, retries);
- const secondHalf = await safeGetLogs(
- filter,
- mid + 1,
- toBlock,
- retries
- );
- return [...firstHalf, ...secondHalf];
- }
+ const scanBlockRange = useCallback(
+ async (fromBlock: number, toBlock: number): Promise => {
+ if (!address || !logFetcher || fromBlock > toBlock) return [];
- return await readProvider.getLogs({
- ...filter,
- fromBlock,
- toBlock,
- });
- } catch (error: any) {
- attempt++;
-
- if (
- error.code === 429 ||
- error.message?.includes("rate") ||
- error.message?.includes("limit") ||
- error.message?.includes("invalid block range")
- ) {
- if (attempt < retries) {
- onLog(
- `! RPC error, retrying in ${delay}ms... (attempt ${attempt}/${retries})`
- );
- await new Promise((resolve) => setTimeout(resolve, delay));
- delay *= 1.5;
- continue;
- }
- }
+ const contacts = await getCurrentContacts();
+ const pendingContacts = contacts.filter((contact) => contact.status === "handshake_sent");
+ const activeTopics = await dbService.ratchet.getAllActiveInboundTopics(address);
- if (
- error.message?.includes("exceed") ||
- error.message?.includes("range")
- ) {
- onLog(`✗ Block range error, skipping range ${fromBlock}-${toBlock}`);
- return [];
- }
+ return collectEventsForRange({
+ fromBlock,
+ toBlock,
+ context: {
+ address,
+ emitterAddress,
+ activeTopics,
+ pendingContacts,
+ },
+ getLogs: async (filter, rangeStart, rangeEnd) => {
+ return logFetcher.getLogsForRange(filter, {
+ fromBlock: rangeStart,
+ toBlock: rangeEnd,
+ });
+ },
+ });
+ },
+ [address, emitterAddress, getCurrentContacts, logFetcher]
+ );
- onLog(`✗ RPC error on range ${fromBlock}-${toBlock}: ${error.message}`);
- return [];
+ const processRange = useCallback(
+ async (fromBlock: number, toBlock: number): Promise => {
+ const events = await scanBlockRange(fromBlock, toBlock);
+ if (events.length > 0) {
+ await onEventsProcessedRef.current(events);
}
- }
-
- onLog(
- `✗ Failed after ${retries} retries for range ${fromBlock}-${toBlock}`
- );
- return [];
- };
-
- // Smart chunking
- const findEventRanges = async (
- fromBlock: number,
- toBlock: number
- ): Promise<[number, number][]> => {
- const ranges: [number, number][] = [];
- let currentBlock = toBlock;
-
- while (currentBlock >= fromBlock) {
- const rangeStart = Math.max(currentBlock - CHUNK_SIZE, fromBlock);
- const rangeEnd = currentBlock;
-
- ranges.unshift([rangeStart, rangeEnd]);
- currentBlock = rangeStart - 1;
-
- if (ranges.length >= 5) break;
- }
-
- return ranges;
- };
-
- const batchScanRanges = async (
- ranges: [number, number][]
- ): Promise => {
- if (ranges.length > 1) {
- setSyncProgress({ current: 0, total: ranges.length });
- }
-
- let results: ProcessedEvent[] = [];
- let completedRanges = 0;
-
- for (const range of ranges) {
- const [start, end] = range;
- try {
- const chunkResults = await scanBlockRange(start, end);
- results = results.concat(chunkResults);
- completedRanges++;
-
- setSyncProgress({ current: completedRanges, total: ranges.length });
+ return events;
+ },
+ [scanBlockRange]
+ );
- if (completedRanges < ranges.length) {
- await new Promise((resolve) => setTimeout(resolve, 200));
- }
- } catch (error) {
- onLog(`✗ Failed to scan range ${start}-${end}: ${error}`);
+ const batchProcessRanges = useCallback(
+ async (
+ ranges: PendingRange[] | { fromBlock: number; toBlock: number }[],
+ options?: { showProgress?: boolean; stopOnError?: boolean }
+ ): Promise => {
+ const { showProgress = false, stopOnError = false } = options ?? {};
+ const allEvents: ProcessedEvent[] = [];
+ const failedRanges: PendingRange[] = [];
+
+ if (showProgress && ranges.length > 1) {
+ setSyncProgress({ current: 0, total: ranges.length });
}
- }
-
- setSyncProgress(null);
- return results;
- };
-
- // scan specific block range - load contacts from db when needed
- const scanBlockRange = async (
- fromBlock: number,
- toBlock: number
- ): Promise => {
- if (!address) return [];
- const contacts = await getCurrentContacts();
-
- const userRecipientHash = calculateRecipientHash(address);
- const allEvents: ProcessedEvent[] = [];
-
- try {
- const handshakeFilter = {
- address: LOGCHAIN_SINGLETON_ADDR,
- topics: [EVENT_SIGNATURES.Handshake, userRecipientHash],
- };
- const handshakeLogs = await safeGetLogs(
- handshakeFilter,
- fromBlock,
- toBlock
- );
-
- for (const log of handshakeLogs) {
- const logKey = `${log.transactionHash}-${log.logIndex}`;
- if (!processedLogs.current.has(logKey)) {
- processedLogs.current.add(logKey);
- allEvents.push({
- logKey,
- eventType: "handshake",
- rawLog: log,
- blockNumber: log.blockNumber,
- timestamp: Date.now(),
+ let completed = 0;
+ for (const range of ranges) {
+ try {
+ const events = await processRange(range.fromBlock, range.toBlock);
+ allEvents.push(...events);
+ } catch (error) {
+ console.error(
+ `[verbeth] scan failed for range ${range.fromBlock}-${range.toBlock}:`,
+ error
+ );
+ if (stopOnError) throw error;
+ failedRanges.push({
+ fromBlock: range.fromBlock,
+ toBlock: range.toBlock,
+ attempts: 1,
+ nextRetryAt: Date.now() + 1_500,
+ lastError: toErrorMessage(error),
});
}
- }
- const pendingContacts = contacts.filter(
- (c) => c.status === "handshake_sent"
- );
-
- if (pendingContacts.length > 0) {
- const responseFilter = {
- address: LOGCHAIN_SINGLETON_ADDR,
- topics: [EVENT_SIGNATURES.HandshakeResponse],
- };
- const responseLogs = await safeGetLogs(
- responseFilter,
- fromBlock,
- toBlock
- );
-
- onLog(
- `🔍 Found ${responseLogs.length} total handshake responses in blocks ${fromBlock}-${toBlock}`
- );
-
- // Match by responder address
- for (const log of responseLogs) {
- const responderAddress = "0x" + log.topics[2].slice(-40);
-
- const matchingContact = pendingContacts.find(
- (c) => c.address.toLowerCase() === responderAddress.toLowerCase()
- );
-
- if (matchingContact) {
- const logKey = `${log.transactionHash}-${log.logIndex}`;
- if (!processedLogs.current.has(logKey)) {
- processedLogs.current.add(logKey);
- allEvents.push({
- logKey,
- eventType: "handshake_response",
- rawLog: log,
- blockNumber: log.blockNumber,
- timestamp: Date.now(),
- });
- }
- }
+ completed += 1;
+ if (showProgress && ranges.length > 1) {
+ setSyncProgress({ current: completed, total: ranges.length });
}
}
- const establishedContacts = contacts.filter(
- (c) => c.status === "established"
- );
- if (establishedContacts.length > 0) {
- // 1) INBOUND ONLY: listen exclusively to topics where we receive messages
- const inboundTopics = establishedContacts
- .map((c) => c.topicInbound)
- .filter(Boolean);
-
- if (inboundTopics.length > 0) {
- const messageFilterIn = {
- address: LOGCHAIN_SINGLETON_ADDR,
- topics: [EVENT_SIGNATURES.MessageSent, null, inboundTopics],
- };
- const inboundLogs = await safeGetLogs(
- messageFilterIn,
- fromBlock,
- toBlock
- );
-
- for (const log of inboundLogs) {
- const logKey = `${log.transactionHash}-${log.logIndex}`;
- if (!processedLogs.current.has(logKey)) {
- processedLogs.current.add(logKey);
- allEvents.push({
- logKey,
- eventType: "message",
- rawLog: log,
- blockNumber: log.blockNumber,
- timestamp: Date.now(),
- });
- }
- }
- }
-
- // 2) OUTBOUND CONFIRMATION: we do not need topic filter, we match logs where sender = our address
- if (address) {
- const senderTopic =
- "0x000000000000000000000000" + address.slice(2).toLowerCase();
- const messageFilterOutConfirm = {
- address: LOGCHAIN_SINGLETON_ADDR,
- topics: [EVENT_SIGNATURES.MessageSent, senderTopic],
- };
- const outLogs = await safeGetLogs(
- messageFilterOutConfirm,
- fromBlock,
- toBlock
- );
+ setSyncProgress(null);
- for (const log of outLogs) {
- const logKey = `${log.transactionHash}-${log.logIndex}`;
- if (!processedLogs.current.has(logKey)) {
- processedLogs.current.add(logKey);
- allEvents.push({
- logKey,
- eventType: "message",
- rawLog: log,
- blockNumber: log.blockNumber,
- timestamp: Date.now(),
- });
- }
- }
- }
+ if (failedRanges.length > 0 && address) {
+ await enqueueRanges(address, failedRanges, "degraded");
+ await refreshSyncStatus();
}
- } catch (error) {
- onLog(`Error scanning block range ${fromBlock}-${toBlock}: ${error}`);
- }
- return allEvents;
- };
+ return allEvents;
+ },
+ [address, processRange, refreshSyncStatus]
+ );
const performInitialScan = useCallback(async () => {
- if (!readProvider || !address || isInitialLoading) return;
+ if (!readProvider || !address || !logFetcher) return;
- // check if initial scan already completed for this address
- const initialScanComplete = await dbService.getInitialScanComplete(address);
- if (initialScanComplete) {
- onLog(`Initial scan already completed for ${address.slice(0, 8)}...`);
+ setIsInitialLoading(true);
- const savedLastBlock = await dbService.getLastKnownBlock(address);
- const savedOldestBlock = await dbService.getOldestScannedBlock(address);
+ try {
+ const currentBlock = Number(await readProvider.getBlockNumber());
+ const initialScanComplete = !!(await dbService.getInitialScanComplete(address));
+ const savedLastBlockRaw = await dbService.getLastKnownBlock(address);
+ const savedOldestBlockRaw = await dbService.getOldestScannedBlock(address);
+ const savedLastBlock =
+ typeof savedLastBlockRaw === "number" ? savedLastBlockRaw : null;
+ const savedOldestBlock =
+ typeof savedOldestBlockRaw === "number" ? savedOldestBlockRaw : null;
+
+ const fallbackOldest = Math.max(
+ currentBlock - INITIAL_SCAN_BLOCKS,
+ CONTRACT_CREATION_BLOCK
+ );
+ const effectiveOldest = savedOldestBlock ?? fallbackOldest;
+ setOldestScannedBlock(effectiveOldest);
+ setCanLoadMore(effectiveOldest > CONTRACT_CREATION_BLOCK);
+
+ if (initialScanComplete) {
+ let cursor = savedLastBlock;
+ if (cursor === null) {
+ cursor = fallbackOldest;
+ console.warn("[verbeth] sync cursor missing, using fallback window cursor");
+ }
- if (savedLastBlock) setLastKnownBlock(savedLastBlock);
- if (savedOldestBlock) setOldestScannedBlock(savedOldestBlock);
+ cursor = clampCursorToTip(cursor, currentBlock);
- setCanLoadMore(
- savedOldestBlock ? savedOldestBlock > CONTRACT_CREATION_BLOCK : true
- );
- return;
- }
+ if (cursor < currentBlock) {
+ const catchUpRanges = planRanges(cursor + 1, currentBlock, CHUNK_SIZE);
+ await batchProcessRanges(catchUpRanges, {
+ showProgress: catchUpRanges.length > 1,
+ stopOnError: false,
+ });
+ }
- setIsInitialLoading(true);
- onLog(`...Starting initial scan of last ${INITIAL_SCAN_BLOCKS} blocks...`);
+ setLastKnownBlock(currentBlock);
+ lastKnownBlockRef.current = currentBlock;
+ await dbService.setLastKnownBlock(address, currentBlock);
+ const postState = await loadSyncState(address);
+ if (postState.pendingRanges.length === 0) {
+ await saveSyncState(address, {
+ pendingRanges: [],
+ status: "synced",
+ targetTip: currentBlock,
+ lastError: undefined,
+ });
+ } else {
+ await saveSyncState(address, {
+ targetTip: currentBlock,
+ });
+ }
+ await refreshSyncStatus();
+ return;
+ }
- try {
- const currentBlock = await readProvider.getBlockNumber();
const startBlock = Math.max(
currentBlock - INITIAL_SCAN_BLOCKS,
CONTRACT_CREATION_BLOCK
);
+ const initialRanges = planRanges(startBlock, currentBlock, CHUNK_SIZE);
- const events = await scanBlockRange(startBlock, currentBlock);
-
- onEventsProcessed(events);
-
- // store chunk info
- scanChunks.current = [
- {
- fromBlock: startBlock,
- toBlock: currentBlock,
- loaded: true,
- events: events.map((e) => e.rawLog),
- },
- ];
+ await batchProcessRanges(initialRanges, {
+ showProgress: initialRanges.length > 1,
+ stopOnError: true,
+ });
- // Update state and database
setLastKnownBlock(currentBlock);
+ lastKnownBlockRef.current = currentBlock;
setOldestScannedBlock(startBlock);
setCanLoadMore(startBlock > CONTRACT_CREATION_BLOCK);
await dbService.setLastKnownBlock(address, currentBlock);
await dbService.setOldestScannedBlock(address, startBlock);
await dbService.setInitialScanComplete(address, true);
-
- onLog(
- `Initial scan complete: ${events.length} events found in blocks ${startBlock}-${currentBlock}`
- );
+ await saveSyncState(address, {
+ pendingRanges: [],
+ status: "synced",
+ targetTip: currentBlock,
+ lastError: undefined,
+ });
+ await refreshSyncStatus();
} catch (error) {
- onLog(`✗ Initial scan failed: ${error}`);
+ console.error("[verbeth] scan failed during initial sync:", error);
+ await saveSyncState(address, {
+ status: "degraded",
+ lastError: toErrorMessage(error),
+ });
+ await refreshSyncStatus();
} finally {
setIsInitialLoading(false);
}
}, [
- readProvider,
address,
- isInitialLoading,
- onLog,
- onEventsProcessed,
- getCurrentContacts,
+ batchProcessRanges,
+ logFetcher,
+ readProvider,
+ refreshSyncStatus,
]);
const loadMoreHistory = useCallback(async () => {
@@ -410,147 +344,225 @@ export const useMessageListener = ({
!address ||
isLoadingMore ||
!canLoadMore ||
- !oldestScannedBlock
+ oldestScannedBlock === null
) {
return;
}
setIsLoadingMore(true);
- onLog(`...Loading more history...`);
try {
const endBlock = oldestScannedBlock - 1;
- const startBlock = Math.max(
- endBlock - INITIAL_SCAN_BLOCKS,
- CONTRACT_CREATION_BLOCK
- );
-
- let maxIndexedBlock = endBlock;
- for (let b = endBlock; b >= startBlock; b--) {
- const blk = await readProvider.getBlock(b);
- if (blk) {
- maxIndexedBlock = b;
- break;
- }
- }
-
- if (maxIndexedBlock < startBlock) {
- onLog(
- `⚠️ No indexed blocks found between ${startBlock} and ${endBlock}. Retrying later.`
- );
- setIsLoadingMore(false);
- return;
- }
-
- const safeStartBlock = Math.max(startBlock, CONTRACT_CREATION_BLOCK);
- const safeEndBlock = maxIndexedBlock;
-
- const ranges = await findEventRanges(safeStartBlock, safeEndBlock);
-
- if (ranges.length === 0) {
- onLog(`No more events found before block ${safeEndBlock}`);
+ if (endBlock < CONTRACT_CREATION_BLOCK) {
setCanLoadMore(false);
- setIsLoadingMore(false);
return;
}
- const events = await batchScanRanges(ranges);
-
- onEventsProcessed(events);
+ const startBlock = Math.max(
+ endBlock - INITIAL_SCAN_BLOCKS,
+ CONTRACT_CREATION_BLOCK
+ );
+ const ranges = planRanges(startBlock, endBlock, CHUNK_SIZE);
- scanChunks.current.push({
- fromBlock: safeStartBlock,
- toBlock: safeEndBlock,
- loaded: true,
- events: events.map((e) => e.rawLog),
+ await batchProcessRanges(ranges, {
+ showProgress: ranges.length > 1,
+ stopOnError: false,
});
- setOldestScannedBlock(safeStartBlock);
- setCanLoadMore(safeStartBlock > CONTRACT_CREATION_BLOCK);
- await dbService.setOldestScannedBlock(address, safeStartBlock);
-
- onLog(
- `Loaded ${events.length} more events from blocks ${safeStartBlock}-${safeEndBlock}`
- );
+ setOldestScannedBlock(startBlock);
+ setCanLoadMore(startBlock > CONTRACT_CREATION_BLOCK);
+ await dbService.setOldestScannedBlock(address, startBlock);
} catch (error) {
- onLog(`✗ Failed to load more history: ${error}`);
+ console.error("[verbeth] failed to load more history:", error);
} finally {
setIsLoadingMore(false);
+ setSyncProgress(null);
}
}, [
- readProvider,
address,
- isLoadingMore,
+ batchProcessRanges,
canLoadMore,
+ isLoadingMore,
oldestScannedBlock,
- onLog,
- onEventsProcessed,
+ readProvider,
]);
- // real time scanning for new blocks
useEffect(() => {
- if (!readProvider || !address || !lastKnownBlock) return;
+ if (!address || !readProvider || !logFetcher) return;
- const interval = setInterval(async () => {
- try {
- const currentBlock = await readProvider.getBlockNumber();
- const maxSafeBlock = currentBlock - REAL_TIME_BUFFER;
+ let disposed = false;
+
+ const runBackfillWorker = async () => {
+ if (disposed || isBackfillWorkerRunningRef.current) return;
+
+ isBackfillWorkerRunningRef.current = true;
- if (maxSafeBlock > lastKnownBlock) {
- const startScanBlock = lastKnownBlock + 1;
- const events = await scanBlockRange(startScanBlock, maxSafeBlock);
+ try {
+ const retryableRanges = await dequeueRetryableRanges(address, Date.now(), 1);
+ if (retryableRanges.length === 0) {
+ await refreshSyncStatus();
+ return;
+ }
- if (events.length > 0) {
- onEventsProcessed(events);
- onLog(
- `Found ${events.length} new events in blocks ${startScanBlock}-${maxSafeBlock}`
+ for (const range of retryableRanges) {
+ try {
+ await processRange(range.fromBlock, range.toBlock);
+ } catch (error) {
+ const attempts = range.attempts + 1;
+ const retryDelay =
+ Math.min(120_000, 1_500 * 2 ** Math.min(attempts, 6)) +
+ Math.floor(Math.random() * 600);
+
+ await enqueueRanges(
+ address,
+ [
+ {
+ ...range,
+ attempts,
+ nextRetryAt: Date.now() + retryDelay,
+ lastError: toErrorMessage(error),
+ },
+ ],
+ "degraded"
);
}
+ }
- setLastKnownBlock(maxSafeBlock);
- await dbService.setLastKnownBlock(address, maxSafeBlock);
+ const state = await loadSyncState(address);
+ if (state.pendingRanges.length === 0 && state.status !== "synced") {
+ await saveSyncState(address, {
+ status: "synced",
+ lastError: undefined,
+ });
}
+
+ await refreshSyncStatus();
+ } finally {
+ isBackfillWorkerRunningRef.current = false;
+ }
+ };
+
+ void runBackfillWorker();
+ const interval = setInterval(() => {
+ void runBackfillWorker();
+ }, 4_000);
+
+ return () => {
+ disposed = true;
+ clearInterval(interval);
+ };
+ }, [address, logFetcher, processRange, readProvider, refreshSyncStatus]);
+
+ useEffect(() => {
+ if (!readProvider || !address) return;
+
+ const handleNewBlock = async (blockNumber: number) => {
+ const currentLastKnown = lastKnownBlockRef.current;
+ if (currentLastKnown === null) return;
+ if (isRealtimeScanRunningRef.current) return;
+
+ isRealtimeScanRunningRef.current = true;
+
+ try {
+ const readTip = Number(await readProvider.getBlockNumber());
+ const maxSafeBlock = Math.min(blockNumber, readTip) - REAL_TIME_BUFFER;
+
+ // Track tip lag for health scoring
+ const lag = blockNumber - readTip;
+ if (lag >= 1) {
+ if (tipLagStateRef.current.since === null) {
+ tipLagStateRef.current = { blocks: lag, since: Date.now() };
+ } else {
+ tipLagStateRef.current = { ...tipLagStateRef.current, blocks: lag };
+ }
+ } else {
+ tipLagStateRef.current = { blocks: 0, since: null };
+ }
+
+ if (maxSafeBlock <= currentLastKnown) return;
+
+ await processRange(currentLastKnown + 1, maxSafeBlock);
+
+ setLastKnownBlock(maxSafeBlock);
+ lastKnownBlockRef.current = maxSafeBlock;
+ await dbService.setLastKnownBlock(address, maxSafeBlock);
+ } catch (error) {
+ console.error("[verbeth] real-time scan error:", error);
+ } finally {
+ isRealtimeScanRunningRef.current = false;
+ recomputeHealth();
+ }
+ };
+
+ if (viemClient) {
+ const unwatch = viemClient.watchBlockNumber({
+ onBlockNumber: (blockNumber: bigint) => {
+ void handleNewBlock(Number(blockNumber));
+ },
+ onError: () => {
+ wsErrorTimestampsRef.current.push(Date.now());
+ recomputeHealth();
+ },
+ emitOnBegin: false,
+ pollingInterval: 4_000,
+ });
+ return unwatch;
+ }
+
+ const interval = setInterval(async () => {
+ try {
+ const currentBlock = Number(await readProvider.getBlockNumber());
+ await handleNewBlock(currentBlock);
} catch (error) {
- onLog(`⚠️ Real-time scan error: ${error}`);
+ console.error("[verbeth] real-time scan error:", error);
}
- }, 5000);
+ }, 5_000);
return () => clearInterval(interval);
- }, [readProvider, address, lastKnownBlock, onLog, onEventsProcessed]);
+ }, [address, processRange, readProvider, recomputeHealth, viemClient]);
- // clear state when address changes
useEffect(() => {
- if (address) {
- setIsInitialLoading(false);
- setIsLoadingMore(false);
- setCanLoadMore(true);
- setSyncProgress(null);
- setLastKnownBlock(null);
- setOldestScannedBlock(null);
- processedLogs.current.clear();
- scanChunks.current = [];
- }
+ hasBootstrappedRef.current = false;
+ setIsInitialLoading(false);
+ setIsLoadingMore(false);
+ setCanLoadMore(true);
+ setSyncProgress(null);
+ setSyncStatus(IDLE_SYNC_STATUS);
+ setLastKnownBlock(null);
+ lastKnownBlockRef.current = null;
+ setOldestScannedBlock(null);
+ setHealth(OK_HEALTH);
+ rateLimitTimestampsRef.current = [];
+ wsErrorTimestampsRef.current = [];
+ tipLagStateRef.current = { blocks: 0, since: null };
+ pendingRangeStateRef.current = { count: 0, since: null };
+
+ if (!address) return;
+
+ void (async () => {
+ const persisted = await loadSyncState(address);
+ setSyncStatus(toSyncStatus(persisted));
+ })();
}, [address]);
-
useEffect(() => {
- if (
- readProvider &&
- address &&
- !isInitialLoading &&
- scanChunks.current.length === 0
- ) {
- performInitialScan();
+ if (!readProvider || !address || !logFetcher || !verbethClient || hasBootstrappedRef.current) {
+ return;
}
- }, [readProvider, address, performInitialScan]);
+
+ hasBootstrappedRef.current = true;
+ void performInitialScan();
+ }, [address, logFetcher, performInitialScan, readProvider, verbethClient]);
return {
isInitialLoading,
isLoadingMore,
canLoadMore,
syncProgress,
+ syncStatus,
loadMoreHistory,
lastKnownBlock,
oldestScannedBlock,
+ health,
};
};
diff --git a/apps/demo/src/hooks/useMessageProcessor.ts b/apps/demo/src/hooks/useMessageProcessor.ts
index bb2c2ad..8e9c591 100644
--- a/apps/demo/src/hooks/useMessageProcessor.ts
+++ b/apps/demo/src/hooks/useMessageProcessor.ts
@@ -1,93 +1,56 @@
-// apps/demo/src/hooks/useMessageProcessor.ts
+// src/hooks/useMessageProcessor.ts
-import { useState, useEffect, useCallback, useRef } from "react";
-import { AbiCoder } from "ethers";
-import {
- type IdentityContext,
- decryptMessage,
- parseHandshakePayload,
- verifyHandshakeIdentity,
- IdentityKeyPair,
- decodeUnifiedPubKeys,
- verifyAndExtractHandshakeResponseKeys,
- deriveDuplexTopics,
- verifyDerivedDuplexTopics,
- computeTagFromInitiator,
- pickOutboundTopic,
-} from "@verbeth/sdk";
+/**
+ * Message Processor Hook.
+ *
+ * Manages messaging state (messages, contacts, pendingHandshakes) and
+ * orchestrates event processing via EventProcessorService.
+ *
+ * Uses VerbethClient for session management and decryption.
+ */
+
+import { useState, useEffect, useCallback } from "react";
+import type { IdentityContext, IdentityKeyPair, VerbethClient } from "@verbeth/sdk";
import { dbService } from "../services/DbService.js";
+import {
+ processHandshakeEvent,
+ processHandshakeResponseEvent,
+ processMessageEvent,
+} from "../services/EventProcessorService.js";
import {
Contact,
Message,
PendingHandshake,
ProcessedEvent,
MessageProcessorResult,
- MessageDirection,
- MessageType,
- ContactStatus,
- generateTempMessageId,
} from "../types.js";
interface UseMessageProcessorProps {
readProvider: any;
address: string | undefined;
+ emitterAddress: string | undefined;
identityKeyPair: IdentityKeyPair | null;
identityContext: IdentityContext;
- onLog: (message: string) => void;
+ verbethClient: VerbethClient | null;
}
-/**
- * useMessageProcessor
- *
- * - Maintains a FIFO of pending outgoing messages per topic in-memory.
- * - Enqueue on send (and on DB restore), dequeue on on-chain confirmation.
- * - This guarantees correct matching when multiple messages are sent before the first confirms.
- *
- * - On confirmation:
- * 1) Prefer the in-memory FIFO (topic -> queue.shift()).
- * 2) If missing (refresh / multi-tab / race), fallback to DB.findPendingMessage(...).
- * 3) If still missing, synthesize a confirmed outgoing from the log (id = txHash-logIndex).
- *
- * - Dedup logic lives in DbService (skip dedup for pending; dedup confirmed by sender:topic:nonce).
- * - NB: listener should query only inbound topics; confirmations are filtered by sender=me.
- */
-
export const useMessageProcessor = ({
readProvider,
address,
+ emitterAddress,
identityKeyPair,
identityContext,
- onLog,
+ verbethClient,
}: UseMessageProcessorProps): MessageProcessorResult => {
+
const [messages, setMessages] = useState([]);
- const [pendingHandshakes, setPendingHandshakes] = useState<
- PendingHandshake[]
- >([]);
+ const [pendingHandshakes, setPendingHandshakes] = useState([]);
const [contacts, setContacts] = useState([]);
- // Track pending outgoing per topic in-memory (FIFO queues)
- const pendingMessagesRef = useRef>(new Map());
-
- const hexToUint8Array = (hex: string): Uint8Array => {
- const cleanHex = hex.replace("0x", "");
- return new Uint8Array(
- cleanHex.match(/.{1,2}/g)?.map((byte) => parseInt(byte, 16)) || []
- );
- };
-
- const generateMessageId = (
- txHash: string,
- log: { logIndex?: number; index?: number }
- ): string => {
- const idx =
- typeof log.logIndex !== "undefined"
- ? log.logIndex
- : typeof log.index !== "undefined"
- ? log.index
- : 0;
- return `${txHash}-${idx}`;
- };
+ // ===========================================================================
+ // Load Data from Database
+ // ===========================================================================
const loadFromDatabase = useCallback(async () => {
if (!address) return;
@@ -102,632 +65,192 @@ export const useMessageProcessor = ({
setContacts(dbContacts);
setMessages(dbMessages);
setPendingHandshakes(dbPendingHandshakes);
-
- // Restore pending outgoing messages into the in-memory Map
- pendingMessagesRef.current.clear();
- dbMessages
- .filter(
- (msg) =>
- msg.status === "pending" &&
- msg.direction === "outgoing" &&
- msg.type === "text" &&
- msg.topic
- )
- .forEach((msg) => {
- const q = pendingMessagesRef.current.get(msg.topic) ?? [];
- q.push(msg);
- pendingMessagesRef.current.set(msg.topic, q);
- onLog(
- `Restored pending message for topic ${msg.topic.slice(
- 0,
- 10
- )}...: "${msg.decrypted?.slice(0, 30)}..."`
- );
- });
-
- onLog(
- `Loaded from DB for ${address.slice(0, 8)}...: ${
- dbContacts.length
- } contacts, ${dbMessages.length} messages (${
- pendingMessagesRef.current.size
- } pending), ${dbPendingHandshakes.length} pending handshakes`
- );
} catch (error) {
- onLog(`✗ Failed to load from database: ${error}`);
+ console.error(`[verbeth] database load failed:`, error);
}
- }, [address, onLog]);
-
- const processHandshakeLog = useCallback(
- async (event: ProcessedEvent): Promise => {
- if (!address || !readProvider) return;
-
- try {
- const log = event.rawLog;
- const abiCoder = new AbiCoder();
- const decoded = abiCoder.decode(["bytes", "bytes", "bytes"], log.data);
- const [
- identityPubKeyBytes,
- ephemeralPubKeyBytes,
- plaintextPayloadBytes,
- ] = decoded;
-
- const unifiedPubKeys = hexToUint8Array(identityPubKeyBytes);
- const decodedKeys = decodeUnifiedPubKeys(unifiedPubKeys);
-
- if (!decodedKeys) {
- onLog("✗ Failed to decode unified public keys");
- return;
- }
-
- const identityPubKey = decodedKeys.identityPubKey;
- const signingPubKey = decodedKeys.signingPubKey;
- const ephemeralPubKey = hexToUint8Array(ephemeralPubKeyBytes);
- const plaintextPayload = new TextDecoder().decode(
- hexToUint8Array(plaintextPayloadBytes)
- );
-
- const cleanSenderAddress = "0x" + log.topics[2].slice(-40);
- const recipientHash = log.topics[1];
-
- let handshakeContent;
- let hasValidIdentityProof = false;
-
- try {
- handshakeContent = parseHandshakePayload(plaintextPayload);
- hasValidIdentityProof = true;
- } catch (error) {
- handshakeContent = {
- plaintextPayload: plaintextPayload,
- identityProof: null,
- };
- hasValidIdentityProof = false;
- }
-
- let isVerified = false;
- if (hasValidIdentityProof) {
- try {
- const handshakeEvent = {
- recipientHash,
- sender: cleanSenderAddress,
- pubKeys: identityPubKeyBytes,
- ephemeralPubKey: ephemeralPubKeyBytes,
- plaintextPayload: plaintextPayload,
- };
-
- // Pass identityContext for domain & chain bound verification
- isVerified = await verifyHandshakeIdentity(
- handshakeEvent,
- readProvider,
- identityContext
- );
- } catch (error) {
- onLog(`Failed to verify handshake identity: ${error}`);
- }
- }
-
- const pendingHandshake: PendingHandshake = {
- id: log.transactionHash,
- ownerAddress: address,
- sender: cleanSenderAddress,
- identityPubKey,
- signingPubKey,
- ephemeralPubKey,
- message: handshakeContent.plaintextPayload,
- timestamp: Date.now(),
- blockNumber: log.blockNumber,
- verified: isVerified,
- };
-
- await dbService.savePendingHandshake(pendingHandshake);
-
- setPendingHandshakes((prev) => {
- const existing = prev.find((h) => h.id === pendingHandshake.id);
- if (existing) return prev;
- return [...prev, pendingHandshake];
- });
-
- const handshakeMessage: Message = {
- id: generateTempMessageId(),
- topic: "",
- sender: cleanSenderAddress,
- recipient: address,
- ciphertext: "",
- timestamp: Date.now(),
- blockTimestamp: Date.now(),
- blockNumber: log.blockNumber,
- direction: "incoming" as const,
- decrypted: `Request received: "${handshakeContent.plaintextPayload}"`,
- read: true,
- nonce: 0,
- dedupKey: `handshake-received-${log.transactionHash}`,
- type: "system" as const,
- ownerAddress: address,
- status: "confirmed" as const,
- verified: isVerified,
- };
-
- await dbService.saveMessage(handshakeMessage);
- setMessages((prev) => [...prev, handshakeMessage]);
-
- onLog(
- `📨 Handshake received from ${cleanSenderAddress.slice(0, 8)}... ${
- isVerified ? "✅" : "⚠️"
- }: "${handshakeContent.plaintextPayload}"`
- );
- } catch (error) {
- onLog(`✗ Failed to process handshake log: ${error}`);
- }
- },
- [address, readProvider, identityContext, onLog]
- );
-
- const processHandshakeResponseLog = useCallback(
- async (event: ProcessedEvent): Promise => {
- if (!address || !readProvider) return;
-
- try {
- const log = event.rawLog;
- const abiCoder = new AbiCoder();
- const [responderEphemeralRBytes, ciphertextBytes] = abiCoder.decode(
- ["bytes32", "bytes"],
- log.data
- );
-
- const ciphertextJson = new TextDecoder().decode(
- hexToUint8Array(ciphertextBytes)
- );
+ }, [address]);
- const responder = "0x" + log.topics[2].slice(-40);
- const inResponseTo = log.topics[1];
-
- const currentContacts = await dbService.getAllContacts(address);
-
- onLog(
- `🔍 Debug: Loaded ${currentContacts.length} contacts from DB for handshake response`
- );
-
- const contact = currentContacts.find(
- (c) =>
- c.address.toLowerCase() === responder.toLowerCase() &&
- c.status === "handshake_sent"
- );
-
- if (!contact || !contact.ephemeralKey) {
- onLog(
- `❓ Received handshake response from unknown contact: ${responder.slice(
- 0,
- 8
- )}...`
- );
- return;
- }
-
- const responseEvent = {
- inResponseTo,
- responder,
- responderEphemeralR: responderEphemeralRBytes,
- ciphertext: ciphertextJson,
- };
-
- // Pass identityContext for domain & chain bound verification
- const result = await verifyAndExtractHandshakeResponseKeys(
- responseEvent,
- contact.ephemeralKey, // initiator's ephemeral secret key
- readProvider,
- identityContext
- );
-
- if (!result.isValid || !result.keys) {
- onLog(
- `❌ Failed to verify handshake response from ${responder.slice(
- 0,
- 8
- )}... - invalid signature or tag mismatch`
- );
- return;
- }
-
- const { identityPubKey, signingPubKey, ephemeralPubKey, note } =
- result.keys;
-
- if (!identityKeyPair) {
- onLog(`❌ Cannot verify duplex topics: identityKeyPair is null`);
- return;
- }
-
- const saltHex = computeTagFromInitiator(
- contact.ephemeralKey, // Alice's ephemeral secret (stored when she sent handshake)
- hexToUint8Array(responderEphemeralRBytes) // Bob's public R from the response event
- );
- const salt = Uint8Array.from(Buffer.from(saltHex.slice(2), "hex"));
-
- const duplexTopics = deriveDuplexTopics(
- identityKeyPair.secretKey, // Alice's identity secret key
- identityPubKey, // Bob's identity public key (from response)
- salt
- );
- const isValidTopics = verifyDerivedDuplexTopics({
- myIdentitySecretKey: identityKeyPair.secretKey,
- theirIdentityPubKey: identityPubKey,
- topicInfo: {
- out: duplexTopics.topicOut,
- in: duplexTopics.topicIn,
- chk: duplexTopics.checksum,
- },
- salt,
- });
- if (!isValidTopics) {
- onLog(
- `❌ Invalid duplex topics checksum for ${responder.slice(0, 8)}...`
- );
- return;
- }
-
- onLog(
- `✅ Handshake response verified from ${responder.slice(0, 8)}...`
- );
-
- const updatedContact: Contact = {
- ...contact,
- status: "established" as ContactStatus,
- identityPubKey,
- signingPubKey,
- ephemeralKey: undefined,
- topicOutbound: pickOutboundTopic(true, duplexTopics), // Alice is initiator
- topicInbound: pickOutboundTopic(false, duplexTopics), // Bob is responder
- lastMessage: note || "Handshake accepted",
- lastTimestamp: Date.now(),
- };
-
- await dbService.saveContact(updatedContact);
-
- setContacts((prev) =>
- prev.map((c) =>
- c.address.toLowerCase() === responder.toLowerCase()
- ? updatedContact
- : c
- )
- );
-
- onLog(
- `🤝 Handshake completed with ${responder.slice(0, 8)}... : "${
- note || "No message"
- }"`
- );
-
- const responseMessage: Message = {
- id: generateTempMessageId(),
- topic: updatedContact.topicInbound || "",
- sender: responder,
- recipient: address,
- ciphertext: "",
- timestamp: Date.now(),
- blockTimestamp: Date.now(),
- blockNumber: 0,
- direction: "incoming" as const,
- decrypted: `Request accepted: "${note || "No message"}"`,
- read: true,
- nonce: 0,
- dedupKey: `handshake-response-${inResponseTo}`,
- type: "system" as const,
- ownerAddress: address,
- status: "confirmed" as const,
- verified: true,
- };
-
- await dbService.saveMessage(responseMessage);
- setMessages((prev) => [...prev, responseMessage]);
- } catch (error) {
- onLog(`✗ Failed to process handshake response log: ${error}`);
- }
- },
- [address, readProvider, identityKeyPair, identityContext, onLog]
- );
-
- const processMessageLog = useCallback(
- async (event: ProcessedEvent): Promise => {
- if (!address || !identityKeyPair) return;
-
- try {
- const log = event.rawLog;
- const abiCoder = new AbiCoder();
- const decoded = abiCoder.decode(
- ["bytes", "uint256", "uint256"],
- log.data
- );
- const [ciphertextBytes, timestamp, nonce] = decoded;
- const topic = log.topics[2];
- const sender = "0x" + log.topics[1].slice(-40);
- const key = `${address.toLowerCase()}:${generateMessageId(log.transactionHash, log)}`;
-
- const ciphertextJson = new TextDecoder().decode(
- hexToUint8Array(ciphertextBytes)
- );
- const isOurMessage = sender.toLowerCase() === address.toLowerCase();
+ // ===========================================================================
+ // Event Processing
+ // ===========================================================================
- if (!isOurMessage) {
- const already = await dbService.getByDedupKey(key);
- if (already) return;
- }
+ const processEvents = useCallback(
+ async (events: ProcessedEvent[]) => {
+ if (!address) return;
- onLog(
- `🔍 Processing message log: sender=${sender.slice(
- 0,
- 8
- )}..., isOurMessage=${isOurMessage}, topic=${topic.slice(
- 0,
- 10
- )}..., nonce=${Number(nonce)}`
- );
+ for (const event of events) {
+ switch (event.eventType) {
+ // -----------------------------------------------------------------
+ // HANDSHAKE
+ // -----------------------------------------------------------------
+ case "handshake": {
+ if (!verbethClient) {
+ throw new Error("message processor is not ready: missing verbethClient");
+ }
- // OUTGOING MESSAGE CONFIRMATION
- if (isOurMessage) {
- onLog(
- `🔄 Confirming our outgoing message: topic=${topic.slice(
- 0,
- 10
- )}..., nonce=${Number(nonce)}`
- );
-
- const q = pendingMessagesRef.current.get(topic) ?? [];
- const pendingMessage = q.shift(); // confirm the oldest
- pendingMessagesRef.current.set(topic, q);
-
- if (pendingMessage) {
- onLog(
- `Matched pending by topic. Content preview:: "${pendingMessage.decrypted?.slice(
- 0,
- 100
- )}..."`
+ const result = await processHandshakeEvent(
+ event,
+ address,
+ readProvider,
+ identityContext,
+ verbethClient,
);
- const newId = generateMessageId(log.transactionHash, log);
- const confirmedMessage: Message = {
- ...pendingMessage,
- id: newId,
- blockNumber: log.blockNumber,
- blockTimestamp: Date.now(),
- ciphertext: ciphertextJson,
- nonce: Number(nonce),
- dedupKey: key,
- status: "confirmed",
- };
-
- if (q.length === 0) {
- pendingMessagesRef.current.delete(topic);
+ if (result) {
+ setPendingHandshakes((prev) => {
+ const existing = prev.find((h) => h.id === result.pendingHandshake.id);
+ if (existing) return prev;
+ return [...prev, result.pendingHandshake];
+ });
+ setMessages((prev) => [...prev, result.systemMessage]);
}
+ break;
+ }
- await dbService.updateMessage(pendingMessage.id, confirmedMessage);
- await dbService.upsertDedup({
- key,
- messageId: newId,
- txHash: log.transactionHash,
- blockNumber: log.blockNumber,
- });
-
- setMessages((prev) =>
- prev.map((m) =>
- m.id === pendingMessage.id ? confirmedMessage : m
- )
- );
+ // -----------------------------------------------------------------
+ // HANDSHAKE RESPONSE - requires verbethClient for session creation
+ // -----------------------------------------------------------------
+ case "handshake_response": {
+ if (!identityKeyPair || !verbethClient) {
+ throw new Error(
+ "message processor is not ready: missing identity or verbethClient"
+ );
+ }
- onLog(
- `✅ Outgoing message confirmed: "${pendingMessage.decrypted?.slice(
- 0,
- 30
- )}..." (${pendingMessage.id} → ${newId})`
- );
- } else {
- const dbFallback = await dbService.findPendingMessage(
+ const result = await processHandshakeResponseEvent(
+ event,
address,
- topic,
- Number(nonce),
- address
+ readProvider,
+ identityContext,
+ verbethClient,
);
- // for "lastMessage" updates
- const allContacts = await dbService.getAllContacts(address);
- const byTopic = allContacts.find((c) => c.topicOutbound === topic);
-
- const newId = generateMessageId(log.transactionHash, log);
- const confirmed: Message = {
- id: newId,
- topic,
- sender: address,
- recipient: byTopic?.address,
- ciphertext: ciphertextJson,
- timestamp: Number(timestamp) * 1000,
- blockTimestamp: Date.now(),
- blockNumber: log.blockNumber,
- direction: "outgoing",
- read: true,
- decrypted: dbFallback?.decrypted,
- type: "text",
- nonce: Number(nonce),
- dedupKey: key,
- ownerAddress: address,
- status: "confirmed",
- };
-
- if (dbFallback) {
- // Replace the pending row in-place (preserves the bubble)
- await dbService.updateMessage(dbFallback.id, confirmed);
- await dbService.upsertDedup({
- key,
- messageId: newId,
- txHash: log.transactionHash,
- blockNumber: log.blockNumber,
- });
- setMessages((prev) =>
- prev.map((m) => (m.id === dbFallback.id ? confirmed : m))
- );
- onLog(
- `✅ Outgoing message confirmed (fallback): "${
- confirmed.decrypted?.slice(0, 30) ?? ""
- }" (${dbFallback.id} → ${newId})`
+ if (result) {
+ setContacts((prev) =>
+ prev.map((c) =>
+ c.address.toLowerCase() === result.updatedContact.address.toLowerCase()
+ ? result.updatedContact
+ : c
+ )
);
- } else {
- await dbService.saveMessage(confirmed);
- await dbService.upsertDedup({
- key,
- messageId: newId,
- txHash: log.transactionHash,
- blockNumber: log.blockNumber,
- });
- setMessages((prev) => [...prev, confirmed]);
- onLog(`✅ Outgoing message confirmed (synthesized): ${newId}`);
+ setMessages((prev) => [...prev, result.systemMessage]);
}
+ break;
}
- return;
- }
+ // -----------------------------------------------------------------
+ // MESSAGE - Uses VerbethClient for decryption
+ // -----------------------------------------------------------------
+ case "message": {
+ if (!verbethClient) {
+ throw new Error("message processor is not ready: missing verbethClient");
+ }
- // INCOMING MESSAGE
- const currentContacts = await dbService.getAllContacts(address);
- const contact = currentContacts.find(
- (c) =>
- c.address.toLowerCase() === sender.toLowerCase() &&
- c.status === "established"
- );
+ const result = await processMessageEvent(
+ event,
+ address,
+ emitterAddress,
+ verbethClient,
+ );
- if (!contact || !contact.identityPubKey || !contact.signingPubKey) {
- onLog(
- `❓ Received message from unknown contact: ${sender.slice(0, 8)}...`
- );
- return;
+ if (result) {
+ if (result.newMessage) {
+ setMessages((prev) => {
+ const existing = prev.find((m) => m.id === result.newMessage!.id);
+ if (existing) return prev;
+ return [...prev, result.newMessage!];
+ });
+ }
+
+ if (result.messageUpdate) {
+ const [originalId, updates] = result.messageUpdate;
+ setMessages((prev) =>
+ prev.map((m) => (m.id === originalId ? { ...m, ...updates } : m))
+ );
+ }
+
+ if (result.contactUpdate) {
+ setContacts((prev) =>
+ prev.map((c) =>
+ c.address.toLowerCase() === result.contactUpdate!.address.toLowerCase()
+ ? result.contactUpdate!
+ : c
+ )
+ );
+ }
+ }
+ break;
+ }
}
+ }
+ },
+ [address, readProvider, identityKeyPair, identityContext, emitterAddress, verbethClient]
+ );
- if (contact.topicInbound && topic !== contact.topicInbound) {
- onLog(
- `❌ Message topic mismatch from ${sender.slice(
- 0,
- 8
- )}... - expected ${contact.topicInbound.slice(
- 0,
- 10
- )}..., got ${topic.slice(0, 10)}...`
- );
- return;
- }
+ // ===========================================================================
+ // CRUD Operations
+ // ===========================================================================
- const decryptedMessage = decryptMessage(
- ciphertextJson,
- identityKeyPair.secretKey,
- contact.signingPubKey
- );
-
- if (!decryptedMessage) {
- onLog(`✗ Failed to decrypt message from ${sender.slice(0, 8)}...`);
- return;
- }
+ const addMessage = useCallback(
+ async (message: Message) => {
+ if (!address) return;
- const message: Message = {
- id: generateMessageId(log.transactionHash, log),
- topic: topic,
- sender: sender,
- recipient: address,
- ciphertext: ciphertextJson,
- timestamp: Number(timestamp) * 1000,
- blockTimestamp: Date.now(),
- blockNumber: log.blockNumber,
- direction: "incoming" as MessageDirection,
- decrypted: decryptedMessage,
- read: false,
- nonce: Number(nonce),
- dedupKey: key,
- type: "text" as MessageType,
- ownerAddress: address,
- status: "confirmed",
- };
-
- const saved = await dbService.saveMessage(message);
-
- if (saved) {
- await dbService.upsertDedup({
- key,
- messageId: message.id,
- txHash: log.transactionHash,
- blockNumber: log.blockNumber,
- });
- setMessages((prev) => {
- const existing = prev.find((m) => m.id === message.id);
- if (existing) return prev;
- return [...prev, message];
- });
-
- const updatedContact: Contact = {
- ...contact,
- lastMessage: decryptedMessage,
- lastTimestamp: Date.now(),
- };
-
- await dbService.saveContact(updatedContact);
-
- setContacts((prev) =>
- prev.map((c) =>
- c.address.toLowerCase() === sender.toLowerCase()
- ? updatedContact
- : c
- )
- );
-
- onLog(`Message from ${sender.slice(0, 8)}...: "${decryptedMessage}"`);
- }
- } catch (error) {
- onLog(`✗ Failed to process message log: ${error}`);
+ const messageWithOwner = { ...message, ownerAddress: address };
+ const saved = await dbService.saveMessage(messageWithOwner);
+ if (saved) {
+ setMessages((prev) => [...prev, messageWithOwner]);
}
},
- [address, identityKeyPair, onLog]
+ [address]
);
- const processEvents = useCallback(
- async (events: ProcessedEvent[]) => {
- for (const event of events) {
- switch (event.eventType) {
- case "handshake":
- await processHandshakeLog(event);
- break;
- case "handshake_response":
- await processHandshakeResponseLog(event);
- break;
- case "message":
- await processMessageLog(event);
- break;
- }
+ const updateMessageStatus = useCallback(
+ async (messageId: string, status: Message["status"], error?: string) => {
+ const updates: Partial = { status };
+ await dbService.updateMessage(messageId, updates);
+
+ setMessages((prev) =>
+ prev.map((m) => (m.id === messageId ? { ...m, status } : m))
+ );
+
+ if (status === "failed" && error) {
+ console.error(`[verbeth] message ${messageId.slice(0, 8)}... failed: ${error}`);
}
},
- [processHandshakeLog, processHandshakeResponseLog, processMessageLog]
+ []
);
- const addMessage = useCallback(
- async (message: Message) => {
- if (!address) return;
+ const removeMessage = useCallback(async (messageId: string) => {
+ await dbService.deleteMessage(messageId);
+ setMessages((prev) => prev.filter((m) => m.id !== messageId));
+ }, []);
- const messageWithOwner = { ...message, ownerAddress: address };
- // Track pending outgoing in the in-memory Map
- if (
- messageWithOwner.status === "pending" &&
- messageWithOwner.direction === "outgoing" &&
- messageWithOwner.type === "text" &&
- messageWithOwner.topic
- ) {
- const q = pendingMessagesRef.current.get(messageWithOwner.topic) ?? [];
- q.push(messageWithOwner);
- pendingMessagesRef.current.set(messageWithOwner.topic, q);
- onLog(
- `Registered pending message for topic ${messageWithOwner.topic.slice(
- 0,
- 10
- )}...`
+ const markMessagesLost = useCallback(
+ async (contactAddress: string, afterTimestamp: number): Promise => {
+ if (!address) return 0;
+
+ const count = await dbService.markMessagesAsLost(address, contactAddress, afterTimestamp);
+
+ if (count > 0) {
+ const normalizedContact = contactAddress.toLowerCase();
+ setMessages((prev) =>
+ prev.map((m) => {
+ if (
+ m.direction === 'outgoing' &&
+ m.recipient?.toLowerCase() === normalizedContact &&
+ m.timestamp > afterTimestamp &&
+ m.type !== 'system'
+ ) {
+ return { ...m, isLost: true };
+ }
+ return m;
+ })
);
}
- const saved = await dbService.saveMessage(messageWithOwner);
- if (saved) {
- setMessages((prev) => [...prev, messageWithOwner]);
- }
+ return count;
},
- [address, onLog]
+ [address]
);
const removePendingHandshake = useCallback(async (id: string) => {
@@ -747,10 +270,12 @@ export const useMessageProcessor = ({
[address]
);
- // cleanup and reload when address changes
+ // ===========================================================================
+ // Effects
+ // ===========================================================================
+
useEffect(() => {
if (address) {
- pendingMessagesRef.current.clear();
setMessages([]);
setContacts([]);
setPendingHandshakes([]);
@@ -763,8 +288,11 @@ export const useMessageProcessor = ({
pendingHandshakes,
contacts,
addMessage,
+ updateMessageStatus,
+ removeMessage,
removePendingHandshake,
updateContact,
processEvents,
+ markMessagesLost,
};
};
diff --git a/apps/demo/src/hooks/useMessageQueue.ts b/apps/demo/src/hooks/useMessageQueue.ts
new file mode 100644
index 0000000..5ab791e
--- /dev/null
+++ b/apps/demo/src/hooks/useMessageQueue.ts
@@ -0,0 +1,369 @@
+// src/hooks/useMessageQueue.ts
+
+/**
+ * Message Queue Hook for Sequential Processing with Optimistic UI.
+ *
+ * Uses VerbethClient's two-phase commit pattern:
+ * 1. prepareMessage() - get ID and encrypted payload
+ * 2. Submit tx manually
+ * 3. confirmTx() on chain confirmation
+ *
+ * The key insight: we use prepareMessage()'s ID for BOTH the optimistic
+ * message AND the pending record, so confirmTx() can find the right message.
+ */
+
+import { useCallback, useRef, useEffect } from "react";
+import { hexlify } from "ethers";
+import type { VerbethClient } from "@verbeth/sdk";
+import { Contact, Message } from "../types.js";
+import { dbService } from "../services/DbService.js";
+
+
+export type QueuedMessageStatus =
+ | "queued" // In queue, waiting to be sent
+ | "sending" // Currently being encrypted/submitted
+ | "pending" // Submitted, awaiting confirmation
+ | "confirmed" // Confirmed on-chain
+ | "failed"; // Failed to send
+
+export interface QueuedMessage {
+ id: string;
+ conversationId: string;
+ contact: Contact;
+ plaintext: string;
+ status: QueuedMessageStatus;
+ error?: string;
+ txHash?: string;
+ createdAt: number;
+}
+
+interface ConversationQueue {
+ messages: QueuedMessage[];
+ isProcessing: boolean;
+}
+
+interface UseMessageQueueProps {
+ verbethClient: VerbethClient | null;
+ addMessage: (message: Message) => Promise;
+ updateMessageStatus: (id: string, status: Message["status"], error?: string) => Promise;
+ removeMessage: (id: string) => Promise;
+ updateContact: (contact: Contact) => Promise;
+}
+
+
+export const useMessageQueue = ({
+ verbethClient,
+ addMessage,
+ updateMessageStatus,
+ removeMessage,
+ updateContact,
+}: UseMessageQueueProps) => {
+
+ const queuesRef = useRef>(new Map());
+ const failedMessagesRef = useRef>(new Map());
+ const mountedRef = useRef(true);
+
+ useEffect(() => {
+ mountedRef.current = true;
+ return () => {
+ mountedRef.current = false;
+ };
+ }, []);
+
+ // ===========================================================================
+ // Queue Processor - Uses prepareMessage() for ID-first approach
+ // ===========================================================================
+
+ const processQueue = useCallback(async (conversationId: string) => {
+ if (!verbethClient || !mountedRef.current) return;
+
+ const queue = queuesRef.current.get(conversationId);
+ if (!queue || queue.isProcessing || queue.messages.length === 0) return;
+
+ queue.isProcessing = true;
+
+ while (queue.messages.length > 0 && mountedRef.current) {
+ const queuedMsg = queue.messages[0];
+
+ // Skip already processed messages
+ if (queuedMsg.status === "confirmed" || queuedMsg.status === "pending") {
+ queue.messages.shift();
+ continue;
+ }
+
+ if (queuedMsg.status === "failed") {
+ queue.messages.shift();
+ continue;
+ }
+
+ // Track prepared ID for error handling (survives into catch block)
+ let preparedId: string | null = null;
+
+ try {
+ queuedMsg.status = "sending";
+
+ // =====================================================================
+ // Step 1: Prepare message - this gives us the ID and encrypted payload
+ // Session is committed immediately for forward secrecy
+ // =====================================================================
+ const prepared = await verbethClient.prepareMessage(
+ conversationId,
+ queuedMsg.plaintext
+ );
+
+ // Update queuedMsg.id IMMEDIATELY so catch block has correct ID
+ preparedId = prepared.id;
+ queuedMsg.id = prepared.id;
+
+ // =====================================================================
+ // Step 2: Create optimistic message with the SAME ID as prepared
+ // This is the key fix - both share prepared.id
+ // =====================================================================
+ const optimisticMessage: Message = {
+ id: prepared.id,
+ topic: prepared.topic,
+ sender: verbethClient.userAddress,
+ recipient: queuedMsg.contact.address,
+ ciphertext: "",
+ timestamp: prepared.createdAt,
+ blockTimestamp: prepared.createdAt,
+ blockNumber: 0,
+ direction: "outgoing",
+ decrypted: queuedMsg.plaintext,
+ read: true,
+ nonce: prepared.messageNumber,
+ dedupKey: `pending-${prepared.id}`,
+ type: "text",
+ ownerAddress: verbethClient.userAddress,
+ status: "pending",
+ };
+
+ await addMessage(optimisticMessage);
+
+ // =====================================================================
+ // Step 3: Create pending record (SDK's PendingStore via StorageAdapter)
+ // =====================================================================
+ await dbService.ratchet.savePendingOutbound({
+ id: prepared.id,
+ conversationId,
+ topic: prepared.topic,
+ payloadHex: hexlify(prepared.payload),
+ plaintext: queuedMsg.plaintext,
+ sessionStateBefore: JSON.stringify({ epoch: prepared.sessionBefore.topicEpoch }),
+ sessionStateAfter: JSON.stringify({ epoch: prepared.sessionAfter.topicEpoch }),
+ createdAt: prepared.createdAt,
+ txHash: null,
+ status: 'preparing',
+ });
+
+ // =====================================================================
+ // Step 4: Submit transaction
+ // =====================================================================
+ const timestamp = Math.floor(Date.now() / 1000);
+ const tx = await verbethClient.executorInstance.sendMessage(
+ prepared.payload,
+ prepared.topic,
+ timestamp,
+ BigInt(prepared.messageNumber)
+ );
+
+ // =====================================================================
+ // Step 5: Update pending with txHash
+ // =====================================================================
+ await dbService.ratchet.updatePendingOutboundStatus(prepared.id, 'submitted', tx.hash);
+
+ queuedMsg.txHash = tx.hash;
+ queuedMsg.status = "pending";
+
+ // Update contact with current topic (may have ratcheted)
+ const session = await verbethClient.getSession(conversationId);
+ if (session) {
+ const updatedContact: Contact = {
+ ...queuedMsg.contact,
+ topicOutbound: session.currentTopicOutbound,
+ topicInbound: session.currentTopicInbound,
+ lastMessage: queuedMsg.plaintext,
+ lastTimestamp: Date.now(),
+ };
+ await updateContact(updatedContact);
+ }
+
+ queue.messages.shift();
+
+ } catch (error) {
+ const errorMessage = error instanceof Error ? error.message : "Unknown error";
+ queuedMsg.status = "failed";
+ queuedMsg.error = errorMessage;
+
+ // Use preparedId if message was already created in DB, otherwise use original queue id
+ const messageId = preparedId ?? queuedMsg.id;
+
+ // Note: Ratchet slot may already be burned (session was committed in prepareMessage)
+ await updateMessageStatus(messageId, "failed", errorMessage);
+
+ console.error(`[verbeth] send failed: ${errorMessage}`);
+
+ // Store failed message for retry/cancel (use correct ID)
+ queuedMsg.id = messageId;
+ failedMessagesRef.current.set(messageId, { ...queuedMsg });
+
+ // Remove from active queue
+ queue.messages.shift();
+ }
+ }
+
+ queue.isProcessing = false;
+ }, [verbethClient, addMessage, updateContact, updateMessageStatus]);
+
+
+ /**
+ * Queue a message for sending.
+ */
+ const queueMessage = useCallback(async (
+ contact: Contact,
+ messageText: string
+ ): Promise => {
+ if (!verbethClient || !contact.conversationId) return null;
+
+ const conversationId = contact.conversationId;
+
+ // Use a temporary ID for queue tracking only (will be replaced with prepared.id)
+ const tempId = `queue-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`;
+
+ // Create queued message (optimistic UI created later with correct ID)
+ const queuedMessage: QueuedMessage = {
+ id: tempId,
+ conversationId,
+ contact,
+ plaintext: messageText,
+ status: "queued",
+ createdAt: Date.now(),
+ };
+
+ // Get or create queue for this conversation
+ let queue = queuesRef.current.get(conversationId);
+ if (!queue) {
+ queue = { messages: [], isProcessing: false };
+ queuesRef.current.set(conversationId, queue);
+ }
+
+ queue.messages.push(queuedMessage);
+ console.log(`[verbeth] message queued (temp ID: ${tempId}) for conversation ${conversationId}`);
+
+ // Trigger queue processing (non-blocking)
+ setTimeout(() => processQueue(conversationId), 0);
+
+ return tempId;
+ }, [verbethClient, processQueue]);
+
+ const retryMessage = useCallback(async (messageId: string): Promise => {
+ const failedMessage = failedMessagesRef.current.get(messageId);
+
+ if (failedMessage) {
+ const conversationId = failedMessage.conversationId;
+
+ failedMessagesRef.current.delete(messageId);
+ await removeMessage(messageId);
+
+ // Reset status for retry (will get new ID in processQueue)
+ failedMessage.id = `queue-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`;
+ failedMessage.status = "queued";
+ failedMessage.error = undefined;
+ failedMessage.createdAt = Date.now();
+
+ let queue = queuesRef.current.get(conversationId);
+ if (!queue) {
+ queue = { messages: [], isProcessing: false };
+ queuesRef.current.set(conversationId, queue);
+ }
+
+ // Add to end of queue
+ queue.messages.push(failedMessage);
+
+ console.log(`[verbeth] retrying message (temp ID: ${failedMessage.id}) for conversation ${conversationId}`);
+
+ // Trigger processing
+ setTimeout(() => processQueue(conversationId), 0);
+
+ return true;
+ }
+
+ return false;
+ }, [removeMessage, processQueue]);
+
+ /**
+ * Cancel/delete a failed or queued message.
+ */
+ const cancelMessage = useCallback(async (messageId: string): Promise => {
+ // Check failed messages map first
+ const failedMessage = failedMessagesRef.current.get(messageId);
+
+ if (failedMessage) {
+ failedMessagesRef.current.delete(messageId);
+
+ await removeMessage(messageId);
+ return true;
+ }
+
+ // Fallback: check active queues
+ for (const [, queue] of queuesRef.current.entries()) {
+ const messageIndex = queue.messages.findIndex(
+ m => m.id === messageId && (m.status === "queued" || m.status === "failed")
+ );
+
+ if (messageIndex !== -1) {
+ const message = queue.messages[messageIndex];
+ queue.messages.splice(messageIndex, 1);
+
+ await removeMessage(messageId);
+ return true;
+ }
+ }
+
+ return false;
+ }, [removeMessage]);
+
+ /**
+ * Get queue status for a conversation.
+ */
+ const getQueueStatus = useCallback((conversationId: string): {
+ queueLength: number;
+ isProcessing: boolean;
+ pendingMessages: QueuedMessage[];
+ } => {
+ const queue = queuesRef.current.get(conversationId);
+ if (!queue) {
+ return { queueLength: 0, isProcessing: false, pendingMessages: [] };
+ }
+ return {
+ queueLength: queue.messages.length,
+ isProcessing: queue.isProcessing,
+ pendingMessages: [...queue.messages],
+ };
+ }, []);
+
+ /**
+ * Invalidate cached session for a conversation.
+ */
+ const invalidateSessionCache = useCallback((conversationId: string) => {
+ verbethClient?.invalidateSessionCache(conversationId);
+ }, [verbethClient]);
+
+ /**
+ * Clear all queues (e.g., on logout).
+ */
+ const clearAllQueues = useCallback(() => {
+ queuesRef.current.clear();
+ failedMessagesRef.current.clear();
+ verbethClient?.clearSessionCache();
+ }, [verbethClient]);
+
+ return {
+ queueMessage,
+ retryMessage,
+ cancelMessage,
+ getQueueStatus,
+ invalidateSessionCache,
+ clearAllQueues,
+ };
+};
\ No newline at end of file
diff --git a/apps/demo/src/hooks/usePendingSessionReset.ts b/apps/demo/src/hooks/usePendingSessionReset.ts
new file mode 100644
index 0000000..fef3e82
--- /dev/null
+++ b/apps/demo/src/hooks/usePendingSessionReset.ts
@@ -0,0 +1,39 @@
+// src/hooks/usePendingSessionReset.ts
+
+import { useMemo } from 'react';
+import type { Contact, PendingHandshake } from '../types.js';
+
+interface PendingSessionReset {
+ hasPendingReset: boolean;
+ pendingHandshake: PendingHandshake | null;
+ limboAfterTimestamp: number | null;
+}
+
+/**
+ * Hook to detect if there's a pending session reset from the selected contact.
+ */
+export function usePendingSessionReset(
+ selectedContact: Contact | null,
+ pendingHandshakes: PendingHandshake[]
+): PendingSessionReset {
+ return useMemo(() => {
+ if (!selectedContact) {
+ return { hasPendingReset: false, pendingHandshake: null, limboAfterTimestamp: null };
+ }
+
+ const resetHandshake = pendingHandshakes.find(
+ h => h.sender.toLowerCase() === selectedContact.address.toLowerCase()
+ && h.isExistingContact === true
+ );
+
+ if (!resetHandshake) {
+ return { hasPendingReset: false, pendingHandshake: null, limboAfterTimestamp: null };
+ }
+
+ return {
+ hasPendingReset: true,
+ pendingHandshake: resetHandshake,
+ limboAfterTimestamp: resetHandshake.timestamp,
+ };
+ }, [selectedContact, pendingHandshakes]);
+}
\ No newline at end of file
diff --git a/apps/demo/src/hooks/useSessionSetup.ts b/apps/demo/src/hooks/useSessionSetup.ts
new file mode 100644
index 0000000..6e70cbf
--- /dev/null
+++ b/apps/demo/src/hooks/useSessionSetup.ts
@@ -0,0 +1,202 @@
+// src/hooks/useSessionSetup.ts
+import { useState, useEffect, useCallback } from "react";
+import { BrowserProvider, Contract } from "ethers";
+import {
+ getOrCreateSafeForOwner,
+ ensureModuleEnabled,
+} from "../services/safeAccount.js";
+import { VERBETH_SINGLETON_ADDR, SAFE_MODULE_ADDRESS, ExecutionMode } from "../types.js";
+
+interface UseSessionSetupParams {
+ walletClient: any;
+ address: string | undefined;
+ safeAddr: string | null;
+ sessionSignerAddr: string | null;
+ chainId: number;
+ readProvider: any;
+ // State from useInitIdentity
+ isSafeDeployed: boolean;
+ isModuleEnabled: boolean;
+ setIsSafeDeployed: (deployed: boolean) => void;
+ setIsModuleEnabled: (enabled: boolean) => void;
+ setNeedsSessionSetup: (needs: boolean) => void;
+ executionMode: ExecutionMode | null;
+}
+
+export function useSessionSetup({
+ walletClient,
+ address,
+ safeAddr,
+ sessionSignerAddr,
+ chainId,
+ readProvider,
+ isSafeDeployed,
+ isModuleEnabled,
+ setIsSafeDeployed,
+ setIsModuleEnabled,
+ setNeedsSessionSetup,
+ executionMode,
+}: UseSessionSetupParams) {
+ const [sessionSignerBalance, setSessionSignerBalance] = useState(null);
+ const [loading, setLoading] = useState(false);
+
+ const isClassicMode = executionMode === 'classic';
+
+ // Refresh session signer balance (only for fast mode)
+ useEffect(() => {
+ if (isClassicMode || !sessionSignerAddr || !readProvider) return;
+
+ const refreshBalance = async () => {
+ try {
+ const balance = await readProvider.getBalance(sessionSignerAddr);
+ setSessionSignerBalance(balance);
+ } catch (err) {
+ console.error("Failed to refresh balance:", err);
+ }
+ };
+
+ refreshBalance();
+ const interval = setInterval(refreshBalance, 10000);
+ return () => clearInterval(interval);
+ }, [sessionSignerAddr, readProvider, isClassicMode]);
+
+ const refreshSessionBalance = useCallback(async () => {
+ if (isClassicMode || !sessionSignerAddr || !readProvider) return;
+ try {
+ const balance = await readProvider.getBalance(sessionSignerAddr);
+ console.log(`🔄 Balance: ${Number(balance) / 1e18} ETH`);
+ setSessionSignerBalance(balance);
+ } catch (err) {
+ console.error("Failed to refresh balance:", err);
+ }
+ }, [sessionSignerAddr, readProvider, isClassicMode]);
+
+ const setupSession = useCallback(async () => {
+ //Guard for classic mode
+ if (isClassicMode) return;
+
+ if (!walletClient || !address || !safeAddr || !sessionSignerAddr) return;
+
+ setLoading(true);
+ try {
+ const ethersProvider = new BrowserProvider(walletClient.transport);
+ const ethersSigner = await ethersProvider.getSigner();
+
+ console.log(`\n========== SETTING UP SESSION (Fast Mode) ==========`);
+ console.log(`VerbEth Safe: ${safeAddr}`);
+ console.log(`Deployed: ${isSafeDeployed}`);
+ console.log(`Module enabled: ${isModuleEnabled}`);
+ console.log(`Session signer: ${sessionSignerAddr}`);
+
+ // Case 1: Safe not deployed → Deploy + enable + configure (1 tx via helper)
+ if (!isSafeDeployed) {
+
+ const { isDeployed, moduleEnabled, sessionConfigured } = await getOrCreateSafeForOwner({
+ chainId,
+ ownerAddress: address as `0x${string}`,
+ providerEip1193: walletClient.transport,
+ ethersSigner,
+ deployIfMissing: true,
+ sessionConfig: {
+ sessionSigner: sessionSignerAddr,
+ target: VERBETH_SINGLETON_ADDR,
+ },
+ useApiLookup: false,
+ });
+
+ if (!isDeployed) {
+ throw new Error("Safe deployment failed");
+ }
+
+ setIsSafeDeployed(true);
+ setIsModuleEnabled(moduleEnabled);
+
+ if (sessionConfigured) {
+ console.log(`[verbeth] session setup complete in 1 tx`);
+ setIsModuleEnabled(true);
+ setNeedsSessionSetup(false);
+ // Don't call onSessionSetupComplete - state already updated via setters.
+ // Calling reinit would read stale RPC data and overwrite correct state.
+ return;
+ }
+
+ console.warn("Session not configured during deploy, falling back...");
+ }
+
+ // Case 2: Safe exists but module not enabled
+ if (!isModuleEnabled) {
+
+ const { protocolKit } = await getOrCreateSafeForOwner({
+ chainId,
+ ownerAddress: address as `0x${string}`,
+ providerEip1193: walletClient.transport,
+ ethersSigner,
+ deployIfMissing: false,
+ sessionConfig: {
+ sessionSigner: sessionSignerAddr,
+ target: VERBETH_SINGLETON_ADDR,
+ },
+ useApiLookup: false,
+ });
+
+ await ensureModuleEnabled(protocolKit);
+ setIsModuleEnabled(true);
+ console.log(`✓ Module enabled`);
+ }
+
+ // Case 3: Safe + module exist → Just setup session
+ const moduleContract = new Contract(
+ SAFE_MODULE_ADDRESS,
+ ["function setupSession(address safe, address signer, uint256 expiry, address target)"],
+ ethersSigner
+ );
+
+ const tx = await moduleContract.setupSession(
+ safeAddr,
+ sessionSignerAddr,
+ BigInt("0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"),
+ VERBETH_SINGLETON_ADDR
+ );
+ console.log(`TX: ${tx.hash}`);
+ await tx.wait();
+ console.log(`✓ Session configured`);
+
+ console.log(`=====================================================\n`);
+ setNeedsSessionSetup(false);
+
+ } catch (err: any) {
+ console.error(`[verbeth] session setup failed:`, err);
+ } finally {
+ setLoading(false);
+ }
+ }, [
+ walletClient,
+ address,
+ safeAddr,
+ sessionSignerAddr,
+ isSafeDeployed,
+ isModuleEnabled,
+ chainId,
+ isClassicMode,
+ setIsSafeDeployed,
+ setIsModuleEnabled,
+ setNeedsSessionSetup,
+ ]);
+
+ //Return null values for classic mode
+ if (isClassicMode) {
+ return {
+ sessionSignerBalance: null,
+ sessionLoading: false,
+ refreshSessionBalance: async () => {},
+ setupSession: async () => {},
+ };
+ }
+
+ return {
+ sessionSignerBalance,
+ sessionLoading: loading,
+ refreshSessionBalance,
+ setupSession,
+ };
+}
\ No newline at end of file
diff --git a/apps/demo/src/providers.tsx b/apps/demo/src/providers.tsx
index f7dffc1..2c9969a 100644
--- a/apps/demo/src/providers.tsx
+++ b/apps/demo/src/providers.tsx
@@ -22,7 +22,7 @@ export function Providers({ children }: { children: React.ReactNode }) {
{children}
diff --git a/apps/demo/src/rpc.tsx b/apps/demo/src/rpc.tsx
index 9161966..295af17 100644
--- a/apps/demo/src/rpc.tsx
+++ b/apps/demo/src/rpc.tsx
@@ -1,51 +1,109 @@
import { createContext, useContext, useEffect, useState, useMemo } from "react";
import { JsonRpcProvider } from "ethers";
-import { createPublicClient, http, fallback } from "viem";
-import { base } from "viem/chains";
+import { createPublicClient, http, webSocket, fallback } from "viem";
+import { baseSepolia } from "viem/chains";
+const WS_URL = import.meta.env.VITE_RPC_WS_URL as string | undefined;
+const ALCHEMY_HTTP_URL = import.meta.env.VITE_RPC_HTTP_URL as string | undefined;
+
+const PUBLIC_HTTP_1 = "https://sepolia.base.org";
+const PUBLIC_HTTP_2 = "https://base-sepolia-rpc.publicnode.com";
+
+const HTTP_URLS: readonly string[] = [
+ ...(ALCHEMY_HTTP_URL ? [ALCHEMY_HTTP_URL] : []),
+ PUBLIC_HTTP_1,
+ PUBLIC_HTTP_2,
+];
+
+export const BASESEPOLIA_HTTP_URLS = HTTP_URLS;
+
+/** Browser-safe read RPC URL for Base Sepolia. */
+export const BASESEPOLIA_HTTP_URL = HTTP_URLS[0];
+
+export type TransportStatus =
+ | "ws"
+ | "http-alchemy"
+ | "http-public"
+ | "disconnected";
+
type RpcState = {
ethers: JsonRpcProvider | null;
- viem: ReturnType | null;
+ viem: ReturnType | null;
+ transportStatus: TransportStatus;
};
-
const RpcCtx = createContext(null);
+function isAlchemyUrl(url: string): boolean {
+ return url.includes("alchemy.com");
+}
+
export function RpcProvider({ children }: { children: React.ReactNode }) {
const [ethersProvider, setEthersProvider] = useState(null);
+ const [transportStatus, setTransportStatus] = useState(
+ WS_URL ? "ws" : ALCHEMY_HTTP_URL ? "http-alchemy" : "http-public"
+ );
useEffect(() => {
let mounted = true;
(async () => {
- try {
- const p = new JsonRpcProvider("https://mainnet.base.org", undefined, {
- polling: true,
- pollingInterval: 3000,
- });
- await p.getBlockNumber();
- if (mounted) setEthersProvider(p);
- } catch (e) {
- console.error("Ethers RPC failed:", e);
+ for (const url of HTTP_URLS) {
+ try {
+ const p = new JsonRpcProvider(url, undefined, {
+ polling: true,
+ pollingInterval: 3000,
+ });
+ await p.getBlockNumber();
+ if (mounted) {
+ setEthersProvider(p);
+ if (!WS_URL) {
+ setTransportStatus(isAlchemyUrl(url) ? "http-alchemy" : "http-public");
+ }
+ }
+ return;
+ } catch (e) {
+ console.warn(`Ethers RPC failed for ${url}:`, e);
+ }
+ }
+ if (mounted) {
+ console.error("All ethers RPC endpoints failed");
+ setTransportStatus("disconnected");
}
})();
return () => { mounted = false; };
}, []);
- const viemClient = useMemo(
- () =>
- createPublicClient({
- chain: base,
- transport: fallback([
- http("https://mainnet.base.org"), // rate-limited
- http("https://base-rpc.publicnode.com"),
- ]),
- }),
- []
- );
+ const viemClient = useMemo(() => {
+ const transports = [];
+
+ if (WS_URL) {
+ transports.push(
+ webSocket(WS_URL, {
+ reconnect: { attempts: 5, delay: 2_000 },
+ })
+ );
+ }
+ if (ALCHEMY_HTTP_URL) {
+ transports.push(http(ALCHEMY_HTTP_URL));
+ }
+ transports.push(http(PUBLIC_HTTP_1));
+ transports.push(http(PUBLIC_HTTP_2));
+
+ return createPublicClient({
+ chain: baseSepolia,
+ transport: fallback(transports),
+ });
+ }, []);
return (
-
+
{children}
);
@@ -58,9 +116,10 @@ export function useRpcClients() {
}
export function useRpcStatus() {
- const provider = useContext(RpcCtx);
+ const ctx = useContext(RpcCtx);
return {
- isConnected: provider !== null,
- provider
+ isConnected: ctx !== null && ctx.ethers !== null,
+ transportStatus: ctx?.transportStatus ?? "disconnected",
+ provider: ctx,
};
-}
\ No newline at end of file
+}
diff --git a/apps/demo/src/services/DbService.ts b/apps/demo/src/services/DbService.ts
index 46e7149..c3480ac 100644
--- a/apps/demo/src/services/DbService.ts
+++ b/apps/demo/src/services/DbService.ts
@@ -1,16 +1,24 @@
+// src/services/DbService.ts
+
import { VerbEthDatabase } from "./schema.js";
import type {
StoredIdentity,
Contact,
+ ContactStatus,
Message,
+ EventType,
PendingHandshake,
+ PersistedSyncState,
} from "../types.js";
+import { RatchetDbService } from "./RatchetDbService.js";
export class DbService {
private readonly db: VerbEthDatabase;
+ public readonly ratchet: RatchetDbService;
constructor() {
this.db = new VerbEthDatabase();
+ this.ratchet = new RatchetDbService(this.db);
}
/* ----------------------------- ADDRESS HELPERS --------------------------- */
@@ -21,84 +29,23 @@ export class DbService {
/* ----------------------------- IDENTITIES -------------------------------- */
async saveIdentity(identity: StoredIdentity) {
const normalizedAddress = this.normalizeAddress(identity.address);
- console.log(
- `Saving identity for ${normalizedAddress.slice(
- 0,
- 8
- )}... (original: ${identity.address.slice(0, 8)})`
- );
-
- try {
- // Normalize the address before saving
- const normalizedIdentity = {
- ...identity,
- address: normalizedAddress,
- };
-
- const result = await this.db.identity.put(normalizedIdentity);
- console.log(
- `✅ Identity saved successfully for ${normalizedAddress.slice(0, 8)}...`
- );
-
- // Verify it was saved
- const verification = await this.db.identity.get(normalizedAddress);
- if (verification) {
- console.log(
- `Identity verified in DB for ${normalizedAddress.slice(0, 8)}...`
- );
- } else {
- console.error(
- `✗ Identity NOT found after save for ${normalizedAddress.slice(
- 0,
- 8
- )}...`
- );
- }
-
- return result;
- } catch (error) {
- console.error(
- `✗ Failed to save identity for ${normalizedAddress.slice(0, 8)}...:`,
- error
- );
- throw error;
- }
+ const normalizedIdentity = { ...identity, address: normalizedAddress };
+ console.debug(`[db] saveIdentity ${normalizedAddress.slice(0, 8)}...`);
+ return this.db.identity.put(normalizedIdentity);
}
async getIdentity(address: string) {
const normalizedAddress = this.normalizeAddress(address);
- console.log(
- `Looking for identity: ${normalizedAddress.slice(
- 0,
- 8
- )}... (original: ${address.slice(0, 8)})`
- );
-
try {
- const result = await this.db.identity.get(normalizedAddress);
- if (result) {
- } else {
- // Debug: show all identities in DB
- const allIdentities = await this.db.identity.toArray();
- console.log(`Available identities in DB: ${allIdentities.length}`);
- allIdentities.forEach((id) => {
- console.log(` - ${id.address} (${id.address.slice(0, 8)}...)`);
- });
- }
- return result;
+ return await this.db.identity.get(normalizedAddress);
} catch (error) {
- console.error(
- `✗ Error getting identity for ${normalizedAddress.slice(0, 8)}...:`,
- error
- );
+ console.error(`[db] getIdentity failed for ${normalizedAddress.slice(0, 8)}...:`, error);
return null;
}
}
deleteIdentity(address: string) {
- const normalizedAddress = this.normalizeAddress(address);
- console.log(`Deleting identity for ${normalizedAddress.slice(0, 8)}...`);
- return this.db.identity.delete(normalizedAddress);
+ return this.db.identity.delete(this.normalizeAddress(address));
}
/* ------------------------------ CONTACTS --------------------------------- */
@@ -108,12 +55,6 @@ export class DbService {
address: this.normalizeAddress(contact.address),
ownerAddress: this.normalizeAddress(contact.ownerAddress),
};
- console.log(
- `👤 Saving contact ${normalizedContact.address.slice(
- 0,
- 8
- )}... for owner ${normalizedContact.ownerAddress.slice(0, 8)}...`
- );
return this.db.contacts.put(normalizedContact);
}
@@ -152,7 +93,7 @@ export class DbService {
async upsertDedup(entry: {
key: string; // `${ownerAddress}:${txHash}-${logIndex}`
- messageId: string;
+ messageId: string;
txHash: string;
blockNumber: number;
}) {
@@ -166,9 +107,55 @@ export class DbService {
return this.db.dedup.get(key);
}
+ private buildEventReceiptKey(
+ ownerAddress: string,
+ eventType: EventType,
+ txHash: string,
+ logIndex: number
+ ): string {
+ const normalizedOwner = this.normalizeAddress(ownerAddress);
+ return `event:${normalizedOwner}:${eventType}:${txHash.toLowerCase()}-${logIndex}`;
+ }
+
+ async hasProcessedEvent(
+ ownerAddress: string,
+ eventType: EventType,
+ txHash: string,
+ logIndex: number
+ ): Promise {
+ const key = this.buildEventReceiptKey(
+ ownerAddress,
+ eventType,
+ txHash,
+ logIndex
+ );
+ const existing = await this.db.dedup.get(key);
+ return !!existing;
+ }
+
+ async markEventProcessed(
+ ownerAddress: string,
+ eventType: EventType,
+ txHash: string,
+ logIndex: number,
+ blockNumber: number
+ ): Promise {
+ const key = this.buildEventReceiptKey(
+ ownerAddress,
+ eventType,
+ txHash,
+ logIndex
+ );
+ await this.upsertDedup({
+ key,
+ messageId: key,
+ txHash: txHash.toLowerCase(),
+ blockNumber,
+ });
+ }
+
/* ------------------------------ MESSAGES --------------------------------- */
async saveMessage(message: Message): Promise {
-
if (await this.db.messages.get(message.id)) {
console.debug(`Message ${message.id} already in DB`);
return false;
@@ -202,12 +189,6 @@ export class DbService {
);
}
- console.log(
- `Saving new message from ${normalizedMessage.sender.slice(
- 0,
- 8
- )}... for owner ${normalizedMessage.ownerAddress.slice(0, 8)}...`
- );
await this.db.messages.put(normalizedMessage);
return true;
}
@@ -224,25 +205,15 @@ export class DbService {
const newMessage = { ...oldMessage, ...updates };
await this.deleteMessage(messageId);
await this.saveMessage(newMessage);
- console.log(
- `Replaced message ${messageId.slice(
- 0,
- 8
- )}... with new ID ${updates.id?.slice(0, 8)}...`
- );
return true;
}
return false;
}
const result = await this.db.messages.update(messageId, updates);
- console.log(`Updated message ${messageId.slice(0, 8)}... with:`, updates);
return result > 0;
} catch (error) {
- console.error(
- `✗ Failed to update message ${messageId.slice(0, 8)}...:`,
- error
- );
+ console.error(`[db] updateMessage failed for ${messageId.slice(0, 8)}...:`, error);
return false;
}
}
@@ -262,14 +233,7 @@ export class DbService {
.equals([normalizedOwner, normalizedSender, topic, nonce, "pending"])
.first();
- if (exactMatch) {
- console.log(`Found exact match!`, {
- messageId: exactMatch.id,
- messageTopic: exactMatch.topic.slice(0, 20) + "...",
- messageNonce: exactMatch.nonce,
- });
- return exactMatch;
- }
+ if (exactMatch) return exactMatch;
// FALLBACK: Find by content and recent timestamp
const recentPendingMessages = await this.db.messages
@@ -279,14 +243,7 @@ export class DbService {
.limit(3)
.toArray();
- if (recentPendingMessages.length > 0) {
- console.log(
- `Using fallback matching: found ${recentPendingMessages.length} recent pending messages`
- );
- return recentPendingMessages[0]; // Most recent
- }
-
- return undefined;
+ return recentPendingMessages[0];
}
async findMessageByDedupKey(dedupKey: string): Promise {
@@ -338,7 +295,6 @@ export class DbService {
async getAllMessages(ownerAddress: string, limit = 100) {
const normalizedOwner = this.normalizeAddress(ownerAddress);
- console.log(`Loading messages for owner ${normalizedOwner.slice(0, 8)}...`);
const messages = await this.db.messages
.where("ownerAddress")
.equals(normalizedOwner)
@@ -354,6 +310,33 @@ export class DbService {
return this.db.messages.update(id, { read: true });
}
+ async markMessagesAsLost(
+ ownerAddress: string,
+ contactAddress: string,
+ afterTimestamp: number
+ ): Promise {
+ const normalizedOwner = this.normalizeAddress(ownerAddress);
+ const normalizedContact = this.normalizeAddress(contactAddress);
+
+ const messages = await this.db.messages
+ .where("ownerAddress")
+ .equals(normalizedOwner)
+ .filter(
+ (m) =>
+ m.direction === "outgoing" &&
+ m.recipient?.toLowerCase() === normalizedContact &&
+ m.timestamp > afterTimestamp &&
+ m.type !== "system"
+ )
+ .toArray();
+
+ for (const msg of messages) {
+ await this.db.messages.update(msg.id, { isLost: true });
+ }
+
+ return messages.length;
+ }
+
getUnreadMessagesCount() {
return this.db.messages.filter((m) => !m.read).count();
}
@@ -369,12 +352,6 @@ export class DbService {
sender: this.normalizeAddress(h.sender),
ownerAddress: this.normalizeAddress(h.ownerAddress),
};
- console.log(
- `...Saving pending handshake from ${normalizedHandshake.sender.slice(
- 0,
- 8
- )}... for owner ${normalizedHandshake.ownerAddress.slice(0, 8)}...`
- );
return this.db.pendingHandshakes.put(normalizedHandshake);
}
@@ -394,7 +371,6 @@ export class DbService {
}
deletePendingHandshake(id: string) {
- console.log(`Deleting pending handshake ${id.slice(0, 8)}...`);
return this.db.pendingHandshakes.delete(id);
}
@@ -411,21 +387,21 @@ export class DbService {
/* --------------------------------- SYNC --------------------------------- */
getLastKnownBlock(addr: string) {
- const normalizedAddr = this.normalizeAddress(addr);
- return this.getSetting(`lastKnownBlock_${normalizedAddr}`);
-}
+ const normalizedAddr = this.normalizeAddress(addr);
+ return this.getSetting(`lastKnownBlock_${normalizedAddr}`);
+ }
setLastKnownBlock(addr: string, n: number) {
- const normalizedAddr = this.normalizeAddress(addr);
- return this.setSetting(`lastKnownBlock_${normalizedAddr}`, n);
-}
+ const normalizedAddr = this.normalizeAddress(addr);
+ return this.setSetting(`lastKnownBlock_${normalizedAddr}`, n);
+ }
getOldestScannedBlock(addr: string) {
- const normalizedAddr = this.normalizeAddress(addr);
- return this.getSetting(`oldestScannedBlock_${normalizedAddr}`);
-}
+ const normalizedAddr = this.normalizeAddress(addr);
+ return this.getSetting(`oldestScannedBlock_${normalizedAddr}`);
+ }
setOldestScannedBlock(addr: string, n: number) {
- const normalizedAddr = this.normalizeAddress(addr);
- return this.setSetting(`oldestScannedBlock_${normalizedAddr}`, n);
-}
+ const normalizedAddr = this.normalizeAddress(addr);
+ return this.setSetting(`oldestScannedBlock_${normalizedAddr}`, n);
+ }
getInitialScanComplete(addr: string) {
const normalizedAddr = this.normalizeAddress(addr);
return this.getSetting(`initialScanComplete_${normalizedAddr}`);
@@ -435,6 +411,27 @@ export class DbService {
return this.setSetting(`initialScanComplete_${normalizedAddr}`, ok);
}
+ private getSyncStateKey(addr: string): string {
+ const normalizedAddr = this.normalizeAddress(addr);
+ return `syncState_${normalizedAddr}`;
+ }
+
+ async getSyncState(addr: string): Promise {
+ const key = this.getSyncStateKey(addr);
+ const value = await this.getSetting(key);
+ return value ?? null;
+ }
+
+ async setSyncState(addr: string, state: PersistedSyncState): Promise {
+ const key = this.getSyncStateKey(addr);
+ await this.setSetting(key, state);
+ }
+
+ async clearSyncState(addr: string): Promise {
+ const key = this.getSyncStateKey(addr);
+ await this.deleteSetting(key);
+ }
+
/* ------------------------------ UTILITIES ------------------------------- */
async clearAllData() {
console.log("🧹 Clearing all database data...");
@@ -446,6 +443,8 @@ export class DbService {
this.db.messages,
this.db.pendingHandshakes,
this.db.settings,
+ this.db.ratchetSessions,
+ this.db.pendingOutbound,
],
async () => {
await this.db.identity.clear();
@@ -453,6 +452,8 @@ export class DbService {
await this.db.messages.clear();
await this.db.pendingHandshakes.clear();
await this.db.settings.clear();
+ await this.db.ratchetSessions.clear();
+ await this.db.pendingOutbound.clear();
}
);
console.log("All database data cleared");
@@ -469,6 +470,8 @@ export class DbService {
this.db.messages,
this.db.pendingHandshakes,
this.db.settings,
+ this.db.ratchetSessions,
+ this.db.pendingOutbound,
],
async () => {
await this.db.identity.delete(normalizedAddr);
@@ -487,6 +490,19 @@ export class DbService {
.equals(normalizedAddr)
.delete();
+ //Delete ratchet data
+ const sessions = await this.db.ratchetSessions
+ .where("myAddress")
+ .equals(normalizedAddr)
+ .toArray();
+ for (const s of sessions) {
+ await this.db.ratchetSessions.delete(s.conversationId);
+ await this.db.pendingOutbound
+ .where("conversationId")
+ .equals(s.conversationId)
+ .delete();
+ }
+
const staleSettings = await this.db.settings
.where("name")
.startsWith(`initialScanComplete_${normalizedAddr}`)
@@ -496,11 +512,54 @@ export class DbService {
}
await this.db.settings.delete(`lastKnownBlock_${normalizedAddr}`);
await this.db.settings.delete(`oldestScannedBlock_${normalizedAddr}`);
+ await this.db.settings.delete(`syncState_${normalizedAddr}`);
+
+ const dedupForOwner = await this.db.dedup
+ .where("key")
+ .startsWith(`${normalizedAddr}:`)
+ .toArray();
+ for (const row of dedupForOwner) {
+ await this.db.dedup.delete(row.key);
+ }
+
+ const eventReceiptsForOwner = await this.db.dedup
+ .where("key")
+ .startsWith(`event:${normalizedAddr}:`)
+ .toArray();
+ for (const row of eventReceiptsForOwner) {
+ await this.db.dedup.delete(row.key);
+ }
}
);
//this.deduplicator.clear();
console.log(`User data cleared for ${normalizedAddr.slice(0, 8)}...`);
}
+ /* ----------------------------- SESSION ----------------------------- */
+
+ /**
+ * Check if a sender is an existing contact.
+ * Used during handshake processing for receiver hints.
+ */
+ async isExistingContact(
+ senderAddress: string,
+ ownerAddress: string
+ ): Promise<{
+ exists: boolean;
+ previousStatus?: ContactStatus;
+ previousConversationId?: string;
+ }> {
+ const contact = await this.getContact(senderAddress, ownerAddress);
+
+ if (!contact) {
+ return { exists: false };
+ }
+
+ return {
+ exists: true,
+ previousStatus: contact.status,
+ previousConversationId: contact.conversationId,
+ };
+ }
/* ---------------------------- BACKUP / IMPORT --------------------------- */
async exportData() {
@@ -511,11 +570,13 @@ export class DbService {
messages: await this.db.messages.toArray(),
pendingHandshakes: await this.db.pendingHandshakes.toArray(),
settings: await this.db.settings.toArray(),
+ ratchetSessions: await this.db.ratchetSessions.toArray(),
+ pendingOutbound: await this.db.pendingOutbound.toArray(),
exportedAt: Date.now(),
} as const;
console.log(
- `Exported ${payload.identity.length} identities, ${payload.contacts.length} contacts, ${payload.messages.length} messages`
+ `Exported ${payload.identity.length} identities, ${payload.contacts.length} contacts, ${payload.messages.length} messages, ${payload.ratchetSessions.length} ratchet sessions`
);
return JSON.stringify(payload);
}
@@ -532,6 +593,8 @@ export class DbService {
this.db.messages,
this.db.pendingHandshakes,
this.db.settings,
+ this.db.ratchetSessions,
+ this.db.pendingOutbound,
],
async () => {
if (data.identity) await this.db.identity.bulkPut(data.identity);
@@ -540,6 +603,10 @@ export class DbService {
if (data.pendingHandshakes)
await this.db.pendingHandshakes.bulkPut(data.pendingHandshakes);
if (data.settings) await this.db.settings.bulkPut(data.settings);
+ if (data.ratchetSessions)
+ await this.db.ratchetSessions.bulkPut(data.ratchetSessions);
+ if (data.pendingOutbound)
+ await this.db.pendingOutbound.bulkPut(data.pendingOutbound);
}
);
console.log("✅ Database import completed");
@@ -548,37 +615,17 @@ export class DbService {
/* -------------------------------- ACCOUNT SWITCH -------------------------------- */
async switchAccount(newAddress: string) {
const normalizedAddress = this.normalizeAddress(newAddress);
+ console.debug(`[db] switchAccount ${normalizedAddress.slice(0, 8)}...`);
- //this.deduplicator.clear();
-
- const allIdentities = await this.db.identity.toArray();
- console.log(`Database state: ${allIdentities.length} total identities`);
- allIdentities.forEach((id) => {
- console.log(
- ` - ${id.address} (derived: ${new Date(
- id.derivedAt
- ).toLocaleString()})`
- );
- });
-
- const [contacts, messages, handshakes] = await Promise.all([
+ await Promise.all([
this.getAllContacts(normalizedAddress),
this.getAllMessages(normalizedAddress, 1000),
this.getAllPendingHandshakes(normalizedAddress),
]);
-
- console.log(
- `Data for ${normalizedAddress.slice(0, 8)}...: ${
- contacts.length
- } contacts, ${messages.length} messages, ${
- handshakes.length
- } pending handshakes`
- );
}
/* ------------------------------ CLEANUP --------------------------------- */
close() {
- console.log("Closing database connection...");
this.db.close();
}
}
diff --git a/apps/demo/src/services/EventProcessorService.ts b/apps/demo/src/services/EventProcessorService.ts
new file mode 100644
index 0000000..2b06ba1
--- /dev/null
+++ b/apps/demo/src/services/EventProcessorService.ts
@@ -0,0 +1,571 @@
+// src/services/EventProcessorService.ts
+// CLEANED VERSION - uses VerbethClient for session creation
+
+/**
+ * Event Processing Service.
+ *
+ * Handles decoding, verification, decryption, and persistence of blockchain events.
+ * Uses VerbethClient SDK methods for session management and topic derivation.
+ */
+
+import { AbiCoder, getBytes } from "ethers";
+import {
+ type IdentityContext,
+ type VerbethClient,
+} from "@verbeth/sdk";
+
+import { dbService } from "./DbService.js";
+import {
+ Contact,
+ Message,
+ PendingHandshake,
+ ProcessedEvent,
+} from "../types.js";
+
+
+export function generateMessageId(
+ txHash: string,
+ log: { logIndex?: number; index?: number }
+): string {
+ const idx =
+ typeof log.logIndex !== "undefined"
+ ? log.logIndex
+ : typeof log.index !== "undefined"
+ ? log.index
+ : 0;
+ return `${txHash}-${idx}`;
+}
+
+function systemEventMessageId(
+ kind: "handshake" | "handshake-response",
+ txHash: string,
+ logIndex: number
+): string {
+ return `sys-${kind}-${txHash}-${logIndex}`;
+}
+
+// =============================================================================
+// Result Types
+// =============================================================================
+
+export interface HandshakeResult {
+ pendingHandshake: PendingHandshake;
+ systemMessage: Message;
+}
+
+export interface HandshakeResponseResult {
+ updatedContact: Contact;
+ systemMessage: Message;
+}
+
+export interface MessageResult {
+ newMessage?: Message;
+ messageUpdate?: [string, Partial];
+ contactUpdate?: Contact;
+}
+
+// =============================================================================
+// Handshake Processing (unchanged - doesn't use ratchet)
+// =============================================================================
+
+export async function processHandshakeEvent(
+ event: ProcessedEvent,
+ address: string,
+ readProvider: any,
+ identityContext: IdentityContext,
+ verbethClient: VerbethClient,
+): Promise {
+ try {
+ const alreadyProcessed = await dbService.hasProcessedEvent(
+ address,
+ "handshake",
+ event.txHash,
+ event.logIndex
+ );
+ if (alreadyProcessed) return null;
+
+ const log = event.rawLog;
+ const abiCoder = new AbiCoder();
+ const decoded = abiCoder.decode(["bytes", "bytes", "bytes"], log.data);
+ const [identityPubKeyBytes, ephemeralPubKeyBytes, plaintextPayloadBytes] = decoded;
+
+ const unifiedPubKeys = getBytes(identityPubKeyBytes);
+ const decodedKeys = verbethClient.payload.decodeUnifiedPubKeys(unifiedPubKeys);
+
+ if (!decodedKeys) {
+ console.error("[verbeth] failed to decode public keys");
+ return null;
+ }
+
+ const identityPubKey = decodedKeys.identityPubKey;
+ const signingPubKey = decodedKeys.signingPubKey;
+ const ephemeralPubKeyFull = getBytes(ephemeralPubKeyBytes);
+ // Extract X25519 part (first 32 bytes) for backward compatibility
+ const ephemeralPubKey = ephemeralPubKeyFull.length > 32
+ ? ephemeralPubKeyFull.slice(0, 32)
+ : ephemeralPubKeyFull;
+ const plaintextPayload = new TextDecoder().decode(
+ getBytes(plaintextPayloadBytes)
+ );
+
+ const cleanSenderAddress = "0x" + log.topics[2].slice(-40);
+ const recipientHash = log.topics[1];
+
+ let handshakeContent;
+ let hasValidIdentityProof = false;
+
+ try {
+ handshakeContent = verbethClient.payload.parseHandshakePayload(plaintextPayload);
+ hasValidIdentityProof = true;
+ } catch (error) {
+ handshakeContent = {
+ plaintextPayload: plaintextPayload,
+ identityProof: null,
+ };
+ hasValidIdentityProof = false;
+ }
+
+ let isVerified = false;
+ if (hasValidIdentityProof) {
+ try {
+ const handshakeEvent = {
+ recipientHash,
+ sender: cleanSenderAddress,
+ pubKeys: identityPubKeyBytes,
+ ephemeralPubKey: ephemeralPubKeyBytes,
+ plaintextPayload: plaintextPayload,
+ };
+
+ isVerified = await verbethClient.verify.verifyHandshakeIdentity(
+ handshakeEvent,
+ readProvider,
+ identityContext
+ );
+ } catch (error) {
+ // verification failure is non-critical, handshake still shown with unverified badge
+ }
+ }
+
+ let identityAddress = cleanSenderAddress;
+ if (hasValidIdentityProof && handshakeContent.identityProof?.message) {
+ try {
+ const parsed = verbethClient.utils.parseBindingMessage(handshakeContent.identityProof.message);
+ if (parsed.address) {
+ identityAddress = parsed.address;
+ }
+ } catch (e) {}
+ }
+
+ const existingContact = await dbService.getContact(identityAddress, address);
+ const isExistingEstablished = existingContact?.status === "established";
+
+ const pendingHandshake: PendingHandshake = {
+ id: log.transactionHash,
+ ownerAddress: address,
+ sender: identityAddress,
+ emitterAddress: cleanSenderAddress,
+ identityPubKey,
+ signingPubKey,
+ ephemeralPubKey, // X25519 only (32 bytes)
+ ephemeralPubKeyFull, // Full key (may include KEM - 1216 bytes)
+ message: handshakeContent.plaintextPayload,
+ timestamp: Date.now(),
+ blockNumber: log.blockNumber,
+ verified: isVerified,
+ isExistingContact: isExistingEstablished,
+ };
+
+ const messagePrefix = pendingHandshake.isExistingContact
+ ? "Session reset request received"
+ : "Request received";
+
+ const systemMessage: Message = {
+ id: systemEventMessageId("handshake", event.txHash, event.logIndex),
+ topic: "",
+ sender: identityAddress,
+ recipient: address,
+ ciphertext: "",
+ timestamp: Date.now(),
+ blockTimestamp: Date.now(),
+ blockNumber: log.blockNumber,
+ direction: "incoming",
+ decrypted: `${messagePrefix}: "${handshakeContent.plaintextPayload}"`,
+ read: true,
+ nonce: 0,
+ dedupKey: `handshake-received-${log.transactionHash}`,
+ type: "system",
+ ownerAddress: address,
+ status: "confirmed",
+ verified: isVerified,
+ };
+
+ await dbService.savePendingHandshake(pendingHandshake);
+ await dbService.saveMessage(systemMessage);
+ await dbService.markEventProcessed(
+ address,
+ "handshake",
+ event.txHash,
+ event.logIndex,
+ log.blockNumber
+ );
+
+ return { pendingHandshake, systemMessage };
+ } catch (error) {
+ console.error("[verbeth] handshake processing failed:", error);
+ return null;
+ }
+}
+
+// =============================================================================
+// Handshake Response Processing
+// Uses VerbethClient.createInitiatorSession for topic derivation
+// =============================================================================
+
+export async function processHandshakeResponseEvent(
+ event: ProcessedEvent,
+ address: string,
+ readProvider: any,
+ identityContext: IdentityContext,
+ verbethClient: VerbethClient,
+): Promise {
+ try {
+ const alreadyProcessed = await dbService.hasProcessedEvent(
+ address,
+ "handshake_response",
+ event.txHash,
+ event.logIndex
+ );
+ if (alreadyProcessed) return null;
+
+ const log = event.rawLog;
+ const abiCoder = new AbiCoder();
+ const [responderEphemeralRBytes, ciphertextBytes] = abiCoder.decode(
+ ["bytes32", "bytes"],
+ log.data
+ );
+
+ const ciphertextJson = new TextDecoder().decode(getBytes(ciphertextBytes));
+ const responder = "0x" + log.topics[2].slice(-40);
+ const inResponseTo = log.topics[1];
+
+ const currentContacts = await dbService.getAllContacts(address);
+
+ const contact = currentContacts.find(
+ (c) =>
+ c.address.toLowerCase() === event.matchedContactAddress?.toLowerCase() &&
+ c.status === "handshake_sent"
+ );
+
+ if (!contact || !contact.handshakeEphemeralSecret) {
+ return null;
+ }
+
+ const responseEvent = {
+ inResponseTo,
+ responder,
+ responderEphemeralR: responderEphemeralRBytes,
+ ciphertext: ciphertextJson,
+ };
+
+ const initiatorEphemeralSecret = getBytes(contact.handshakeEphemeralSecret);
+
+ // Get stored KEM secret for PQ-hybrid decapsulation
+ const initiatorKemSecret = contact.handshakeKemSecret
+ ? getBytes(contact.handshakeKemSecret)
+ : undefined;
+
+ if (!initiatorKemSecret) {
+ return null;
+ }
+
+ const result = await verbethClient.verify.verifyAndExtractHandshakeResponseKeys(
+ responseEvent,
+ initiatorEphemeralSecret,
+ initiatorKemSecret,
+ readProvider,
+ identityContext
+ );
+
+ if (!result.isValid || !result.keys) {
+ return null;
+ }
+
+ // =========================================================================
+ // Create session using VerbethClient convenience method
+ // =========================================================================
+ const ratchetSession = verbethClient.createInitiatorSessionFromHsr({
+ contactAddress: contact.address,
+ myEphemeralSecret: initiatorEphemeralSecret,
+ myKemSecret: initiatorKemSecret,
+ hsrEvent: {
+ inResponseToTag: inResponseTo as `0x${string}`,
+ responderEphemeralPubKey: result.keys.ephemeralPubKey,
+ kemCiphertext: result.keys.kemCiphertext,
+ },
+ });
+
+ // Save session to DB (SDK will pick it up via SessionStore)
+ await dbService.ratchet.saveRatchetSession(ratchetSession);
+
+ const updatedContact: Contact = {
+ ...contact,
+ status: "established",
+ identityPubKey: result.keys.identityPubKey,
+ signingPubKey: result.keys.signingPubKey,
+ topicOutbound: ratchetSession.currentTopicOutbound,
+ topicInbound: ratchetSession.currentTopicInbound,
+ conversationId: ratchetSession.conversationId,
+ handshakeEphemeralSecret: undefined, // Clear after use
+ handshakeKemSecret: undefined, // Clear after use
+ lastMessage: result.keys.note || "Connection established",
+ lastTimestamp: Date.now(),
+ };
+
+ await dbService.saveContact(updatedContact);
+
+ const systemMessage: Message = {
+ id: systemEventMessageId(
+ "handshake-response",
+ event.txHash,
+ event.logIndex
+ ),
+ topic: ratchetSession.currentTopicOutbound,
+ sender: contact.address,
+ recipient: address,
+ ciphertext: "",
+ timestamp: Date.now(),
+ blockTimestamp: Date.now(),
+ blockNumber: log.blockNumber,
+ direction: "incoming",
+ decrypted: `Connection established: "${result.keys.note || "Hello!"}"`,
+ read: true,
+ nonce: 0,
+ dedupKey: `handshake-response-${log.transactionHash}`,
+ type: "system",
+ ownerAddress: address,
+ status: "confirmed",
+ };
+
+ await dbService.saveMessage(systemMessage);
+ await dbService.markEventProcessed(
+ address,
+ "handshake_response",
+ event.txHash,
+ event.logIndex,
+ log.blockNumber
+ );
+
+ console.log(`[verbeth] session established with ${contact.address.slice(0, 10)}...`);
+
+ return { updatedContact, systemMessage };
+ } catch (error) {
+ console.error("[verbeth] handshake response failed:", error);
+ return null;
+ }
+}
+
+// =============================================================================
+// Message Processing - Uses VerbethClient for decryption
+// =============================================================================
+
+/**
+ * Process a message event using VerbethClient's decryptMessage.
+ *
+ * For outgoing messages:
+ * - Look up pending record by txHash
+ * - Finalize the pending record
+ * - Use pending.id to update the message (which IS the optimistic message ID)
+ */
+export async function processMessageEvent(
+ event: ProcessedEvent,
+ address: string,
+ emitterAddress: string | undefined,
+ verbethClient: VerbethClient,
+): Promise {
+ try {
+ const alreadyProcessed = await dbService.hasProcessedEvent(
+ address,
+ "message",
+ event.txHash,
+ event.logIndex
+ );
+ if (alreadyProcessed) return null;
+
+ const log = event.rawLog;
+ const abiCoder = new AbiCoder();
+ const decoded = abiCoder.decode(["bytes", "uint256", "uint256"], log.data);
+ const [ciphertextBytes, timestamp, nonce] = decoded;
+ const topic = log.topics[2];
+
+ const sender = "0x" + log.topics[1].slice(-40);
+ const ciphertextHex = ciphertextBytes as string;
+ const ciphertextRaw = getBytes(ciphertextHex);
+ const dedupKey = `${address.toLowerCase()}:${generateMessageId(
+ event.txHash,
+ log
+ )}`;
+
+ const isOurMessage =
+ sender.toLowerCase() === address.toLowerCase() ||
+ (emitterAddress && sender.toLowerCase() === emitterAddress.toLowerCase());
+
+ // Check dedup for incoming messages
+ if (!isOurMessage) {
+ const already = await dbService.getByDedupKey(dedupKey);
+ if (already) {
+ await dbService.markEventProcessed(
+ address,
+ "message",
+ event.txHash,
+ event.logIndex,
+ log.blockNumber
+ );
+ return null;
+ }
+ }
+
+ // =========================================================================
+ // OUTGOING MESSAGE CONFIRMATION - Use txHash lookup
+ // =========================================================================
+ if (isOurMessage) {
+
+ // Look up pending by txHash
+ const pending = await dbService.ratchet.getPendingOutboundByTxHash(log.transactionHash);
+
+ if (pending && pending.status === "submitted") {
+ // Finalize the pending record (clean up)
+ const finalized = await dbService.ratchet.finalizePendingOutbound(pending.id);
+
+ if (!finalized) {
+ return null;
+ }
+
+ // Update the message using pending.id (which IS the optimistic message ID)
+ const newId = generateMessageId(log.transactionHash, log);
+ const updates: Partial = {
+ id: newId,
+ status: "confirmed",
+ blockNumber: log.blockNumber,
+ blockTimestamp: Date.now(),
+ ciphertext: ciphertextHex,
+ nonce: Number(nonce),
+ dedupKey,
+ };
+
+ await dbService.updateMessage(pending.id, updates);
+ await dbService.upsertDedup({
+ key: dedupKey,
+ messageId: newId,
+ txHash: log.transactionHash,
+ blockNumber: log.blockNumber,
+ });
+ await dbService.markEventProcessed(
+ address,
+ "message",
+ event.txHash,
+ event.logIndex,
+ log.blockNumber
+ );
+
+ return {
+ messageUpdate: [pending.id, updates],
+ };
+ }
+
+ const existing = await dbService.getByDedupKey(dedupKey);
+ if (existing) {
+ await dbService.markEventProcessed(
+ address,
+ "message",
+ event.txHash,
+ event.logIndex,
+ log.blockNumber
+ );
+ }
+
+ return null;
+ }
+
+ // =========================================================================
+ // INCOMING MESSAGE - Use SDK's decryptMessage
+ // =========================================================================
+
+ // Get contact for signing key
+ const session = await dbService.ratchet.getRatchetSessionByAnyInboundTopic(topic);
+ if (!session) {
+ return null;
+ }
+
+ const contact = await dbService.getContact(session.contactAddress, address);
+ if (!contact?.signingPubKey) {
+ return null;
+ }
+
+ // Decrypt using SDK (handles session lookup, signature verification, topic promotion)
+ const decrypted = await verbethClient.decryptMessage(
+ topic,
+ ciphertextRaw,
+ contact.signingPubKey,
+ false // not our message
+ );
+
+ if (!decrypted) {
+ console.error(`[verbeth] decryption failed for message from ${contact.address.slice(0, 10)}...`);
+ return null;
+ }
+
+ // Create message record
+ const message: Message = {
+ id: generateMessageId(log.transactionHash, log),
+ topic: topic,
+ sender: contact.address,
+ recipient: address,
+ ciphertext: ciphertextHex,
+ timestamp: Number(timestamp) * 1000,
+ blockTimestamp: Date.now(),
+ blockNumber: log.blockNumber,
+ direction: "incoming",
+ decrypted: decrypted.plaintext,
+ read: false,
+ nonce: Number(nonce),
+ dedupKey,
+ type: "text",
+ ownerAddress: address,
+ status: "confirmed",
+ };
+
+ // Update contact with current topics (may have ratcheted)
+ const updatedContact: Contact = {
+ ...contact,
+ topicInbound: decrypted.session.currentTopicInbound,
+ topicOutbound: decrypted.session.currentTopicOutbound,
+ lastMessage: decrypted.plaintext,
+ lastTimestamp: Date.now(),
+ };
+
+ // Persist
+ const saved = await dbService.saveMessage(message);
+ if (saved) {
+ await dbService.upsertDedup({
+ key: dedupKey,
+ messageId: message.id,
+ txHash: log.transactionHash,
+ blockNumber: log.blockNumber,
+ });
+ await dbService.saveContact(updatedContact);
+ }
+
+ await dbService.markEventProcessed(
+ address,
+ "message",
+ event.txHash,
+ event.logIndex,
+ log.blockNumber
+ );
+
+ return saved ? { newMessage: message, contactUpdate: updatedContact } : null;
+ } catch (error) {
+ console.error("[verbeth] decryption failed:", error);
+ return null;
+ }
+}
diff --git a/apps/demo/src/services/RatchetDbService.ts b/apps/demo/src/services/RatchetDbService.ts
new file mode 100644
index 0000000..be7f3d8
--- /dev/null
+++ b/apps/demo/src/services/RatchetDbService.ts
@@ -0,0 +1,195 @@
+import { VerbEthDatabase } from "./schema.js";
+import type { PendingOutbound } from "../types.js";
+import { serializeRatchetSession, deserializeRatchetSession } from "../types.js";
+import type { RatchetSession } from "@verbeth/sdk";
+import { pruneExpiredSkippedKeys } from "@verbeth/sdk";
+
+export class RatchetDbService {
+ constructor(private readonly db: VerbEthDatabase) {}
+
+ /* ========================= RATCHET SESSIONS ========================= */
+
+ async saveRatchetSession(session: RatchetSession): Promise {
+ const stored = serializeRatchetSession(session);
+ console.log(`💾 Saving ratchet session: ${stored.conversationId.slice(0, 10)}... (sendingMsgNumber=${stored.sendingMsgNumber}, topicEpoch=${stored.topicEpoch})`);
+ await this.db.ratchetSessions.put(stored);
+ }
+
+ async getRatchetSessionByConversation(conversationId: string): Promise {
+ const stored = await this.db.ratchetSessions.get(conversationId);
+ if (!stored) return null;
+
+ const session = deserializeRatchetSession(stored);
+ const pruned = pruneExpiredSkippedKeys(session);
+
+ if (pruned.skippedKeys.length !== session.skippedKeys.length) {
+ await this.saveRatchetSession(pruned);
+ }
+
+ return pruned;
+ }
+
+ /**
+ * Find session by any active inbound topic (current, next, or previous).
+ * Handles topic ratcheting grace period and pre-computed next topics.
+ */
+ async getRatchetSessionByAnyInboundTopic(topic: string): Promise {
+ const topicLower = topic.toLowerCase();
+
+ // try current topic first
+ let stored = await this.db.ratchetSessions
+ .where("currentTopicInbound")
+ .equals(topicLower)
+ .first();
+
+ if (stored) {
+ const session = deserializeRatchetSession(stored);
+ const pruned = pruneExpiredSkippedKeys(session);
+ if (pruned.skippedKeys.length !== session.skippedKeys.length) {
+ await this.saveRatchetSession(pruned);
+ }
+ return pruned;
+ }
+
+ // try next topic (pre-computed for incoming DH ratchet)
+ stored = await this.db.ratchetSessions
+ .where("nextTopicInbound")
+ .equals(topicLower)
+ .first();
+
+ if (stored) {
+ console.log(`🔄 Found session via nextTopicInbound for topic ${topicLower.slice(0, 10)}...`);
+ const session = deserializeRatchetSession(stored);
+ const pruned = pruneExpiredSkippedKeys(session);
+ if (pruned.skippedKeys.length !== session.skippedKeys.length) {
+ await this.saveRatchetSession(pruned);
+ }
+ return pruned;
+ }
+
+ // Try previous topic (check expiry)
+ stored = await this.db.ratchetSessions
+ .where("previousTopicInbound")
+ .equals(topicLower)
+ .first();
+
+ if (stored) {
+ const session = deserializeRatchetSession(stored);
+ const pruned = pruneExpiredSkippedKeys(session);
+ if (pruned.skippedKeys.length !== session.skippedKeys.length) {
+ await this.saveRatchetSession(pruned);
+ }
+ return pruned;
+ }
+
+ return null;
+ }
+
+ /**
+ * Get all active inbound topics for a user (for event filtering).
+ * Returns current, next, and non-expired previous topics.
+ */
+ async getAllActiveInboundTopics(myAddress: string): Promise {
+ const sessions = await this.db.ratchetSessions
+ .where("myAddress")
+ .equals(myAddress.toLowerCase())
+ .toArray();
+
+ const topics: string[] = [];
+
+ for (const s of sessions) {
+ // Current topic
+ if (s.currentTopicInbound) {
+ topics.push(s.currentTopicInbound);
+ }
+ // Next topic (pre-computed for incoming DH ratchet)
+ if (s.nextTopicInbound) {
+ topics.push(s.nextTopicInbound);
+ }
+ // Previous topic (always include — crypto safety is enforced by ratchetDecrypt, not expiry)
+ if (s.previousTopicInbound) {
+ topics.push(s.previousTopicInbound);
+ }
+ }
+
+ return [...new Set(topics)];
+ }
+
+ async deleteRatchetSession(conversationId: string): Promise {
+ await this.db.ratchetSessions.delete(conversationId);
+ console.log(`🗑️ Deleted ratchet session: ${conversationId.slice(0, 10)}...`);
+ }
+
+ /* ========================= PENDING OUTBOUND ========================= */
+
+ async savePendingOutbound(pending: PendingOutbound): Promise {
+ console.log(`📤 Saving pending outbound: ${pending.id.slice(0, 10)}...`);
+ await this.db.pendingOutbound.put(pending);
+ }
+
+ async getPendingOutbound(id: string): Promise {
+ return await this.db.pendingOutbound.get(id) ?? null;
+ }
+
+ async getPendingOutboundByTxHash(txHash: string): Promise {
+ return await this.db.pendingOutbound
+ .where("txHash")
+ .equals(txHash.toLowerCase())
+ .first() ?? null;
+ }
+
+ async getPendingOutboundByConversation(conversationId: string): Promise {
+ return await this.db.pendingOutbound
+ .where("conversationId")
+ .equals(conversationId)
+ .filter((p) => p.status === "preparing" || p.status === "submitted")
+ .toArray();
+ }
+
+ async updatePendingOutboundStatus(
+ id: string,
+ status: PendingOutbound["status"],
+ txHash?: string
+ ): Promise {
+ const updates: Partial = { status };
+ if (txHash) {
+ updates.txHash = txHash.toLowerCase();
+ }
+ await this.db.pendingOutbound.update(id, updates);
+ console.log(`📝 Updated pending outbound ${id.slice(0, 10)}... status to: ${status}`);
+ }
+
+ async finalizePendingOutbound(id: string): Promise<{ plaintext: string } | null> {
+ const pending = await this.db.pendingOutbound.get(id);
+ if (!pending) {
+ console.warn(`⚠️ Pending outbound ${id} not found for finalization`);
+ return null;
+ }
+ await this.db.pendingOutbound.delete(id);
+ console.log(`✅ Finalized pending outbound ${id.slice(0, 10)}...`);
+ return { plaintext: pending.plaintext };
+ }
+
+ async deletePendingOutbound(id: string): Promise {
+ await this.db.pendingOutbound.delete(id);
+ console.log(`🗑️ Deleted pending outbound: ${id.slice(0, 10)}...`);
+ }
+
+ async cleanupStalePendingOutbound(maxAgeMs: number = 24 * 60 * 60 * 1000): Promise {
+ const cutoff = Date.now() - maxAgeMs;
+ const stale = await this.db.pendingOutbound
+ .where("createdAt")
+ .below(cutoff)
+ .toArray();
+
+ for (const p of stale) {
+ await this.db.pendingOutbound.delete(p.id);
+ }
+
+ if (stale.length > 0) {
+ console.log(`🧹 Cleaned up ${stale.length} stale pending outbound records`);
+ }
+
+ return stale.length;
+ }
+}
\ No newline at end of file
diff --git a/apps/demo/src/services/StorageAdapters.ts b/apps/demo/src/services/StorageAdapters.ts
new file mode 100644
index 0000000..e0d3a72
--- /dev/null
+++ b/apps/demo/src/services/StorageAdapters.ts
@@ -0,0 +1,150 @@
+// src/services/StorageAdapters.ts
+
+/**
+ * Storage Adapters for VerbethClient SDK.
+ *
+ * These adapters implement the SDK's SessionStore and PendingStore interfaces,
+ * connecting the VerbethClient to the app's IndexedDB persistence layer.
+ *
+ * Usage:
+ * ```typescript
+ * import { configureClientStorage } from './StorageAdapters';
+ *
+ * const client = new VerbethClient(config);
+ * configureClientStorage(client);
+ * ```
+ */
+
+import type {
+ SessionStore,
+ PendingStore,
+ PendingMessage,
+ PendingStatus,
+ RatchetSession,
+} from '@verbeth/sdk';
+import { dbService } from './DbService.js';
+import type { PendingOutbound } from '../types.js';
+
+// =============================================================================
+// Session Store Adapter
+// =============================================================================
+
+/**
+ * Adapts DbService to the SDK's SessionStore interface.
+ * Handles session persistence with automatic topic lookup.
+ */
+class SessionStoreAdapter implements SessionStore {
+ /**
+ * Get session by conversation ID (primary key).
+ */
+ async get(conversationId: string): Promise {
+ return dbService.ratchet.getRatchetSessionByConversation(conversationId);
+ }
+
+ /**
+ * Find session by any active inbound topic.
+ * Checks current, next, and previous (within grace period) topics.
+ */
+ async getByInboundTopic(topic: string): Promise {
+ return dbService.ratchet.getRatchetSessionByAnyInboundTopic(topic);
+ }
+
+ /**
+ * Persist session state.
+ */
+ async save(session: RatchetSession): Promise {
+ return dbService.ratchet.saveRatchetSession(session);
+ }
+}
+
+// =============================================================================
+// Pending Store Adapter
+// =============================================================================
+
+/**
+ * Adapts DbService to the SDK's PendingStore interface.
+ * Handles pending outbound message lifecycle.
+ */
+class PendingStoreAdapter implements PendingStore {
+ /**
+ * Save a new pending message.
+ */
+ async save(pending: PendingMessage): Promise {
+ const dbPending: PendingOutbound = {
+ id: pending.id,
+ conversationId: pending.conversationId,
+ topic: pending.topic,
+ payloadHex: pending.payloadHex,
+ plaintext: pending.plaintext,
+ sessionStateBefore: pending.sessionStateBefore,
+ sessionStateAfter: pending.sessionStateAfter,
+ createdAt: pending.createdAt,
+ txHash: pending.txHash,
+ status: pending.status,
+ };
+ return dbService.ratchet.savePendingOutbound(dbPending);
+ }
+
+ /**
+ * Get pending message by ID.
+ */
+ async get(id: string): Promise {
+ const pending = await dbService.ratchet.getPendingOutbound(id);
+ return pending ? this.toPendingMessage(pending) : null;
+ }
+
+ /**
+ * Get pending message by transaction hash.
+ */
+ async getByTxHash(txHash: string): Promise {
+ const pending = await dbService.ratchet.getPendingOutboundByTxHash(txHash);
+ return pending ? this.toPendingMessage(pending) : null;
+ }
+
+ /**
+ * Update pending message status.
+ */
+ async updateStatus(id: string, status: PendingStatus, txHash?: string): Promise {
+ return dbService.ratchet.updatePendingOutboundStatus(id, status, txHash);
+ }
+
+ /**
+ * Delete pending message.
+ */
+ async delete(id: string): Promise {
+ return dbService.ratchet.deletePendingOutbound(id);
+ }
+
+ /**
+ * Get all pending messages for a conversation.
+ */
+ async getByConversation(conversationId: string): Promise {
+ const pending = await dbService.ratchet.getPendingOutboundByConversation(conversationId);
+ return pending.map(p => this.toPendingMessage(p));
+ }
+
+ /**
+ * Convert DB format to SDK format.
+ */
+ private toPendingMessage(pending: PendingOutbound): PendingMessage {
+ return {
+ id: pending.id,
+ conversationId: pending.conversationId,
+ topic: pending.topic,
+ payloadHex: pending.payloadHex,
+ plaintext: pending.plaintext,
+ sessionStateBefore: pending.sessionStateBefore,
+ sessionStateAfter: pending.sessionStateAfter,
+ createdAt: pending.createdAt,
+ txHash: pending.txHash,
+ status: pending.status as PendingStatus,
+ };
+ }
+}
+
+// =============================================================================
+// Singleton Instances (exported for createVerbethClient factory)
+// =============================================================================
+
+export const sessionStore = new SessionStoreAdapter();
+export const pendingStore = new PendingStoreAdapter();
\ No newline at end of file
diff --git a/apps/demo/src/services/index.ts b/apps/demo/src/services/index.ts
new file mode 100644
index 0000000..ed6a951
--- /dev/null
+++ b/apps/demo/src/services/index.ts
@@ -0,0 +1,14 @@
+// src/services/index.ts
+
+/**
+ * Services for VerbEth messaging app.
+ */
+
+export { dbService } from './DbService.js';
+export { configureClientStorage } from './StorageAdapters.js';
+export {
+ processHandshakeEvent,
+ processHandshakeResponseEvent,
+ processMessageEvent,
+ generateMessageId,
+} from './EventProcessorService.js';
\ No newline at end of file
diff --git a/apps/demo/src/services/safeAccount.ts b/apps/demo/src/services/safeAccount.ts
new file mode 100644
index 0000000..26b0add
--- /dev/null
+++ b/apps/demo/src/services/safeAccount.ts
@@ -0,0 +1,304 @@
+// src/services/safeAccount.ts
+import SafeDefault from '@safe-global/protocol-kit'
+import SafeApiKitDefault from '@safe-global/api-kit'
+import { getAddress, Interface } from 'ethers'
+import { getModuleSetupHelper, getVerbethAddress } from '@verbeth/sdk'
+import { SAFE_MODULE_ADDRESS } from '../types.js'
+
+// Handle ESM/CJS interop - safe libs export default differently
+const Safe = (SafeDefault as any).default ?? SafeDefault
+const SafeApiKit = (SafeApiKitDefault as any).default ?? SafeApiKitDefault
+
+const SAFE_API_KEY = import.meta.env.VITE_SAFE_API_KEY as string
+const VERBETH_SINGLETON_ADDR = getVerbethAddress()
+
+export interface SessionConfig {
+ sessionSigner: string
+ target: string
+}
+
+export async function predictVerbEthSafeAddress(params: {
+ chainId: number
+ ownerAddress: `0x${string}`
+ sessionSignerAddr: string
+ providerEip1193: any
+ contractNetworks?: any
+}): Promise<`0x${string}`> {
+ const { chainId, ownerAddress, sessionSignerAddr, providerEip1193, contractNetworks } = params
+
+ const sessionConfig: SessionConfig = {
+ sessionSigner: sessionSignerAddr,
+ target: VERBETH_SINGLETON_ADDR,
+ }
+
+ const safeAccountConfig = buildSafeAccountConfig(
+ getAddress(ownerAddress),
+ chainId,
+ sessionConfig
+ )
+
+ const predictedSafe = {
+ safeAccountConfig,
+ safeDeploymentConfig: { saltNonce: '0' },
+ }
+
+ const maybeNetworks = contractNetworks ? { contractNetworks } : {}
+
+ const tempKit = await Safe.init({
+ provider: providerEip1193,
+ signer: ownerAddress,
+ predictedSafe,
+ ...maybeNetworks,
+ })
+
+ return (await tempKit.getAddress()) as `0x${string}`
+}
+
+export async function checkSafeOnChainStatus(params: {
+ safeAddress: `0x${string}`
+ providerEip1193: any
+ ownerAddress: `0x${string}`
+ contractNetworks?: any
+}): Promise<{
+ isDeployed: boolean
+ moduleEnabled: boolean
+}> {
+ const { safeAddress, providerEip1193, ownerAddress, contractNetworks } = params
+ const maybeNetworks = contractNetworks ? { contractNetworks } : {}
+
+ try {
+ const protocolKit = await Safe.init({
+ provider: providerEip1193,
+ signer: ownerAddress,
+ safeAddress,
+ ...maybeNetworks,
+ })
+
+ const isDeployed = await protocolKit.isSafeDeployed()
+ if (!isDeployed) {
+ return { isDeployed: false, moduleEnabled: false }
+ }
+
+ const moduleEnabled = await protocolKit.isModuleEnabled(SAFE_MODULE_ADDRESS)
+ return { isDeployed: true, moduleEnabled }
+ } catch {
+ return { isDeployed: false, moduleEnabled: false }
+ }
+}
+
+export async function getOrCreateSafeForOwner(params: {
+ chainId: number
+ ownerAddress: `0x${string}`
+ providerEip1193: any
+ ethersSigner: any
+ deployIfMissing?: boolean
+ sessionConfig: SessionConfig
+ contractNetworks?: any
+ /**
+ * Only set to true for "custom" mode (import existing Safe)
+ * Default: false (deterministic VerbEth Safe only)
+ */
+ useApiLookup?: boolean
+}) {
+ const {
+ chainId,
+ ownerAddress: rawOwnerAddress,
+ providerEip1193,
+ ethersSigner,
+ deployIfMissing = false,
+ sessionConfig,
+ contractNetworks,
+ useApiLookup = false,
+ } = params
+
+ const ownerAddress = getAddress(rawOwnerAddress) as `0x${string}`
+ const maybeNetworks = contractNetworks ? { contractNetworks } : {}
+
+ // 1) ALWAYS build deterministic config first
+ const safeAccountConfig = buildSafeAccountConfig(ownerAddress, chainId, sessionConfig)
+ const predictedSafe = {
+ safeAccountConfig,
+ safeDeploymentConfig: { saltNonce: '0' },
+ }
+
+ // 2) Compute deterministic VerbEth Safe address
+ const tempKit = await Safe.init({
+ provider: providerEip1193,
+ signer: ownerAddress,
+ predictedSafe,
+ ...maybeNetworks,
+ })
+ const verbEthSafeAddress = (await tempKit.getAddress()) as `0x${string}`
+
+ // 3) Check if OUR deterministic Safe exists on-chain
+ const isDeployedOnChain = await tempKit.isSafeDeployed()
+
+ if (isDeployedOnChain) {
+ // Our VerbEth Safe exists - use it
+ const protocolKit = await Safe.init({
+ provider: providerEip1193,
+ signer: ownerAddress,
+ safeAddress: verbEthSafeAddress,
+ ...maybeNetworks,
+ })
+
+ const moduleEnabled = await protocolKit.isModuleEnabled(SAFE_MODULE_ADDRESS)
+
+ console.log(`Found VerbEth Safe on-chain at ${verbEthSafeAddress}`)
+ return {
+ safeAddress: verbEthSafeAddress,
+ protocolKit,
+ isDeployed: true,
+ moduleEnabled,
+ sessionConfigured: moduleEnabled, // If module enabled via helper, session is configured
+ }
+ }
+
+ // 4) OPTIONAL: API lookup for custom mode only
+ if (useApiLookup) {
+ const apiKit = new SafeApiKit({
+ chainId: BigInt(chainId),
+ ...(SAFE_API_KEY ? { apiKey: SAFE_API_KEY } : {}),
+ })
+
+ try {
+ const { safes } = await apiKit.getSafesByOwner(ownerAddress)
+ if (safes?.length) {
+ console.log(`API found ${safes.length} Safe(s) for owner (custom mode)`)
+ // For custom mode, caller would handle Safe selection UI
+ // This is placeholder for "coming soon" feature
+ }
+ } catch (e: any) {
+ console.warn(`Safe API lookup failed: ${e?.message}`)
+ }
+ }
+
+ // 5) VerbEth Safe not deployed yet - return predicted address
+ if (!deployIfMissing) {
+ return {
+ safeAddress: verbEthSafeAddress,
+ protocolKit: tempKit,
+ isDeployed: false,
+ moduleEnabled: false,
+ sessionConfigured: false,
+ }
+ }
+
+ // 6) Deploy the VerbEth Safe
+ console.log(`🚀 Deploying VerbEth Safe with module + session configured...`)
+
+ const deploymentTx = await tempKit.createSafeDeploymentTransaction()
+
+ const txResp = await ethersSigner.sendTransaction({
+ to: deploymentTx.to,
+ data: deploymentTx.data,
+ value: BigInt(deploymentTx.value),
+ })
+ const receipt = await txResp.wait()
+
+ if (receipt?.status !== 1 && receipt?.status !== 1n) {
+ throw new Error('Safe deployment reverted')
+ }
+
+ console.log(`✅ VerbEth Safe deployed at ${verbEthSafeAddress}`)
+
+ const helperAddress = getModuleSetupHelper(chainId)
+ if (helperAddress && sessionConfig) {
+ console.log(` Module enabled: true (via helper)`)
+ console.log(` Session configured: true (via helper)`)
+
+ return {
+ safeAddress: verbEthSafeAddress,
+ protocolKit: null,
+ isDeployed: true,
+ moduleEnabled: true,
+ sessionConfigured: true,
+ }
+ }
+
+ // Fallback for chains without helper
+ await new Promise((resolve) => setTimeout(resolve, 2000))
+
+ const protocolKit = await Safe.init({
+ provider: providerEip1193,
+ signer: ownerAddress,
+ safeAddress: verbEthSafeAddress,
+ ...maybeNetworks,
+ })
+
+ const moduleEnabled = await protocolKit.isModuleEnabled(SAFE_MODULE_ADDRESS)
+
+ return {
+ safeAddress: verbEthSafeAddress,
+ protocolKit,
+ isDeployed: true,
+ moduleEnabled,
+ sessionConfigured: false,
+ }
+}
+
+/**
+ * Build SafeAccountConfig with module + session setup callback.
+ * This MUST be used consistently for both prediction and deployment
+ * to ensure the same Safe address.
+ */
+function buildSafeAccountConfig(
+ ownerAddress: string,
+ chainId: number,
+ sessionConfig: SessionConfig
+): any {
+ const baseConfig = {
+ owners: [ownerAddress],
+ threshold: 1,
+ }
+
+ const helperAddress = getModuleSetupHelper(chainId)
+ if (!helperAddress) {
+ console.warn(`⚠️ ModuleSetupHelper not deployed on chain ${chainId}, using base config`)
+ return baseConfig
+ }
+
+ // Encode enableModuleWithSession call for the helper contract
+ const helperInterface = new Interface([
+ 'function enableModuleWithSession(address module, address sessionSigner, uint256 expiry, address target)',
+ ])
+
+ const NO_EXPIRY = '0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
+
+ const setupData = helperInterface.encodeFunctionData('enableModuleWithSession', [
+ SAFE_MODULE_ADDRESS,
+ sessionConfig.sessionSigner,
+ NO_EXPIRY,
+ sessionConfig.target,
+ ])
+
+ return {
+ ...baseConfig,
+ to: helperAddress,
+ data: setupData,
+ }
+}
+
+export function isHelperAvailable(chainId: number): boolean {
+ return !!getModuleSetupHelper(chainId)
+}
+
+/**
+ * Enable module on an already-deployed Safe (separate tx, for legacy Safes)
+ */
+export async function ensureModuleEnabled(protocolKit: any): Promise {
+ const moduleAddress = getAddress(SAFE_MODULE_ADDRESS)
+ const enabled = await protocolKit.isModuleEnabled(moduleAddress)
+ if (enabled) {
+ console.log(`Module already enabled`)
+ return true
+ }
+
+ console.log(`Enabling module ${moduleAddress}...`)
+ const enableTx = await protocolKit.createEnableModuleTx(moduleAddress)
+ const signed = await protocolKit.signTransaction(enableTx)
+ const exec = await protocolKit.executeTransaction(signed)
+ await exec.transactionResponse?.wait()
+ console.log(`✅ Module enabled`)
+ return true
+}
\ No newline at end of file
diff --git a/apps/demo/src/services/schema.ts b/apps/demo/src/services/schema.ts
index 1172fdd..e41f27f 100644
--- a/apps/demo/src/services/schema.ts
+++ b/apps/demo/src/services/schema.ts
@@ -1,3 +1,5 @@
+// src/services/schema.ts
+
import { Dexie, Table } from "dexie";
import type {
StoredIdentity,
@@ -5,6 +7,8 @@ import type {
Message,
PendingHandshake,
AppSettings,
+ StoredRatchetSession,
+ PendingOutbound,
} from "../types.js";
export class VerbEthDatabase extends Dexie {
@@ -13,8 +17,11 @@ export class VerbEthDatabase extends Dexie {
messages!: Table;
pendingHandshakes!: Table;
settings!: Table;
- dedup!: Dexie.Table<{ key: string; messageId: string; txHash: string; blockNumber: number }, string>;
-
+ dedup!: Table<{ key: string; messageId: string; txHash: string; blockNumber: number }, string>;
+
+ // ratchet tables
+ ratchetSessions!: Table;
+ pendingOutbound!: Table;
constructor() {
super("VerbEthDB");
@@ -22,13 +29,14 @@ export class VerbEthDatabase extends Dexie {
this.version(1).stores({
identity: "address",
contacts:
- "[address+ownerAddress], ownerAddress, lastTimestamp, status, topicOutbound, topicInbound",
+ "[address+ownerAddress], ownerAddress, lastTimestamp, status, topicOutbound, topicInbound, emitterAddress, conversationId",
messages:
"id, ownerAddress, sender, recipient, topic, nonce, timestamp, blockTimestamp, read, status, [ownerAddress+sender+status], [ownerAddress+sender+topic+nonce+status]",
- dedup: "key, messageId, txHash, blockNumber", // key is PRIMARY KEY
- pendingHandshakes: "id, ownerAddress, sender, timestamp, verified",
+ dedup: "key, messageId, txHash, blockNumber",
+ pendingHandshakes: "id, ownerAddress, sender, timestamp, verified, emitterAddress",
settings: "name",
+ ratchetSessions: "conversationId, topicInbound, topicOutbound, currentTopicInbound, nextTopicInbound, previousTopicInbound, myAddress, contactAddress",
+ pendingOutbound: "id, conversationId, txHash, status, createdAt",
});
-
}
-}
+}
\ No newline at end of file
diff --git a/apps/demo/src/types.ts b/apps/demo/src/types.ts
index a99528e..dc1221a 100644
--- a/apps/demo/src/types.ts
+++ b/apps/demo/src/types.ts
@@ -1,17 +1,13 @@
-import type { IdentityKeyPair, IdentityProof } from '@verbeth/sdk';
-import { keccak256, toUtf8Bytes } from 'ethers';
+import type { IdentityKeyPair, IdentityProof, RatchetSession as SDKRatchetSession, } from '@verbeth/sdk';
+import { getVerbethAddress, getCreationBlock, SCAN_DEFAULTS } from '@verbeth/sdk';
+import { keccak256, toUtf8Bytes, hexlify, getBytes } from 'ethers';
/* ------------------------------- CONSTANTS -------------------------------- */
-export const LOGCHAIN_SINGLETON_ADDR =
- '0x41a3eaC0d858028E9228d1E2092e6178fc81c4f0';
-export const CONTRACT_CREATION_BLOCK = 36_053_269;
-export const INITIAL_SCAN_BLOCKS = 1_000;
-export const MAX_RETRIES = 3;
-export const MAX_RANGE_PROVIDER = 2_000;
-export const CHUNK_SIZE = 2_000;
-export const REAL_TIME_BUFFER = 3;
-
-export const SAFE_TX_SERVICE_URL = import.meta.env.VITE_SAFE_TX_SERVICE_URL as string
+// Re-export from SDK for backward compatibility
+export const VERBETH_SINGLETON_ADDR = getVerbethAddress();
+export const CONTRACT_CREATION_BLOCK = getCreationBlock();
+export const { INITIAL_SCAN_BLOCKS, MAX_RETRIES, MAX_RANGE_PROVIDER, CHUNK_SIZE, REAL_TIME_BUFFER } = SCAN_DEFAULTS;
+
export const SAFE_MODULE_ADDRESS = import.meta.env.VITE_SAFE_SESSION_MODULE as `0x${string}`
/* --------------------------- EVENT SIGNATURES ---------------------------- */
@@ -29,19 +25,28 @@ export const EVENT_SIGNATURES = {
/* ------------------------------- ENTITIES -------------------------------- */
export interface Contact {
- address: string;
- ownerAddress: string;
- identityPubKey?: Uint8Array;
- signingPubKey?: Uint8Array;
- ephemeralKey?: Uint8Array;
- topicOutbound?: string;
+ address: string; // Contact's address (EOA or Safe) */
+ emitterAddress?: string;
+ ownerAddress: string;
+ name?: string;
+ identityPubKey?: Uint8Array;
+ signingPubKey?: Uint8Array;
+ topicOutbound?: string;
topicInbound?: string;
- status: 'none' | 'handshake_sent' | 'established';
+ status: "none" | "handshake_sent" | "established";
lastMessage?: string;
lastTimestamp?: number;
+ unreadCount?: number;
note?: string;
+
+ conversationId?: string;
+ previousConversationId?: string;
+ handshakeEphemeralSecret?: string;
+ handshakeKemSecret?: string; // ML-KEM secret for PQ-hybrid (hex)
+ sessionResetAt?: number;
}
+
export interface Message {
id: string; // Unique ID (txHash-logIndex or dedupKey)
ownerAddress: string; // which account own this contact
@@ -60,28 +65,91 @@ export interface Message {
type: 'text' | 'system';
status: 'pending' | 'confirmed' | 'failed';
verified?: boolean;
+ isLost?: boolean;
}
export interface PendingHandshake {
id: string;
- ownerAddress: string;
+ ownerAddress: string;
+ emitterAddress?: string;
sender: string;
identityPubKey: Uint8Array;
signingPubKey: Uint8Array;
- ephemeralPubKey: Uint8Array;
+ ephemeralPubKey: Uint8Array; // X25519 part only (32 bytes) - for backward compat
+ ephemeralPubKeyFull: Uint8Array; // Full key (may be 1216 bytes with KEM)
message: string;
timestamp: number;
blockNumber: number;
verified: boolean;
+ isExistingContact?: boolean;
+ previousConversationId?: string;
}
+export type ExecutionMode = 'classic' | 'fast' | 'custom';
+
export interface StoredIdentity {
- address: string; // primary key
- keyPair: IdentityKeyPair; // X25519 + Ed25519 keys
- derivedAt: number;
- proof: IdentityProof;
+ address: string;
+ keyPair: IdentityKeyPair;
+ derivedAt: number;
+ proof?: IdentityProof;
+ sessionPrivateKey?: string; // Derived deterministically from the same seed signature as identity keys.
+ sessionAddress?: string;
+ executionMode?: ExecutionMode;
+ emitterAddress?: string; // EOA for classic, Safe for fast/custom
}
+// Extends SDK RatchetSession with serialization-friendly format.
+export interface StoredRatchetSession {
+ conversationId: string;
+ topicOutbound: string;
+ topicInbound: string;
+ myAddress: string;
+ contactAddress: string;
+ rootKey: string;
+ dhMySecretKey: string;
+ dhMyPublicKey: string;
+ dhTheirPublicKey: string;
+ sendingChainKey: string | null;
+ sendingMsgNumber: number;
+ receivingChainKey: string | null;
+ receivingMsgNumber: number;
+ previousChainLength: number;
+ skippedKeys: StoredSkippedKey[];
+ createdAt: number;
+ updatedAt: number;
+ epoch: number;
+
+ // === Topic Ratcheting ===
+ currentTopicOutbound: string;
+ currentTopicInbound: string;
+ nextTopicOutbound?: string;
+ nextTopicInbound?: string;
+ previousTopicInbound?: string;
+ previousTopicExpiry?: number;
+ topicEpoch: number;
+}
+
+export interface StoredSkippedKey {
+ dhPubKeyHex: string;
+ msgNumber: number;
+ messageKey: string;
+ createdAt: number;
+}
+
+export interface PendingOutbound {
+ id: string;
+ conversationId: string;
+ topic: string;
+ payloadHex: string;
+ plaintext: string;
+ sessionStateBefore: string;
+ sessionStateAfter: string;
+ createdAt: number;
+ txHash: string | null;
+ status: 'preparing' | 'submitted' | 'confirmed' | 'failed';
+}
+
+
export interface AppSettings {
name: string;
value: any;
@@ -114,12 +182,70 @@ export interface ScanChunk {
events: any[];
}
+export interface BlockRange {
+ fromBlock: number;
+ toBlock: number;
+}
+
+export interface PendingRange extends BlockRange {
+ attempts: number;
+ nextRetryAt: number;
+ lastError?: string;
+}
+
+export type PersistedSyncStateStatus =
+ | "idle"
+ | "catching_up"
+ | "degraded"
+ | "synced";
+
+export interface PersistedSyncState {
+ pendingRanges: PendingRange[];
+ status: PersistedSyncStateStatus;
+ lastError?: string;
+ lastRetryAt?: number;
+ targetTip?: number;
+ updatedAt: number;
+}
+
+export type ListenerSyncMode =
+ | "idle"
+ | "catching_up"
+ | "retrying"
+ | "degraded"
+ | "synced";
+
+export interface ListenerSyncStatus {
+ mode: ListenerSyncMode;
+ pendingRanges: number;
+ lastError?: string;
+ isComplete: boolean;
+}
+
export interface ProcessedEvent {
logKey: string;
eventType: EventType;
rawLog: any;
+ txHash: string;
+ logIndex: number;
blockNumber: number;
timestamp: number;
+ matchedContactAddress?: string;
+}
+
+export type ListenerHealthLevel = "ok" | "warning";
+
+export type ListenerHealthReason =
+ | "rate_limit"
+ | "backlog"
+ | "tip_lag"
+ | "ws_error";
+
+export interface ListenerHealthStatus {
+ level: ListenerHealthLevel;
+ reasons: ListenerHealthReason[];
+ message: string;
+ updatedAt: number;
}
export interface MessageListenerResult {
@@ -127,9 +253,11 @@ export interface MessageListenerResult {
isLoadingMore: boolean;
canLoadMore: boolean;
syncProgress: ScanProgress | null;
+ syncStatus: ListenerSyncStatus;
loadMoreHistory: () => Promise;
lastKnownBlock: number | null;
oldestScannedBlock: number | null;
+ health: ListenerHealthStatus;
}
export interface MessageProcessorResult {
@@ -137,9 +265,105 @@ export interface MessageProcessorResult {
pendingHandshakes: PendingHandshake[];
contacts: Contact[];
addMessage: (message: Message) => void;
+ updateMessageStatus: (id: string, status: Message["status"], error?: string) => Promise;
+ removeMessage: (id: string) => Promise;
removePendingHandshake: (id: string) => void;
updateContact: (contact: Contact) => void;
processEvents: (events: ProcessedEvent[]) => Promise;
+ markMessagesLost: (contactAddress: string, afterTimestamp: number) => Promise;
+}
+
+
+export const generateTempMessageId = () => `temp-${Date.now()}-${Math.random()}`;
+
+// =============================================================================
+// SERIALIZATION HELPERS
+// =============================================================================
+
+/**
+ * Convert SDK RatchetSession to storable format.
+ */
+export function serializeRatchetSession(session: SDKRatchetSession): StoredRatchetSession {
+ return {
+ conversationId: session.conversationId,
+ topicOutbound: session.topicOutbound.toLowerCase() as `0x${string}`,
+ topicInbound: session.topicInbound.toLowerCase() as `0x${string}`,
+ myAddress: session.myAddress.toLowerCase(),
+ contactAddress: session.contactAddress.toLowerCase(),
+
+ rootKey: hexlify(session.rootKey),
+ dhMySecretKey: hexlify(session.dhMySecretKey),
+ dhMyPublicKey: hexlify(session.dhMyPublicKey),
+ dhTheirPublicKey: hexlify(session.dhTheirPublicKey),
+
+ sendingChainKey: session.sendingChainKey ? hexlify(session.sendingChainKey) : null,
+ sendingMsgNumber: session.sendingMsgNumber,
+ receivingChainKey: session.receivingChainKey ? hexlify(session.receivingChainKey) : null,
+ receivingMsgNumber: session.receivingMsgNumber,
+
+ previousChainLength: session.previousChainLength,
+ skippedKeys: session.skippedKeys.map((sk: any) => ({
+ dhPubKeyHex: sk.dhPubKeyHex,
+ msgNumber: sk.msgNumber,
+ messageKey: hexlify(sk.messageKey),
+ createdAt: sk.createdAt,
+ })),
+
+ createdAt: session.createdAt,
+ updatedAt: session.updatedAt,
+ epoch: session.epoch,
+
+ // Topic Ratcheting
+ currentTopicOutbound: session.currentTopicOutbound.toLowerCase(),
+ currentTopicInbound: session.currentTopicInbound.toLowerCase(),
+ nextTopicOutbound: session.nextTopicOutbound?.toLowerCase(),
+ nextTopicInbound: session.nextTopicInbound?.toLowerCase(),
+ previousTopicInbound: session.previousTopicInbound?.toLowerCase(),
+ previousTopicExpiry: session.previousTopicExpiry,
+ topicEpoch: session.topicEpoch,
+ };
}
-export const generateTempMessageId = () => `temp-${Date.now()}-${Math.random()}`;
\ No newline at end of file
+/**
+ * Convert stored format back to SDK RatchetSession.
+ */
+export function deserializeRatchetSession(stored: StoredRatchetSession): SDKRatchetSession {
+ return {
+ conversationId: stored.conversationId,
+ topicOutbound: stored.topicOutbound as `0x${string}`,
+ topicInbound: stored.topicInbound as `0x${string}`,
+ myAddress: stored.myAddress,
+ contactAddress: stored.contactAddress,
+
+ rootKey: getBytes(stored.rootKey),
+ dhMySecretKey: getBytes(stored.dhMySecretKey),
+ dhMyPublicKey: getBytes(stored.dhMyPublicKey),
+ dhTheirPublicKey: getBytes(stored.dhTheirPublicKey),
+
+ sendingChainKey: stored.sendingChainKey ? getBytes(stored.sendingChainKey) : null,
+ sendingMsgNumber: stored.sendingMsgNumber,
+ receivingChainKey: stored.receivingChainKey ? getBytes(stored.receivingChainKey) : null,
+ receivingMsgNumber: stored.receivingMsgNumber,
+
+ previousChainLength: stored.previousChainLength,
+ skippedKeys: stored.skippedKeys.map((sk) => ({
+ dhPubKeyHex: sk.dhPubKeyHex,
+ msgNumber: sk.msgNumber,
+ messageKey: getBytes(sk.messageKey),
+ createdAt: sk.createdAt,
+ })),
+
+ createdAt: stored.createdAt,
+ updatedAt: stored.updatedAt,
+ epoch: stored.epoch,
+
+ // Topic Ratcheting
+ currentTopicOutbound: stored.currentTopicOutbound as `0x${string}`,
+ currentTopicInbound: stored.currentTopicInbound as `0x${string}`,
+ nextTopicOutbound: stored.nextTopicOutbound as `0x${string}` | undefined,
+ nextTopicInbound: stored.nextTopicInbound as `0x${string}` | undefined,
+ previousTopicInbound: stored.previousTopicInbound as `0x${string}` | undefined,
+ previousTopicExpiry: stored.previousTopicExpiry,
+ topicEpoch: stored.topicEpoch,
+ };
+}
diff --git a/apps/demo/vite.config.ts b/apps/demo/vite.config.ts
index 5eee74c..072ce19 100644
--- a/apps/demo/vite.config.ts
+++ b/apps/demo/vite.config.ts
@@ -3,7 +3,11 @@ import react from "@vitejs/plugin-react";
export default defineConfig({
plugins: [react()],
-
+
+ define: {
+ global: "globalThis",
+ },
+
server: {
port: 3000,
},
diff --git a/apps/docs/.gitignore b/apps/docs/.gitignore
new file mode 100644
index 0000000..b2d6de3
--- /dev/null
+++ b/apps/docs/.gitignore
@@ -0,0 +1,20 @@
+# Dependencies
+/node_modules
+
+# Production
+/build
+
+# Generated files
+.docusaurus
+.cache-loader
+
+# Misc
+.DS_Store
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
diff --git a/apps/docs/README.md b/apps/docs/README.md
new file mode 100644
index 0000000..b28211a
--- /dev/null
+++ b/apps/docs/README.md
@@ -0,0 +1,41 @@
+# Website
+
+This website is built using [Docusaurus](https://docusaurus.io/), a modern static website generator.
+
+## Installation
+
+```bash
+yarn
+```
+
+## Local Development
+
+```bash
+yarn start
+```
+
+This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
+
+## Build
+
+```bash
+yarn build
+```
+
+This command generates static content into the `build` directory and can be served using any static contents hosting service.
+
+## Deployment
+
+Using SSH:
+
+```bash
+USE_SSH=true yarn deploy
+```
+
+Not using SSH:
+
+```bash
+GIT_USER= yarn deploy
+```
+
+If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
diff --git a/apps/docs/docs/concepts/_category_.json b/apps/docs/docs/concepts/_category_.json
new file mode 100644
index 0000000..74b831d
--- /dev/null
+++ b/apps/docs/docs/concepts/_category_.json
@@ -0,0 +1,5 @@
+{
+ "label": "Concepts",
+ "position": 2,
+ "collapsed": false
+}
diff --git a/apps/docs/docs/concepts/handshake.md b/apps/docs/docs/concepts/handshake.md
new file mode 100644
index 0000000..2eafdf7
--- /dev/null
+++ b/apps/docs/docs/concepts/handshake.md
@@ -0,0 +1,171 @@
+---
+sidebar_position: 3
+title: Handshake
+---
+
+# Handshake
+
+Verbeth uses a hybrid key exchange combining X25519 (classical) and ML-KEM-768 (post-quantum) to establish encrypted channels.
+
+## Overview
+
+Unlike Signal's X3DH which uses prekeys stored on a server, Verbeth uses ephemeral-only key exchange:
+
+| X3DH (Signal) | Verbeth |
+|---------------|---------|
+| Prekey server required | No server infrastructure |
+| Offline initiation | Initiator must wait for response |
+| Multiple DH operations | Ephemeral + KEM hybrid |
+
+The tradeoff: Verbeth requires the responder to come online before the channel is established, but eliminates server trust entirely.
+
+## Hybrid Key Exchange
+
+Verbeth combines two key exchange mechanisms:
+
+### X25519 (Classical)
+
+- Well-understood elliptic curve Diffie-Hellman
+- 128-bit security against classical computers
+- Vulnerable to quantum computers running Shor's algorithm
+
+### ML-KEM-768 (Post-Quantum)
+
+- NIST-standardized lattice-based KEM (formerly Kyber)
+- 192-bit security against quantum computers
+- Larger keys (1184 bytes public, 1088 bytes ciphertext)
+
+### Why Hybrid?
+
+Defense-in-depth:
+
+- If X25519 is broken (quantum), ML-KEM protects
+- If ML-KEM is broken (cryptanalysis), X25519 protects
+- Security holds if *either* primitive remains secure
+
+This protects against "Harvest Now, Decrypt Later" (HNDL) attacks where adversaries record encrypted traffic today hoping to decrypt with future quantum computers.
+
+## Protocol Flow
+
+```
+Alice (Initiator) Bob (Responder)
+───────────────── ───────────────
+
+1. Generate ephemeral X25519 keypair (a, A)
+2. Generate ML-KEM-768 keypair (kemPk, kemSk)
+3. Create identity proof
+
+ ─────── Handshake Event ───────►
+ │ recipientHash: H(bob_addr) │
+ │ ephemeralPubKey: A │
+ │ kemPublicKey: kemPk │
+ │ identityProof: {...} │
+ └───────────────────────────────┘
+
+ 4. Generate ephemeral keypair (r, R)
+ 5. Compute X25519: x_ss = ECDH(r, A)
+ 6. Encapsulate KEM: (ct, kem_ss) = Encap(kemPk)
+ 7. Compute hybrid tag:
+ tag = HKDF(x_ss || kem_ss, "verbeth:hsr-hybrid:v1")
+ 8. Encrypt response to A
+
+ ◄───── HandshakeResponse ──────
+ │ inResponseTo: tag │
+ │ responderEphemeralR: R │
+ │ ciphertext: Enc(A, response) │
+ └───────────────────────────────┘
+
+9. Decrypt response, extract R, ct
+10. Compute X25519: x_ss = ECDH(a, R)
+11. Decapsulate KEM: kem_ss = Decap(ct, kemSk)
+12. Verify tag matches
+13. Derive root key from hybrid secret
+
+ ═══════ Channel Established ═══════
+```
+
+## Hybrid Tag Computation
+
+The `inResponseTo` tag links response to handshake using the hybrid secret:
+
+```typescript
+function computeHybridTag(
+ ecdhSecret: Uint8Array, // X25519 shared secret
+ kemSecret: Uint8Array // ML-KEM shared secret
+): `0x${string}` {
+ const okm = hkdf(sha256, kemSecret, ecdhSecret, "verbeth:hsr-hybrid:v1", 32);
+ return keccak256(okm);
+}
+```
+
+Observers cannot link `HandshakeResponse` to its `Handshake` without the shared secrets. See [Security Model](./security.md#handshake-response-unlinkability) for detailed analysis against classical and quantum adversaries.
+
+## Root Key Derivation
+
+The initial root key for the Double Ratchet combines both secrets:
+
+```typescript
+function hybridInitialSecret(
+ x25519Secret: Uint8Array,
+ kemSecret: Uint8Array
+): Uint8Array {
+ const combined = concat([x25519Secret, kemSecret]);
+ return hkdf(sha256, combined, zeros(32), "VerbethHybrid", 32);
+}
+```
+
+This root key is post-quantum secure. All subsequent ratchet keys derive from it, propagating PQ security through the entire conversation.
+
+## On-Chain Events
+
+### Handshake Event
+
+```solidity
+event Handshake(
+ bytes32 indexed recipientHash,
+ address indexed sender,
+ bytes ephemeralPubKey, // 32 bytes X25519
+ bytes kemPublicKey, // 1184 bytes ML-KEM-768
+ bytes plaintextPayload // Identity proof + note
+);
+```
+
+### HandshakeResponse Event
+
+```solidity
+event HandshakeResponse(
+ bytes32 indexed inResponseTo, // Hybrid tag
+ address indexed responder,
+ bytes responderEphemeralR, // 32 bytes X25519
+ bytes ciphertext // Encrypted response (includes KEM ciphertext)
+);
+```
+
+## Gas Considerations
+
+| Component | Size | Notes |
+|-----------|------|-------|
+| X25519 ephemeral | 32 bytes | Minimal |
+| ML-KEM public key | 1184 bytes | Dominates handshake cost |
+| ML-KEM ciphertext | 1088 bytes | In encrypted response |
+| Identity proof | ~500 bytes | Signature + message |
+
+Handshake initiation costs more due to the KEM public key. Response is encrypted, so KEM ciphertext is hidden in the blob.
+
+## Executor Abstraction
+
+Handshake transactions can be sent via:
+
+- **EOA**: Direct wallet transaction
+- **Safe Module**: Session key authorized by Safe
+
+The identity proof's `ExecutorAddres` field specifies which address will send the transaction, enabling verification regardless of executor type.
+
+## Security Properties
+
+| Property | Guarantee |
+|----------|-----------|
+| **Forward secrecy** | Ephemeral keys provide FS from message 0 |
+| **HNDL resistance** | ML-KEM protects root key against quantum |
+| **Identity binding** | Proof ties keys to Ethereum address |
+| **Quantum unlinkability** | Tag derivation hides handshake-response link |
diff --git a/apps/docs/docs/concepts/how-it-works.md b/apps/docs/docs/concepts/how-it-works.md
new file mode 100644
index 0000000..aa6dca7
--- /dev/null
+++ b/apps/docs/docs/concepts/how-it-works.md
@@ -0,0 +1,101 @@
+---
+sidebar_position: 1
+title: How It Works
+---
+
+# How It Works
+
+Verbeth is an end-to-end encrypted messaging protocol that uses Ethereum as its sole transport layer.
+
+## Blockchain as Transport
+
+Instead of running servers or relay infrastructure, Verbeth stores encrypted messages directly in Ethereum event logs. The blockchain provides:
+
+- **Immutability**: Messages cannot be altered or deleted
+- **Availability**: No server downtime, no message loss
+- **Censorship resistance**: Anyone can read/write to the contract
+- **Global ordering**: Block timestamps provide message ordering
+
+Messages are emitted as events from the Verbeth contract. Clients query these events using standard RPC calls.
+
+## No Servers, No Relays
+
+Traditional encrypted messaging requires:
+
+1. A server to store messages until recipients come online
+2. Push infrastructure for notifications
+3. Trust that the server doesn't log metadata
+
+Verbeth eliminates all of this. The Ethereum network stores messages indefinitely. Recipients query the blockchain when they come online. The trust model shifts from "trust our servers" to "verify the chain."
+
+## Identity Model
+
+Your Ethereum address is your identity. No usernames, no phone numbers, no email verification.
+
+Verbeth derives cryptographic keys from a single wallet signature:
+
+```
+Wallet Signature
+ ↓
+ HKDF Chain
+ ↓
+┌─────────────────────────────────────┐
+│ X25519 (encryption) │
+│ Ed25519 (signing) │
+│ secp256k1 session key (optional) │
+└─────────────────────────────────────┘
+```
+
+A binding proof cryptographically ties these derived keys to your Ethereum address. This proof is verified on-chain or via ERC-1271/ERC-6492 for smart accounts.
+
+## Why Safe Accounts?
+
+Verbeth works with EOAs but is optimized for Safe accounts:
+
+- **Session keys**: Derived secp256k1 key can be authorized via Safe module, enabling messaging without repeated wallet signatures
+- **Gasless messaging**: Paymasters can sponsor message transactions
+- **Multi-sig recovery**: Safe's recovery mechanisms protect your messaging identity
+- **ERC-1271 verification**: Smart contract signature verification built into the protocol
+
+## Protocol Stack
+
+```
+┌─────────────────────────────────────┐
+│ Application │
+├─────────────────────────────────────┤
+│ VerbethClient │
+│ (sessions, encryption, keys) │
+├─────────────────────────────────────┤
+│ @verbeth/sdk │
+│ (ratchet, handshake, identity) │
+├─────────────────────────────────────┤
+│ VerbethV1 Contract │
+│ (Handshake, HandshakeResponse, │
+│ MessageSent events) │
+├─────────────────────────────────────┤
+│ Ethereum │
+│ (event logs, finality) │
+└─────────────────────────────────────┘
+```
+
+## On-Chain Data Model
+
+The Verbeth contract emits three event types:
+
+| Event | Purpose | Indexed Fields |
+|-------|---------|----------------|
+| `Handshake` | Initiate key exchange | `recipientHash`, `sender` |
+| `HandshakeResponse` | Accept key exchange | `inResponseTo` |
+| `MessageSent` | Encrypted message | `sender`, `topic` |
+
+Topics are derived from shared secrets. Only participants know which topics belong to their conversation.
+
+## Message Flow
+
+1. **Alice initiates**: Emits `Handshake` with her ephemeral keys and identity proof
+2. **Bob responds**: Emits `HandshakeResponse` with his keys, encrypted to Alice
+3. **Both derive topics**: Shared secrets produce topic hashes for the conversation
+4. **Messages flow**: Each party emits `MessageSent` to their outbound topic
+5. **Topics evolve**: Double Ratchet advances topics for forward secrecy
+
+See [Handshake](./handshake.md) and [Ratchet](./ratchet.md) for protocol details.
diff --git a/apps/docs/docs/concepts/identity.md b/apps/docs/docs/concepts/identity.md
new file mode 100644
index 0000000..c971443
--- /dev/null
+++ b/apps/docs/docs/concepts/identity.md
@@ -0,0 +1,169 @@
+---
+sidebar_position: 2
+title: Identity
+---
+
+# Identity
+
+Verbeth binds cryptographic messaging keys to Ethereum addresses through deterministic derivation and signed proofs.
+
+## Key Derivation
+
+A single wallet signature produces all identity keys:
+
+```
+┌──────────────────────────────────────────────┐
+│ Seed Message: │
+│ "VerbEth Identity Seed v1" │
+│ "Address: 0x..." │
+│ "Context: verbeth" │
+└──────────────────────────────────────────────┘
+ ↓
+ Wallet Signature
+ ↓
+┌──────────────────────────────────────────────┐
+│ IKM = HKDF( │
+│ canonicalize(sig) || H(message) || │
+│ "verbeth/addr:" || address │
+│ ) │
+└──────────────────────────────────────────────┘
+ ↓
+ ┌────────┴────────┐────────┐
+ ↓ ↓ ↓
+ X25519 key Ed25519 key secp256k1
+ (encryption) (signing) session key
+```
+
+The derivation is:
+
+1. **Deterministic**: Same signature always produces same keys
+2. **Reproducible**: User can regenerate keys by re-signing
+3. **Isolated**: Different addresses produce unrelated keys
+
+### HKDF Chain
+
+```typescript
+// Intermediate Key Material
+const ikm = hkdf(sha256,
+ concat([canonicalSig, sha256(seedMessage), "verbeth/addr:" + address]),
+ "verbeth/seed-sig-v1",
+ "verbeth/ikm",
+ 32
+);
+
+// Derive individual keys
+const x25519_sk = hkdf(sha256, ikm, "", "verbeth-x25519-v1", 32);
+const ed25519_seed = hkdf(sha256, ikm, "", "verbeth-ed25519-v1", 32);
+const session_sk = hkdf(sha256, ikm, "", "verbeth-session-secp256k1-v1", 32);
+```
+
+## Binding Proofs
+
+A binding proof cryptographically ties derived keys to an Ethereum address:
+
+```
+VerbEth Key Binding v1
+Address: 0xabc...
+PkEd25519: 0x123...
+PkX25519: 0x456...
+ExecutorAddres: 0xdef...
+ChainId: 8453
+```
+
+This message is signed by the wallet, creating proof that:
+
+1. The signer controls the Ethereum address
+2. The signer authorizes these specific public keys
+3. The proof is bound to a specific chain and executor
+
+### Message Structure
+
+| Field | Purpose |
+|-------|---------|
+| `Address` | Signer's Ethereum address |
+| `PkEd25519` | Ed25519 signing public key |
+| `PkX25519` | X25519 encryption public key |
+| `ExecutorAddres` | Safe address that will send transactions |
+| `ChainId` | Chain ID for replay protection |
+
+## Verification Standards
+
+Verbeth supports three verification methods:
+
+### EOA (Externally Owned Account)
+
+Standard `ecrecover` verifies the signature against the address.
+
+### ERC-1271 (Deployed Smart Accounts)
+
+For deployed Safe accounts or other smart wallets:
+
+```solidity
+function isValidSignature(bytes32 hash, bytes signature)
+ external view returns (bytes4);
+```
+
+The contract returns `0x1626ba7e` if the signature is valid.
+
+### ERC-6492 (Counterfactual Accounts)
+
+For Safe accounts that haven't been deployed yet:
+
+```
+signature = abi.encodePacked(
+ factory,
+ factoryCalldata,
+ originalSignature
+)
+```
+
+Verification simulates deployment, then calls ERC-1271.
+
+## Safe Account Integration
+
+When using a Safe account, the binding proof includes `ExecutorAddres`:
+
+```typescript
+const { keyPair, sessionPrivateKey, sessionAddress } =
+ await deriveIdentityKeys(signer, address);
+
+const identityProof = await createBindingProof(
+ signer,
+ address,
+ derivedKeys,
+ safeAddress // ExecutorAddres field
+);
+```
+
+The derived `sessionPrivateKey` creates an Ethereum wallet that can be authorized by the Safe's session module. This enables:
+
+- Sending messages without repeated wallet signatures
+- Gasless transactions via paymaster
+- Programmatic messaging from backend services
+
+## Verification Flow
+
+When receiving a handshake or message:
+
+```
+1. Parse binding message
+2. Extract claimed address and public keys
+3. Verify signature:
+ - EOA: ecrecover
+ - Smart Account: ERC-1271
+ - Counterfactual: ERC-6492
+4. Compare extracted keys against message/handshake keys
+5. Validate ExecutorAddres matches msg.sender
+6. Check ChainId matches current chain
+```
+
+If any step fails, the message is rejected.
+
+## Security Properties
+
+| Property | Guarantee |
+|----------|-----------|
+| **Key binding** | Keys are provably controlled by address owner |
+| **Replay protection** | ChainId prevents cross-chain replay |
+| **Executor binding** | ExecutorAddres prevents unauthorized senders |
+| **Determinism** | Same inputs produce same keys (recovery) |
diff --git a/apps/docs/docs/concepts/ratchet.md b/apps/docs/docs/concepts/ratchet.md
new file mode 100644
index 0000000..17ae068
--- /dev/null
+++ b/apps/docs/docs/concepts/ratchet.md
@@ -0,0 +1,237 @@
+---
+sidebar_position: 4
+title: Double Ratchet
+---
+
+# Double Ratchet
+
+Verbeth uses the Double Ratchet algorithm for post-handshake encryption, providing forward secrecy and post-compromise security.
+
+## Overview
+
+The Double Ratchet combines two ratchets:
+
+1. **DH Ratchet**: New Diffie-Hellman exchange on each round-trip
+2. **Symmetric Ratchet**: Key derivation for each message
+
+```
+ Root Key
+ │
+ ┌──────────────┼──────────────┐
+ │ │ │
+ ▼ ▼ ▼
+ DH Step 1 DH Step 2 DH Step 3
+ │ │ │
+ ▼ ▼ ▼
+┌───────┐ ┌───────┐ ┌───────┐
+│Chain 1│ │Chain 2│ │Chain 3│
+│ Key 0 │ │ Key 0 │ │ Key 0 │
+│ Key 1 │ │ Key 1 │ │ Key 1 │
+│ Key 2 │ │ ... │ │ ... │
+└───────┘ └───────┘ └───────┘
+```
+
+## Key Derivation Functions
+
+### Root Key Derivation
+
+When a DH ratchet step occurs:
+
+```typescript
+function kdfRootKey(rootKey: Uint8Array, dhOutput: Uint8Array) {
+ const output = hkdf(sha256, dhOutput, rootKey, "VerbethRatchet", 64);
+ return {
+ rootKey: output.slice(0, 32), // New root key
+ chainKey: output.slice(32, 64) // New chain key
+ };
+}
+```
+
+### Chain Key Derivation
+
+For each message in a chain:
+
+```typescript
+function kdfChainKey(chainKey: Uint8Array) {
+ return {
+ messageKey: hmac(sha256, chainKey, [0x01]), // Encrypt this message
+ chainKey: hmac(sha256, chainKey, [0x02]) // Next chain key
+ };
+}
+```
+
+## Session State
+
+The `RatchetSession` contains:
+
+```typescript
+interface RatchetSession {
+ // Identity
+ conversationId: string; // keccak256(sort([topicOut, topicIn]))
+ myAddress: string;
+ contactAddress: string;
+
+ // Root Ratchet
+ rootKey: Uint8Array; // 32 bytes, PQ-secure from handshake
+
+ // DH Ratchet Keys
+ dhMySecretKey: Uint8Array; // My current DH secret
+ dhMyPublicKey: Uint8Array; // My current DH public (in message headers)
+ dhTheirPublicKey: Uint8Array; // Their last DH public
+
+ // Sending Chain
+ sendingChainKey: Uint8Array | null;
+ sendingMsgNumber: number; // Ns
+
+ // Receiving Chain
+ receivingChainKey: Uint8Array | null;
+ receivingMsgNumber: number; // Nr
+
+ // Skip Handling
+ previousChainLength: number; // PN header field
+ skippedKeys: SkippedKey[]; // For out-of-order messages
+
+ // Topic Ratcheting
+ currentTopicOutbound: `0x${string}`;
+ currentTopicInbound: `0x${string}`;
+ topicEpoch: number;
+}
+```
+
+**Critical**: Session state must be persisted after every encrypt/decrypt operation. Failure to persist creates security vulnerabilities and message loss.
+
+## Topic Ratcheting
+
+Topics evolve with the DH ratchet to maintain forward secrecy of conversation metadata:
+
+```typescript
+function deriveTopic(
+ rootKey: Uint8Array, // PQ-secure salt
+ dhOutput: Uint8Array, // DH shared secret
+ direction: 'outbound' | 'inbound'
+): `0x${string}` {
+ const info = `verbeth:topic-${direction}:v3`;
+ const okm = hkdf(sha256, dhOutput, rootKey, info, 32);
+ return keccak256(okm);
+}
+```
+
+The root key as HKDF salt provides quantum-resistant topic unlinkability. Even if X25519 is broken, topics from different epochs cannot be linked without the root key.
+
+### Topic Evolution
+
+```
+Epoch 0 (Handshake) Epoch 1 (Alice ratchets) Epoch 2 (Bob ratchets)
+───────────────────── ──────────────────────── ──────────────────────
+topicOutA = H(salt₀) topicOutA = H(salt₁) topicOutA = H(salt₂)
+topicInA = H(salt₀) topicInA = H(salt₁) topicInA = H(salt₂)
+```
+
+### Grace Period
+
+When topics change, the previous inbound topic remains valid for 5 minutes (`TOPIC_TRANSITION_WINDOW_MS`). This handles:
+
+- Messages in flight during ratchet
+- Blockchain reorgs
+- Out-of-order delivery
+
+```typescript
+interface RatchetSession {
+ previousTopicInbound?: `0x${string}`;
+ previousTopicExpiry?: number; // Date.now() + 5 minutes
+}
+```
+
+## Out-of-Order Messages
+
+Blockchain delivery doesn't guarantee order. The ratchet handles this via skip keys:
+
+### Skip Key Storage
+
+When message N arrives but we expected message M (where M < N):
+
+```typescript
+// Store keys for messages M through N-1
+for (let i = M; i < N; i++) {
+ const { chainKey, messageKey } = kdfChainKey(currentChainKey);
+ skippedKeys.push({
+ dhPubKeyHex: hexlify(theirDHPub),
+ msgNumber: i,
+ messageKey: messageKey,
+ createdAt: Date.now()
+ });
+ currentChainKey = chainKey;
+}
+```
+
+### Bounds and Pruning
+
+| Constant | Value | Purpose |
+|----------|-------|---------|
+| `MAX_SKIP_PER_MESSAGE` | 100,000 | Reject messages requiring excessive skips |
+| `MAX_STORED_SKIPPED_KEYS` | 1,000 | Prune oldest when exceeded |
+| `MAX_SKIPPED_KEYS_AGE_MS` | 24 hours | TTL for skip keys |
+
+## Burned Slots
+
+**Rollback is forbidden**. If you decrypt a message, the session state advances. Re-using old state creates:
+
+- Duplicate message keys (breaks confidentiality)
+- Orphan skip keys for the receiver
+
+Example of what NOT to do:
+
+```typescript
+// WRONG: Not persisting state
+const result = ratchetDecrypt(session, header, ciphertext);
+// forgot to save result.session
+// next decrypt uses old session = security failure
+```
+
+## DoS Protection
+
+Before ratchet operations, signatures are verified:
+
+```typescript
+// O(1) rejection of invalid messages
+const headerBytes = encodeHeader(header);
+const dataToVerify = concat([headerBytes, ciphertext]);
+const valid = nacl.sign.detached.verify(dataToVerify, signature, signingPubKey);
+
+if (!valid) {
+ return null; // Reject without touching ratchet state
+}
+```
+
+Ed25519 verification is cheap. This prevents attackers from forcing expensive ratchet computations with invalid messages.
+
+## Message Format
+
+Binary payload structure:
+
+```
+┌─────────┬───────────┬────────────┬────────────┐
+│ Version │ Signature │ Header │ Ciphertext │
+│ 1 byte │ 64 bytes │ 40 bytes │ variable │
+└─────────┴───────────┴────────────┴────────────┘
+
+Header (40 bytes):
+┌──────────────────┬──────────┬──────────┐
+│ DH PubKey │ PN │ N │
+│ 32 bytes │ 4 bytes │ 4 bytes │
+└──────────────────┴──────────┴──────────┘
+```
+
+- **DH PubKey**: Sender's current ratchet public key
+- **PN**: Previous chain length (for skip key calculation)
+- **N**: Message number in current chain
+
+## Security Properties
+
+| Property | Mechanism |
+|----------|-----------|
+| **Forward secrecy** | DH ratchet step deletes old keys |
+| **Post-compromise security** | New DH exchange after compromise heals |
+| **Out-of-order tolerance** | Skip keys with bounded storage |
+| **DoS resistance** | Ed25519 verification before ratchet ops |
+| **Topic unlinkability** | Root key salt for topic derivation |
diff --git a/apps/docs/docs/concepts/security.md b/apps/docs/docs/concepts/security.md
new file mode 100644
index 0000000..c42a797
--- /dev/null
+++ b/apps/docs/docs/concepts/security.md
@@ -0,0 +1,341 @@
+---
+sidebar_position: 5
+title: Security Model
+---
+
+# Security Model
+
+This document describes Verbeth's threat model, security guarantees, and tradeoffs compared to traditional encrypted messaging.
+
+## Threat Classes
+
+Verbeth considers three distinct adversary types:
+
+### 1. Passive Network Observer
+
+**Who**: Block explorers, chain indexers, MEV searchers
+
+**Capabilities**:
+- Read all on-chain data (events, calldata)
+- Correlate transactions by address
+- Build transaction graphs
+
+**Cannot**:
+- Read message contents (encrypted)
+- Link topics to recipients (hash-based)
+- Determine conversation participants from topics alone
+
+### 2. Active RPC Adversary
+
+**Who**: Your RPC provider (Infura, Alchemy, self-hosted)
+
+**Capabilities**:
+- See all queries you make
+- Observe which topics you subscribe to
+- Correlate query timing with message receipt
+
+**Cannot**:
+- Decrypt message contents
+- Forge messages (no private keys)
+
+**Critical**: RPC providers can perform correlation attacks linking senders to receivers by observing query patterns. See [Metadata Privacy](#metadata-privacy).
+
+### 3. State Compromise Adversary
+
+**Who**: Malware, device theft, insider threat
+
+**Capabilities**:
+- Read session state (root keys, chain keys)
+- Decrypt future messages until ratchet step
+- Potentially impersonate user
+
+**Mitigated by**:
+- Forward secrecy (past messages protected)
+- Post-compromise security (future messages protected after DH ratchet)
+
+## Authentication & Non-Repudiation
+
+### msg.sender as Authentication
+
+Ethereum's transaction model provides protocol-level sender authentication:
+
+- `msg.sender` is the transaction signer
+- Cannot be forged without private key
+- Verified by every node in the network
+
+### Binding Proofs
+
+Identity proofs add application-level authentication:
+
+- Signed message ties keys to Ethereum address
+- Verified by recipient before decryption
+- Supports EOA, ERC-1271, and ERC-6492
+
+### Non-Repudiation
+
+**Verbeth provides non-repudiation by design.** This is a fundamental difference from Signal:
+
+| | Verbeth | Signal |
+|---|---------|--------|
+| **Message attribution** | Permanent, on-chain | Deniable |
+| **Third-party verification** | Anyone can verify sender | Cannot prove authorship |
+| **Legal admissibility** | Strong (blockchain evidence) | Weak (cryptographic deniability) |
+
+**On-chain transactions are permanent, attributable records.** A message sender cannot later deny sending a message. The blockchain provides:
+
+- Immutable record of who sent what
+- Timestamp from block inclusion
+- Cryptographic proof via transaction signature
+
+**Signal's deniability** works because messages are authenticated with ephemeral keys that both parties know. Either party could have forged the message. Verbeth explicitly trades deniability for trustless, verifiable communication.
+
+**Use cases where non-repudiation matters**:
+- Business communications requiring audit trails
+- Legal agreements and contracts
+- Compliance-regulated messaging
+
+## Forward Secrecy
+
+**Definition**: Compromise of current keys does not reveal past messages.
+
+### Mechanism
+
+1. Each DH ratchet step derives new keys
+2. Old keys are deleted after derivation
+3. Even with current state, past messages cannot be decrypted
+
+### Unconditional from Message 0
+
+Unlike some protocols that require multiple messages before FS kicks in, Verbeth provides forward secrecy immediately:
+
+- Handshake uses ephemeral keys only
+- No long-term keys in key exchange
+- First message is already protected
+
+### Key Deletion Importance
+
+Forward secrecy only works if old keys are actually deleted:
+
+```typescript
+// SDK wipes keys after use
+try {
+ messageKey.fill(0);
+ chainKey.fill(0);
+} catch {}
+```
+
+Application developers must ensure session state isn't backed up in ways that preserve old keys.
+
+## Post-Compromise Security
+
+**Definition**: Security recovery after state compromise.
+
+### Against Classical Adversary
+
+**Full PCS**: After a DH ratchet step, security is restored.
+
+```
+Compromise here
+ ↓
+[msg 1] [msg 2] [msg 3] [DH ratchet] [msg 4] [msg 5]
+ ✗ ✗ ✗ │ ✓ ✓
+ └── Security restored
+```
+
+The attacker can read messages until the next DH exchange. After that, they're locked out.
+
+### Against Quantum Adversary
+
+Two scenarios to consider:
+
+**Passive quantum (HNDL)**:
+- Adversary records ciphertext now, decrypts later with quantum computer
+- **Fully protected**: Root key is PQ-secure from ML-KEM
+- All messages inherit this protection
+
+**Active quantum + state compromise**:
+- Adversary has quantum computer AND compromises device state
+- **PCS degraded**: Subsequent DH ratchets use X25519 (quantum-vulnerable)
+- Attacker can compute future DH shared secrets
+
+This is an honest limitation. Full PQ PCS would require hybrid KEM ratcheting (future work).
+
+## Post-Quantum Security
+
+### HNDL Resistance
+
+"Harvest Now, Decrypt Later" attacks are mitigated:
+
+1. **Handshake**: ML-KEM-768 protects initial key exchange
+2. **Root key derivation**: Hybrid secret (X25519 + ML-KEM)
+3. **Key propagation**: All chain keys derive from PQ-secure root
+4. **Result**: All messages protected, not just handshake
+
+### Why Hybrid (X25519 + ML-KEM)
+
+Defense-in-depth principle:
+
+| Threat | X25519 | ML-KEM | Hybrid |
+|--------|--------|--------|--------|
+| Classical attack | Secure | Secure | Secure |
+| Quantum attack (Shor) | Broken | Secure | Secure |
+| Cryptanalytic breakthrough | Secure | ? | Secure |
+
+ML-KEM is NIST-standardized but newer. X25519 is battle-tested. Combining them ensures security if either remains secure.
+
+### Limitations
+
+Honest assessment of what's NOT quantum-secure:
+
+- **DH ratchet steps**: Use X25519 only (practical tradeoff)
+- **Active quantum + state compromise**: No full PCS
+- **Topic derivation**: Uses DH output (though salted with PQ-secure root)
+
+## Metadata Privacy
+
+### On-Chain Visibility
+
+Observers see:
+
+| Visible | Hidden |
+|---------|--------|
+| Sender addresses | Recipient identity |
+| Topic hashes | Topic-to-recipient mapping |
+| Ciphertext blobs | Message content |
+| Transaction timing | Conversation relationships |
+| Gas costs | |
+
+### Handshake-Response Unlinkability
+
+The `Handshake` and `HandshakeResponse` events have no plaintext link. The only connection is the `inResponseTo` tag, which requires shared secrets to compute:
+
+```
+tag = keccak256(HKDF(kemSecret, ecdhSecret, "verbeth:hsr-hybrid:v1"))
+```
+
+**Against passive classical adversary**:
+- Observer sees Alice's ephemeral public key in `Handshake`
+- Observer sees `inResponseTo` tag in `HandshakeResponse`
+- Cannot compute tag without Bob's ephemeral secret (never published)
+
+**Against passive quantum adversary**:
+- Quantum computer could solve ECDH from public keys (Shor's algorithm)
+- But tag derivation also requires `kemSecret` from ML-KEM
+- ML-KEM ciphertext is inside the encrypted response payload
+- Cannot decrypt payload without Alice's ephemeral secret
+- Result: still cannot link handshake to response
+
+### Handshake-to-Message Unlinkability
+
+After handshake, messages use topics derived from the hybrid root key:
+
+```
+rootKey = HKDF(x25519Secret || kemSecret, "VerbethHybrid")
+topic = keccak256(HKDF(dhOutput, rootKey, "verbeth:topic"))
+```
+
+**Against passive classical adversary**:
+- Topics are hashes with no reversible link to handshake public keys
+- Cannot determine which `Handshake` led to which `MessageSent` topic
+
+**Against passive quantum adversary**:
+- Even with ECDH broken, `rootKey` includes `kemSecret`
+- Topics inherit PQ security from root key salt
+- Cannot link handshake ephemeral keys to message topics
+
+### The RPC Problem
+
+**Critical trust boundary**: Your RPC provider sees your queries.
+
+When you query for messages:
+```typescript
+// RPC sees this query
+contract.queryFilter("MessageSent", { topic: myTopic })
+```
+
+The RPC can:
+1. Note which topics you query
+2. Correlate with message emission timing
+3. Link sender address to querying address
+
+**Mitigations**:
+- Self-hosted node (eliminates RPC trust)
+- Tor/VPN (hides query origin)
+- Decoy queries (noise injection)
+- Query aggregation services (future)
+
+## Replay & DoS Protection
+
+### Ethereum's Gas Model
+
+Spam resistance through economics:
+- Every message costs gas
+- Attack cost scales linearly
+- No free amplification attacks
+
+### Ed25519 Signature Verification
+
+Before any ratchet state changes:
+
+```typescript
+// O(1) verification, O(n) ratchet ops
+if (!nacl.sign.detached.verify(data, sig, pubKey)) {
+ return null; // Reject immediately
+}
+```
+
+Invalid signatures are rejected before expensive key derivation.
+
+### Nonce Tracking (Optional)
+
+For applications requiring strict deduplication:
+- Track message hashes or (topic, n) pairs
+- Reject duplicates at application layer
+- SDK provides hooks for custom logic
+
+## Comparison with Signal Protocol
+
+| Property | Verbeth | Signal |
+|----------|---------|--------|
+| **Transport** | Blockchain (public, immutable) | Server relay (private, ephemeral) |
+| **Authentication** | `msg.sender` + binding proof | X3DH with identity keys |
+| **Non-repudiation** | Yes (on-chain attribution) | No (deniable) |
+| **Prekey infrastructure** | None | Required (server-hosted) |
+| **Forward secrecy** | From message 0 | From message 0 |
+| **PCS (classical)** | Full | Full |
+| **PCS (quantum)** | Degraded (active + compromise) | Degraded (same) |
+| **HNDL resistance** | Yes (ML-KEM-768 hybrid) | Yes (PQXDH) |
+| **Offline delivery** | Yes (blockchain stores forever) | Yes (server queues) |
+| **Metadata privacy** | RPC trust required | Server trust required |
+
+### Verbeth Advantages
+
+- **No server infrastructure**: Eliminates server trust entirely
+- **Trustless delivery**: Blockchain finality guarantees delivery
+- **Smart account integration**: Session keys, gasless messaging
+- **Audit trail**: Compliance-friendly immutable records
+- **Censorship resistance**: Permissionless access
+
+### Verbeth Tradeoffs
+
+- **No deniability**: On-chain = permanent attribution
+- **Higher latency**: Block times (2s L2, 12s L1)
+- **Gas costs**: Each message costs money
+- **RPC metadata**: Query patterns visible to provider
+- **Larger handshakes**: ML-KEM public keys are 1184 bytes
+
+## Summary
+
+Verbeth provides strong security guarantees with explicit tradeoffs:
+
+| Guarantee | Status |
+|-----------|--------|
+| End-to-end encryption | Yes |
+| Forward secrecy | Yes, from message 0 |
+| Post-compromise security (classical) | Yes |
+| HNDL resistance | Yes |
+| Sender authentication | Yes, protocol-level |
+| Recipient privacy | Yes, from chain observers |
+| Deniability | No (explicit design choice) |
+| Metadata privacy from RPC | No (requires self-hosting) |
+| Full PQ PCS | No (active quantum + compromise) |
diff --git a/apps/docs/docs/quick-start.md b/apps/docs/docs/quick-start.md
new file mode 100644
index 0000000..614a2d2
--- /dev/null
+++ b/apps/docs/docs/quick-start.md
@@ -0,0 +1,208 @@
+---
+sidebar_position: 1
+slug: /quick-start
+title: Quick Start
+---
+
+# Quick Start
+
+Get end-to-end encrypted messaging working in your dApp.
+
+## Install
+
+```bash
+npm install @verbeth/sdk ethers
+```
+
+## Setup Client
+
+```typescript
+import {
+ createVerbethClient,
+ deriveIdentityKeyPairWithProof,
+ ExecutorFactory,
+ getVerbethAddress
+} from '@verbeth/sdk';
+import { ethers } from 'ethers';
+
+// 1. Connect wallet
+const provider = new ethers.BrowserProvider(window.ethereum);
+const signer = await provider.getSigner();
+const address = await signer.getAddress();
+
+// 2. Derive identity keys (requires 2 wallet signatures)
+const { identityKeyPair, identityProof } = await deriveIdentityKeyPairWithProof(
+ signer,
+ address
+);
+
+// 3. Create executor for contract interactions
+const contract = new ethers.Contract(getVerbethAddress(), VERBETH_ABI, signer);
+const executor = ExecutorFactory.createEOA(contract);
+
+// 4. Create client
+const client = createVerbethClient({
+ address,
+ signer,
+ identityKeyPair,
+ identityProof,
+ executor,
+});
+```
+
+## Send a Handshake
+
+Start a conversation by sending a handshake to another address.
+The returned secrets must be stored until the recipient responds.
+
+```typescript
+const recipientAddress = '0x...';
+
+const { tx, ephemeralKeyPair, kemKeyPair } = await client.sendHandshake(
+ recipientAddress,
+ 'Hello from Verbeth!'
+);
+await tx.wait();
+```
+
+## Accept a Handshake
+
+When you receive a handshake, accept it to establish the encrypted channel. You can implement your own storage to persist the session.
+
+
+```typescript
+// Parse incoming handshake event from blockchain logs
+const initiatorEphemeralPubKey = handshakeEvent.ephemeralPubKey;
+
+const {
+ tx,
+ topicOutbound,
+ topicInbound,
+ responderEphemeralSecret,
+ responderEphemeralPublic,
+ salt,
+ kemSharedSecret,
+} = await client.acceptHandshake(initiatorEphemeralPubKey, 'Hey!');
+
+await tx.wait();
+
+const session = client.createResponderSession({
+ contactAddress: handshakeEvent.sender,
+ responderEphemeralSecret,
+ responderEphemeralPublic,
+ initiatorEphemeralPubKey,
+ salt,
+ kemSharedSecret,
+});
+
+await sessionStore.save(session);
+```
+
+## Create Session from Response
+
+When the recipient responds to your handshake, create your session using the previously stored secrets.
+
+```typescript
+// hsrEvent is the HandshakeResponse event from the blockchain
+const session = client.createInitiatorSessionFromHsr({
+ contactAddress: recipientAddress,
+ myEphemeralSecret: storedEphemeralSecret,
+ myKemSecret: storedKemSecret,
+ hsrEvent: {
+ responderEphemeralPubKey: hsrEvent.responderEphemeralPubKey,
+ inResponseToTag: hsrEvent.inResponseTo,
+ kemCiphertext: hsrEvent.kemCiphertext,
+ },
+});
+
+await sessionStore.save(session);
+```
+
+## Send Messages
+
+Once you have a session, configure the storage and send encrypted messages.
+
+```typescript
+client.setSessionStore(sessionStore);
+client.setPendingStore(pendingStore);
+
+const result = await client.sendMessage(
+ session.conversationId,
+ 'This message is end-to-end encrypted!'
+);
+
+console.log('Sent:', result.txHash);
+```
+
+## Decrypt Messages
+
+Decrypt incoming messages from the blockchain.
+
+```typescript
+const decrypted = await client.decryptMessage(
+ messageEvent.topic,
+ messageEvent.payload,
+ senderSigningKey,
+ false // isOwnMessage
+);
+
+if (decrypted) {
+ console.log('Received:', decrypted.plaintext);
+}
+```
+
+## Full Example
+
+```typescript
+import {
+ createVerbethClient,
+ deriveIdentityKeyPairWithProof,
+ ExecutorFactory,
+ getVerbethAddress
+} from '@verbeth/sdk';
+import { ethers } from 'ethers';
+
+async function initVerbeth() {
+ const provider = new ethers.BrowserProvider(window.ethereum);
+ const signer = await provider.getSigner();
+ const address = await signer.getAddress();
+
+ const { identityKeyPair, identityProof } = await deriveIdentityKeyPairWithProof(
+ signer,
+ address
+ );
+
+ const contract = new ethers.Contract(getVerbethAddress(), VERBETH_ABI, signer);
+ const executor = ExecutorFactory.createEOA(contract);
+
+ const client = createVerbethClient({
+ address,
+ signer,
+ identityKeyPair,
+ identityProof,
+ executor,
+ });
+
+ return { client, identityKeyPair };
+}
+
+async function startConversation(client, recipientAddress: string) {
+ const { tx, ephemeralKeyPair, kemKeyPair } = await client.sendHandshake(
+ recipientAddress,
+ 'Starting secure conversation'
+ );
+
+ await tx.wait();
+
+ return {
+ ephemeralSecret: ephemeralKeyPair.secretKey,
+ kemSecret: kemKeyPair.secretKey,
+ };
+}
+```
+
+## Next Steps
+
+- **Identity binding**: Keys are cryptographically bound to your Ethereum address via signed messages
+- **Handshake flow**: X3DH-like protocol with ML-KEM-768 for post-quantum security
+- **Double Ratchet**: Forward secrecy with automatic topic evolution
diff --git a/apps/docs/docusaurus.config.ts b/apps/docs/docusaurus.config.ts
new file mode 100644
index 0000000..79366a4
--- /dev/null
+++ b/apps/docs/docusaurus.config.ts
@@ -0,0 +1,87 @@
+import {themes as prismThemes} from 'prism-react-renderer';
+import type {Config} from '@docusaurus/types';
+import type * as Preset from '@docusaurus/preset-classic';
+
+const config: Config = {
+ title: 'Verbeth SDK',
+ tagline: 'End-to-end encrypted messaging over Ethereum logs',
+ favicon: 'img/favicon.ico',
+
+ future: {
+ v4: true,
+ },
+
+ url: 'https://docs.verbeth.xyz',
+ baseUrl: '/',
+
+ organizationName: 'okrame',
+ projectName: 'verbeth',
+
+ onBrokenLinks: 'throw',
+
+ markdown: {
+ hooks: {
+ onBrokenMarkdownLinks: 'warn',
+ },
+ },
+
+ i18n: {
+ defaultLocale: 'en',
+ locales: ['en'],
+ },
+
+ presets: [
+ [
+ 'classic',
+ {
+ docs: {
+ sidebarPath: './sidebars.ts',
+ editUrl: 'https://github.com/okrame/verbeth/tree/main/apps/docs/',
+ },
+ blog: false,
+ theme: {
+ customCss: './src/css/custom.css',
+ },
+ } satisfies Preset.Options,
+ ],
+ ],
+
+ themeConfig: {
+ colorMode: {
+ defaultMode: 'dark',
+ respectPrefersColorScheme: true,
+ },
+ navbar: {
+ title: 'Verbeth',
+ items: [
+ {
+ type: 'docSidebar',
+ sidebarId: 'docsSidebar',
+ position: 'left',
+ label: 'Docs',
+ },
+ {
+ href: 'https://github.com/okrame/verbeth',
+ label: 'GitHub',
+ position: 'right',
+ },
+ {
+ href: 'https://www.npmjs.com/package/@verbeth/sdk',
+ label: 'npm',
+ position: 'right',
+ },
+ ],
+ },
+ footer: {
+ style: 'dark',
+ copyright: `Verbeth SDK. Built with Docusaurus.`,
+ },
+ prism: {
+ theme: prismThemes.github,
+ darkTheme: prismThemes.dracula,
+ additionalLanguages: ['bash', 'typescript', 'solidity'],
+ },
+ } satisfies Preset.ThemeConfig,
+};
+
+export default config;
diff --git a/apps/docs/package.json b/apps/docs/package.json
new file mode 100644
index 0000000..e61ba2b
--- /dev/null
+++ b/apps/docs/package.json
@@ -0,0 +1,47 @@
+{
+ "name": "docs",
+ "version": "0.0.0",
+ "private": true,
+ "scripts": {
+ "docusaurus": "docusaurus",
+ "start": "docusaurus start",
+ "build": "docusaurus build",
+ "swizzle": "docusaurus swizzle",
+ "deploy": "docusaurus deploy",
+ "clear": "docusaurus clear",
+ "serve": "docusaurus serve",
+ "write-translations": "docusaurus write-translations",
+ "write-heading-ids": "docusaurus write-heading-ids",
+ "typecheck": "tsc"
+ },
+ "dependencies": {
+ "@docusaurus/core": "3.9.2",
+ "@docusaurus/preset-classic": "3.9.2",
+ "@mdx-js/react": "^3.0.0",
+ "clsx": "^2.0.0",
+ "prism-react-renderer": "^2.3.0",
+ "react": "^19.0.0",
+ "react-dom": "^19.0.0"
+ },
+ "devDependencies": {
+ "@docusaurus/module-type-aliases": "3.9.2",
+ "@docusaurus/tsconfig": "3.9.2",
+ "@docusaurus/types": "3.9.2",
+ "typescript": "~5.6.2"
+ },
+ "browserslist": {
+ "production": [
+ ">0.5%",
+ "not dead",
+ "not op_mini all"
+ ],
+ "development": [
+ "last 3 chrome version",
+ "last 3 firefox version",
+ "last 5 safari version"
+ ]
+ },
+ "engines": {
+ "node": ">=20.0"
+ }
+}
diff --git a/apps/docs/replay-protection.md b/apps/docs/replay-protection.md
deleted file mode 100644
index 9574935..0000000
--- a/apps/docs/replay-protection.md
+++ /dev/null
@@ -1,64 +0,0 @@
-# Replay Protection in VerbEth
-
-VerbEth uses Ethereum event logs as the only transport layer for encrypted messages.
-Replay protection is not enforced on-chain and it doesn’t need to be (hence saving on gas).
-
-## Why?
-
-Ethereum already gives us:
-
-- **Sender authentication** via `msg.sender`
-- **Spam resistance** via gas costs
-- **Immutable message delivery** via event logs
-- **Timestamped, ordered history** per sender
-
-This means every message is:
-
-- Authenticated by the sender’s Ethereum key
-- Costly to publish
-- Cryptographically anchored to the chain
-
-> We rely on AEAD provided by nacl.box (XSalsa20 + Poly1305),
-> but avoid additional detached signatures or layered MACs.
-
----
-
-## So What Does `nonce` Do?
-
-We include a `uint256 nonce` in each message log event to support:
-
-- Client-side **message ordering**
-- Optional **deduplication** (e.g. prevent duplicate rendering)
-- Future support for **group/thread consistency**
-
-```solidity
-event MessageSent(
- address indexed sender,
- bytes ciphertext,
- uint256 timestamp,
- bytes32 indexed topic,
- uint256 nonce
- );
-```
-
-But:
-🔸 There is no on-chain enforcement of nonce values
-🔸 Recipients may ignore them entirely or filter replays locally
-
----
-
-## Should You Verify a Message Wasn't Replayed?
-
-Only if you want to. The SDK may optionally track `(sender, topic, nonce)` triplets to filter duplicates:
-
-```ts
-const seen = new Set();
-function isReplay(log) {
- const key = `${log.sender}:${log.topic}:${log.nonce}`;
- if (seen.has(key)) return true;
- seen.add(key);
- return false;
-}
-```
-
----
\ No newline at end of file
diff --git a/apps/docs/sidebars.ts b/apps/docs/sidebars.ts
new file mode 100644
index 0000000..4a93de9
--- /dev/null
+++ b/apps/docs/sidebars.ts
@@ -0,0 +1,20 @@
+import type {SidebarsConfig} from '@docusaurus/plugin-content-docs';
+
+const sidebars: SidebarsConfig = {
+ docsSidebar: [
+ 'quick-start',
+ {
+ type: 'category',
+ label: 'Concepts',
+ items: [
+ 'concepts/how-it-works',
+ 'concepts/identity',
+ 'concepts/handshake',
+ 'concepts/ratchet',
+ 'concepts/security',
+ ],
+ },
+ ],
+};
+
+export default sidebars;
diff --git a/apps/docs/src/css/custom.css b/apps/docs/src/css/custom.css
new file mode 100644
index 0000000..22d451d
--- /dev/null
+++ b/apps/docs/src/css/custom.css
@@ -0,0 +1,238 @@
+/**
+ * Verbeth SDK Documentation
+ * Brutalist minimal theme
+ */
+
+:root {
+ /* Electric blue accent */
+ --ifm-color-primary: #3B82F6;
+ --ifm-color-primary-dark: #2563EB;
+ --ifm-color-primary-darker: #1D4ED8;
+ --ifm-color-primary-darkest: #1E40AF;
+ --ifm-color-primary-light: #60A5FA;
+ --ifm-color-primary-lighter: #93C5FD;
+ --ifm-color-primary-lightest: #BFDBFE;
+
+ /* Light mode */
+ --ifm-background-color: #FAFAFA;
+ --ifm-background-surface-color: #FFFFFF;
+ --ifm-font-color-base: #171717;
+
+ /* Typography */
+ --ifm-font-family-base: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif;
+ --ifm-font-family-monospace: 'JetBrains Mono', 'Fira Code', 'SF Mono', Consolas, monospace;
+ --ifm-code-font-size: 0.875rem;
+ --ifm-heading-font-weight: 600;
+
+ /* Layout */
+ --ifm-navbar-background-color: transparent;
+ --ifm-footer-background-color: transparent;
+ --ifm-global-radius: 0;
+
+ /* Code highlighting */
+ --docusaurus-highlighted-code-line-bg: rgba(59, 130, 246, 0.1);
+}
+
+[data-theme='dark'] {
+ /* Dark mode - crypto-native default */
+ --ifm-background-color: #0A0A0A;
+ --ifm-background-surface-color: #171717;
+ --ifm-font-color-base: #E5E5E5;
+ --ifm-color-primary: #60A5FA;
+ --ifm-color-primary-dark: #3B82F6;
+ --ifm-color-primary-darker: #2563EB;
+ --ifm-color-primary-darkest: #1D4ED8;
+ --ifm-color-primary-light: #93C5FD;
+ --ifm-color-primary-lighter: #BFDBFE;
+ --ifm-color-primary-lightest: #DBEAFE;
+ --ifm-color-emphasis-200: #262626;
+ --ifm-color-emphasis-300: #404040;
+
+ --docusaurus-highlighted-code-line-bg: rgba(96, 165, 250, 0.15);
+}
+
+/* Brutalist touches */
+.navbar {
+ border-bottom: 1px solid var(--ifm-color-emphasis-200);
+ backdrop-filter: blur(8px);
+}
+
+[data-theme='dark'] .navbar {
+ background: rgba(10, 10, 10, 0.8);
+}
+
+[data-theme='light'] .navbar {
+ background: rgba(250, 250, 250, 0.8);
+}
+
+/* No rounded corners - brutalist */
+code {
+ border-radius: 0;
+}
+
+pre {
+ border-radius: 0;
+ border: 1px solid var(--ifm-color-emphasis-200);
+}
+
+/* Buttons */
+.button {
+ border-radius: 0;
+}
+
+/* Cards and containers */
+.card {
+ border-radius: 0;
+}
+
+/* Table of contents */
+.table-of-contents {
+ border-left: 1px solid var(--ifm-color-emphasis-200);
+}
+
+/* Sidebar */
+.menu__link {
+ border-radius: 0;
+}
+
+/* Hero section for landing page */
+.hero-verbeth {
+ min-height: calc(100vh - 60px);
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ align-items: center;
+ text-align: center;
+ padding: 4rem 2rem;
+}
+
+.hero-verbeth h1 {
+ font-size: 4rem;
+ font-weight: 700;
+ margin-bottom: 1rem;
+ letter-spacing: -0.02em;
+}
+
+.hero-verbeth p {
+ font-size: 1.25rem;
+ color: var(--ifm-color-emphasis-700);
+ margin-bottom: 2rem;
+ max-width: 500px;
+}
+
+[data-theme='dark'] .hero-verbeth p {
+ color: var(--ifm-color-emphasis-600);
+}
+
+.hero-buttons {
+ display: flex;
+ gap: 1rem;
+ flex-wrap: wrap;
+ justify-content: center;
+}
+
+.hero-buttons a {
+ padding: 0.75rem 1.5rem;
+ font-weight: 500;
+ text-decoration: none;
+ border: 1px solid var(--ifm-color-emphasis-300);
+ transition: all 0.15s ease;
+}
+
+.hero-buttons a:first-child {
+ background: var(--ifm-color-primary);
+ color: white;
+ border-color: var(--ifm-color-primary);
+}
+
+.hero-buttons a:first-child:hover {
+ background: var(--ifm-color-primary-dark);
+ border-color: var(--ifm-color-primary-dark);
+}
+
+.hero-buttons a:not(:first-child) {
+ background: transparent;
+ color: var(--ifm-font-color-base);
+}
+
+.hero-buttons a:not(:first-child):hover {
+ background: var(--ifm-color-emphasis-100);
+}
+
+[data-theme='dark'] .hero-buttons a:not(:first-child):hover {
+ background: var(--ifm-color-emphasis-200);
+}
+
+/* Responsive hero */
+@media screen and (max-width: 768px) {
+ .hero-verbeth h1 {
+ font-size: 2.5rem;
+ }
+
+ .hero-verbeth p {
+ font-size: 1rem;
+ }
+}
+
+/* Footer minimal */
+.footer {
+ border-top: 1px solid var(--ifm-color-emphasis-200);
+ padding: 1.5rem;
+}
+
+.footer__copyright {
+ font-size: 0.875rem;
+ color: var(--ifm-color-emphasis-600);
+}
+
+/* Code blocks - more prominent */
+pre code {
+ font-size: 0.875rem;
+ line-height: 1.6;
+}
+
+/* Inline code */
+:not(pre) > code {
+ background: var(--ifm-color-emphasis-100);
+ padding: 0.125rem 0.375rem;
+ font-size: 0.875em;
+}
+
+[data-theme='dark'] :not(pre) > code {
+ background: var(--ifm-color-emphasis-200);
+}
+
+/* Markdown content */
+.markdown h1 {
+ font-size: 2.25rem;
+}
+
+.markdown h2 {
+ font-size: 1.75rem;
+ margin-top: 2.5rem;
+}
+
+.markdown h3 {
+ font-size: 1.25rem;
+ margin-top: 2rem;
+}
+
+/* Admonitions - brutalist style */
+.admonition {
+ border-radius: 0;
+ border-left-width: 3px;
+}
+
+/* Links */
+a {
+ text-decoration: none;
+}
+
+a:hover {
+ text-decoration: underline;
+}
+
+/* Doc page layout */
+.docMainContainer {
+ max-width: 800px;
+}
diff --git a/apps/docs/src/pages/index.tsx b/apps/docs/src/pages/index.tsx
new file mode 100644
index 0000000..851b94d
--- /dev/null
+++ b/apps/docs/src/pages/index.tsx
@@ -0,0 +1,20 @@
+import type {ReactNode} from 'react';
+import Link from '@docusaurus/Link';
+import Layout from '@theme/Layout';
+
+export default function Home(): ReactNode {
+ return (
+
+
+ Verbeth SDK
+ End-to-end encrypted messaging over Ethereum
+
+ Get Started
+ GitHub
+
+
+
+ );
+}
diff --git a/apps/docs/static/.nojekyll b/apps/docs/static/.nojekyll
new file mode 100644
index 0000000..e69de29
diff --git a/apps/docs/static/img/favicon.ico b/apps/docs/static/img/favicon.ico
new file mode 100644
index 0000000..c01d54b
Binary files /dev/null and b/apps/docs/static/img/favicon.ico differ
diff --git a/apps/docs/static/img/logo.svg b/apps/docs/static/img/logo.svg
new file mode 100644
index 0000000..9db6d0d
--- /dev/null
+++ b/apps/docs/static/img/logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/apps/docs/tsconfig.json b/apps/docs/tsconfig.json
new file mode 100644
index 0000000..920d7a6
--- /dev/null
+++ b/apps/docs/tsconfig.json
@@ -0,0 +1,8 @@
+{
+ // This file is not used in compilation. It is here just for a nice editor experience.
+ "extends": "@docusaurus/tsconfig",
+ "compilerOptions": {
+ "baseUrl": "."
+ },
+ "exclude": [".docusaurus", "build"]
+}
diff --git a/apps/indexer/.DS_Store b/apps/indexer/.DS_Store
new file mode 100644
index 0000000..60fd190
Binary files /dev/null and b/apps/indexer/.DS_Store differ
diff --git a/apps/indexer/.env.example b/apps/indexer/.env.example
new file mode 100644
index 0000000..898f337
--- /dev/null
+++ b/apps/indexer/.env.example
@@ -0,0 +1,10 @@
+RPC_WS_URL=wss://base-sepolia.g.alchemy.com/v2/YOUR_KEY
+
+RPC_HTTP_URL=https://base-sepolia.g.alchemy.com/v2/YOUR_KEY
+CONTRACT_ADDRESS=0x82C9c5475D63e4C9e959280e9066aBb24973a663
+CREATION_BLOCK=37097547
+DATABASE_PATH=./data/indexer.db
+SERVER_PORT=3002
+BACKFILL_DAYS=7
+RETENTION_DAYS=7
+RUST_LOG=info
diff --git a/apps/indexer/.gitignore b/apps/indexer/.gitignore
new file mode 100644
index 0000000..e46d592
--- /dev/null
+++ b/apps/indexer/.gitignore
@@ -0,0 +1,4 @@
+/target/
+/data/
+.env
+indexer.log
diff --git a/apps/indexer/Cargo.lock b/apps/indexer/Cargo.lock
new file mode 100644
index 0000000..2bbb1c6
--- /dev/null
+++ b/apps/indexer/Cargo.lock
@@ -0,0 +1,4337 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 4
+
+[[package]]
+name = "ahash"
+version = "0.8.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
+dependencies = [
+ "cfg-if",
+ "getrandom 0.3.4",
+ "once_cell",
+ "version_check",
+ "zerocopy",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "allocator-api2"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
+
+[[package]]
+name = "alloy"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "02b0561294ccedc6181e5528b850b4579e3fbde696507baa00109bfd9054c5bb"
+dependencies = [
+ "alloy-consensus",
+ "alloy-contract",
+ "alloy-core",
+ "alloy-eips",
+ "alloy-genesis",
+ "alloy-network",
+ "alloy-provider",
+ "alloy-pubsub",
+ "alloy-rpc-client",
+ "alloy-rpc-types",
+ "alloy-serde",
+ "alloy-transport",
+ "alloy-transport-http",
+ "alloy-transport-ws",
+]
+
+[[package]]
+name = "alloy-chains"
+version = "0.1.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "28e2652684758b0d9b389d248b209ed9fd9989ef489a550265fe4bb8454fe7eb"
+dependencies = [
+ "alloy-primitives",
+ "num_enum",
+ "strum",
+]
+
+[[package]]
+name = "alloy-consensus"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a101d4d016f47f13890a74290fdd17b05dd175191d9337bc600791fb96e4dea8"
+dependencies = [
+ "alloy-eips",
+ "alloy-primitives",
+ "alloy-rlp",
+ "alloy-serde",
+ "alloy-trie",
+ "auto_impl",
+ "c-kzg",
+ "derive_more 1.0.0",
+ "serde",
+]
+
+[[package]]
+name = "alloy-consensus-any"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa60357dda9a3d0f738f18844bd6d0f4a5924cc5cf00bfad2ff1369897966123"
+dependencies = [
+ "alloy-consensus",
+ "alloy-eips",
+ "alloy-primitives",
+ "alloy-rlp",
+ "alloy-serde",
+ "serde",
+]
+
+[[package]]
+name = "alloy-contract"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2869e4fb31331d3b8c58c7db567d1e4e4e94ef64640beda3b6dd9b7045690941"
+dependencies = [
+ "alloy-dyn-abi",
+ "alloy-json-abi",
+ "alloy-network",
+ "alloy-network-primitives",
+ "alloy-primitives",
+ "alloy-provider",
+ "alloy-pubsub",
+ "alloy-rpc-types-eth",
+ "alloy-sol-types",
+ "alloy-transport",
+ "futures",
+ "futures-util",
+ "thiserror 2.0.18",
+]
+
+[[package]]
+name = "alloy-core"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05f1ab91967646311bb7dd32db4fee380c69fe624319dcd176b89fb2a420c6b5"
+dependencies = [
+ "alloy-dyn-abi",
+ "alloy-json-abi",
+ "alloy-primitives",
+ "alloy-rlp",
+ "alloy-sol-types",
+]
+
+[[package]]
+name = "alloy-dyn-abi"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf69d3061e2e908a4370bda5d8d6529d5080232776975489eec0b49ce971027e"
+dependencies = [
+ "alloy-json-abi",
+ "alloy-primitives",
+ "alloy-sol-type-parser",
+ "alloy-sol-types",
+ "const-hex",
+ "itoa",
+ "serde",
+ "serde_json",
+ "winnow",
+]
+
+[[package]]
+name = "alloy-eip2930"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0069cf0642457f87a01a014f6dc29d5d893cd4fd8fddf0c3cdfad1bb3ebafc41"
+dependencies = [
+ "alloy-primitives",
+ "alloy-rlp",
+ "serde",
+]
+
+[[package]]
+name = "alloy-eip7702"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c986539255fb839d1533c128e190e557e52ff652c9ef62939e233a81dd93f7e"
+dependencies = [
+ "alloy-primitives",
+ "alloy-rlp",
+ "derive_more 1.0.0",
+ "serde",
+]
+
+[[package]]
+name = "alloy-eips"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b6755b093afef5925f25079dd5a7c8d096398b804ba60cb5275397b06b31689"
+dependencies = [
+ "alloy-eip2930",
+ "alloy-eip7702",
+ "alloy-primitives",
+ "alloy-rlp",
+ "alloy-serde",
+ "c-kzg",
+ "derive_more 1.0.0",
+ "once_cell",
+ "serde",
+ "sha2",
+]
+
+[[package]]
+name = "alloy-genesis"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aeec8e6eab6e52b7c9f918748c9b811e87dbef7312a2e3a2ca1729a92966a6af"
+dependencies = [
+ "alloy-primitives",
+ "alloy-serde",
+ "alloy-trie",
+ "serde",
+]
+
+[[package]]
+name = "alloy-json-abi"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4584e3641181ff073e9d5bec5b3b8f78f9749d9fb108a1cfbc4399a4a139c72a"
+dependencies = [
+ "alloy-primitives",
+ "alloy-sol-type-parser",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "alloy-json-rpc"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fa077efe0b834bcd89ff4ba547f48fb081e4fdc3673dd7da1b295a2cf2bb7b7"
+dependencies = [
+ "alloy-primitives",
+ "alloy-sol-types",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.18",
+ "tracing",
+]
+
+[[package]]
+name = "alloy-network"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "209a1882a08e21aca4aac6e2a674dc6fcf614058ef8cb02947d63782b1899552"
+dependencies = [
+ "alloy-consensus",
+ "alloy-consensus-any",
+ "alloy-eips",
+ "alloy-json-rpc",
+ "alloy-network-primitives",
+ "alloy-primitives",
+ "alloy-rpc-types-any",
+ "alloy-rpc-types-eth",
+ "alloy-serde",
+ "alloy-signer",
+ "alloy-sol-types",
+ "async-trait",
+ "auto_impl",
+ "futures-utils-wasm",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.18",
+]
+
+[[package]]
+name = "alloy-network-primitives"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c20219d1ad261da7a6331c16367214ee7ded41d001fabbbd656fbf71898b2773"
+dependencies = [
+ "alloy-consensus",
+ "alloy-eips",
+ "alloy-primitives",
+ "alloy-serde",
+ "serde",
+]
+
+[[package]]
+name = "alloy-primitives"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "777d58b30eb9a4db0e5f59bc30e8c2caef877fee7dc8734cf242a51a60f22e05"
+dependencies = [
+ "alloy-rlp",
+ "bytes",
+ "cfg-if",
+ "const-hex",
+ "derive_more 2.1.1",
+ "foldhash",
+ "hashbrown 0.15.5",
+ "indexmap",
+ "itoa",
+ "k256",
+ "keccak-asm",
+ "paste",
+ "proptest",
+ "rand 0.8.5",
+ "ruint",
+ "rustc-hash",
+ "serde",
+ "sha3",
+ "tiny-keccak",
+]
+
+[[package]]
+name = "alloy-provider"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9eefa6f4c798ad01f9b4202d02cea75f5ec11fa180502f4701e2b47965a8c0bb"
+dependencies = [
+ "alloy-chains",
+ "alloy-consensus",
+ "alloy-eips",
+ "alloy-json-rpc",
+ "alloy-network",
+ "alloy-network-primitives",
+ "alloy-primitives",
+ "alloy-pubsub",
+ "alloy-rpc-client",
+ "alloy-rpc-types-eth",
+ "alloy-transport",
+ "alloy-transport-http",
+ "alloy-transport-ws",
+ "async-stream",
+ "async-trait",
+ "auto_impl",
+ "dashmap",
+ "futures",
+ "futures-utils-wasm",
+ "lru",
+ "parking_lot",
+ "pin-project",
+ "reqwest",
+ "schnellru",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.18",
+ "tokio",
+ "tracing",
+ "url",
+ "wasmtimer",
+]
+
+[[package]]
+name = "alloy-pubsub"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aac9a7210e0812b1d814118f426f57eb7fc260a419224dd1c76d169879c06907"
+dependencies = [
+ "alloy-json-rpc",
+ "alloy-primitives",
+ "alloy-transport",
+ "bimap",
+ "futures",
+ "serde",
+ "serde_json",
+ "tokio",
+ "tokio-stream",
+ "tower",
+ "tracing",
+]
+
+[[package]]
+name = "alloy-rlp"
+version = "0.3.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e93e50f64a77ad9c5470bf2ad0ca02f228da70c792a8f06634801e202579f35e"
+dependencies = [
+ "alloy-rlp-derive",
+ "arrayvec",
+ "bytes",
+]
+
+[[package]]
+name = "alloy-rlp-derive"
+version = "0.3.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce8849c74c9ca0f5a03da1c865e3eb6f768df816e67dd3721a398a8a7e398011"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "alloy-rpc-client"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed30bf1041e84cabc5900f52978ca345dd9969f2194a945e6fdec25b0620705c"
+dependencies = [
+ "alloy-json-rpc",
+ "alloy-primitives",
+ "alloy-pubsub",
+ "alloy-transport",
+ "alloy-transport-http",
+ "alloy-transport-ws",
+ "futures",
+ "pin-project",
+ "reqwest",
+ "serde",
+ "serde_json",
+ "tokio",
+ "tokio-stream",
+ "tower",
+ "tracing",
+ "url",
+ "wasmtimer",
+]
+
+[[package]]
+name = "alloy-rpc-types"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ab686b0fa475d2a4f5916c5f07797734a691ec58e44f0f55d4746ea39cbcefb"
+dependencies = [
+ "alloy-primitives",
+ "alloy-rpc-types-eth",
+ "alloy-serde",
+ "serde",
+]
+
+[[package]]
+name = "alloy-rpc-types-any"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "200661999b6e235d9840be5d60a6e8ae2f0af9eb2a256dd378786744660e36ec"
+dependencies = [
+ "alloy-consensus-any",
+ "alloy-rpc-types-eth",
+ "alloy-serde",
+]
+
+[[package]]
+name = "alloy-rpc-types-eth"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0600b8b5e2dc0cab12cbf91b5a885c35871789fb7b3a57b434bd4fced5b7a8b"
+dependencies = [
+ "alloy-consensus",
+ "alloy-consensus-any",
+ "alloy-eips",
+ "alloy-network-primitives",
+ "alloy-primitives",
+ "alloy-rlp",
+ "alloy-serde",
+ "alloy-sol-types",
+ "derive_more 1.0.0",
+ "itertools 0.13.0",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "alloy-serde"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9afa753a97002a33b2ccb707d9f15f31c81b8c1b786c95b73cc62bb1d1fd0c3f"
+dependencies = [
+ "alloy-primitives",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "alloy-signer"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b2cbff01a673936c2efd7e00d4c0e9a4dbbd6d600e2ce298078d33efbb19cd7"
+dependencies = [
+ "alloy-primitives",
+ "async-trait",
+ "auto_impl",
+ "elliptic-curve",
+ "k256",
+ "thiserror 2.0.18",
+]
+
+[[package]]
+name = "alloy-sol-macro"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e68b32b6fa0d09bb74b4cefe35ccc8269d711c26629bc7cd98a47eeb12fe353f"
+dependencies = [
+ "alloy-sol-macro-expander",
+ "alloy-sol-macro-input",
+ "proc-macro-error2",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "alloy-sol-macro-expander"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2afe6879ac373e58fd53581636f2cce843998ae0b058ebe1e4f649195e2bd23c"
+dependencies = [
+ "alloy-sol-macro-input",
+ "const-hex",
+ "heck",
+ "indexmap",
+ "proc-macro-error2",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+ "syn-solidity",
+ "tiny-keccak",
+]
+
+[[package]]
+name = "alloy-sol-macro-input"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3ba01aee235a8c699d07e5be97ba215607564e71be72f433665329bec307d28"
+dependencies = [
+ "const-hex",
+ "dunce",
+ "heck",
+ "macro-string",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+ "syn-solidity",
+]
+
+[[package]]
+name = "alloy-sol-type-parser"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c13fc168b97411e04465f03e632f31ef94cad1c7c8951bf799237fd7870d535"
+dependencies = [
+ "serde",
+ "winnow",
+]
+
+[[package]]
+name = "alloy-sol-types"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e960c4b52508ef2ae1e37cae5058e905e9ae099b107900067a503f8c454036f"
+dependencies = [
+ "alloy-json-abi",
+ "alloy-primitives",
+ "alloy-sol-macro",
+ "const-hex",
+ "serde",
+]
+
+[[package]]
+name = "alloy-transport"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d69d36982b9e46075ae6b792b0f84208c6c2c15ad49f6c500304616ef67b70e0"
+dependencies = [
+ "alloy-json-rpc",
+ "base64",
+ "futures-util",
+ "futures-utils-wasm",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.18",
+ "tokio",
+ "tower",
+ "tracing",
+ "url",
+ "wasmtimer",
+]
+
+[[package]]
+name = "alloy-transport-http"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e02ffd5d93ffc51d72786e607c97de3b60736ca3e636ead0ec1f7dce68ea3fd"
+dependencies = [
+ "alloy-json-rpc",
+ "alloy-transport",
+ "reqwest",
+ "serde_json",
+ "tower",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "alloy-transport-ws"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c085c4e1e7680b723ffc558f61a22c061ed3f70eb3436f93f3936779c59cec1"
+dependencies = [
+ "alloy-pubsub",
+ "alloy-transport",
+ "futures",
+ "http",
+ "rustls",
+ "serde_json",
+ "tokio",
+ "tokio-tungstenite",
+ "tracing",
+ "ws_stream_wasm",
+]
+
+[[package]]
+name = "alloy-trie"
+version = "0.7.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d95a94854e420f07e962f7807485856cde359ab99ab6413883e15235ad996e8b"
+dependencies = [
+ "alloy-primitives",
+ "alloy-rlp",
+ "arrayvec",
+ "derive_more 1.0.0",
+ "nybbles",
+ "serde",
+ "smallvec",
+ "tracing",
+]
+
+[[package]]
+name = "ark-ff"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6"
+dependencies = [
+ "ark-ff-asm 0.3.0",
+ "ark-ff-macros 0.3.0",
+ "ark-serialize 0.3.0",
+ "ark-std 0.3.0",
+ "derivative",
+ "num-bigint",
+ "num-traits",
+ "paste",
+ "rustc_version 0.3.3",
+ "zeroize",
+]
+
+[[package]]
+name = "ark-ff"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba"
+dependencies = [
+ "ark-ff-asm 0.4.2",
+ "ark-ff-macros 0.4.2",
+ "ark-serialize 0.4.2",
+ "ark-std 0.4.0",
+ "derivative",
+ "digest 0.10.7",
+ "itertools 0.10.5",
+ "num-bigint",
+ "num-traits",
+ "paste",
+ "rustc_version 0.4.1",
+ "zeroize",
+]
+
+[[package]]
+name = "ark-ff"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70"
+dependencies = [
+ "ark-ff-asm 0.5.0",
+ "ark-ff-macros 0.5.0",
+ "ark-serialize 0.5.0",
+ "ark-std 0.5.0",
+ "arrayvec",
+ "digest 0.10.7",
+ "educe",
+ "itertools 0.13.0",
+ "num-bigint",
+ "num-traits",
+ "paste",
+ "zeroize",
+]
+
+[[package]]
+name = "ark-ff-asm"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44"
+dependencies = [
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "ark-ff-asm"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348"
+dependencies = [
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "ark-ff-asm"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60"
+dependencies = [
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "ark-ff-macros"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20"
+dependencies = [
+ "num-bigint",
+ "num-traits",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "ark-ff-macros"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565"
+dependencies = [
+ "num-bigint",
+ "num-traits",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "ark-ff-macros"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3"
+dependencies = [
+ "num-bigint",
+ "num-traits",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "ark-serialize"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671"
+dependencies = [
+ "ark-std 0.3.0",
+ "digest 0.9.0",
+]
+
+[[package]]
+name = "ark-serialize"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5"
+dependencies = [
+ "ark-std 0.4.0",
+ "digest 0.10.7",
+ "num-bigint",
+]
+
+[[package]]
+name = "ark-serialize"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7"
+dependencies = [
+ "ark-std 0.5.0",
+ "arrayvec",
+ "digest 0.10.7",
+ "num-bigint",
+]
+
+[[package]]
+name = "ark-std"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c"
+dependencies = [
+ "num-traits",
+ "rand 0.8.5",
+]
+
+[[package]]
+name = "ark-std"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185"
+dependencies = [
+ "num-traits",
+ "rand 0.8.5",
+]
+
+[[package]]
+name = "ark-std"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a"
+dependencies = [
+ "num-traits",
+ "rand 0.8.5",
+]
+
+[[package]]
+name = "arrayvec"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "async-stream"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
+dependencies = [
+ "async-stream-impl",
+ "futures-core",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "async-stream-impl"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "async-trait"
+version = "0.1.89"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "async_io_stream"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c"
+dependencies = [
+ "futures",
+ "pharos",
+ "rustc_version 0.4.1",
+]
+
+[[package]]
+name = "atomic-waker"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
+
+[[package]]
+name = "auto_impl"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
+
+[[package]]
+name = "axum"
+version = "0.8.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
+dependencies = [
+ "axum-core",
+ "axum-macros",
+ "bytes",
+ "form_urlencoded",
+ "futures-util",
+ "http",
+ "http-body",
+ "http-body-util",
+ "hyper",
+ "hyper-util",
+ "itoa",
+ "matchit",
+ "memchr",
+ "mime",
+ "percent-encoding",
+ "pin-project-lite",
+ "serde_core",
+ "serde_json",
+ "serde_path_to_error",
+ "serde_urlencoded",
+ "sync_wrapper",
+ "tokio",
+ "tower",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "axum-core"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "http",
+ "http-body",
+ "http-body-util",
+ "mime",
+ "pin-project-lite",
+ "sync_wrapper",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "axum-macros"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "base16ct"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
+
+[[package]]
+name = "base64"
+version = "0.22.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
+
+[[package]]
+name = "base64ct"
+version = "1.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06"
+
+[[package]]
+name = "bimap"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7"
+
+[[package]]
+name = "bit-set"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
+dependencies = [
+ "bit-vec",
+]
+
+[[package]]
+name = "bit-vec"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
+
+[[package]]
+name = "bitflags"
+version = "2.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
+
+[[package]]
+name = "bitvec"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
+dependencies = [
+ "funty",
+ "radium",
+ "tap",
+ "wyz",
+]
+
+[[package]]
+name = "block-buffer"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "blst"
+version = "0.3.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcdb4c7013139a150f9fc55d123186dbfaba0d912817466282c73ac49e71fb45"
+dependencies = [
+ "cc",
+ "glob",
+ "threadpool",
+ "zeroize",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.19.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510"
+
+[[package]]
+name = "byte-slice-cast"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7575182f7272186991736b70173b0ea045398f984bf5ebbb3804736ce1330c9d"
+
+[[package]]
+name = "byteorder"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
+
+[[package]]
+name = "bytes"
+version = "1.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "c-kzg"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0307f72feab3300336fb803a57134159f6e20139af1357f36c54cb90d8e8928"
+dependencies = [
+ "blst",
+ "cc",
+ "glob",
+ "hex",
+ "libc",
+ "once_cell",
+ "serde",
+]
+
+[[package]]
+name = "cc"
+version = "1.2.55"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47b26a0954ae34af09b50f0de26458fa95369a0d478d8236d3f93082b219bd29"
+dependencies = [
+ "find-msvc-tools",
+ "shlex",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
+
+[[package]]
+name = "const-hex"
+version = "1.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "proptest",
+ "serde_core",
+]
+
+[[package]]
+name = "const-oid"
+version = "0.9.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
+
+[[package]]
+name = "const_format"
+version = "0.2.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad"
+dependencies = [
+ "const_format_proc_macros",
+]
+
+[[package]]
+name = "const_format_proc_macros"
+version = "0.2.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "convert_case"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "core-foundation"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "core-foundation-sys"
+version = "0.8.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
+
+[[package]]
+name = "crunchy"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
+
+[[package]]
+name = "crypto-bigint"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
+dependencies = [
+ "generic-array",
+ "rand_core 0.6.4",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "crypto-common"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "dashmap"
+version = "6.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+ "hashbrown 0.14.5",
+ "lock_api",
+ "once_cell",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "data-encoding"
+version = "2.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea"
+
+[[package]]
+name = "der"
+version = "0.7.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
+dependencies = [
+ "const-oid",
+ "zeroize",
+]
+
+[[package]]
+name = "derivative"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
+[[package]]
+name = "derive_more"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05"
+dependencies = [
+ "derive_more-impl 1.0.0",
+]
+
+[[package]]
+name = "derive_more"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134"
+dependencies = [
+ "derive_more-impl 2.1.1",
+]
+
+[[package]]
+name = "derive_more-impl"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+ "unicode-xid",
+]
+
+[[package]]
+name = "derive_more-impl"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb"
+dependencies = [
+ "convert_case",
+ "proc-macro2",
+ "quote",
+ "rustc_version 0.4.1",
+ "syn 2.0.114",
+ "unicode-xid",
+]
+
+[[package]]
+name = "digest"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "digest"
+version = "0.10.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
+dependencies = [
+ "block-buffer",
+ "const-oid",
+ "crypto-common",
+ "subtle",
+]
+
+[[package]]
+name = "displaydoc"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "dotenvy"
+version = "0.15.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
+
+[[package]]
+name = "dunce"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
+
+[[package]]
+name = "ecdsa"
+version = "0.16.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca"
+dependencies = [
+ "der",
+ "digest 0.10.7",
+ "elliptic-curve",
+ "rfc6979",
+ "signature",
+ "spki",
+]
+
+[[package]]
+name = "educe"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417"
+dependencies = [
+ "enum-ordinalize",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "either"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
+
+[[package]]
+name = "elliptic-curve"
+version = "0.13.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47"
+dependencies = [
+ "base16ct",
+ "crypto-bigint",
+ "digest 0.10.7",
+ "ff",
+ "generic-array",
+ "group",
+ "pkcs8",
+ "rand_core 0.6.4",
+ "sec1",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "enum-ordinalize"
+version = "4.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0"
+dependencies = [
+ "enum-ordinalize-derive",
+]
+
+[[package]]
+name = "enum-ordinalize-derive"
+version = "4.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "equivalent"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
+
+[[package]]
+name = "errno"
+version = "0.3.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
+dependencies = [
+ "libc",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "fallible-iterator"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
+
+[[package]]
+name = "fallible-streaming-iterator"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
+
+[[package]]
+name = "fastrand"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
+
+[[package]]
+name = "fastrlp"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418"
+dependencies = [
+ "arrayvec",
+ "auto_impl",
+ "bytes",
+]
+
+[[package]]
+name = "fastrlp"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4"
+dependencies = [
+ "arrayvec",
+ "auto_impl",
+ "bytes",
+]
+
+[[package]]
+name = "ff"
+version = "0.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393"
+dependencies = [
+ "rand_core 0.6.4",
+ "subtle",
+]
+
+[[package]]
+name = "find-msvc-tools"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
+
+[[package]]
+name = "fixed-hash"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534"
+dependencies = [
+ "byteorder",
+ "rand 0.8.5",
+ "rustc-hex",
+ "static_assertions",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "foldhash"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
+
+[[package]]
+name = "foreign-types"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
+dependencies = [
+ "foreign-types-shared",
+]
+
+[[package]]
+name = "foreign-types-shared"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "funty"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
+
+[[package]]
+name = "futures"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
+
+[[package]]
+name = "futures-executor"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-io"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
+
+[[package]]
+name = "futures-lite"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad"
+dependencies = [
+ "fastrand",
+ "futures-core",
+ "futures-io",
+ "parking",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "futures-macro"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "futures-sink"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
+
+[[package]]
+name = "futures-task"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
+
+[[package]]
+name = "futures-timer"
+version = "3.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24"
+
+[[package]]
+name = "futures-util"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-macro",
+ "futures-sink",
+ "futures-task",
+ "memchr",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+]
+
+[[package]]
+name = "futures-utils-wasm"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9"
+
+[[package]]
+name = "generic-array"
+version = "0.14.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2"
+dependencies = [
+ "typenum",
+ "version_check",
+ "zeroize",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "libc",
+ "r-efi",
+ "wasip2",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "glob"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
+
+[[package]]
+name = "governor"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be93b4ec2e4710b04d9264c0c7350cdd62a8c20e5e4ac732552ebb8f0debe8eb"
+dependencies = [
+ "cfg-if",
+ "dashmap",
+ "futures-sink",
+ "futures-timer",
+ "futures-util",
+ "getrandom 0.3.4",
+ "no-std-compat",
+ "nonzero_ext",
+ "parking_lot",
+ "portable-atomic",
+ "quanta",
+ "rand 0.9.2",
+ "smallvec",
+ "spinning_top",
+ "web-time",
+]
+
+[[package]]
+name = "group"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63"
+dependencies = [
+ "ff",
+ "rand_core 0.6.4",
+ "subtle",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
+
+[[package]]
+name = "hashbrown"
+version = "0.14.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
+dependencies = [
+ "ahash",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.15.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
+dependencies = [
+ "allocator-api2",
+ "equivalent",
+ "foldhash",
+ "serde",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.16.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
+
+[[package]]
+name = "hashlink"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af"
+dependencies = [
+ "hashbrown 0.14.5",
+]
+
+[[package]]
+name = "heck"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
+
+[[package]]
+name = "hermit-abi"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
+
+[[package]]
+name = "hex"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+
+[[package]]
+name = "hmac"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
+dependencies = [
+ "digest 0.10.7",
+]
+
+[[package]]
+name = "http"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
+dependencies = [
+ "bytes",
+ "itoa",
+]
+
+[[package]]
+name = "http-body"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
+dependencies = [
+ "bytes",
+ "http",
+]
+
+[[package]]
+name = "http-body-util"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "http",
+ "http-body",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "httparse"
+version = "1.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
+
+[[package]]
+name = "httpdate"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
+
+[[package]]
+name = "hyper"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11"
+dependencies = [
+ "atomic-waker",
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "http",
+ "http-body",
+ "httparse",
+ "httpdate",
+ "itoa",
+ "pin-project-lite",
+ "pin-utils",
+ "smallvec",
+ "tokio",
+ "want",
+]
+
+[[package]]
+name = "hyper-tls"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0"
+dependencies = [
+ "bytes",
+ "http-body-util",
+ "hyper",
+ "hyper-util",
+ "native-tls",
+ "tokio",
+ "tokio-native-tls",
+ "tower-service",
+]
+
+[[package]]
+name = "hyper-util"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
+dependencies = [
+ "base64",
+ "bytes",
+ "futures-channel",
+ "futures-util",
+ "http",
+ "http-body",
+ "hyper",
+ "ipnet",
+ "libc",
+ "percent-encoding",
+ "pin-project-lite",
+ "socket2",
+ "tokio",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "icu_collections"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43"
+dependencies = [
+ "displaydoc",
+ "potential_utf",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locale_core"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6"
+dependencies = [
+ "displaydoc",
+ "litemap",
+ "tinystr",
+ "writeable",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599"
+dependencies = [
+ "icu_collections",
+ "icu_normalizer_data",
+ "icu_properties",
+ "icu_provider",
+ "smallvec",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer_data"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
+
+[[package]]
+name = "icu_properties"
+version = "2.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec"
+dependencies = [
+ "icu_collections",
+ "icu_locale_core",
+ "icu_properties_data",
+ "icu_provider",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_properties_data"
+version = "2.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af"
+
+[[package]]
+name = "icu_provider"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614"
+dependencies = [
+ "displaydoc",
+ "icu_locale_core",
+ "writeable",
+ "yoke",
+ "zerofrom",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "idna"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
+dependencies = [
+ "idna_adapter",
+ "smallvec",
+ "utf8_iter",
+]
+
+[[package]]
+name = "idna_adapter"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
+dependencies = [
+ "icu_normalizer",
+ "icu_properties",
+]
+
+[[package]]
+name = "impl-codec"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f"
+dependencies = [
+ "parity-scale-codec",
+]
+
+[[package]]
+name = "impl-trait-for-tuples"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "indexer"
+version = "0.1.0"
+dependencies = [
+ "alloy",
+ "axum",
+ "dotenvy",
+ "futures-lite",
+ "governor",
+ "hex",
+ "nonzero_ext",
+ "r2d2",
+ "r2d2_sqlite",
+ "rusqlite",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.18",
+ "tokio",
+ "tower-http",
+ "tracing",
+ "tracing-subscriber",
+]
+
+[[package]]
+name = "indexmap"
+version = "2.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017"
+dependencies = [
+ "equivalent",
+ "hashbrown 0.16.1",
+ "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "ipnet"
+version = "2.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
+
+[[package]]
+name = "iri-string"
+version = "0.7.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a"
+dependencies = [
+ "memchr",
+ "serde",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itertools"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
+
+[[package]]
+name = "js-sys"
+version = "0.3.85"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3"
+dependencies = [
+ "once_cell",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "k256"
+version = "0.13.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b"
+dependencies = [
+ "cfg-if",
+ "ecdsa",
+ "elliptic-curve",
+ "once_cell",
+ "sha2",
+]
+
+[[package]]
+name = "keccak"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654"
+dependencies = [
+ "cpufeatures",
+]
+
+[[package]]
+name = "keccak-asm"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b646a74e746cd25045aa0fd42f4f7f78aa6d119380182c7e63a5593c4ab8df6f"
+dependencies = [
+ "digest 0.10.7",
+ "sha3-asm",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
+
+[[package]]
+name = "libc"
+version = "0.2.180"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc"
+
+[[package]]
+name = "libm"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981"
+
+[[package]]
+name = "libsqlite3-sys"
+version = "0.30.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
+dependencies = [
+ "cc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
+
+[[package]]
+name = "litemap"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
+
+[[package]]
+name = "lock_api"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965"
+dependencies = [
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
+
+[[package]]
+name = "lru"
+version = "0.12.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38"
+dependencies = [
+ "hashbrown 0.15.5",
+]
+
+[[package]]
+name = "macro-string"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "matchers"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
+dependencies = [
+ "regex-automata",
+]
+
+[[package]]
+name = "matchit"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
+
+[[package]]
+name = "memchr"
+version = "2.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
+
+[[package]]
+name = "mime"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+
+[[package]]
+name = "mio"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
+dependencies = [
+ "libc",
+ "wasi",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "native-tls"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e"
+dependencies = [
+ "libc",
+ "log",
+ "openssl",
+ "openssl-probe",
+ "openssl-sys",
+ "schannel",
+ "security-framework",
+ "security-framework-sys",
+ "tempfile",
+]
+
+[[package]]
+name = "no-std-compat"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c"
+
+[[package]]
+name = "nonzero_ext"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
+
+[[package]]
+name = "nu-ansi-term"
+version = "0.50.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "num-bigint"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
+dependencies = [
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.46"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+dependencies = [
+ "autocfg",
+ "libm",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "num_enum"
+version = "0.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c"
+dependencies = [
+ "num_enum_derive",
+ "rustversion",
+]
+
+[[package]]
+name = "num_enum_derive"
+version = "0.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "nybbles"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8983bb634df7248924ee0c4c3a749609b5abcb082c28fffe3254b3eb3602b307"
+dependencies = [
+ "alloy-rlp",
+ "const-hex",
+ "proptest",
+ "serde",
+ "smallvec",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.21.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
+
+[[package]]
+name = "openssl"
+version = "0.10.75"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328"
+dependencies = [
+ "bitflags",
+ "cfg-if",
+ "foreign-types",
+ "libc",
+ "once_cell",
+ "openssl-macros",
+ "openssl-sys",
+]
+
+[[package]]
+name = "openssl-macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.111"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "parity-scale-codec"
+version = "3.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "799781ae679d79a948e13d4824a40970bfa500058d245760dd857301059810fa"
+dependencies = [
+ "arrayvec",
+ "bitvec",
+ "byte-slice-cast",
+ "const_format",
+ "impl-trait-for-tuples",
+ "parity-scale-codec-derive",
+ "rustversion",
+ "serde",
+]
+
+[[package]]
+name = "parity-scale-codec-derive"
+version = "3.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34b4653168b563151153c9e4c08ebed57fb8262bebfa79711552fa983c623e7a"
+dependencies = [
+ "proc-macro-crate",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "parking"
+version = "2.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
+
+[[package]]
+name = "parking_lot"
+version = "0.12.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-link",
+]
+
+[[package]]
+name = "paste"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
+
+[[package]]
+name = "percent-encoding"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
+
+[[package]]
+name = "pest"
+version = "2.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c9eb05c21a464ea704b53158d358a31e6425db2f63a1a7312268b05fe2b75f7"
+dependencies = [
+ "memchr",
+ "ucd-trie",
+]
+
+[[package]]
+name = "pharos"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414"
+dependencies = [
+ "futures",
+ "rustc_version 0.4.1",
+]
+
+[[package]]
+name = "pin-project"
+version = "1.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a"
+dependencies = [
+ "pin-project-internal",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "1.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "pkcs8"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
+dependencies = [
+ "der",
+ "spki",
+]
+
+[[package]]
+name = "pkg-config"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
+
+[[package]]
+name = "portable-atomic"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49"
+
+[[package]]
+name = "potential_utf"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77"
+dependencies = [
+ "zerovec",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
+dependencies = [
+ "zerocopy",
+]
+
+[[package]]
+name = "primitive-types"
+version = "0.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2"
+dependencies = [
+ "fixed-hash",
+ "impl-codec",
+ "uint",
+]
+
+[[package]]
+name = "proc-macro-crate"
+version = "3.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983"
+dependencies = [
+ "toml_edit",
+]
+
+[[package]]
+name = "proc-macro-error-attr2"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+]
+
+[[package]]
+name = "proc-macro-error2"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
+dependencies = [
+ "proc-macro-error-attr2",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.106"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "proptest"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40"
+dependencies = [
+ "bit-set",
+ "bit-vec",
+ "bitflags",
+ "num-traits",
+ "rand 0.9.2",
+ "rand_chacha 0.9.0",
+ "rand_xorshift",
+ "regex-syntax",
+ "rusty-fork",
+ "tempfile",
+ "unarray",
+]
+
+[[package]]
+name = "quanta"
+version = "0.12.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7"
+dependencies = [
+ "crossbeam-utils",
+ "libc",
+ "once_cell",
+ "raw-cpuid",
+ "wasi",
+ "web-sys",
+ "winapi",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quote"
+version = "1.0.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "r-efi"
+version = "5.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
+
+[[package]]
+name = "r2d2"
+version = "0.8.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93"
+dependencies = [
+ "log",
+ "parking_lot",
+ "scheduled-thread-pool",
+]
+
+[[package]]
+name = "r2d2_sqlite"
+version = "0.25.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb14dba8247a6a15b7fdbc7d389e2e6f03ee9f184f87117706d509c092dfe846"
+dependencies = [
+ "r2d2",
+ "rusqlite",
+ "uuid",
+]
+
+[[package]]
+name = "radium"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha 0.3.1",
+ "rand_core 0.6.4",
+ "serde",
+]
+
+[[package]]
+name = "rand"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
+dependencies = [
+ "rand_chacha 0.9.0",
+ "rand_core 0.9.5",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.6.4",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.9.5",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+dependencies = [
+ "getrandom 0.2.17",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c"
+dependencies = [
+ "getrandom 0.3.4",
+]
+
+[[package]]
+name = "rand_xorshift"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a"
+dependencies = [
+ "rand_core 0.9.5",
+]
+
+[[package]]
+name = "raw-cpuid"
+version = "11.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.5.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a96887878f22d7bad8a3b6dc5b7440e0ada9a245242924394987b21cf2210a4c"
+
+[[package]]
+name = "reqwest"
+version = "0.12.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147"
+dependencies = [
+ "base64",
+ "bytes",
+ "futures-core",
+ "http",
+ "http-body",
+ "http-body-util",
+ "hyper",
+ "hyper-tls",
+ "hyper-util",
+ "js-sys",
+ "log",
+ "native-tls",
+ "percent-encoding",
+ "pin-project-lite",
+ "rustls-pki-types",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "sync_wrapper",
+ "tokio",
+ "tokio-native-tls",
+ "tower",
+ "tower-http",
+ "tower-service",
+ "url",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+]
+
+[[package]]
+name = "rfc6979"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2"
+dependencies = [
+ "hmac",
+ "subtle",
+]
+
+[[package]]
+name = "ring"
+version = "0.17.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
+dependencies = [
+ "cc",
+ "cfg-if",
+ "getrandom 0.2.17",
+ "libc",
+ "untrusted",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "rlp"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec"
+dependencies = [
+ "bytes",
+ "rustc-hex",
+]
+
+[[package]]
+name = "ruint"
+version = "1.17.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a"
+dependencies = [
+ "alloy-rlp",
+ "ark-ff 0.3.0",
+ "ark-ff 0.4.2",
+ "ark-ff 0.5.0",
+ "bytes",
+ "fastrlp 0.3.1",
+ "fastrlp 0.4.0",
+ "num-bigint",
+ "num-integer",
+ "num-traits",
+ "parity-scale-codec",
+ "primitive-types",
+ "proptest",
+ "rand 0.8.5",
+ "rand 0.9.2",
+ "rlp",
+ "ruint-macro",
+ "serde_core",
+ "valuable",
+ "zeroize",
+]
+
+[[package]]
+name = "ruint-macro"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18"
+
+[[package]]
+name = "rusqlite"
+version = "0.32.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e"
+dependencies = [
+ "bitflags",
+ "fallible-iterator",
+ "fallible-streaming-iterator",
+ "hashlink",
+ "libsqlite3-sys",
+ "smallvec",
+]
+
+[[package]]
+name = "rustc-hash"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
+
+[[package]]
+name = "rustc-hex"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6"
+
+[[package]]
+name = "rustc_version"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee"
+dependencies = [
+ "semver 0.11.0",
+]
+
+[[package]]
+name = "rustc_version"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
+dependencies = [
+ "semver 1.0.27",
+]
+
+[[package]]
+name = "rustix"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34"
+dependencies = [
+ "bitflags",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "rustls"
+version = "0.23.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b"
+dependencies = [
+ "once_cell",
+ "ring",
+ "rustls-pki-types",
+ "rustls-webpki",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-pki-types"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd"
+dependencies = [
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-webpki"
+version = "0.103.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53"
+dependencies = [
+ "ring",
+ "rustls-pki-types",
+ "untrusted",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
+
+[[package]]
+name = "rusty-fork"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2"
+dependencies = [
+ "fnv",
+ "quick-error",
+ "tempfile",
+ "wait-timeout",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984"
+
+[[package]]
+name = "schannel"
+version = "0.1.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1"
+dependencies = [
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "scheduled-thread-pool"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19"
+dependencies = [
+ "parking_lot",
+]
+
+[[package]]
+name = "schnellru"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "356285bbf17bea63d9e52e96bd18f039672ac92b55b8cb997d6162a2a37d1649"
+dependencies = [
+ "ahash",
+ "cfg-if",
+ "hashbrown 0.13.2",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+
+[[package]]
+name = "sec1"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc"
+dependencies = [
+ "base16ct",
+ "der",
+ "generic-array",
+ "pkcs8",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "security-framework"
+version = "2.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
+dependencies = [
+ "bitflags",
+ "core-foundation",
+ "core-foundation-sys",
+ "libc",
+ "security-framework-sys",
+]
+
+[[package]]
+name = "security-framework-sys"
+version = "2.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "semver"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6"
+dependencies = [
+ "semver-parser",
+]
+
+[[package]]
+name = "semver"
+version = "1.0.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
+
+[[package]]
+name = "semver-parser"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2"
+dependencies = [
+ "pest",
+]
+
+[[package]]
+name = "send_wrapper"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73"
+
+[[package]]
+name = "serde"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
+dependencies = [
+ "serde_core",
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_core"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.228"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.149"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
+dependencies = [
+ "itoa",
+ "memchr",
+ "serde",
+ "serde_core",
+ "zmij",
+]
+
+[[package]]
+name = "serde_path_to_error"
+version = "0.1.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457"
+dependencies = [
+ "itoa",
+ "serde",
+ "serde_core",
+]
+
+[[package]]
+name = "serde_urlencoded"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+dependencies = [
+ "form_urlencoded",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha1"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest 0.10.7",
+]
+
+[[package]]
+name = "sha2"
+version = "0.10.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest 0.10.7",
+]
+
+[[package]]
+name = "sha3"
+version = "0.10.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60"
+dependencies = [
+ "digest 0.10.7",
+ "keccak",
+]
+
+[[package]]
+name = "sha3-asm"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b31139435f327c93c6038ed350ae4588e2c70a13d50599509fee6349967ba35a"
+dependencies = [
+ "cc",
+ "cfg-if",
+]
+
+[[package]]
+name = "sharded-slab"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
+[[package]]
+name = "signal-hook-registry"
+version = "1.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b"
+dependencies = [
+ "errno",
+ "libc",
+]
+
+[[package]]
+name = "signature"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
+dependencies = [
+ "digest 0.10.7",
+ "rand_core 0.6.4",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5"
+
+[[package]]
+name = "smallvec"
+version = "1.15.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "socket2"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0"
+dependencies = [
+ "libc",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "spinning_top"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d96d2d1d716fb500937168cc09353ffdc7a012be8475ac7308e1bdf0e3923300"
+dependencies = [
+ "lock_api",
+]
+
+[[package]]
+name = "spki"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
+dependencies = [
+ "base64ct",
+ "der",
+]
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596"
+
+[[package]]
+name = "static_assertions"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+
+[[package]]
+name = "strum"
+version = "0.27.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
+dependencies = [
+ "strum_macros",
+]
+
+[[package]]
+name = "strum_macros"
+version = "0.27.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "subtle"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
+
+[[package]]
+name = "syn"
+version = "1.0.109"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.114"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "syn-solidity"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab4e6eed052a117409a1a744c8bda9c3ea6934597cf7419f791cb7d590871c4c"
+dependencies = [
+ "paste",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "sync_wrapper"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
+dependencies = [
+ "futures-core",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "tap"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
+
+[[package]]
+name = "tempfile"
+version = "3.24.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c"
+dependencies = [
+ "fastrand",
+ "getrandom 0.3.4",
+ "once_cell",
+ "rustix",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
+dependencies = [
+ "thiserror-impl 1.0.69",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4"
+dependencies = [
+ "thiserror-impl 2.0.18",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "2.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "threadpool"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
+dependencies = [
+ "num_cpus",
+]
+
+[[package]]
+name = "tiny-keccak"
+version = "2.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
+dependencies = [
+ "crunchy",
+]
+
+[[package]]
+name = "tinystr"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869"
+dependencies = [
+ "displaydoc",
+ "zerovec",
+]
+
+[[package]]
+name = "tokio"
+version = "1.49.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86"
+dependencies = [
+ "bytes",
+ "libc",
+ "mio",
+ "parking_lot",
+ "pin-project-lite",
+ "signal-hook-registry",
+ "socket2",
+ "tokio-macros",
+ "windows-sys 0.61.2",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "tokio-native-tls"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
+dependencies = [
+ "native-tls",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-rustls"
+version = "0.26.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61"
+dependencies = [
+ "rustls",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-stream"
+version = "0.1.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70"
+dependencies = [
+ "futures-core",
+ "pin-project-lite",
+ "tokio",
+ "tokio-util",
+]
+
+[[package]]
+name = "tokio-tungstenite"
+version = "0.24.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "edc5f74e248dc973e0dbb7b74c7e0d6fcc301c694ff50049504004ef4d0cdcd9"
+dependencies = [
+ "futures-util",
+ "log",
+ "rustls",
+ "rustls-pki-types",
+ "tokio",
+ "tokio-rustls",
+ "tungstenite",
+ "webpki-roots 0.26.11",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "futures-sink",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "toml_datetime"
+version = "0.7.5+spec-1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347"
+dependencies = [
+ "serde_core",
+]
+
+[[package]]
+name = "toml_edit"
+version = "0.23.10+spec-1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269"
+dependencies = [
+ "indexmap",
+ "toml_datetime",
+ "toml_parser",
+ "winnow",
+]
+
+[[package]]
+name = "toml_parser"
+version = "1.0.6+spec-1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44"
+dependencies = [
+ "winnow",
+]
+
+[[package]]
+name = "tower"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "pin-project-lite",
+ "sync_wrapper",
+ "tokio",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "tower-http"
+version = "0.6.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
+dependencies = [
+ "bitflags",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "iri-string",
+ "pin-project-lite",
+ "tower",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "tower-layer"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
+
+[[package]]
+name = "tower-service"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
+
+[[package]]
+name = "tracing"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
+dependencies = [
+ "log",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
+dependencies = [
+ "log",
+ "once_cell",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
+dependencies = [
+ "matchers",
+ "nu-ansi-term",
+ "once_cell",
+ "regex-automata",
+ "sharded-slab",
+ "smallvec",
+ "thread_local",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+]
+
+[[package]]
+name = "try-lock"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
+
+[[package]]
+name = "tungstenite"
+version = "0.24.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a"
+dependencies = [
+ "byteorder",
+ "bytes",
+ "data-encoding",
+ "http",
+ "httparse",
+ "log",
+ "rand 0.8.5",
+ "rustls",
+ "rustls-pki-types",
+ "sha1",
+ "thiserror 1.0.69",
+ "utf-8",
+]
+
+[[package]]
+name = "typenum"
+version = "1.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
+
+[[package]]
+name = "ucd-trie"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
+
+[[package]]
+name = "uint"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52"
+dependencies = [
+ "byteorder",
+ "crunchy",
+ "hex",
+ "static_assertions",
+]
+
+[[package]]
+name = "unarray"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
+
+[[package]]
+name = "untrusted"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
+
+[[package]]
+name = "url"
+version = "2.5.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+ "serde",
+]
+
+[[package]]
+name = "utf-8"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
+
+[[package]]
+name = "utf8_iter"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
+
+[[package]]
+name = "uuid"
+version = "1.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee48d38b119b0cd71fe4141b30f5ba9c7c5d9f4e7a3a8b4a674e4b6ef789976f"
+dependencies = [
+ "getrandom 0.3.4",
+ "js-sys",
+ "rand 0.9.2",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "valuable"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
+
+[[package]]
+name = "vcpkg"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+
+[[package]]
+name = "version_check"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
+
+[[package]]
+name = "wait-timeout"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "want"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
+dependencies = [
+ "try-lock",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.1+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
+
+[[package]]
+name = "wasip2"
+version = "1.0.2+wasi-0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5"
+dependencies = [
+ "wit-bindgen",
+]
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.108"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "rustversion",
+ "wasm-bindgen-macro",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.58"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f"
+dependencies = [
+ "cfg-if",
+ "futures-util",
+ "js-sys",
+ "once_cell",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.108"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.108"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55"
+dependencies = [
+ "bumpalo",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.108"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "wasmtimer"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b"
+dependencies = [
+ "futures",
+ "js-sys",
+ "parking_lot",
+ "pin-utils",
+ "slab",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "web-sys"
+version = "0.3.85"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "web-time"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "webpki-roots"
+version = "0.26.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
+dependencies = [
+ "webpki-roots 1.0.6",
+]
+
+[[package]]
+name = "webpki-roots"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed"
+dependencies = [
+ "rustls-pki-types",
+]
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows-link"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
+
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets 0.53.5",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.61.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
+dependencies = [
+ "windows_aarch64_gnullvm 0.52.6",
+ "windows_aarch64_msvc 0.52.6",
+ "windows_i686_gnu 0.52.6",
+ "windows_i686_gnullvm 0.52.6",
+ "windows_i686_msvc 0.52.6",
+ "windows_x86_64_gnu 0.52.6",
+ "windows_x86_64_gnullvm 0.52.6",
+ "windows_x86_64_msvc 0.52.6",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.53.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3"
+dependencies = [
+ "windows-link",
+ "windows_aarch64_gnullvm 0.53.1",
+ "windows_aarch64_msvc 0.53.1",
+ "windows_i686_gnu 0.53.1",
+ "windows_i686_gnullvm 0.53.1",
+ "windows_i686_msvc 0.53.1",
+ "windows_x86_64_gnu 0.53.1",
+ "windows_x86_64_gnullvm 0.53.1",
+ "windows_x86_64_msvc 0.53.1",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
+
+[[package]]
+name = "winnow"
+version = "0.7.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "wit-bindgen"
+version = "0.51.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
+
+[[package]]
+name = "writeable"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
+
+[[package]]
+name = "ws_stream_wasm"
+version = "0.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c173014acad22e83f16403ee360115b38846fe754e735c5d9d3803fe70c6abc"
+dependencies = [
+ "async_io_stream",
+ "futures",
+ "js-sys",
+ "log",
+ "pharos",
+ "rustc_version 0.4.1",
+ "send_wrapper",
+ "thiserror 2.0.18",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+]
+
+[[package]]
+name = "wyz"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
+dependencies = [
+ "tap",
+]
+
+[[package]]
+name = "yoke"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954"
+dependencies = [
+ "stable_deref_trait",
+ "yoke-derive",
+ "zerofrom",
+]
+
+[[package]]
+name = "yoke-derive"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+ "synstructure",
+]
+
+[[package]]
+name = "zerocopy"
+version = "0.8.38"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57cf3aa6855b23711ee9852dfc97dfaa51c45feaba5b645d0c777414d494a961"
+dependencies = [
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.8.38"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a616990af1a287837c4fe6596ad77ef57948f787e46ce28e166facc0cc1cb75"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "zerofrom"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
+dependencies = [
+ "zerofrom-derive",
+]
+
+[[package]]
+name = "zerofrom-derive"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+ "synstructure",
+]
+
+[[package]]
+name = "zeroize"
+version = "1.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
+dependencies = [
+ "zeroize_derive",
+]
+
+[[package]]
+name = "zeroize_derive"
+version = "1.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "zerotrie"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851"
+dependencies = [
+ "displaydoc",
+ "yoke",
+ "zerofrom",
+]
+
+[[package]]
+name = "zerovec"
+version = "0.11.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
+dependencies = [
+ "yoke",
+ "zerofrom",
+ "zerovec-derive",
+]
+
+[[package]]
+name = "zerovec-derive"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.114",
+]
+
+[[package]]
+name = "zmij"
+version = "1.0.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ff05f8caa9038894637571ae6b9e29466c1f4f829d26c9b28f869a29cbe3445"
diff --git a/apps/indexer/Cargo.toml b/apps/indexer/Cargo.toml
new file mode 100644
index 0000000..501f008
--- /dev/null
+++ b/apps/indexer/Cargo.toml
@@ -0,0 +1,37 @@
+[package]
+name = "indexer"
+version = "0.1.0"
+edition = "2021"
+rust-version = "1.84"
+default-run = "indexer"
+
+[dependencies]
+tokio = { version = "1.43", features = ["full", "signal"] }
+axum = { version = "0.8", features = ["macros"] }
+tower-http = { version = "0.6", features = ["cors", "trace"] }
+
+alloy = { version = "0.7", features = [
+ "provider-ws",
+ "provider-http",
+ "rpc-types-eth",
+ "sol-types",
+]}
+
+rusqlite = { version = "0.32", features = ["bundled"] }
+r2d2 = "0.8"
+r2d2_sqlite = "0.25"
+
+serde = { version = "1.0", features = ["derive"] }
+serde_json = "1.0"
+dotenvy = "0.15"
+tracing = "0.1"
+tracing-subscriber = { version = "0.3", features = ["env-filter"] }
+thiserror = "2.0"
+hex = "0.4"
+governor = "0.8"
+nonzero_ext = "0.3"
+futures-lite = "2.5"
+
+[profile.release]
+lto = true
+codegen-units = 1
diff --git a/apps/indexer/Dockerfile b/apps/indexer/Dockerfile
new file mode 100644
index 0000000..7b991f8
--- /dev/null
+++ b/apps/indexer/Dockerfile
@@ -0,0 +1,12 @@
+FROM rust:1.84-bookworm AS builder
+WORKDIR /app
+COPY . .
+RUN cargo build --release
+
+FROM debian:bookworm-slim
+RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/*
+COPY --from=builder /app/target/release/indexer /usr/local/bin/
+ENV DATABASE_PATH=/data/indexer.db
+VOLUME ["/data"]
+EXPOSE 3000
+CMD ["indexer"]
diff --git a/apps/indexer/README.md b/apps/indexer/README.md
new file mode 100644
index 0000000..2dc316c
--- /dev/null
+++ b/apps/indexer/README.md
@@ -0,0 +1,82 @@
+# Indexer
+
+Rust-based event indexer. Listens to on-chain events via WebSocket and persists them in SQLite with stable sequence counters for APSI integration.
+
+## Features
+
+- WebSocket subscription to Verbeth contract events
+- Historical backfill on startup
+- SQLite persistence with WAL mode
+- Atomic sequence counters per topic/recipient
+- Health endpoint for monitoring
+- Graceful shutdown
+
+## Quick Start
+
+```bash
+cp .env.example .env
+
+vim .env
+
+cargo run
+```
+
+## Environment Variables
+
+| Variable | Required | Default | Description |
+|----------|----------|---------|-------------|
+| `RPC_WS_URL` | Yes | - | WebSocket RPC endpoint |
+| `RPC_HTTP_URL` | No | derived from WS | HTTP RPC for backfill |
+| `CONTRACT_ADDRESS` | No | Verbeth proxy | Contract to index |
+| `CREATION_BLOCK` | No | 37097547 | Block to start backfill from |
+| `DATABASE_PATH` | No | ./data/indexer.db | SQLite file location |
+| `SERVER_PORT` | No | 3002 | HTTP server port |
+| `BACKFILL_DAYS` | No | 7 | Days to backfill on empty DB |
+| `RUST_LOG` | No | info | Log level |
+
+## API Endpoints
+
+### GET /health
+
+Returns indexer status:
+
+```json
+{
+ "status": "ok",
+ "last_block": 12345678,
+ "uptime_seconds": 3600,
+ "counts": {
+ "messages": 150,
+ "handshakes": 42,
+ "handshake_responses": 38
+ }
+}
+```
+
+## Deployment
+
+### Docker
+
+```bash
+docker build -t indexer .
+docker run -v indexer-data:/data -e RPC_WS_URL=wss://... indexer
+```
+
+### Fly.io
+
+```bash
+fly launch --no-deploy
+fly secrets set RPC_WS_URL=wss://...
+fly volume create indexer_data --size 1
+fly deploy
+```
+
+## Database Schema
+
+Events are stored with stable `seq` counters:
+
+- `messages(topic, seq)` - MessageSent events
+- `handshakes(recipient_hash, seq)` - Handshake events
+- `handshake_responses(global_seq)` - HandshakeResponse events
+
+These counters enable deterministic item identifiers for APSI queries.
diff --git a/apps/indexer/fly.toml b/apps/indexer/fly.toml
new file mode 100644
index 0000000..faec29f
--- /dev/null
+++ b/apps/indexer/fly.toml
@@ -0,0 +1,27 @@
+app = "indexer"
+primary_region = "iad"
+
+[env]
+ RUST_LOG = "info"
+ SERVER_PORT = "3002"
+ CONTRACT_ADDRESS = "0x82C9c5475D63e4C9e959280e9066aBb24973a663"
+ CREATION_BLOCK = "37097547"
+
+[http_service]
+ internal_port = 3002
+ force_https = true
+ min_machines_running = 1
+
+[[http_service.checks]]
+ path = "/health"
+ interval = "30s"
+ timeout = "10s"
+
+[[mounts]]
+ source = "indexer_data"
+ destination = "/data"
+
+[[vm]]
+ memory = "512mb"
+ cpu_kind = "shared"
+ cpus = 1
diff --git a/apps/indexer/src/.DS_Store b/apps/indexer/src/.DS_Store
new file mode 100644
index 0000000..9bd67d3
Binary files /dev/null and b/apps/indexer/src/.DS_Store differ
diff --git a/apps/indexer/src/api/health.rs b/apps/indexer/src/api/health.rs
new file mode 100644
index 0000000..b28c86f
--- /dev/null
+++ b/apps/indexer/src/api/health.rs
@@ -0,0 +1,48 @@
+use axum::{extract::State, http::StatusCode, Json};
+use serde::Serialize;
+
+use crate::db::models::EventCounts;
+use crate::db::queries::{get_event_counts, get_last_processed_block};
+
+use super::state::AppState;
+
+#[derive(Serialize)]
+pub struct HealthResponse {
+ pub status: &'static str,
+ pub last_block: Option,
+ pub uptime_seconds: u64,
+ pub counts: EventCountsResponse,
+}
+
+#[derive(Serialize)]
+pub struct EventCountsResponse {
+ pub messages: i64,
+ pub handshakes: i64,
+ pub handshake_responses: i64,
+}
+
+impl From for EventCountsResponse {
+ fn from(c: EventCounts) -> Self {
+ Self {
+ messages: c.messages,
+ handshakes: c.handshakes,
+ handshake_responses: c.handshake_responses,
+ }
+ }
+}
+
+pub async fn health(State(state): State) -> Result, StatusCode> {
+ let conn = state.pool.get().map_err(|_| StatusCode::SERVICE_UNAVAILABLE)?;
+
+ let last_block = get_last_processed_block(&conn).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
+ let counts = get_event_counts(&conn).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
+
+ let status = if last_block.is_some() { "ok" } else { "syncing" };
+
+ Ok(Json(HealthResponse {
+ status,
+ last_block,
+ uptime_seconds: state.uptime_seconds(),
+ counts: counts.into(),
+ }))
+}
diff --git a/apps/indexer/src/api/mod.rs b/apps/indexer/src/api/mod.rs
new file mode 100644
index 0000000..26acef3
--- /dev/null
+++ b/apps/indexer/src/api/mod.rs
@@ -0,0 +1,21 @@
+use axum::{routing::get, Router};
+use tower_http::cors::{Any, CorsLayer};
+use tower_http::trace::TraceLayer;
+
+pub mod health;
+pub mod state;
+
+pub use state::AppState;
+
+pub fn create_router(state: AppState) -> Router {
+ let cors = CorsLayer::new()
+ .allow_origin(Any)
+ .allow_methods(Any)
+ .allow_headers(Any);
+
+ Router::new()
+ .route("/health", get(health::health))
+ .layer(cors)
+ .layer(TraceLayer::new_for_http())
+ .with_state(state)
+}
diff --git a/apps/indexer/src/api/state.rs b/apps/indexer/src/api/state.rs
new file mode 100644
index 0000000..05a2f41
--- /dev/null
+++ b/apps/indexer/src/api/state.rs
@@ -0,0 +1,26 @@
+use std::sync::Arc;
+use std::time::Instant;
+
+use crate::config::Config;
+use crate::db::DbPool;
+
+#[derive(Clone)]
+pub struct AppState {
+ pub pool: DbPool,
+ pub config: Arc,
+ pub start_time: Instant,
+}
+
+impl AppState {
+ pub fn new(pool: DbPool, config: Config) -> Self {
+ Self {
+ pool,
+ config: Arc::new(config),
+ start_time: Instant::now(),
+ }
+ }
+
+ pub fn uptime_seconds(&self) -> u64 {
+ self.start_time.elapsed().as_secs()
+ }
+}
diff --git a/apps/indexer/src/config.rs b/apps/indexer/src/config.rs
new file mode 100644
index 0000000..21ead80
--- /dev/null
+++ b/apps/indexer/src/config.rs
@@ -0,0 +1,98 @@
+use alloy::primitives::Address;
+use std::str::FromStr;
+
+use crate::error::{IndexerError, Result};
+
+/// Sanitize RPC URL by masking API keys.
+/// Common patterns: .../v2/abc123 (Alchemy), .../v3/abc123 (Infura)
+pub fn sanitize_rpc_url(url: &str) -> String {
+ if let Some(idx) = url.rfind('/') {
+ let (base, key) = url.split_at(idx + 1);
+ // API keys are typically long alphanumeric strings without dots or slashes
+ if key.len() > 8 && !key.contains('.') && !key.contains('/') {
+ return format!("{}***", base);
+ }
+ }
+ url.to_string()
+}
+
+#[allow(dead_code)]
+pub struct Config {
+ pub rpc_ws_url: String,
+ pub rpc_http_url: Option,
+ pub contract_address: Address,
+ pub creation_block: u64,
+ pub database_path: String,
+ pub server_port: u16,
+ pub backfill_days: u32,
+ pub retention_days: u32,
+ pub rpc_chunk_size: u64,
+ /// SQLite synchronous mode: "OFF", "NORMAL", or "FULL" (default: NORMAL)
+ pub sqlite_sync_mode: String,
+}
+
+impl Config {
+ pub fn from_env() -> Result {
+ dotenvy::dotenv().ok();
+
+ let rpc_ws_url = std::env::var("RPC_WS_URL")
+ .map_err(|_| IndexerError::Config("RPC_WS_URL is required".into()))?;
+
+ let rpc_http_url = std::env::var("RPC_HTTP_URL").ok();
+
+ let contract_address = std::env::var("CONTRACT_ADDRESS")
+ .unwrap_or_else(|_| "0x82C9c5475D63e4C9e959280e9066aBb24973a663".into());
+ let contract_address = Address::from_str(&contract_address)
+ .map_err(|e| IndexerError::Config(format!("Invalid CONTRACT_ADDRESS: {e}")))?;
+
+ let creation_block = std::env::var("CREATION_BLOCK")
+ .unwrap_or_else(|_| "37097547".into())
+ .parse::()
+ .map_err(|e| IndexerError::Config(format!("Invalid CREATION_BLOCK: {e}")))?;
+
+ let database_path = std::env::var("DATABASE_PATH")
+ .unwrap_or_else(|_| "./data/indexer.db".into());
+
+ let server_port = std::env::var("SERVER_PORT")
+ .unwrap_or_else(|_| "3002".into())
+ .parse::()
+ .map_err(|e| IndexerError::Config(format!("Invalid SERVER_PORT: {e}")))?;
+
+ let backfill_days = std::env::var("BACKFILL_DAYS")
+ .unwrap_or_else(|_| "7".into())
+ .parse::()
+ .map_err(|e| IndexerError::Config(format!("Invalid BACKFILL_DAYS: {e}")))?;
+
+ let retention_days = std::env::var("RETENTION_DAYS")
+ .unwrap_or_else(|_| "7".into())
+ .parse::()
+ .map_err(|e| IndexerError::Config(format!("Invalid RETENTION_DAYS: {e}")))?;
+
+ // Default to 10 for Alchemy free tier compatibility
+ let rpc_chunk_size = std::env::var("RPC_CHUNK_SIZE")
+ .unwrap_or_else(|_| "10".into())
+ .parse::()
+ .map_err(|e| IndexerError::Config(format!("Invalid RPC_CHUNK_SIZE: {e}")))?;
+
+ // SQLite sync mode: NORMAL (default), FULL for extra local durability
+ let sqlite_sync_mode = std::env::var("SQLITE_SYNC_MODE").unwrap_or_else(|_| "NORMAL".into());
+ if !["OFF", "NORMAL", "FULL"].contains(&sqlite_sync_mode.as_str()) {
+ return Err(IndexerError::Config(format!(
+ "Invalid SQLITE_SYNC_MODE: {sqlite_sync_mode} (expected OFF, NORMAL, or FULL)"
+ )));
+ }
+
+ Ok(Self {
+ rpc_ws_url,
+ rpc_http_url,
+ contract_address,
+ creation_block,
+ database_path,
+ server_port,
+ backfill_days,
+ retention_days,
+ rpc_chunk_size,
+ sqlite_sync_mode,
+ })
+ }
+}
diff --git a/apps/indexer/src/db/mod.rs b/apps/indexer/src/db/mod.rs
new file mode 100644
index 0000000..b9ca717
--- /dev/null
+++ b/apps/indexer/src/db/mod.rs
@@ -0,0 +1,33 @@
+use r2d2::Pool;
+use r2d2_sqlite::SqliteConnectionManager;
+use std::path::Path;
+
+use crate::error::Result;
+
+pub mod models;
+pub mod queries;
+pub mod schema;
+
+pub type DbPool = Pool;
+
+pub fn create_pool(database_path: &str, sync_mode: &str) -> Result {
+ if let Some(parent) = Path::new(database_path).parent() {
+ std::fs::create_dir_all(parent).ok();
+ }
+
+ let manager = SqliteConnectionManager::file(database_path);
+ let pool = Pool::builder().max_size(4).build(manager)?;
+
+ let conn = pool.get()?;
+ let pragmas = format!(
+ "PRAGMA journal_mode=WAL; PRAGMA synchronous={}; PRAGMA foreign_keys=ON;",
+ sync_mode
+ );
+ conn.execute_batch(&pragmas)?;
+
+ tracing::info!("SQLite initialized with synchronous={}", sync_mode);
+
+ schema::run_migrations(&conn)?;
+
+ Ok(pool)
+}
diff --git a/apps/indexer/src/db/models.rs b/apps/indexer/src/db/models.rs
new file mode 100644
index 0000000..b4dc940
--- /dev/null
+++ b/apps/indexer/src/db/models.rs
@@ -0,0 +1,40 @@
+pub struct MessageRow {
+ pub topic: [u8; 32],
+ pub seq: i64,
+ pub sender: [u8; 20],
+ pub ciphertext: Vec,
+ pub timestamp: i64,
+ pub nonce: i64,
+ pub block_number: i64,
+ pub log_index: i64,
+ pub block_timestamp: i64,
+}
+
+pub struct HandshakeRow {
+ pub recipient_hash: [u8; 32],
+ pub seq: i64,
+ pub sender: [u8; 20],
+ pub pub_keys: Vec,
+ pub ephemeral_pub_key: Vec,
+ pub plaintext_payload: Vec,
+ pub block_number: i64,
+ pub log_index: i64,
+ pub block_timestamp: i64,
+}
+
+pub struct HsrRow {
+ pub global_seq: i64,
+ pub in_response_to: [u8; 32],
+ pub responder: [u8; 20],
+ pub responder_ephemeral_r: [u8; 32],
+ pub ciphertext: Vec,
+ pub block_number: i64,
+ pub log_index: i64,
+ pub block_timestamp: i64,
+}
+
+pub struct EventCounts {
+ pub messages: i64,
+ pub handshakes: i64,
+ pub handshake_responses: i64,
+}
diff --git a/apps/indexer/src/db/queries.rs b/apps/indexer/src/db/queries.rs
new file mode 100644
index 0000000..e347c15
--- /dev/null
+++ b/apps/indexer/src/db/queries.rs
@@ -0,0 +1,144 @@
+use rusqlite::{params, Connection, OptionalExtension};
+
+use crate::error::Result;
+
+use super::models::{EventCounts, HandshakeRow, HsrRow, MessageRow};
+
+pub fn get_and_increment_seq(
+ conn: &Connection,
+ key_type: &str,
+ key_hash: Option<&[u8; 32]>,
+) -> Result {
+ // BEGIN IMMEDIATE acquires write lock immediately, preventing race conditions
+ conn.execute("BEGIN IMMEDIATE", [])?;
+
+ let result = (|| -> Result {
+ let seq: i64 = conn
+ .query_row(
+ "SELECT next_seq FROM seq_counters WHERE key_type = ?1 AND key_hash IS ?2",
+ params![key_type, key_hash.map(|h| h.as_slice())],
+ |row| row.get(0),
+ )
+ .optional()?
+ .unwrap_or(0);
+
+ conn.execute(
+ "INSERT INTO seq_counters (key_type, key_hash, next_seq) VALUES (?1, ?2, ?3)
+ ON CONFLICT(key_type, key_hash) DO UPDATE SET next_seq = ?3",
+ params![key_type, key_hash.map(|h| h.as_slice()), seq + 1],
+ )?;
+
+ Ok(seq)
+ })();
+
+ match &result {
+ Ok(_) => {
+ conn.execute("COMMIT", [])?;
+ }
+ Err(_) => {
+ let _ = conn.execute("ROLLBACK", []);
+ }
+ }
+
+ result
+}
+
+pub fn insert_message(conn: &Connection, row: &MessageRow) -> Result {
+ let inserted = conn.execute(
+ "INSERT OR IGNORE INTO messages
+ (topic, seq, sender, ciphertext, timestamp, nonce, block_number, log_index, block_timestamp)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)",
+ params![
+ row.topic.as_slice(),
+ row.seq,
+ row.sender.as_slice(),
+ &row.ciphertext,
+ row.timestamp,
+ row.nonce,
+ row.block_number,
+ row.log_index,
+ row.block_timestamp,
+ ],
+ )?;
+ Ok(inserted > 0)
+}
+
+pub fn insert_handshake(conn: &Connection, row: &HandshakeRow) -> Result {
+ let inserted = conn.execute(
+ "INSERT OR IGNORE INTO handshakes
+ (recipient_hash, seq, sender, pub_keys, ephemeral_pub_key, plaintext_payload, block_number, log_index, block_timestamp)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)",
+ params![
+ row.recipient_hash.as_slice(),
+ row.seq,
+ row.sender.as_slice(),
+ &row.pub_keys,
+ &row.ephemeral_pub_key,
+ &row.plaintext_payload,
+ row.block_number,
+ row.log_index,
+ row.block_timestamp,
+ ],
+ )?;
+ Ok(inserted > 0)
+}
+
+pub fn insert_hsr(conn: &Connection, row: &HsrRow) -> Result {
+ let inserted = conn.execute(
+ "INSERT OR IGNORE INTO handshake_responses
+ (global_seq, in_response_to, responder, responder_ephemeral_r, ciphertext, block_number, log_index, block_timestamp)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)",
+ params![
+ row.global_seq,
+ row.in_response_to.as_slice(),
+ row.responder.as_slice(),
+ row.responder_ephemeral_r.as_slice(),
+ &row.ciphertext,
+ row.block_number,
+ row.log_index,
+ row.block_timestamp,
+ ],
+ )?;
+ Ok(inserted > 0)
+}
+
+pub fn get_last_processed_block(conn: &Connection) -> Result> {
+ let value = conn
+ .query_row(
+ "SELECT value FROM indexer_state WHERE key = 'last_block'",
+ [],
+ |row| row.get::<_, String>(0),
+ )
+ .optional()?
+ .and_then(|v| v.parse::().ok());
+
+ Ok(value)
+}
+
+pub fn set_last_processed_block(conn: &Connection, block: i64) -> Result<()> {
+ conn.execute(
+ "INSERT OR REPLACE INTO indexer_state (key, value) VALUES ('last_block', ?1)",
+ params![block.to_string()],
+ )?;
+ Ok(())
+}
+
+pub fn get_event_counts(conn: &Connection) -> Result {
+ let messages: i64 =
+ conn.query_row("SELECT COUNT(*) FROM messages", [], |row| row.get(0))?;
+ let handshakes: i64 =
+ conn.query_row("SELECT COUNT(*) FROM handshakes", [], |row| row.get(0))?;
+ let handshake_responses: i64 =
+ conn.query_row("SELECT COUNT(*) FROM handshake_responses", [], |row| row.get(0))?;
+
+ Ok(EventCounts {
+ messages,
+ handshakes,
+ handshake_responses,
+ })
+}
+
+pub fn is_db_empty(conn: &Connection) -> Result {
+ let counts = get_event_counts(conn)?;
+ Ok(counts.messages == 0 && counts.handshakes == 0 && counts.handshake_responses == 0)
+}
diff --git a/apps/indexer/src/db/schema.rs b/apps/indexer/src/db/schema.rs
new file mode 100644
index 0000000..e904001
--- /dev/null
+++ b/apps/indexer/src/db/schema.rs
@@ -0,0 +1,81 @@
+use rusqlite::Connection;
+
+use crate::error::Result;
+
+const SCHEMA_VERSION: i64 = 1;
+
+pub fn run_migrations(conn: &Connection) -> Result<()> {
+ conn.execute_batch(
+ r#"
+ CREATE TABLE IF NOT EXISTS schema_version (version INTEGER PRIMARY KEY);
+ INSERT OR IGNORE INTO schema_version VALUES (1);
+
+ CREATE TABLE IF NOT EXISTS messages (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ topic BLOB NOT NULL,
+ seq INTEGER NOT NULL,
+ sender BLOB NOT NULL,
+ ciphertext BLOB NOT NULL,
+ timestamp INTEGER NOT NULL,
+ nonce INTEGER NOT NULL,
+ block_number INTEGER NOT NULL,
+ log_index INTEGER NOT NULL,
+ block_timestamp INTEGER NOT NULL,
+ UNIQUE(topic, seq)
+ );
+ CREATE INDEX IF NOT EXISTS idx_msg_topic_seq ON messages(topic, seq);
+ CREATE INDEX IF NOT EXISTS idx_msg_block ON messages(block_number, log_index);
+
+ CREATE TABLE IF NOT EXISTS handshakes (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ recipient_hash BLOB NOT NULL,
+ seq INTEGER NOT NULL,
+ sender BLOB NOT NULL,
+ pub_keys BLOB NOT NULL,
+ ephemeral_pub_key BLOB NOT NULL,
+ plaintext_payload BLOB NOT NULL,
+ block_number INTEGER NOT NULL,
+ log_index INTEGER NOT NULL,
+ block_timestamp INTEGER NOT NULL,
+ UNIQUE(recipient_hash, seq)
+ );
+ CREATE INDEX IF NOT EXISTS idx_hs_recipient_seq ON handshakes(recipient_hash, seq);
+
+ CREATE TABLE IF NOT EXISTS handshake_responses (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ global_seq INTEGER NOT NULL UNIQUE,
+ in_response_to BLOB NOT NULL,
+ responder BLOB NOT NULL,
+ responder_ephemeral_r BLOB NOT NULL,
+ ciphertext BLOB NOT NULL,
+ block_number INTEGER NOT NULL,
+ log_index INTEGER NOT NULL,
+ block_timestamp INTEGER NOT NULL
+ );
+ CREATE INDEX IF NOT EXISTS idx_hsr_seq ON handshake_responses(global_seq);
+
+ CREATE TABLE IF NOT EXISTS seq_counters (
+ key_type TEXT NOT NULL,
+ key_hash BLOB,
+ next_seq INTEGER NOT NULL DEFAULT 0,
+ PRIMARY KEY(key_type, key_hash)
+ );
+
+ CREATE TABLE IF NOT EXISTS indexer_state (
+ key TEXT PRIMARY KEY,
+ value TEXT NOT NULL
+ );
+ "#,
+ )?;
+
+ let version: i64 = conn.query_row(
+ "SELECT version FROM schema_version ORDER BY version DESC LIMIT 1",
+ [],
+ |row| row.get(0),
+ )?;
+
+ tracing::info!(schema_version = version, "Database initialized");
+ assert_eq!(version, SCHEMA_VERSION, "Schema version mismatch");
+
+ Ok(())
+}
diff --git a/apps/indexer/src/error.rs b/apps/indexer/src/error.rs
new file mode 100644
index 0000000..dcefc7e
--- /dev/null
+++ b/apps/indexer/src/error.rs
@@ -0,0 +1,31 @@
+use thiserror::Error;
+
+#[derive(Error, Debug)]
+#[allow(dead_code)]
+pub enum IndexerError {
+ #[error("database error: {0}")]
+ Database(#[from] rusqlite::Error),
+
+ #[error("pool error: {0}")]
+ Pool(#[from] r2d2::Error),
+
+ #[error("RPC error: {0}")]
+ Rpc(#[from] alloy::transports::TransportError),
+
+ #[error("config error: {0}")]
+ Config(String),
+
+ #[error("event decode error: {0}")]
+ Decode(String),
+
+ #[error("block not found: {0}")]
+ BlockNotFound(u64),
+
+ #[error("task join error: {0}")]
+ Join(#[from] tokio::task::JoinError),
+
+ #[error("payload too large: {field} is {size} bytes (max: {max})")]
+ PayloadTooLarge { field: &'static str, size: usize, max: usize },
+}
+
+pub type Result = std::result::Result;
diff --git a/apps/indexer/src/indexer/backfill.rs b/apps/indexer/src/indexer/backfill.rs
new file mode 100644
index 0000000..859f288
--- /dev/null
+++ b/apps/indexer/src/indexer/backfill.rs
@@ -0,0 +1,218 @@
+use std::collections::HashMap;
+use std::num::NonZeroU32;
+use std::time::Duration;
+
+use alloy::eips::BlockNumberOrTag;
+use alloy::primitives::Address;
+use alloy::providers::{Provider, ProviderBuilder, RootProvider};
+use alloy::rpc::types::{BlockTransactionsKind, Filter, Log};
+use alloy::transports::http::{Client, Http};
+use governor::{Jitter, Quota, RateLimiter};
+
+use crate::db::queries::set_last_processed_block;
+use crate::db::DbPool;
+use crate::error::{IndexerError, Result};
+
+use super::processor::{decode_log, EventProcessor, LogWithMeta};
+
+// Alchemy Free tier: 500 CU/s, eth_getLogs = 75 CU → max ~6 req/s
+const REQUESTS_PER_SECOND: u32 = 5;
+const MAX_RETRIES: u32 = 5;
+const INITIAL_BACKOFF_MS: u64 = 1000;
+
+pub struct BackfillStats {
+ pub blocks_processed: u64,
+ pub events_processed: u64,
+ pub events_skipped: u64,
+}
+
+pub async fn run_backfill(
+ rpc_url: &str,
+ contract_address: Address,
+ from_block: u64,
+ to_block: u64,
+ chunk_size: u64,
+ pool: DbPool,
+) -> Result {
+ tracing::info!(
+ "Starting backfill from block {} to {}",
+ from_block,
+ to_block
+ );
+
+ let provider = ProviderBuilder::new().on_http(rpc_url.parse().map_err(|e| {
+ IndexerError::Config(format!("Invalid RPC URL: {e}"))
+ })?);
+
+ let processor = EventProcessor::new(pool.clone());
+
+ let limiter = RateLimiter::direct(Quota::per_second(
+ NonZeroU32::new(REQUESTS_PER_SECOND).unwrap(),
+ ));
+
+ let mut block_timestamps: HashMap = HashMap::new();
+ let mut stats = BackfillStats {
+ blocks_processed: 0,
+ events_processed: 0,
+ events_skipped: 0,
+ };
+
+ for chunk_start in (from_block..=to_block).step_by(chunk_size as usize) {
+ let chunk_end = (chunk_start + chunk_size - 1).min(to_block);
+
+ limiter
+ .until_ready_with_jitter(Jitter::up_to(Duration::from_millis(100)))
+ .await;
+
+ // Note: Don't use .events() for multiple signatures - it doesn't work as OR filter
+ // Filter in code via decode_log() instead
+ let filter = Filter::new()
+ .address(contract_address)
+ .from_block(chunk_start)
+ .to_block(chunk_end);
+
+ let logs = get_logs_with_retry(&provider, &filter).await?;
+
+ let mut logs: Vec<_> = logs.into_iter().collect();
+ logs.sort_by_key(|l| (l.block_number, l.log_index));
+
+ let unique_blocks: Vec = logs
+ .iter()
+ .filter_map(|l| l.block_number)
+ .filter(|b| !block_timestamps.contains_key(b))
+ .collect::>()
+ .into_iter()
+ .collect();
+
+ for block_num in unique_blocks {
+ limiter.until_ready().await;
+ let timestamp = fetch_block_timestamp_with_retry(&provider, block_num).await?;
+ block_timestamps.insert(block_num, timestamp);
+ }
+
+ for log in logs {
+ tracing::debug!("Got log with topic: {:?}", log.topics().first());
+ let Some(event) = decode_log(&log) else {
+ // Non-Verbeth events (Upgraded, OwnershipTransferred, etc.) - skip silently
+ continue;
+ };
+
+ let block_number = log.block_number.unwrap_or(0);
+ let log_index = log.log_index.unwrap_or(0);
+ let block_timestamp = *block_timestamps.get(&block_number).unwrap_or(&0);
+
+ let log_with_meta = LogWithMeta {
+ event,
+ block_number,
+ log_index,
+ block_timestamp,
+ };
+
+ match processor.process(log_with_meta) {
+ Ok(true) => stats.events_processed += 1,
+ Ok(false) => stats.events_skipped += 1,
+ Err(e) => {
+ tracing::error!("Failed to process event: {e}");
+ }
+ }
+ }
+
+ let conn = pool.get()?;
+ set_last_processed_block(&conn, chunk_end as i64)?;
+
+ stats.blocks_processed = chunk_end - from_block + 1;
+
+ let progress = (chunk_end - from_block) as f64 / (to_block - from_block).max(1) as f64 * 100.0;
+ tracing::info!(
+ "Backfill progress: {}/{} blocks ({:.1}%), {} events",
+ stats.blocks_processed,
+ to_block - from_block + 1,
+ progress,
+ stats.events_processed
+ );
+ }
+
+ tracing::info!(
+ "Backfill complete: {} blocks, {} events processed, {} skipped",
+ stats.blocks_processed,
+ stats.events_processed,
+ stats.events_skipped
+ );
+
+ Ok(stats)
+}
+
+async fn get_logs_with_retry(
+ provider: &RootProvider>,
+ filter: &Filter,
+) -> Result> {
+ let mut attempt = 0;
+ loop {
+ match provider.get_logs(filter).await {
+ Ok(logs) => return Ok(logs),
+ Err(e) => {
+ let is_rate_limit = e.to_string().contains("429")
+ || e.to_string().contains("exceeded")
+ || e.to_string().contains("rate");
+
+ if is_rate_limit && attempt < MAX_RETRIES {
+ attempt += 1;
+ let backoff = Duration::from_millis(INITIAL_BACKOFF_MS * 2u64.pow(attempt - 1));
+ tracing::warn!(
+ "Rate limited, retrying in {:?} (attempt {}/{})",
+ backoff,
+ attempt,
+ MAX_RETRIES
+ );
+ tokio::time::sleep(backoff).await;
+ } else {
+ return Err(e.into());
+ }
+ }
+ }
+ }
+}
+
+async fn fetch_block_timestamp_with_retry(
+ provider: &RootProvider>,
+ block_num: u64,
+) -> Result {
+ let mut attempt = 0;
+ loop {
+ match provider
+ .get_block_by_number(
+ BlockNumberOrTag::Number(block_num),
+ BlockTransactionsKind::Hashes,
+ )
+ .await
+ {
+ Ok(Some(block)) => return Ok(block.header.timestamp),
+ Ok(None) => return Err(IndexerError::BlockNotFound(block_num)),
+ Err(e) => {
+ let is_rate_limit = e.to_string().contains("429")
+ || e.to_string().contains("exceeded")
+ || e.to_string().contains("rate");
+
+ if is_rate_limit && attempt < MAX_RETRIES {
+ attempt += 1;
+ let backoff = Duration::from_millis(INITIAL_BACKOFF_MS * 2u64.pow(attempt - 1));
+ tracing::warn!(
+ "Rate limited fetching block {}, retrying in {:?} (attempt {}/{})",
+ block_num,
+ backoff,
+ attempt,
+ MAX_RETRIES
+ );
+ tokio::time::sleep(backoff).await;
+ } else {
+ return Err(e.into());
+ }
+ }
+ }
+ }
+}
+
+#[allow(dead_code)]
+pub async fn get_chain_head(provider: &RootProvider>) -> Result {
+ Ok(provider.get_block_number().await?)
+}
diff --git a/apps/indexer/src/indexer/events.rs b/apps/indexer/src/indexer/events.rs
new file mode 100644
index 0000000..1c57180
--- /dev/null
+++ b/apps/indexer/src/indexer/events.rs
@@ -0,0 +1,26 @@
+use alloy::sol;
+
+sol! {
+ event MessageSent(
+ address indexed sender,
+ bytes ciphertext,
+ uint256 timestamp,
+ bytes32 indexed topic,
+ uint256 nonce
+ );
+
+ event Handshake(
+ bytes32 indexed recipientHash,
+ address indexed sender,
+ bytes pubKeys,
+ bytes ephemeralPubKey,
+ bytes plaintextPayload
+ );
+
+ event HandshakeResponse(
+ bytes32 indexed inResponseTo,
+ address indexed responder,
+ bytes32 responderEphemeralR,
+ bytes ciphertext
+ );
+}
diff --git a/apps/indexer/src/indexer/mod.rs b/apps/indexer/src/indexer/mod.rs
new file mode 100644
index 0000000..18a2438
--- /dev/null
+++ b/apps/indexer/src/indexer/mod.rs
@@ -0,0 +1,5 @@
+pub mod backfill;
+pub mod events;
+pub mod processor;
+pub mod retry_queue;
+pub mod subscriber;
diff --git a/apps/indexer/src/indexer/processor.rs b/apps/indexer/src/indexer/processor.rs
new file mode 100644
index 0000000..47c9189
--- /dev/null
+++ b/apps/indexer/src/indexer/processor.rs
@@ -0,0 +1,235 @@
+use alloy::primitives::{Address, B256};
+use alloy::rpc::types::Log;
+
+use crate::db::models::{HandshakeRow, HsrRow, MessageRow};
+use crate::db::queries::{
+ get_and_increment_seq, insert_handshake, insert_hsr, insert_message,
+};
+use crate::db::DbPool;
+use crate::error::{IndexerError, Result};
+
+use super::events::{Handshake, HandshakeResponse, MessageSent};
+
+// Payload size limits (reasonable for Verbeth protocol)
+const MAX_CIPHERTEXT_SIZE: usize = 64 * 1024; // 64 KB per message
+const MAX_PUB_KEYS_SIZE: usize = 65; // version + X25519 + Ed25519
+const MAX_EPHEMERAL_KEY_SIZE: usize = 1216; // X25519 32 + ML-KEM 1184
+const MAX_PLAINTEXT_PAYLOAD_SIZE: usize = 1024; // 1 KB metadata
+const MAX_HSR_CIPHERTEXT_SIZE: usize = 4 * 1024; // 4 KB handshake response
+
+#[derive(Clone)]
+pub enum VerbethEvent {
+ MessageSent {
+ sender: Address,
+ ciphertext: Vec,
+ timestamp: u64,
+ topic: B256,
+ nonce: u64,
+ },
+ Handshake {
+ recipient_hash: B256,
+ sender: Address,
+ pub_keys: Vec,
+ ephemeral_pub_key: Vec,
+ plaintext_payload: Vec,
+ },
+ HandshakeResponse {
+ in_response_to: B256,
+ responder: Address,
+ responder_ephemeral_r: B256,
+ ciphertext: Vec,
+ },
+}
+
+#[derive(Clone)]
+pub struct LogWithMeta {
+ pub event: VerbethEvent,
+ pub block_number: u64,
+ pub log_index: u64,
+ pub block_timestamp: u64,
+}
+
+fn validate_payload_sizes(event: &VerbethEvent) -> Result<()> {
+ match event {
+ VerbethEvent::MessageSent { ciphertext, .. } => {
+ if ciphertext.len() > MAX_CIPHERTEXT_SIZE {
+ return Err(IndexerError::PayloadTooLarge {
+ field: "ciphertext",
+ size: ciphertext.len(),
+ max: MAX_CIPHERTEXT_SIZE,
+ });
+ }
+ }
+ VerbethEvent::Handshake { pub_keys, ephemeral_pub_key, plaintext_payload, .. } => {
+ if pub_keys.len() > MAX_PUB_KEYS_SIZE {
+ return Err(IndexerError::PayloadTooLarge {
+ field: "pubKeys",
+ size: pub_keys.len(),
+ max: MAX_PUB_KEYS_SIZE,
+ });
+ }
+ if ephemeral_pub_key.len() > MAX_EPHEMERAL_KEY_SIZE {
+ return Err(IndexerError::PayloadTooLarge {
+ field: "ephemeralPubKey",
+ size: ephemeral_pub_key.len(),
+ max: MAX_EPHEMERAL_KEY_SIZE,
+ });
+ }
+ if plaintext_payload.len() > MAX_PLAINTEXT_PAYLOAD_SIZE {
+ return Err(IndexerError::PayloadTooLarge {
+ field: "plaintextPayload",
+ size: plaintext_payload.len(),
+ max: MAX_PLAINTEXT_PAYLOAD_SIZE,
+ });
+ }
+ }
+ VerbethEvent::HandshakeResponse { ciphertext, .. } => {
+ if ciphertext.len() > MAX_HSR_CIPHERTEXT_SIZE {
+ return Err(IndexerError::PayloadTooLarge {
+ field: "hsrCiphertext",
+ size: ciphertext.len(),
+ max: MAX_HSR_CIPHERTEXT_SIZE,
+ });
+ }
+ }
+ }
+ Ok(())
+}
+
+pub struct EventProcessor {
+ pool: DbPool,
+}
+
+impl EventProcessor {
+ pub fn new(pool: DbPool) -> Self {
+ Self { pool }
+ }
+
+ pub fn process(&self, log: LogWithMeta) -> Result {
+ // Validate payload sizes before processing
+ validate_payload_sizes(&log.event)?;
+
+ let conn = self.pool.get()?;
+
+ match log.event {
+ VerbethEvent::MessageSent {
+ sender,
+ ciphertext,
+ timestamp,
+ topic,
+ nonce,
+ } => {
+ let topic_bytes: [u8; 32] = topic.0;
+ let seq = get_and_increment_seq(&conn, "message", Some(&topic_bytes))?;
+
+ insert_message(
+ &conn,
+ &MessageRow {
+ topic: topic_bytes,
+ seq,
+ sender: sender.0 .0,
+ ciphertext,
+ timestamp: timestamp as i64,
+ nonce: nonce as i64,
+ block_number: log.block_number as i64,
+ log_index: log.log_index as i64,
+ block_timestamp: log.block_timestamp as i64,
+ },
+ )
+ }
+ VerbethEvent::Handshake {
+ recipient_hash,
+ sender,
+ pub_keys,
+ ephemeral_pub_key,
+ plaintext_payload,
+ } => {
+ let recipient_hash_bytes: [u8; 32] = recipient_hash.0;
+ let seq =
+ get_and_increment_seq(&conn, "handshake", Some(&recipient_hash_bytes))?;
+
+ insert_handshake(
+ &conn,
+ &HandshakeRow {
+ recipient_hash: recipient_hash_bytes,
+ seq,
+ sender: sender.0 .0,
+ pub_keys,
+ ephemeral_pub_key,
+ plaintext_payload,
+ block_number: log.block_number as i64,
+ log_index: log.log_index as i64,
+ block_timestamp: log.block_timestamp as i64,
+ },
+ )
+ }
+ VerbethEvent::HandshakeResponse {
+ in_response_to,
+ responder,
+ responder_ephemeral_r,
+ ciphertext,
+ } => {
+ let global_seq = get_and_increment_seq(&conn, "hsr", None)?;
+
+ insert_hsr(
+ &conn,
+ &HsrRow {
+ global_seq,
+ in_response_to: in_response_to.0,
+ responder: responder.0 .0,
+ responder_ephemeral_r: responder_ephemeral_r.0,
+ ciphertext,
+ block_number: log.block_number as i64,
+ log_index: log.log_index as i64,
+ block_timestamp: log.block_timestamp as i64,
+ },
+ )
+ }
+ }
+ }
+}
+
+pub fn decode_log(log: &Log) -> Option {
+ use alloy::sol_types::SolEvent;
+
+ let topics = log.topics();
+ if topics.is_empty() {
+ return None;
+ }
+
+ let sig = topics[0];
+
+ if sig == MessageSent::SIGNATURE_HASH {
+ let decoded = MessageSent::decode_log(log.inner.as_ref(), true).ok()?;
+ return Some(VerbethEvent::MessageSent {
+ sender: decoded.sender,
+ ciphertext: decoded.ciphertext.to_vec(),
+ timestamp: decoded.timestamp.try_into().ok()?,
+ topic: decoded.topic,
+ nonce: decoded.nonce.try_into().ok()?,
+ });
+ }
+
+ if sig == Handshake::SIGNATURE_HASH {
+ let decoded = Handshake::decode_log(log.inner.as_ref(), true).ok()?;
+ return Some(VerbethEvent::Handshake {
+ recipient_hash: decoded.recipientHash,
+ sender: decoded.sender,
+ pub_keys: decoded.pubKeys.to_vec(),
+ ephemeral_pub_key: decoded.ephemeralPubKey.to_vec(),
+ plaintext_payload: decoded.plaintextPayload.to_vec(),
+ });
+ }
+
+ if sig == HandshakeResponse::SIGNATURE_HASH {
+ let decoded = HandshakeResponse::decode_log(log.inner.as_ref(), true).ok()?;
+ return Some(VerbethEvent::HandshakeResponse {
+ in_response_to: decoded.inResponseTo,
+ responder: decoded.responder,
+ responder_ephemeral_r: decoded.responderEphemeralR,
+ ciphertext: decoded.ciphertext.to_vec(),
+ });
+ }
+
+ None
+}
diff --git a/apps/indexer/src/indexer/retry_queue.rs b/apps/indexer/src/indexer/retry_queue.rs
new file mode 100644
index 0000000..d7ca6f2
--- /dev/null
+++ b/apps/indexer/src/indexer/retry_queue.rs
@@ -0,0 +1,83 @@
+use std::collections::VecDeque;
+use tokio::sync::Mutex;
+
+use super::processor::LogWithMeta;
+
+const MAX_RETRIES: u32 = 3;
+const MAX_QUEUE_SIZE: usize = 1000;
+
+pub struct FailedEvent {
+ pub log: LogWithMeta,
+ pub retry_count: u32,
+ pub last_error: String,
+}
+
+pub struct RetryQueue {
+ queue: Mutex>,
+}
+
+impl RetryQueue {
+ pub fn new() -> Self {
+ Self {
+ queue: Mutex::new(VecDeque::new()),
+ }
+ }
+
+ pub async fn push(&self, log: LogWithMeta, error: String) {
+ let mut q = self.queue.lock().await;
+
+ // Check if we're at capacity - dead-letter oldest event
+ if q.len() >= MAX_QUEUE_SIZE {
+ if let Some(old) = q.pop_front() {
+ tracing::error!(
+ block = old.log.block_number,
+ log_index = old.log.log_index,
+ retries = old.retry_count,
+ error = %old.last_error,
+ "Event permanently failed (dead-letter, queue full)"
+ );
+ }
+ }
+
+ q.push_back(FailedEvent {
+ log,
+ retry_count: 0,
+ last_error: error,
+ });
+ }
+
+ pub async fn push_retry(&self, mut event: FailedEvent, error: String) {
+ event.retry_count += 1;
+ event.last_error = error;
+
+ if event.retry_count >= MAX_RETRIES {
+ // Dead-letter: log and discard
+ tracing::error!(
+ block = event.log.block_number,
+ log_index = event.log.log_index,
+ retries = event.retry_count,
+ error = %event.last_error,
+ "Event permanently failed (dead-letter, max retries)"
+ );
+ return;
+ }
+
+ let mut q = self.queue.lock().await;
+ q.push_back(event);
+ }
+
+ pub async fn pop(&self) -> Option {
+ self.queue.lock().await.pop_front()
+ }
+
+ #[allow(dead_code)]
+ pub async fn len(&self) -> usize {
+ self.queue.lock().await.len()
+ }
+}
+
+impl Default for RetryQueue {
+ fn default() -> Self {
+ Self::new()
+ }
+}
diff --git a/apps/indexer/src/indexer/subscriber.rs b/apps/indexer/src/indexer/subscriber.rs
new file mode 100644
index 0000000..5780f28
--- /dev/null
+++ b/apps/indexer/src/indexer/subscriber.rs
@@ -0,0 +1,288 @@
+use std::sync::Arc;
+use std::time::Duration;
+
+use alloy::eips::BlockNumberOrTag;
+use alloy::primitives::Address;
+use alloy::providers::{Provider, ProviderBuilder, RootProvider, WsConnect};
+use alloy::pubsub::PubSubFrontend;
+use alloy::rpc::types::{BlockTransactionsKind, Filter};
+use alloy::sol_types::SolEvent;
+use tokio::sync::watch;
+
+use crate::config::{sanitize_rpc_url, Config};
+use crate::db::queries::{get_last_processed_block, set_last_processed_block};
+use crate::db::DbPool;
+use crate::error::Result;
+
+use super::backfill::run_backfill;
+use super::events::{Handshake, HandshakeResponse, MessageSent};
+use super::processor::{decode_log, EventProcessor, LogWithMeta};
+use super::retry_queue::RetryQueue;
+
+const RETRY_INTERVAL_SECS: u64 = 10;
+
+pub async fn subscribe_with_reconnect(
+ config: Arc,
+ pool: DbPool,
+ mut shutdown: watch::Receiver,
+) {
+ let processor = Arc::new(EventProcessor::new(pool.clone()));
+ let retry_queue = Arc::new(RetryQueue::new());
+ let mut backoff = Duration::from_secs(1);
+ let mut is_first_connect = true;
+
+ // Spawn background retry task
+ let retry_processor = processor.clone();
+ let retry_q = retry_queue.clone();
+ let retry_pool = pool.clone();
+ let retry_shutdown = shutdown.clone();
+ tokio::spawn(async move {
+ run_retry_loop(retry_q, retry_processor, retry_pool, retry_shutdown).await;
+ });
+
+ loop {
+ if *shutdown.borrow() {
+ tracing::info!("Subscriber received shutdown signal");
+ break;
+ }
+
+ // Recover missed events via HTTP before (re)connecting WS
+ // Skip on first connect since main.rs already does initial backfill
+ if !is_first_connect {
+ if let Err(e) = recover_missed_events(&config, &pool).await {
+ tracing::warn!("Failed to recover missed events: {e}");
+ }
+ }
+ is_first_connect = false;
+
+ match connect_and_subscribe(
+ &config.rpc_ws_url,
+ config.contract_address,
+ &processor,
+ &retry_queue,
+ &pool,
+ &mut shutdown,
+ )
+ .await
+ {
+ Ok(()) => {
+ tracing::info!("Subscriber shut down gracefully");
+ break;
+ }
+ Err(e) => {
+ tracing::warn!("Subscriber error: {e}, reconnecting in {:?}", backoff);
+ tokio::select! {
+ _ = tokio::time::sleep(backoff) => {}
+ _ = shutdown.changed() => {
+ if *shutdown.borrow() {
+ break;
+ }
+ }
+ }
+ backoff = (backoff * 2).min(Duration::from_secs(60));
+ }
+ }
+ }
+}
+
+async fn recover_missed_events(config: &Config, pool: &DbPool) -> Result<()> {
+ let conn = pool.get()?;
+ let last_block = get_last_processed_block(&conn)?.unwrap_or(0) as u64;
+ drop(conn);
+
+ // Derive HTTP URL from WS URL if not explicitly set
+ let http_url = config.rpc_http_url.clone().unwrap_or_else(|| {
+ config
+ .rpc_ws_url
+ .replace("wss://", "https://")
+ .replace("ws://", "http://")
+ });
+
+ let provider = ProviderBuilder::new().on_http(http_url.parse().map_err(|e| {
+ crate::error::IndexerError::Config(format!("Invalid HTTP RPC URL: {e}"))
+ })?);
+
+ let chain_head = provider.get_block_number().await?;
+
+ if chain_head <= last_block {
+ tracing::debug!("No missed blocks to recover");
+ return Ok(());
+ }
+
+ let gap = chain_head - last_block;
+ tracing::info!(
+ "Recovering {} missed blocks ({} -> {})",
+ gap,
+ last_block + 1,
+ chain_head
+ );
+
+ run_backfill(
+ &http_url,
+ config.contract_address,
+ last_block + 1,
+ chain_head,
+ config.rpc_chunk_size,
+ pool.clone(),
+ )
+ .await?;
+
+ tracing::info!("Recovery complete");
+ Ok(())
+}
+
+async fn run_retry_loop(
+ queue: Arc,
+ processor: Arc,
+ pool: DbPool,
+ mut shutdown: watch::Receiver,
+) {
+ loop {
+ tokio::select! {
+ _ = shutdown.changed() => {
+ if *shutdown.borrow() {
+ tracing::debug!("Retry loop shutting down");
+ return;
+ }
+ }
+ _ = tokio::time::sleep(Duration::from_secs(RETRY_INTERVAL_SECS)) => {
+ while let Some(failed) = queue.pop().await {
+ let block_number = failed.log.block_number;
+ let log_clone = failed.log.clone();
+ match processor.process(log_clone) {
+ Ok(true) => {
+ tracing::info!(
+ block = block_number,
+ attempt = failed.retry_count + 1,
+ "Retry succeeded"
+ );
+ if let Ok(conn) = pool.get() {
+ let _ = set_last_processed_block(&conn, block_number as i64);
+ }
+ }
+ Ok(false) => {
+ tracing::debug!(block = block_number, "Retry: duplicate event");
+ }
+ Err(e) => {
+ // Re-queue for another retry attempt
+ queue.push_retry(failed, e.to_string()).await;
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+async fn connect_and_subscribe(
+ ws_url: &str,
+ contract_address: Address,
+ processor: &Arc,
+ retry_queue: &Arc,
+ pool: &DbPool,
+ shutdown: &mut watch::Receiver,
+) -> Result<()> {
+ tracing::info!("Connecting to WebSocket: {}", sanitize_rpc_url(ws_url));
+
+ let ws = WsConnect::new(ws_url);
+ let provider = ProviderBuilder::new().on_ws(ws).await?;
+
+ tracing::info!("Connected, subscribing to events...");
+
+ let filter = Filter::new().address(contract_address).events([
+ MessageSent::SIGNATURE_HASH,
+ Handshake::SIGNATURE_HASH,
+ HandshakeResponse::SIGNATURE_HASH,
+ ]);
+
+ let sub = provider.subscribe_logs(&filter).await?;
+ let mut stream = sub.into_stream();
+
+ tracing::info!("Subscribed to Verbeth events");
+
+ loop {
+ tokio::select! {
+ _ = shutdown.changed() => {
+ if *shutdown.borrow() {
+ return Ok(());
+ }
+ }
+ log_opt = futures_lite::StreamExt::next(&mut stream) => {
+ let log = match log_opt {
+ Some(log) => log,
+ None => {
+ tracing::warn!("WebSocket stream ended");
+ return Err(crate::error::IndexerError::Config("Stream ended".into()));
+ }
+ };
+
+ let block_number = log.block_number.unwrap_or(0);
+ let log_index = log.log_index.unwrap_or(0);
+
+ let Some(event) = decode_log(&log) else {
+ tracing::debug!("Unknown event at block {}", block_number);
+ continue;
+ };
+
+ let block_timestamp = match fetch_block_timestamp(&provider, block_number).await {
+ Ok(ts) => ts,
+ Err(e) => {
+ tracing::warn!("Failed to fetch block timestamp: {e}");
+ continue;
+ }
+ };
+
+ let log_with_meta = LogWithMeta {
+ event,
+ block_number,
+ log_index,
+ block_timestamp,
+ };
+
+ match processor.process(log_with_meta) {
+ Ok(true) => {
+ tracing::debug!("Processed event at block {}", block_number);
+ let conn = pool.get()?;
+ set_last_processed_block(&conn, block_number as i64)?;
+ }
+ Ok(false) => {
+ tracing::debug!("Duplicate event at block {}", block_number);
+ }
+ Err(e) => {
+ tracing::warn!(
+ block = block_number,
+ log_index = log_index,
+ error = %e,
+ "Failed to process event, queuing for retry"
+ );
+ // Re-create log_with_meta for retry (need to re-decode)
+ if let Some(event) = decode_log(&log) {
+ let retry_log = LogWithMeta {
+ event,
+ block_number,
+ log_index,
+ block_timestamp,
+ };
+ retry_queue.push(retry_log, e.to_string()).await;
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+async fn fetch_block_timestamp(
+ provider: &RootProvider,
+ block_number: u64,
+) -> Result {
+ let block = provider
+ .get_block_by_number(
+ BlockNumberOrTag::Number(block_number),
+ BlockTransactionsKind::Hashes,
+ )
+ .await?
+ .ok_or(crate::error::IndexerError::BlockNotFound(block_number))?;
+
+ Ok(block.header.timestamp)
+}
diff --git a/apps/indexer/src/main.rs b/apps/indexer/src/main.rs
new file mode 100644
index 0000000..9f98063
--- /dev/null
+++ b/apps/indexer/src/main.rs
@@ -0,0 +1,149 @@
+use std::net::SocketAddr;
+
+use alloy::providers::{Provider, ProviderBuilder};
+use tokio::sync::watch;
+use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
+
+mod api;
+mod config;
+mod db;
+mod error;
+mod indexer;
+
+use api::AppState;
+use config::Config;
+use db::{create_pool, queries};
+use error::Result;
+use indexer::{backfill, subscriber};
+
+#[tokio::main]
+async fn main() -> Result<()> {
+ tracing_subscriber::registry()
+ .with(EnvFilter::try_from_default_env().unwrap_or_else(|_| "info".into()))
+ .with(tracing_subscriber::fmt::layer())
+ .init();
+
+ let config = Config::from_env()?;
+
+ tracing::info!(
+ "Starting Verbeth Indexer v{}",
+ env!("CARGO_PKG_VERSION")
+ );
+ tracing::info!("Contract: {}", config.contract_address);
+ tracing::info!("Database: {}", config.database_path);
+ tracing::info!("RPC chunk size: {} blocks", config.rpc_chunk_size);
+
+ let pool = create_pool(&config.database_path, &config.sqlite_sync_mode)?;
+
+ let (shutdown_tx, shutdown_rx) = watch::channel(false);
+
+ let conn = pool.get()?;
+ let is_empty = queries::is_db_empty(&conn)?;
+ let last_block = queries::get_last_processed_block(&conn)?;
+ drop(conn);
+
+ let rpc_url = config.rpc_http_url.clone().unwrap_or_else(|| {
+ config.rpc_ws_url.replace("wss://", "https://").replace("ws://", "http://")
+ });
+
+ let http_provider = ProviderBuilder::new()
+ .on_http(rpc_url.parse().map_err(|e| {
+ error::IndexerError::Config(format!("Invalid RPC URL: {e}"))
+ })?);
+
+ let chain_head = http_provider.get_block_number().await?;
+ tracing::info!("Chain head: {}", chain_head);
+
+ let start_block = if is_empty {
+ let blocks_per_day = 43200u64; // ~2s blocks on Base
+ let days_back = config.backfill_days as u64;
+ chain_head.saturating_sub(blocks_per_day * days_back).max(config.creation_block)
+ } else {
+ (last_block.unwrap_or(config.creation_block as i64) as u64) + 1
+ };
+
+ if start_block < chain_head {
+ tracing::info!("Running backfill from block {} to {}", start_block, chain_head);
+ backfill::run_backfill(
+ &rpc_url,
+ config.contract_address,
+ start_block,
+ chain_head,
+ config.rpc_chunk_size,
+ pool.clone(),
+ )
+ .await?;
+ } else {
+ tracing::info!("No backfill needed, starting from chain head");
+ }
+
+ let state = AppState::new(pool.clone(), config);
+
+ let subscriber_handle = {
+ let config = state.config.clone();
+ let pool = pool.clone();
+ let shutdown_rx = shutdown_rx.clone();
+
+ tokio::spawn(async move {
+ subscriber::subscribe_with_reconnect(config, pool, shutdown_rx).await;
+ })
+ };
+
+ let addr = SocketAddr::from(([0, 0, 0, 0], state.config.server_port));
+ let listener = tokio::net::TcpListener::bind(addr).await.map_err(|e| {
+ error::IndexerError::Config(format!("Failed to bind to {}: {}", addr, e))
+ })?;
+
+ tracing::info!("API server listening on {}", addr);
+
+ let router = api::create_router(state);
+
+ let server_handle = tokio::spawn(async move {
+ axum::serve(listener, router)
+ .with_graceful_shutdown(shutdown_signal(shutdown_tx))
+ .await
+ .ok();
+ });
+
+ tokio::select! {
+ _ = subscriber_handle => {
+ tracing::info!("Subscriber task finished");
+ }
+ _ = server_handle => {
+ tracing::info!("Server task finished");
+ }
+ }
+
+ tracing::info!("Shutdown complete");
+ Ok(())
+}
+
+async fn shutdown_signal(shutdown_tx: watch::Sender) {
+ let ctrl_c = async {
+ tokio::signal::ctrl_c()
+ .await
+ .expect("Failed to install Ctrl+C handler");
+ };
+
+ #[cfg(unix)]
+ let terminate = async {
+ tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
+ .expect("Failed to install SIGTERM handler")
+ .recv()
+ .await;
+ };
+
+ #[cfg(not(unix))]
+ let terminate = std::future::pending::<()>();
+
+ tokio::select! {
+ _ = ctrl_c => {
+ tracing::info!("Received Ctrl+C, shutting down...");
+ }
+ _ = terminate => {
+ tracing::info!("Received SIGTERM, shutting down...");
+ }
+ }
+
+ let _ = shutdown_tx.send(true);
+}
diff --git a/package.json b/package.json
index b835aad..9dbf0ff 100644
--- a/package.json
+++ b/package.json
@@ -1,5 +1,5 @@
{
- "name": "verbeth-sdk",
+ "name": "verbeth",
"private": true,
"scripts": {
"test:integration": "./packages/sdk/node_modules/.bin/vitest run tests/*.test.ts --config vitest.integration.config.mjs --pool=threads",
diff --git a/packages/.DS_Store b/packages/.DS_Store
index b051f74..ddf991e 100644
Binary files a/packages/.DS_Store and b/packages/.DS_Store differ
diff --git a/packages/contracts/contracts/LogChainV1.sol b/packages/contracts/contracts/LogChainV1.sol
deleted file mode 100644
index 4458fb5..0000000
--- a/packages/contracts/contracts/LogChainV1.sol
+++ /dev/null
@@ -1,142 +0,0 @@
-// SPDX-License-Identifier: MIT
-pragma solidity ^0.8.24;
-
-import "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol";
-import "@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol";
-import "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
-
-/**
- * @title LogChainV1
- * @dev Contract for end-to-end encrypted messaging using Ethereum event logs as the sole transport layer.
- * @author guefett0
- * @notice This contract enables secure, decentralized messaging without relying on off-chain infrastructure.
- */
-contract LogChainV1 is Initializable, UUPSUpgradeable, OwnableUpgradeable {
-
- /**
- * @dev Emitted when an encrypted message is sent
- * @param sender The address of the message sender (EOA or smart account)
- * @param ciphertext The encrypted message payload
- * @param timestamp Unix timestamp when the message was sent
- * @param topic Indexed topic/channel identifier for message filtering
- * @param nonce Sequential number for message ordering and deduplication
- */
- event MessageSent(
- address indexed sender,
- bytes ciphertext,
- uint256 timestamp,
- bytes32 indexed topic,
- uint256 nonce
- );
-
- /**
- * @dev Emitted when initiating a handshake with a recipient
- * @param recipientHash Keccak256 hash of "contact:" + recipient's lowercase address
- * @param sender The address initiating the handshake
- * @param pubKeys The sender's long-term singing and identity pubkeys (32 bytes each)
- * @param ephemeralPubKey Fresh public key generated for this specific handshake
- * @param plaintextPayload Human-readable message or JSON with optional identity proof
- */
- event Handshake(
- bytes32 indexed recipientHash,
- address indexed sender,
- bytes pubKeys,
- bytes ephemeralPubKey,
- bytes plaintextPayload
- );
-
- /**
- * @dev Emitted when responding to a handshake
- * @param inResponseTo Response tag derived from ECDH(viewPubA, R) and HKDF.
- * @param responder The address responding to the handshake
- * @param responderEphemeralR Ephemeral public key R used to generate the response tag.
- * @param ciphertext Encrypted response containing responder's public keys
- */
- event HandshakeResponse(
- bytes32 indexed inResponseTo,
- address indexed responder,
- bytes32 responderEphemeralR,
- bytes ciphertext
- );
-
- /// @custom:oz-upgrades-unsafe-allow constructor
- constructor() {
- _disableInitializers();
- }
-
- /**
- * @dev Initializes the contract using the proxy pattern
- * @notice Should be called immediately after deployment via proxy
- */
- function initialize() public initializer {
- __Ownable_init(msg.sender);
- __UUPSUpgradeable_init();
- }
-
- /**
- * @dev Authorizes contract upgrades (restricted to owner)
- * @param newImplementation Address of the new implementation contract
- */
- function _authorizeUpgrade(address newImplementation)
- internal
- override
- onlyOwner
- {}
-
- /**
- * @dev Sends an encrypted message to a recipient
- * @param ciphertext The encrypted message payload (JSON-encoded EncryptedPayload)
- * @param topic Channel or conversation identifier for message filtering
- * @param timestamp Unix timestamp when the message was created
- * @param nonce Sequential number for message ordering (not enforced on-chain)
- *
- * @notice Gas cost scales with message size. Consider message splitting for large payloads.
- * @notice Nonce values are not validated on-chain - clients should handle replay protection.
- */
- function sendMessage(
- bytes calldata ciphertext,
- bytes32 topic,
- uint256 timestamp,
- uint256 nonce
- ) external {
- emit MessageSent(msg.sender, ciphertext, timestamp, topic, nonce);
- }
-
- /**
- * @dev Initiates a secure handshake with a recipient
- * @param recipientHash Keccak256("contact:" + recipient.toLowerCase())
- * @param pubKeys Sender's long-term X25519 public key (32 bytes)
- * @param ephemeralPubKey Fresh X25519 public key for this handshake (32 bytes)
- * @param plaintextPayload Human-readable greeting or JSON with identity proof
- *
- * @notice Recipients monitor for events where recipientHash matches their address hash
- * @notice For smart accounts, plaintextPayload may include EIP-1271 signature proof
- */
- function initiateHandshake(
- bytes32 recipientHash,
- bytes calldata pubKeys,
- bytes calldata ephemeralPubKey,
- bytes calldata plaintextPayload
- ) external {
- emit Handshake(recipientHash, msg.sender, pubKeys, ephemeralPubKey, plaintextPayload);
- }
-
- /**
- * @dev Responds to a handshake with encrypted public keys
- * @param inResponseTo Reference tag for the handshake initiator
- * @param responderEphemeralR Ephemeral public key R used to generate the response tag.
- * @param ciphertext Encrypted payload containing responder's identity and ephemeral keys
- *
- * @notice The ciphertext should be encrypted to the initiator's ephemeral public key
- */
- function respondToHandshake(bytes32 inResponseTo, bytes32 responderEphemeralR, bytes calldata ciphertext) external {
- emit HandshakeResponse(inResponseTo, msg.sender, responderEphemeralR, ciphertext);
- }
-
- /**
- * @dev This empty reserved space allows future versions to add new variables
- * without shifting down storage in the inheritance chain.
- * @notice See https://docs.openzeppelin.com/contracts/4.x/upgradeable#storage_gaps
- */
- uint256[50] private __gap;
-}
\ No newline at end of file
diff --git a/packages/contracts/contracts/ModuleSetupHelper.sol b/packages/contracts/contracts/ModuleSetupHelper.sol
new file mode 100644
index 0000000..182d88f
--- /dev/null
+++ b/packages/contracts/contracts/ModuleSetupHelper.sol
@@ -0,0 +1,64 @@
+// SPDX-License-Identifier: MIT
+pragma solidity ^0.8.20;
+
+interface ISessionModule {
+ function setupSession(
+ address safe,
+ address signer,
+ uint256 expiry,
+ address target
+ ) external;
+}
+
+contract ModuleSetupHelper {
+ /**
+ * @notice Enable a single module during Safe setup
+ * @param module The module address to enable
+ */
+ function enableModule(address module) external {
+ (bool success, bytes memory returnData) = address(this).call(
+ abi.encodeWithSignature("enableModule(address)", module)
+ );
+ require(success, string(abi.encodePacked("Enable module failed: ", returnData)));
+ }
+
+ /**
+ * @notice Enable module and setup session in one delegatecall
+ * @param module The SessionModule address
+ * @param sessionSigner The session signer address to authorize
+ * @param expiry Session expiry timestamp (type(uint256).max for no expiry)
+ * @param target The target contract to allow calls to
+ * @dev This is called via delegatecall from Safe.setup()
+ * - address(this) = Safe
+ * - First call enables module on the Safe (internal call)
+ * - Second call configures session on the module (external call, msg.sender = Safe)
+ */
+ function enableModuleWithSession(
+ address module,
+ address sessionSigner,
+ uint256 expiry,
+ address target
+ ) external {
+ (bool enableSuccess, bytes memory enableData) = address(this).call(
+ abi.encodeWithSignature("enableModule(address)", module)
+ );
+ require(enableSuccess, string(abi.encodePacked("Enable module failed: ", enableData)));
+
+
+ ISessionModule(module).setupSession(
+ address(this), // safe
+ sessionSigner,
+ expiry,
+ target
+ );
+ }
+
+ function enableModules(address[] calldata modules) external {
+ for (uint256 i = 0; i < modules.length; i++) {
+ (bool success, bytes memory returnData) = address(this).call(
+ abi.encodeWithSignature("enableModule(address)", modules[i])
+ );
+ require(success, string(abi.encodePacked("Enable module failed: ", returnData)));
+ }
+ }
+}
\ No newline at end of file
diff --git a/packages/contracts/contracts/SessionModule.sol b/packages/contracts/contracts/SessionModule.sol
new file mode 100644
index 0000000..68722e1
--- /dev/null
+++ b/packages/contracts/contracts/SessionModule.sol
@@ -0,0 +1,112 @@
+// SPDX-License-Identifier: MIT
+pragma solidity ^0.8.20;
+
+interface ISafe {
+ function execTransactionFromModule(
+ address to,
+ uint256 value,
+ bytes memory data,
+ uint8 operation
+ ) external returns (bool success);
+
+ function isOwner(address owner) external view returns (bool);
+}
+
+/**
+ * @title SessionModule (Singleton)
+ * @notice Allows session signers to execute txs on behalf of any Safe that has enabled this module.
+ */
+contract SessionModule {
+ // safe => sessionSigner => expiry timestamp (0 = never expires, < block.timestamp = expired)
+ mapping(address => mapping(address => uint256)) public sessionExpiry;
+
+ // safe => target => allowed
+ mapping(address => mapping(address => bool)) public isAllowedTarget;
+
+ event SessionSignerSet(address indexed safe, address indexed signer, uint256 expiry);
+ event TargetSet(address indexed safe, address indexed target, bool allowed);
+ event Executed(address indexed safe, address indexed to, uint256 value, bool success);
+
+ error NotOwnerOrSafe();
+ error SessionExpiredOrInvalid();
+ error TargetNotAllowed();
+ error ExecutionFailed();
+
+ /// @notice Authorize if caller is Safe owner or the Safe itself
+ /// @dev Allowing Safe as caller enables setup during deployment via delegatecall helper
+ modifier onlySafeOwnerOrSafe(address safe) {
+ if (msg.sender != safe && !ISafe(safe).isOwner(msg.sender)) revert NotOwnerOrSafe();
+ _;
+ }
+
+ /// @notice Check if a session signer is currently valid
+ function isValidSession(
+ address safe,
+ address signer
+ ) public view returns (bool) {
+ uint256 expiry = sessionExpiry[safe][signer];
+ if (expiry == 0) return false; // never set
+ if (expiry == type(uint256).max) return true; // never expires
+ return block.timestamp < expiry;
+ }
+
+ function setSession(
+ address safe,
+ address signer,
+ uint256 expiry
+ ) external onlySafeOwnerOrSafe(safe) {
+ sessionExpiry[safe][signer] = expiry;
+ emit SessionSignerSet(safe, signer, expiry);
+ }
+
+ function setTarget(
+ address safe,
+ address target,
+ bool allowed
+ ) external onlySafeOwnerOrSafe(safe) {
+ isAllowedTarget[safe][target] = allowed;
+ emit TargetSet(safe, target, allowed);
+ }
+
+
+ /// @notice set session signer AND allow target in one tx
+ /// @param safe The Safe address
+ /// @param signer The session signer address
+ /// @param expiry Timestamp when session expires (type(uint256).max for no expiry)
+ /// @param target The target contract to allow
+ function setupSession(
+ address safe,
+ address signer,
+ uint256 expiry,
+ address target
+ ) external onlySafeOwnerOrSafe(safe) {
+ sessionExpiry[safe][signer] = expiry;
+ emit SessionSignerSet(safe, signer, expiry);
+
+ isAllowedTarget[safe][target] = true;
+ emit TargetSet(safe, target, true);
+ }
+
+ /// @notice Execute a transaction on behalf of the Safe (called by session signer)
+ function execute(
+ address safe,
+ address to,
+ uint256 value,
+ bytes calldata data,
+ uint8 operation
+ ) external returns (bool) {
+ if (!isValidSession(safe, msg.sender)) revert SessionExpiredOrInvalid();
+ if (!isAllowedTarget[safe][to]) revert TargetNotAllowed();
+
+ bool success = ISafe(safe).execTransactionFromModule(
+ to,
+ value,
+ data,
+ operation
+ );
+ emit Executed(safe, to, value, success);
+
+ if (!success) revert ExecutionFailed();
+ return success;
+ }
+}
diff --git a/packages/contracts/contracts/UniversalSigValidator.sol b/packages/contracts/contracts/UniversalSigValidator.sol
deleted file mode 100644
index 8b0ad27..0000000
--- a/packages/contracts/contracts/UniversalSigValidator.sol
+++ /dev/null
@@ -1,112 +0,0 @@
-// SPDX-License-Identifier: UNLICENSED
-pragma solidity ^0.8.0;
-
-// As per ERC-1271
-interface IERC1271Wallet {
- function isValidSignature(bytes32 hash, bytes calldata signature) external view returns (bytes4 magicValue);
-}
-
-error ERC1271Revert(bytes error);
-error ERC6492DeployFailed(bytes error);
-
-contract UniversalSigValidator {
- bytes32 private constant ERC6492_DETECTION_SUFFIX = 0x6492649264926492649264926492649264926492649264926492649264926492;
- bytes4 private constant ERC1271_SUCCESS = 0x1626ba7e;
-
- // ECDSA secp256k1 curve order / 2 (for malleability check)
- uint256 private constant SECP256K1N_OVER_TWO =
- 0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0;
-
- function isValidSigImpl(
- address _signer,
- bytes32 _hash,
- bytes calldata _signature,
- bool allowSideEffects,
- bool tryPrepare
- ) public returns (bool) {
- uint contractCodeLen = address(_signer).code.length;
- bytes memory sigToValidate;
-
- // Order strictly defined in EIP-6492:
- // 1. Check for ERC-6492 envelope (counterfactual)
- // 2. If contract code exists, try ERC-1271
- // 3. Fallback to ECDSA ecrecover
- bool isCounterfactual = bytes32(_signature[_signature.length-32:_signature.length]) == ERC6492_DETECTION_SUFFIX;
- if (isCounterfactual) {
- address create2Factory;
- bytes memory factoryCalldata;
- (create2Factory, factoryCalldata, sigToValidate) = abi.decode(
- _signature[0:_signature.length-32],
- (address, bytes, bytes)
- );
-
- if (contractCodeLen == 0 || tryPrepare) {
- (bool success, bytes memory err) = create2Factory.call(factoryCalldata);
- if (!success) revert ERC6492DeployFailed(err);
- }
- } else {
- sigToValidate = _signature;
- }
-
- // ERC-1271 validation path
- if (isCounterfactual || contractCodeLen > 0) {
- try IERC1271Wallet(_signer).isValidSignature(_hash, sigToValidate) returns (bytes4 magicValue) {
- bool isValid = magicValue == ERC1271_SUCCESS;
-
- // retry assuming prefix is a prepare call
- if (!isValid && !tryPrepare && contractCodeLen > 0) {
- return isValidSigImpl(_signer, _hash, _signature, allowSideEffects, true);
- }
-
- if (contractCodeLen == 0 && isCounterfactual && !allowSideEffects) {
- // to avoid side effects, return result via revert(bool)
- assembly {
- mstore(0x00, isValid)
- revert(0x00, 0x20)
- }
- }
-
- return isValid;
- } catch (bytes memory err) {
- if (!tryPrepare && contractCodeLen > 0) {
- return isValidSigImpl(_signer, _hash, _signature, allowSideEffects, true);
- }
- revert ERC1271Revert(err);
- }
- }
-
- // ECDSA verification
- require(_signature.length == 65, "SignatureValidator#recoverSigner: invalid sig length");
- bytes32 r = bytes32(_signature[0:32]);
- bytes32 s = bytes32(_signature[32:64]);
- uint8 v = uint8(_signature[64]);
-
- // added anti malleability (reject high-s values)
- require(uint256(s) <= SECP256K1N_OVER_TWO, "SignatureValidator: invalid s value");
-
- // normalize v (reference only required 27/28)
- if (v != 27 && v != 28) revert("SignatureValidator: invalid v value");
-
- return ecrecover(_hash, v, r, s) == _signer;
- }
-
- function isValidSigWithSideEffects(address _signer, bytes32 _hash, bytes calldata _signature)
- external returns (bool)
- {
- return this.isValidSigImpl(_signer, _hash, _signature, true, false);
- }
-
- function isValidSig(address _signer, bytes32 _hash, bytes calldata _signature)
- external returns (bool)
- {
- try this.isValidSigImpl(_signer, _hash, _signature, false, false) returns (bool isValid) {
- return isValid;
- } catch (bytes memory error) {
- // no-side-effects path, result is returned via revert(bool)
- if (error.length == 32) {
- return abi.decode(error, (bool));
- }
- revert();
- }
- }
-}
diff --git a/packages/contracts/contracts/VerbethV1.sol b/packages/contracts/contracts/VerbethV1.sol
new file mode 100644
index 0000000..32d3601
--- /dev/null
+++ b/packages/contracts/contracts/VerbethV1.sol
@@ -0,0 +1,138 @@
+// SPDX-License-Identifier: MIT
+pragma solidity ^0.8.24;
+
+import "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol";
+import "@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol";
+import "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol";
+
+/**
+ * @title Verbeth
+ * @dev Contract for messaging using event logs as the sole transport layer.
+ * @author okrame
+ */
+contract VerbethV1 is Initializable, UUPSUpgradeable, OwnableUpgradeable {
+
+ uint256 public constant UPGRADE_DELAY = 2 days;
+ address public pendingImplementation;
+ uint256 public upgradeEligibleAt;
+
+ event UpgradeProposed(address indexed newImplementation, uint256 eligibleAt);
+ event UpgradeCancelled(address indexed newImplementation);
+
+ /**
+ * @dev Emitted when a message is sent
+ * @param sender The address of the message sender (EOA or contract)
+ * @param ciphertext The message payload
+ * @param timestamp Unix timestamp when the message was sent
+ * @param topic Indexed channel identifier for message filtering
+ * @param nonce Sequential number for message ordering and deduplication
+ */
+ event MessageSent(
+ address indexed sender,
+ bytes ciphertext,
+ uint256 timestamp,
+ bytes32 indexed topic,
+ uint256 nonce
+ );
+
+ /**
+ * @dev Emitted when initiating a handshake with a recipient
+ * @param recipientHash e.g. Keccak256 hash of "contact:" + recipient's lowercase address
+ * @param sender The address initiating the handshake
+ * @param pubKeys The sender's long-term public keys
+ * @param ephemeralPubKey Ephemeral public key(s) for this handshake
+ * @param plaintextPayload Typically contains a message and identity proof
+ */
+ event Handshake(
+ bytes32 indexed recipientHash,
+ address indexed sender,
+ bytes pubKeys,
+ bytes ephemeralPubKey,
+ bytes plaintextPayload
+ );
+
+ /**
+ * @dev Emitted when responding to a handshake
+ * @param inResponseTo Matching tag so the initiator can find this response
+ * @param responder The address responding to the handshake
+ * @param responderEphemeralR Ephemeral public key used to derive the response tag
+ * @param ciphertext Encrypted response payload with responder's keys
+ */
+ event HandshakeResponse(
+ bytes32 indexed inResponseTo,
+ address indexed responder,
+ bytes32 responderEphemeralR,
+ bytes ciphertext
+ );
+
+ /// @custom:oz-upgrades-unsafe-allow constructor
+ constructor() {
+ _disableInitializers();
+ }
+
+ function initialize() public initializer {
+ __Ownable_init(msg.sender);
+ __UUPSUpgradeable_init();
+ }
+
+
+ function proposeUpgrade(address newImplementation) external onlyOwner {
+ require(newImplementation != address(0), "Invalid implementation");
+ pendingImplementation = newImplementation;
+ upgradeEligibleAt = block.timestamp + UPGRADE_DELAY;
+ emit UpgradeProposed(newImplementation, upgradeEligibleAt);
+ }
+
+
+ function cancelUpgrade() external onlyOwner {
+ require(pendingImplementation != address(0), "No pending upgrade");
+ address cancelled = pendingImplementation;
+ pendingImplementation = address(0);
+ upgradeEligibleAt = 0;
+ emit UpgradeCancelled(cancelled);
+ }
+
+ function _authorizeUpgrade(address newImplementation)
+ internal
+ override
+ onlyOwner
+ {
+ require(newImplementation == pendingImplementation, "Not proposed implementation");
+ require(block.timestamp >= upgradeEligibleAt, "Timelock not expired");
+ pendingImplementation = address(0);
+ upgradeEligibleAt = 0;
+ }
+
+
+ function sendMessage(
+ bytes calldata ciphertext,
+ bytes32 topic,
+ uint256 timestamp,
+ uint256 nonce
+ ) external {
+ emit MessageSent(msg.sender, ciphertext, timestamp, topic, nonce);
+ }
+
+
+ function initiateHandshake(
+ bytes32 recipientHash,
+ bytes calldata pubKeys,
+ bytes calldata ephemeralPubKey,
+ bytes calldata plaintextPayload
+ ) external {
+ emit Handshake(recipientHash, msg.sender, pubKeys, ephemeralPubKey, plaintextPayload);
+ }
+
+
+ function respondToHandshake(bytes32 inResponseTo, bytes32 responderEphemeralR, bytes calldata ciphertext) external {
+ emit HandshakeResponse(inResponseTo, msg.sender, responderEphemeralR, ciphertext);
+ }
+
+ /**
+ * @dev This empty reserved space allows future versions to add new variables
+ * without shifting down storage in the inheritance chain.
+ * @notice See https://docs.openzeppelin.com/contracts/4.x/upgradeable#storage_gaps
+ * @notice Reduced from 50 to 48 to account for pendingImplementation and upgradeEligibleAt
+ */
+ uint256[48] private __gap;
+}
\ No newline at end of file
diff --git a/packages/contracts/hardhat.config.ts b/packages/contracts/hardhat.config.ts
index 354c738..6920bdd 100644
--- a/packages/contracts/hardhat.config.ts
+++ b/packages/contracts/hardhat.config.ts
@@ -49,7 +49,6 @@ const config: HardhatUserConfig = {
ignition: {
strategyConfig: {
create2: {
- // "VERBETH" in hex
salt: "0x5645524245544800000000000000000000000000000000000000000000000000",
},
},
@@ -64,7 +63,8 @@ const config: HardhatUserConfig = {
paths: [
"@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol",
"@account-abstraction/contracts/core/EntryPoint.sol",
- "test/contracts/TestSmartAccount.sol"
+ "test/contracts/TestSmartAccount.sol",
+ "test/contracts/MockSafe.sol"
]
}
};
diff --git a/packages/contracts/ignition/modules/LogChain.ts b/packages/contracts/ignition/modules/LogChain.ts
deleted file mode 100644
index bcb6d5a..0000000
--- a/packages/contracts/ignition/modules/LogChain.ts
+++ /dev/null
@@ -1,16 +0,0 @@
-import { buildModule } from "@nomicfoundation/hardhat-ignition/modules";
-
-const LogChainModule = buildModule("LogChainModule", (m) => {
- const logChainV1 = m.contract("LogChainV1");
-
- const initCall = m.encodeFunctionCall(logChainV1, "initialize", []);
-
- const proxy = m.contract("ERC1967Proxy", [
- logChainV1,
- initCall
- ]);
-
- return { logChain: proxy, logChainImplementation: logChainV1 };
-});
-
-export default LogChainModule;
\ No newline at end of file
diff --git a/packages/contracts/ignition/modules/SessionModule.ts b/packages/contracts/ignition/modules/SessionModule.ts
new file mode 100644
index 0000000..dc4969b
--- /dev/null
+++ b/packages/contracts/ignition/modules/SessionModule.ts
@@ -0,0 +1,11 @@
+import { buildModule } from "@nomicfoundation/hardhat-ignition/modules";
+
+const SessionModuleModule = buildModule("SessionModuleModule", (m) => {
+ const sessionModule = m.contract("SessionModule");
+
+ const moduleSetupHelper = m.contract("ModuleSetupHelper");
+
+ return { sessionModule, moduleSetupHelper };
+});
+
+export default SessionModuleModule;
diff --git a/packages/contracts/ignition/modules/UniversalSigValidator.ts b/packages/contracts/ignition/modules/UniversalSigValidator.ts
deleted file mode 100644
index feeb729..0000000
--- a/packages/contracts/ignition/modules/UniversalSigValidator.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-import { buildModule } from "@nomicfoundation/hardhat-ignition/modules";
-
-const UniversalSigValidatorModule = buildModule("UniversalSigValidatorModule", (m) => {
- const universalSigValidator = m.contract("UniversalSigValidator");
- return { universalSigValidator };
-});
-
-export default UniversalSigValidatorModule;
diff --git a/packages/contracts/ignition/modules/Verbeth.ts b/packages/contracts/ignition/modules/Verbeth.ts
new file mode 100644
index 0000000..44d4618
--- /dev/null
+++ b/packages/contracts/ignition/modules/Verbeth.ts
@@ -0,0 +1,16 @@
+import { buildModule } from "@nomicfoundation/hardhat-ignition/modules";
+
+const VerbethModule = buildModule("VerbethModule", (m) => {
+ const verbEthV1 = m.contract("VerbethV1");
+
+ const initCall = m.encodeFunctionCall(verbEthV1, "initialize", []);
+
+ const proxy = m.contract("ERC1967Proxy", [
+ verbEthV1,
+ initCall
+ ]);
+
+ return { verbEth: proxy, verbEthImplementation: verbEthV1 };
+});
+
+export default VerbethModule;
\ No newline at end of file
diff --git a/packages/contracts/package.json b/packages/contracts/package.json
index 49152ee..628e2a5 100644
--- a/packages/contracts/package.json
+++ b/packages/contracts/package.json
@@ -2,7 +2,7 @@
"name": "@verbeth/contracts",
"version": "0.1.0",
"private": false,
- "main": "contracts/LogChainV1.sol",
+ "main": "contracts/VerbethV1.sol",
"devDependencies": {
"@account-abstraction/contracts": "^0.8.0",
"@nomicfoundation/hardhat-ignition": "^0.15.10",
@@ -26,10 +26,12 @@
"scripts": {
"build": "hardhat compile",
"test": "hardhat test",
- "deploy:local": "hardhat ignition deploy ignition/modules/LogChain.ts --network localhost --strategy create2",
- "deploy:testnet": "hardhat ignition deploy ignition/modules/LogChain.ts --network baseSepolia --strategy create2",
- "deploy:mainnet": "hardhat ignition deploy ignition/modules/LogChain.ts --network base --strategy create2",
- "deploy:mainnet:validator": "hardhat ignition deploy ignition/modules/UniversalSigValidator.ts --network base --strategy create2"
+ "deploy:local": "hardhat ignition deploy ignition/modules/Verbeth.ts --network localhost --strategy create2",
+ "deploy:testnet": "hardhat ignition deploy ignition/modules/Verbeth.ts --network baseSepolia --strategy create2",
+ "deploy:mainnet": "hardhat ignition deploy ignition/modules/Verbeth.ts --network base --strategy create2",
+ "deploy:mainnet:validator": "hardhat ignition deploy ignition/modules/UniversalSigValidator.ts --network base --strategy create2",
+ "deploy:mainnet:session": "hardhat ignition deploy ignition/modules/SessionModule.ts --network base --strategy create2",
+ "deploy:testnet:session": "hardhat ignition deploy ignition/modules/SessionModule.ts --network baseSepolia --strategy create2"
},
"exports": {
"./typechain-types/*": "./typechain-types/*",
diff --git a/packages/contracts/test/contracts/MockSafe.sol b/packages/contracts/test/contracts/MockSafe.sol
new file mode 100644
index 0000000..509306c
--- /dev/null
+++ b/packages/contracts/test/contracts/MockSafe.sol
@@ -0,0 +1,50 @@
+// SPDX-License-Identifier: MIT
+pragma solidity ^0.8.20;
+
+/**
+ * @title MockSafe
+ * @notice Minimal mock of Safe for testing SessionModule
+ * @dev Implements ISafe interface + module management
+ */
+contract MockSafe {
+ mapping(address => bool) public isOwner;
+ mapping(address => bool) public isModuleEnabled;
+
+ address public lastExecTo;
+ uint256 public lastExecValue;
+ bytes public lastExecData;
+ uint8 public lastExecOperation;
+ bool public execShouldFail;
+
+ constructor(address _owner) {
+ isOwner[_owner] = true;
+ }
+
+ function addOwner(address owner) external {
+ isOwner[owner] = true;
+ }
+
+ function enableModule(address module) external {
+ isModuleEnabled[module] = true;
+ }
+
+ function setExecShouldFail(bool _fail) external {
+ execShouldFail = _fail;
+ }
+
+ function execTransactionFromModule(
+ address to,
+ uint256 value,
+ bytes memory data,
+ uint8 operation
+ ) external returns (bool success) {
+ require(isModuleEnabled[msg.sender], "Module not enabled");
+
+ lastExecTo = to;
+ lastExecValue = value;
+ lastExecData = data;
+ lastExecOperation = operation;
+
+ return !execShouldFail;
+ }
+}
diff --git a/packages/contracts/test/sessionmodule.test.ts b/packages/contracts/test/sessionmodule.test.ts
new file mode 100644
index 0000000..a77536f
--- /dev/null
+++ b/packages/contracts/test/sessionmodule.test.ts
@@ -0,0 +1,233 @@
+// @ts-ignore
+import { ethers } from "hardhat";
+import { expect } from "chai";
+import { SessionModule, MockSafe } from "../typechain-types";
+import { Signer } from "ethers";
+
+describe("SessionModule", () => {
+ let sessionModule: SessionModule;
+ let mockSafe: MockSafe;
+ let owner: Signer;
+ let sessionSigner: Signer;
+ let attacker: Signer;
+ let targetContract: Signer;
+
+ const NO_EXPIRY = ethers.MaxUint256;
+
+ beforeEach(async () => {
+ [owner, sessionSigner, attacker, targetContract] = await ethers.getSigners();
+
+ const SessionModuleFactory = await ethers.getContractFactory("SessionModule");
+ sessionModule = (await SessionModuleFactory.deploy()) as SessionModule;
+ await sessionModule.waitForDeployment();
+
+ const MockSafeFactory = await ethers.getContractFactory("MockSafe");
+ mockSafe = (await MockSafeFactory.deploy(await owner.getAddress())) as MockSafe;
+ await mockSafe.waitForDeployment();
+
+ await mockSafe.enableModule(await sessionModule.getAddress());
+ });
+
+ describe("Session Management", () => {
+ it("owner can set session signer", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const signerAddr = await sessionSigner.getAddress();
+
+ await expect(sessionModule.connect(owner).setSession(safeAddr, signerAddr, NO_EXPIRY))
+ // @ts-ignore
+ .to.emit(sessionModule, "SessionSignerSet")
+ .withArgs(safeAddr, signerAddr, NO_EXPIRY);
+
+ expect(await sessionModule.isValidSession(safeAddr, signerAddr)).to.be.true;
+ });
+
+ it("non-owner cannot set session signer", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const signerAddr = await sessionSigner.getAddress();
+
+ await expect(sessionModule.connect(attacker).setSession(safeAddr, signerAddr, NO_EXPIRY))
+ // @ts-ignore
+ .to.be.revertedWithCustomError(sessionModule, "NotOwnerOrSafe");
+ });
+
+ it("owner can set allowed target", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const targetAddr = await targetContract.getAddress();
+
+ await expect(sessionModule.connect(owner).setTarget(safeAddr, targetAddr, true))
+ // @ts-ignore
+ .to.emit(sessionModule, "TargetSet")
+ .withArgs(safeAddr, targetAddr, true);
+
+ expect(await sessionModule.isAllowedTarget(safeAddr, targetAddr)).to.be.true;
+ });
+
+ it("non-owner cannot set allowed target", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const targetAddr = await targetContract.getAddress();
+
+ await expect(sessionModule.connect(attacker).setTarget(safeAddr, targetAddr, true))
+ // @ts-ignore
+ .to.be.revertedWithCustomError(sessionModule, "NotOwnerOrSafe");
+ });
+
+ it("setupSession sets both session and target in one call", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const signerAddr = await sessionSigner.getAddress();
+ const targetAddr = await targetContract.getAddress();
+
+ await expect(sessionModule.connect(owner).setupSession(safeAddr, signerAddr, NO_EXPIRY, targetAddr))
+ // @ts-ignore
+ .to.emit(sessionModule, "SessionSignerSet")
+ .withArgs(safeAddr, signerAddr, NO_EXPIRY)
+ .and.to.emit(sessionModule, "TargetSet")
+ .withArgs(safeAddr, targetAddr, true);
+
+ expect(await sessionModule.isValidSession(safeAddr, signerAddr)).to.be.true;
+ expect(await sessionModule.isAllowedTarget(safeAddr, targetAddr)).to.be.true;
+ });
+ });
+
+ describe("Session Validity", () => {
+ it("isValidSession returns false for unset session", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const signerAddr = await sessionSigner.getAddress();
+
+ expect(await sessionModule.isValidSession(safeAddr, signerAddr)).to.be.false;
+ });
+
+ it("isValidSession returns true for non-expiring session", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const signerAddr = await sessionSigner.getAddress();
+
+ await sessionModule.connect(owner).setSession(safeAddr, signerAddr, NO_EXPIRY);
+
+ expect(await sessionModule.isValidSession(safeAddr, signerAddr)).to.be.true;
+ });
+
+ it("isValidSession returns true for future expiry", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const signerAddr = await sessionSigner.getAddress();
+ const futureExpiry = Math.floor(Date.now() / 1000) + 3600;
+
+ await sessionModule.connect(owner).setSession(safeAddr, signerAddr, futureExpiry);
+
+ expect(await sessionModule.isValidSession(safeAddr, signerAddr)).to.be.true;
+ });
+
+ it("isValidSession returns false for past expiry", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const signerAddr = await sessionSigner.getAddress();
+ const pastExpiry = Math.floor(Date.now() / 1000) - 3600;
+
+ await sessionModule.connect(owner).setSession(safeAddr, signerAddr, pastExpiry);
+
+ expect(await sessionModule.isValidSession(safeAddr, signerAddr)).to.be.false;
+ });
+ });
+
+ describe("Execution", () => {
+ beforeEach(async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const signerAddr = await sessionSigner.getAddress();
+ const targetAddr = await targetContract.getAddress();
+
+ await sessionModule.connect(owner).setupSession(safeAddr, signerAddr, NO_EXPIRY, targetAddr);
+ });
+
+ it("session signer can execute on allowed target", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const targetAddr = await targetContract.getAddress();
+ const callData = "0x12345678";
+
+ await expect(sessionModule.connect(sessionSigner).execute(safeAddr, targetAddr, 0, callData, 0))
+ // @ts-ignore
+ .to.emit(sessionModule, "Executed")
+ .withArgs(safeAddr, targetAddr, 0, true);
+ });
+
+ it("non-session signer cannot execute", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const targetAddr = await targetContract.getAddress();
+
+ await expect(sessionModule.connect(attacker).execute(safeAddr, targetAddr, 0, "0x", 0))
+ // @ts-ignore
+ .to.be.revertedWithCustomError(sessionModule, "SessionExpiredOrInvalid");
+ });
+
+ it("session signer cannot execute on disallowed target", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const disallowedTarget = await attacker.getAddress();
+
+ await expect(sessionModule.connect(sessionSigner).execute(safeAddr, disallowedTarget, 0, "0x", 0))
+ // @ts-ignore
+ .to.be.revertedWithCustomError(sessionModule, "TargetNotAllowed");
+ });
+
+ it("execution fails if Safe returns false", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const targetAddr = await targetContract.getAddress();
+
+ await mockSafe.setExecShouldFail(true);
+
+ await expect(sessionModule.connect(sessionSigner).execute(safeAddr, targetAddr, 0, "0x", 0))
+ // @ts-ignore
+ .to.be.revertedWithCustomError(sessionModule, "ExecutionFailed");
+ });
+
+ it("expired session cannot execute", async () => {
+ const safeAddr = await mockSafe.getAddress();
+ const targetAddr = await targetContract.getAddress();
+ const expiredSignerAddr = await attacker.getAddress();
+
+ const pastExpiry = Math.floor(Date.now() / 1000) - 1;
+ await sessionModule.connect(owner).setSession(safeAddr, expiredSignerAddr, pastExpiry);
+ await sessionModule.connect(owner).setTarget(safeAddr, targetAddr, true);
+
+ await expect(sessionModule.connect(attacker).execute(safeAddr, targetAddr, 0, "0x", 0))
+ // @ts-ignore
+ .to.be.revertedWithCustomError(sessionModule, "SessionExpiredOrInvalid");
+ });
+ });
+
+ describe("Safe as Caller", () => {
+ it("Safe itself can setup session (for delegatecall helper pattern)", async () => {
+ // Deploy a new Safe where we can impersonate it
+ const MockSafeFactory = await ethers.getContractFactory("MockSafe");
+ const newSafe = await MockSafeFactory.deploy(await owner.getAddress());
+ await newSafe.waitForDeployment();
+
+ const safeAddr = await newSafe.getAddress();
+ const signerAddr = await sessionSigner.getAddress();
+ const targetAddr = await targetContract.getAddress();
+
+ // Impersonate the Safe address to call setupSession
+ // This simulates the delegatecall pattern from ModuleSetupHelper
+ await ethers.provider.send("hardhat_impersonateAccount", [safeAddr]);
+ await ethers.provider.send("hardhat_setBalance", [safeAddr, "0x1000000000000000000"]);
+
+ const safeSigner = await ethers.getSigner(safeAddr);
+
+ await expect(sessionModule.connect(safeSigner).setupSession(safeAddr, signerAddr, NO_EXPIRY, targetAddr))
+ // @ts-ignore
+ .to.emit(sessionModule, "SessionSignerSet");
+
+ await ethers.provider.send("hardhat_stopImpersonatingAccount", [safeAddr]);
+
+ expect(await sessionModule.isValidSession(safeAddr, signerAddr)).to.be.true;
+ expect(await sessionModule.isAllowedTarget(safeAddr, targetAddr)).to.be.true;
+ });
+ });
+});
+
+describe("ModuleSetupHelper", () => {
+ // Note: ModuleSetupHelper is designed to be called via delegatecall during Safe.setup()
+ it("contract deploys successfully", async () => {
+ const Factory = await ethers.getContractFactory("ModuleSetupHelper");
+ const helper = await Factory.deploy();
+ await helper.waitForDeployment();
+
+ const address = await helper.getAddress();
+ expect(address).to.match(/^0x[0-9a-fA-F]{40}$/);
+ });
+});
\ No newline at end of file
diff --git a/packages/contracts/test/unisigval.test.ts b/packages/contracts/test/unisigval.test.ts
deleted file mode 100644
index 1945ad3..0000000
--- a/packages/contracts/test/unisigval.test.ts
+++ /dev/null
@@ -1,173 +0,0 @@
-// packages/contracts/test/unisigval.test.ts
-// @ts-ignore
-import { ethers } from "hardhat";
-import { expect } from "chai";
-import { UniversalSigValidator } from "../typechain-types";
-
-/**
- * @dev
- * These tests ensure that UniversalSigValidator behaves correctly
- * with already deployed accounts (EOA + ERC-1271).
- * They do not yet cover ERC-6492 pre-deploy simulation.
- */
-describe("UniversalSigValidator (minimal)", () => {
- let validator: UniversalSigValidator;
-
- beforeEach(async () => {
- const Factory = await ethers.getContractFactory("UniversalSigValidator");
- validator = (await Factory.deploy()) as UniversalSigValidator;
- await validator.waitForDeployment();
- });
-
- it("validates an EOA signature via ecrecover", async () => {
- const [signer] = await ethers.getSigners();
-
- const message = ethers.randomBytes(32);
- const digest = ethers.hashMessage(message);
- const signature = await signer.signMessage(message);
-
- const ok = await validator.isValidSig.staticCall(
- await signer.getAddress(),
- digest,
- signature
- );
- expect(ok).to.equal(true);
- });
-
- it("returns false if the signer address does not match the signature", async () => {
- const [signer, other] = await ethers.getSigners();
-
- const message = ethers.randomBytes(32);
- const digest = ethers.hashMessage(message);
- const signature = await signer.signMessage(message);
-
- const ok = await validator.isValidSig.staticCall(
- await other.getAddress(),
- digest,
- signature
- );
- expect(ok).to.equal(false);
- });
-
- it("reverts on invalid signature length (must be 65 bytes)", async () => {
- const [signer] = await ethers.getSigners();
-
- const message = ethers.randomBytes(32);
- const digest = ethers.hashMessage(message);
- const badSig = ethers.hexlify(ethers.randomBytes(64)); // 64, not 65
-
- await expect(
- validator.isValidSig(await signer.getAddress(), digest, badSig)
- // @ts-ignore
- ).to.be.reverted;
- });
-
- it("reverts on invalid v value (must be 27 or 28)", async () => {
- const [signer] = await ethers.getSigners();
-
- const message = ethers.randomBytes(32);
- const digest = ethers.hashMessage(message);
- const goodSig = await signer.signMessage(message);
-
- const bytes = ethers.getBytes(goodSig);
- bytes[64] = 0; // force v = 0 (invalid)
- const badSig = ethers.hexlify(bytes);
-
- await expect(
- validator.isValidSig(await signer.getAddress(), digest, badSig)
- // @ts-ignore
- ).to.be.reverted;
- });
-
- it("isValidSigWithSideEffects behaves like isValidSig for EOA signatures", async () => {
- const [signer] = await ethers.getSigners();
-
- const message = ethers.randomBytes(32);
- const digest = ethers.hashMessage(message);
- const signature = await signer.signMessage(message);
-
- const ok = await validator.isValidSigWithSideEffects.staticCall(
- await signer.getAddress(),
- digest,
- signature
- );
- expect(ok).to.equal(true);
- });
-
- // ----------------------------------------
- // ERC-1271 path using TestSmartAccount.sol
- // ----------------------------------------
-
- it("validates via ERC-1271 when signer is a smart account", async () => {
- const [owner] = await ethers.getSigners();
-
- const Factory = await ethers.getContractFactory("TestSmartAccount");
- const wallet1271 = await Factory.deploy(
- ethers.ZeroAddress,
- await owner.getAddress()
- );
- await wallet1271.waitForDeployment();
-
- const message = ethers.randomBytes(32);
- const digest = ethers.hashMessage(message);
- const signature = await owner.signMessage(message);
-
- const ok = await validator.isValidSig.staticCall(
- await wallet1271.getAddress(),
- digest,
- signature
- );
- expect(ok).to.equal(true);
-
- const okSide = await validator.isValidSigWithSideEffects.staticCall(
- await wallet1271.getAddress(),
- digest,
- signature
- );
- expect(okSide).to.equal(true);
- });
-
- it("returns false via ERC-1271 if signature does not recover to the wallet's owner", async () => {
- const [owner, other] = await ethers.getSigners();
-
- const Factory = await ethers.getContractFactory("TestSmartAccount");
- const wallet1271 = await Factory.deploy(
- ethers.ZeroAddress,
- await owner.getAddress()
- );
- await wallet1271.waitForDeployment();
-
- const message = ethers.randomBytes(32);
- const digest = ethers.hashMessage(message);
- const wrongSig = await other.signMessage(message);
-
- const ok = await validator.isValidSig.staticCall(
- await wallet1271.getAddress(),
- digest,
- wrongSig
- );
- expect(ok).to.equal(false);
- });
-
- it("returns false if ERC-1271 wallet returns a non-magic value (invalid sig)", async () => {
- const [owner, other] = await ethers.getSigners();
-
- const Factory = await ethers.getContractFactory("TestSmartAccount");
- const wallet1271 = await Factory.deploy(
- ethers.ZeroAddress,
- await owner.getAddress()
- );
- await wallet1271.waitForDeployment();
-
- const message = ethers.randomBytes(32);
- const digest = ethers.hashMessage(message);
- const nonMagicSig = await other.signMessage(message);
-
- const ok = await validator.isValidSig.staticCall(
- await wallet1271.getAddress(),
- digest,
- nonMagicSig
- );
- expect(ok).to.equal(false);
- });
-});
diff --git a/packages/contracts/test/upgrade.test.ts b/packages/contracts/test/upgrade.test.ts
index daf28b8..e49cc25 100644
--- a/packages/contracts/test/upgrade.test.ts
+++ b/packages/contracts/test/upgrade.test.ts
@@ -1,59 +1,51 @@
import { expect } from "chai";
// @ts-ignore
-import { ethers, upgrades } from "hardhat";
-import { LogChainV1 } from "../typechain-types";
+import { ethers, upgrades, network } from "hardhat";
+import { VerbethV1 } from "../typechain-types";
-describe("LogChainV1 – Upgradeability (UUPS)", function () {
- let logChain: LogChainV1;
+const TWO_DAYS = 2 * 24 * 60 * 60;
+
+describe("VerbethV1 – Upgradeability (UUPS)", function () {
+ let verbEth: VerbethV1;
let owner: any;
let attacker: any;
beforeEach(async () => {
[owner, attacker] = await ethers.getSigners();
- const Factory = await ethers.getContractFactory("LogChainV1");
- logChain = (await upgrades.deployProxy(Factory, [], {
+ const Factory = await ethers.getContractFactory("VerbethV1");
+ verbEth = (await upgrades.deployProxy(Factory, [], {
kind: "uups",
initializer: "initialize",
- })) as unknown as LogChainV1;
+ })) as unknown as VerbethV1;
});
+ async function advanceTime(seconds: number) {
+ await network.provider.send("evm_increaseTime", [seconds]);
+ await network.provider.send("evm_mine");
+ }
+
it("is initialized correctly", async () => {
- expect(await logChain.owner()).to.equal(await owner.getAddress());
+ expect(await verbEth.owner()).to.equal(await owner.getAddress());
});
it("prevents re‑initialization", async () => {
await expect(
- logChain.initialize()
+ verbEth.initialize()
// @ts-ignore
- ).to.be.revertedWithCustomError(logChain, "InvalidInitialization");
- });
-
- it("only owner can perform upgrade", async () => {
- const NewImplFactory = await ethers.getContractFactory("LogChainV1");
- const newImpl = await NewImplFactory.deploy();
-
- await expect(
- (logChain as any)
- .connect(attacker)
- .upgradeToAndCall(await newImpl.getAddress(), "0x")
- // @ts-ignore
- ).to.be.revertedWithCustomError(logChain, "OwnableUnauthorizedAccount");
-
- await expect(
- (logChain as any)
- .connect(owner)
- .upgradeToAndCall(await newImpl.getAddress(), "0x")
- // @ts-ignore
- ).to.not.be.reverted;
+ ).to.be.revertedWithCustomError(verbEth, "InvalidInitialization");
});
it("storage gap is preserved after upgrade", async () => {
- const ImplV2 = await ethers.getContractFactory("LogChainV1");
+ const ImplV2 = await ethers.getContractFactory("VerbethV1");
const newImpl = await ImplV2.deploy();
+ // Propose and wait for timelock
+ await verbEth.proposeUpgrade(await newImpl.getAddress());
+ await advanceTime(TWO_DAYS);
+
// Perform upgrade via UUPS entry point
- await (logChain as any).upgradeToAndCall(await newImpl.getAddress(), "0x");
+ await (verbEth as any).upgradeToAndCall(await newImpl.getAddress(), "0x");
// Ensure it's still functional
const msg = ethers.encodeBytes32String("hi");
@@ -62,8 +54,192 @@ describe("LogChainV1 – Upgradeability (UUPS)", function () {
const nonce = 1;
await expect(
- logChain.sendMessage(msg, topic, timestamp, nonce)
+ verbEth.sendMessage(msg, topic, timestamp, nonce)
+ // @ts-ignore
+ ).to.emit(verbEth, "MessageSent");
+ });
+
+ describe("Upgrade Timelock", function () {
+ let newImpl: any;
+
+ beforeEach(async () => {
+ const NewImplFactory = await ethers.getContractFactory("VerbethV1");
+ newImpl = await NewImplFactory.deploy();
+ });
+
+ it("UPGRADE_DELAY is 2 days", async () => {
+ expect(await verbEth.UPGRADE_DELAY()).to.equal(TWO_DAYS);
+ });
+
+ it("proposeUpgrade sets pending implementation and eligibleAt", async () => {
+ const implAddress = await newImpl.getAddress();
+ const tx = await verbEth.proposeUpgrade(implAddress);
+ const receipt = await tx.wait();
+ const block = await ethers.provider.getBlock(receipt!.blockNumber);
+
+ expect(await verbEth.pendingImplementation()).to.equal(implAddress);
+ expect(await verbEth.upgradeEligibleAt()).to.equal(
+ block!.timestamp + TWO_DAYS
+ );
+ });
+
+ it("proposeUpgrade emits UpgradeProposed event", async () => {
+ const implAddress = await newImpl.getAddress();
+ await expect(verbEth.proposeUpgrade(implAddress))
+ // @ts-ignore
+ .to.emit(verbEth, "UpgradeProposed")
+ .withArgs(implAddress, (value: any) => value > 0);
+ });
+
+ it("proposeUpgrade reverts for zero address", async () => {
+ await expect(
+ verbEth.proposeUpgrade(ethers.ZeroAddress)
+ // @ts-ignore
+ ).to.be.revertedWith("Invalid implementation");
+ });
+
+ it("only owner can propose upgrade", async () => {
+ await expect(
+ verbEth.connect(attacker).proposeUpgrade(await newImpl.getAddress())
+ // @ts-ignore
+ ).to.be.revertedWithCustomError(verbEth, "OwnableUnauthorizedAccount");
+ });
+
+ it("cancelUpgrade clears pending implementation", async () => {
+ const implAddress = await newImpl.getAddress();
+ await verbEth.proposeUpgrade(implAddress);
+
+ await expect(verbEth.cancelUpgrade())
+ // @ts-ignore
+ .to.emit(verbEth, "UpgradeCancelled")
+ .withArgs(implAddress);
+
+ expect(await verbEth.pendingImplementation()).to.equal(ethers.ZeroAddress);
+ expect(await verbEth.upgradeEligibleAt()).to.equal(0);
+ });
+
+ it("cancelUpgrade reverts when no pending upgrade", async () => {
// @ts-ignore
- ).to.emit(logChain, "MessageSent");
+ await expect(verbEth.cancelUpgrade()).to.be.revertedWith(
+ "No pending upgrade"
+ );
+ });
+
+ it("only owner can cancel upgrade", async () => {
+ await verbEth.proposeUpgrade(await newImpl.getAddress());
+
+ await expect(
+ verbEth.connect(attacker).cancelUpgrade()
+ // @ts-ignore
+ ).to.be.revertedWithCustomError(verbEth, "OwnableUnauthorizedAccount");
+ });
+
+ it("upgrade reverts before timelock expires", async () => {
+ const implAddress = await newImpl.getAddress();
+ await verbEth.proposeUpgrade(implAddress);
+
+ await expect(
+ (verbEth as any).upgradeToAndCall(implAddress, "0x")
+ // @ts-ignore
+ ).to.be.revertedWith("Timelock not expired");
+
+ // Advance time but not enough
+ await advanceTime(TWO_DAYS - 100);
+
+ await expect(
+ (verbEth as any).upgradeToAndCall(implAddress, "0x")
+ // @ts-ignore
+ ).to.be.revertedWith("Timelock not expired");
+ });
+
+ it("upgrade reverts for non-proposed implementation", async () => {
+ const implAddress = await newImpl.getAddress();
+ await verbEth.proposeUpgrade(implAddress);
+ await advanceTime(TWO_DAYS);
+
+ const OtherImplFactory = await ethers.getContractFactory("VerbethV1");
+ const otherImpl = await OtherImplFactory.deploy();
+
+ await expect(
+ (verbEth as any).upgradeToAndCall(await otherImpl.getAddress(), "0x")
+ // @ts-ignore
+ ).to.be.revertedWith("Not proposed implementation");
+ });
+
+ it("upgrade reverts when no upgrade is proposed", async () => {
+ await expect(
+ (verbEth as any).upgradeToAndCall(await newImpl.getAddress(), "0x")
+ // @ts-ignore
+ ).to.be.revertedWith("Not proposed implementation");
+ });
+
+ it("upgrade succeeds after timelock expires", async () => {
+ const implAddress = await newImpl.getAddress();
+ await verbEth.proposeUpgrade(implAddress);
+
+ // Advance time past the timelock
+ await advanceTime(TWO_DAYS + 1);
+
+ await expect(
+ (verbEth as any).upgradeToAndCall(implAddress, "0x")
+ // @ts-ignore
+ ).to.not.be.reverted;
+
+ // Verify pending state is cleared
+ expect(await verbEth.pendingImplementation()).to.equal(ethers.ZeroAddress);
+ expect(await verbEth.upgradeEligibleAt()).to.equal(0);
+ });
+
+ it("only owner can perform upgrade", async () => {
+ const implAddress = await newImpl.getAddress();
+ await verbEth.proposeUpgrade(implAddress);
+ await advanceTime(TWO_DAYS);
+
+ await expect(
+ (verbEth as any).connect(attacker).upgradeToAndCall(implAddress, "0x")
+ // @ts-ignore
+ ).to.be.revertedWithCustomError(verbEth, "OwnableUnauthorizedAccount");
+ });
+
+ it("can propose new upgrade after cancellation", async () => {
+ const implAddress = await newImpl.getAddress();
+ await verbEth.proposeUpgrade(implAddress);
+ await verbEth.cancelUpgrade();
+
+ // Propose again
+ await expect(verbEth.proposeUpgrade(implAddress))
+ // @ts-ignore
+ .to.emit(verbEth, "UpgradeProposed");
+
+ expect(await verbEth.pendingImplementation()).to.equal(implAddress);
+ });
+
+ it("can propose new upgrade replacing previous proposal", async () => {
+ const implAddress = await newImpl.getAddress();
+ await verbEth.proposeUpgrade(implAddress);
+
+ // Deploy another implementation
+ const OtherImplFactory = await ethers.getContractFactory("VerbethV1");
+ const otherImpl = await OtherImplFactory.deploy();
+ const otherAddress = await otherImpl.getAddress();
+
+ // Propose the new one (replaces the previous)
+ await verbEth.proposeUpgrade(otherAddress);
+
+ expect(await verbEth.pendingImplementation()).to.equal(otherAddress);
+
+ // Old implementation should not work even after timelock
+ await advanceTime(TWO_DAYS);
+ await expect(
+ (verbEth as any).upgradeToAndCall(implAddress, "0x")
+ // @ts-ignore
+ ).to.be.revertedWith("Not proposed implementation");
+
+ // New implementation should work
+ await expect(
+ (verbEth as any).upgradeToAndCall(otherAddress, "0x")
+ // @ts-ignore
+ ).to.not.be.reverted;
+ });
});
});
diff --git a/packages/contracts/test/logchain.test.ts b/packages/contracts/test/verbeth.test.ts
similarity index 76%
rename from packages/contracts/test/logchain.test.ts
rename to packages/contracts/test/verbeth.test.ts
index 8f7bb3f..6f791d1 100644
--- a/packages/contracts/test/logchain.test.ts
+++ b/packages/contracts/test/verbeth.test.ts
@@ -1,15 +1,15 @@
// @ts-ignore
import { ethers } from "hardhat";
import { expect } from "chai";
-import { LogChainV1 } from "../typechain-types";
+import { VerbethV1 } from "../typechain-types";
-describe("LogChain", () => {
- let logChain: LogChainV1;
+describe("Verbeth", () => {
+ let verbEth: VerbethV1;
beforeEach(async () => {
- const factory = await ethers.getContractFactory("LogChainV1");
- logChain = (await factory.deploy()) as LogChainV1;
- await logChain.waitForDeployment();
+ const factory = await ethers.getContractFactory("VerbethV1");
+ verbEth = (await factory.deploy()) as VerbethV1;
+ await verbEth.waitForDeployment();
});
it("should emit a MessageSent event", async () => {
@@ -20,9 +20,9 @@ describe("LogChain", () => {
const timestamp = Math.floor(Date.now() / 1000);
const nonce = 1;
- await expect(logChain.sendMessage(msg, topic, timestamp, nonce))
+ await expect(verbEth.sendMessage(msg, topic, timestamp, nonce))
// @ts-ignore
- .to.emit(logChain, "MessageSent")
+ .to.emit(verbEth, "MessageSent")
.withArgs(await sender.getAddress(), msg, timestamp, topic, nonce);
});
@@ -34,8 +34,8 @@ describe("LogChain", () => {
const timestamp = Math.floor(Date.now() / 1000);
const nonce = 42;
- await logChain.sendMessage(msg, topic, timestamp, nonce);
- await logChain.sendMessage(msg, topic, timestamp + 1, nonce); // re-use same nonce, no revert
+ await verbEth.sendMessage(msg, topic, timestamp, nonce);
+ await verbEth.sendMessage(msg, topic, timestamp + 1, nonce); // re-use same nonce, no revert
});
it("should emit a Handshake event", async () => {
@@ -50,7 +50,7 @@ describe("LogChain", () => {
const plaintextPayload = ethers.toUtf8Bytes("Hi Bob, respond pls");
await expect(
- logChain.initiateHandshake(
+ verbEth.initiateHandshake(
recipientHash,
unifiedPubKeys,
ephemeralPubKey,
@@ -58,7 +58,7 @@ describe("LogChain", () => {
)
)
// @ts-ignore
- .to.emit(logChain, "Handshake")
+ .to.emit(verbEth, "Handshake")
.withArgs(
recipientHash,
recipient,
@@ -78,14 +78,14 @@ describe("LogChain", () => {
const responseCiphertext = ethers.hexlify(ethers.randomBytes(64));
await expect(
- logChain.respondToHandshake(
+ verbEth.respondToHandshake(
inResponseTo,
responderEphemeralR,
responseCiphertext
)
)
// @ts-ignore
- .to.emit(logChain, "HandshakeResponse")
+ .to.emit(verbEth, "HandshakeResponse")
.withArgs(
inResponseTo,
await bob.getAddress(),
diff --git a/packages/sdk/README.md b/packages/sdk/README.md
index 8a609fe..cb12646 100644
--- a/packages/sdk/README.md
+++ b/packages/sdk/README.md
@@ -1,191 +1,43 @@
# @verbeth/sdk
-Verbeth enables secure, E2EE messaging using Ethereum event logs as the only transport layer. No servers, no relays—just the blockchain.
-
-## Features
-
-- **End-to-end encryption** using NaCl Box (X25519 + XSalsa20-Poly1305)
-- **Forward secrecy** with ephemeral keys per message
-- **Handshake protocol** for secure key exchange
-- **Privacy-focused** with minimal metadata via `recipientHash`
-- **EOA & Smart Account support** (ERC-1271/6492 compatible)
-- **Fully on-chain** - no centralized infrastructure
-
-## Installation
+End-to-end encrypted messaging over public EVM blockchains.
+### Install
```bash
-npm install @verbeth/sdk ethers tweetnacl
+npm install @verbeth/sdk
```
-## Quick Start
-
-### 1. Initialize with VerbethClient (Recommended)
-
-```typescript
-import { VerbethClient, ExecutorFactory, deriveIdentityKeyPairWithProof } from '@verbeth/sdk';
-import { Contract, BrowserProvider } from 'ethers';
-import { LogChainV1__factory } from '@verbeth/contracts/typechain-types';
+### Quickstart
+```ts
+import {
+ createVerbethClient,
+ deriveIdentityKeyPairWithProof,
+ ExecutorFactory,
+ getVerbethAddress
+} from '@verbeth/sdk';
+import { ethers } from 'ethers';
-// Setup
-const provider = new BrowserProvider(window.ethereum);
+const provider = new ethers.BrowserProvider(window.ethereum);
const signer = await provider.getSigner();
const address = await signer.getAddress();
-// Create contract instance
-const contract = LogChainV1__factory.connect(LOGCHAIN_ADDRESS, signer);
-
-// Derive identity keys (done once, then stored)
-const { identityKeyPair, identityProof } = await deriveIdentityKeyPairWithProof(signer);
-
-// Create executor (handles transaction submission)
-const executor = ExecutorFactory.createEOA(contract);
+const { identityKeyPair, identityProof } = await deriveIdentityKeyPairWithProof(signer, address);
-// Initialize client
-const client = new VerbethClient({
- executor,
- identityKeyPair,
- identityProof,
+const contract = new ethers.Contract(getVerbethAddress(), VerbethABI, signer);
+const client = createVerbethClient({
+ address,
signer,
- address
-});
-
-// Send a handshake to start chatting
-const { tx, ephemeralKeyPair } = await client.sendHandshake(
- '0xRecipientAddress...',
- 'Hello! Want to chat?'
-);
-
-// Store ephemeralKeyPair. you'll just need it to decrypt the handshake response!
-
-// Accept a handshake
-const { tx, duplexTopics } = await client.acceptHandshake(
- handshakeEvent.ephemeralPubKey,
- handshakeEvent.identityPubKey,
- 'Sure, lets chat!'
-);
-
-// Send encrypted messages
-await client.sendMessage(
- duplexTopics.topicOut,
- recipientIdentityPubKey,
- 'This message is encrypted!'
-);
-
-// Decrypt received messages
-const decrypted = await client.decryptMessage(
- messageEvent.ciphertext,
- senderIdentityPubKey
-);
-```
-
-### 2. Low-level API
-
-For more control, use the low-level functions:
-
-```typescript
-import {
- initiateHandshake,
- respondToHandshake,
- sendEncryptedMessage,
- decryptMessage,
- deriveIdentityKeyPairWithProof
-} from '@verbeth/sdk';
-
-// Generate identity keys
-const { identityKeyPair, identityProof } = await deriveIdentityKeyPairWithProof(signer);
-
-// Initiate handshake
-const ephemeralKeyPair = nacl.box.keyPair();
-const tx = await initiateHandshake({
- executor,
- recipientAddress: '0xBob...',
- ephemeralPubKey: ephemeralKeyPair.publicKey,
identityKeyPair,
identityProof,
- plaintextPayload: 'Hi Bob!'
-});
-
-// Send encrypted message
-await sendEncryptedMessage({
- executor,
- topic: derivedTopic,
- message: 'Secret message',
- recipientPubKey: bobsIdentityKey,
- senderAddress: myAddress,
- senderSignKeyPair: identityKeyPair,
- timestamp: Date.now()
+ executor: ExecutorFactory.createEOA(contract),
});
-// Decrypt message
-const plaintext = decryptMessage(
- ciphertext,
- senderIdentityPubKey,
- myIdentityKeyPair.secretKey
-);
-```
-
-## Smart Account Support
-
-Verbeth works with ERC-4337 smart accounts:
-
-```typescript
-import { ExecutorFactory } from '@verbeth/sdk';
-
-// For UserOp-based execution
-const executor = ExecutorFactory.createUserOp(
- contract,
- bundler,
- smartAccount,
- signer
-);
-
-// For direct EntryPoint execution
-const executor = ExecutorFactory.createDirectEntryPoint(
- contract,
- entryPoint,
- smartAccountAddress,
- signer
-);
+await client.sendMessage(conversationId, 'Hello, encrypted world!');
```
-## Contract Addresses
-
-**LogChainV1 Singleton:** `0x41a3eaC0d858028E9228d1E2092e6178fc81c4f0`
-
-**ERC1967Proxy:** `0x62720f39d5Ec6501508bDe4D152c1E13Fd2F6707`
-
-## How It Works
-
-1. **Identity Keys**: Each account derives long-term X25519 (encryption) + Ed25519 (signing) keys bound to their address via signature
-2. **Handshake**: Alice sends her ephemeral key + identity proof to Bob via a `Handshake` event
-3. **Response**: Bob verifies Alice's identity and responds with his keys + duplex topics
-4. **Messaging**: Both parties derive shared topics and exchange encrypted messages via `MessageSent` events
-5. **Decryption**: Recipients monitor their inbound topic and decrypt with their identity key
-
-
-## Security Considerations
-
-- **Forward Secrecy**: Fresh ephemeral keys per message provide sender-side forward secrecy
-- **Identity Binding**: Addresses are cryptographically bound to long-term keys via signature
-- **Non-Repudiation**: Optional Ed25519 signatures prove message origin
-- **Privacy**: RecipientHash hides recipient identity; duplex topics separate communication channels
-
-⚠️ **Note**: Current design provides sender-side forward secrecy. Recipient-side FS requires ephemeral↔ephemeral or session ratcheting (e.g., Double Ratchet).
-
-## Built With
-
-- [TweetNaCl](https://tweetnacl.js.org/) - Encryption primitives
-- [Ethers v6](https://docs.ethers.org/v6/) - Ethereum interactions
-- [Viem](https://viem.sh/) - EIP-1271/6492 verification
-- [Noble Curves](https://github.com/paulmillr/noble-curves) - Elliptic curve operations
-
-## Examples
-
-Check out the [demo application](https://github.com/okrame/verbeth-sdk/tree/main/apps/demo) for a complete implementation.
-
## Documentation
-For detailed protocol documentation, security analysis, and improvement proposals, see the [main repository](https://github.com/okrame/verbeth-sdk).
+For detailed protocol documentation, see [docs.verbeth.xyz](https://docs.verbeth.xyz).
## License
@@ -193,10 +45,10 @@ MPL-2.0
## Links
-- [GitHub Repository](https://github.com/okrame/verbeth-sdk)
+- [GitHub Repository](https://github.com/okrame/verbeth)
- [Demo App](https://verbeth-demo.vercel.app/)
-- [Contract Source](https://github.com/okrame/verbeth-sdk/tree/main/packages/contracts)
+- [Contract Source](https://github.com/okrame/verbeth/tree/main/packages/contracts)
---
-**Questions or feedback?** Open an issue on [GitHub](https://github.com/okrame/verbeth-sdk/issues).
\ No newline at end of file
+**Questions or feedback?** Open an issue on [GitHub](https://github.com/okrame/verbeth/issues).
\ No newline at end of file
diff --git a/packages/sdk/package.json b/packages/sdk/package.json
index b41026d..425ee9d 100644
--- a/packages/sdk/package.json
+++ b/packages/sdk/package.json
@@ -1,6 +1,6 @@
{
"name": "@verbeth/sdk",
- "version": "0.1.4",
+ "version": "0.1.10",
"private": false,
"main": "dist/src/index.js",
"module": "dist/esm/src/index.js",
@@ -17,6 +17,7 @@
"dependencies": {
"@noble/curves": "^1.8.1",
"@noble/hashes": "^1.8.0",
+ "@noble/post-quantum": "^0.5.4",
"@noble/secp256k1": "^2.2.3",
"ethers": "^6.7.0",
"tweetnacl": "^1.0.3",
diff --git a/packages/sdk/src/addresses.ts b/packages/sdk/src/addresses.ts
new file mode 100644
index 0000000..fe3cbbd
--- /dev/null
+++ b/packages/sdk/src/addresses.ts
@@ -0,0 +1,47 @@
+// packages/sdk/src/addresses.ts
+
+export interface ChainConfig {
+ verbethProxy: `0x${string}`;
+ verbethImpl: `0x${string}`;
+ creationBlock: number;
+ moduleSetupHelper?: `0x${string}`;
+}
+
+// Deterministic deployment
+export const VERBETH_CONFIG: ChainConfig = {
+ verbethProxy: '0x82C9c5475D63e4C9e959280e9066aBb24973a663',
+ verbethImpl: '0x51670aB6eDE1d1B11C654CCA53b7D42080802326',
+ creationBlock: 37_097_547, // *** only base sepolia for now
+} as const;
+
+// helpers for Safe session module
+export const MODULE_SETUP_HELPERS: Record = {
+ 8453: '0xc022F74924BDB4b62D830234d89b066359bF67c0',
+ 84532: '0xbd59Fea46D308eDF3b75C22a6f64AC68feFc731A',
+} as const;
+
+
+export function getVerbethAddress(): `0x${string}` {
+ return VERBETH_CONFIG.verbethProxy;
+}
+
+export function getCreationBlock(): number {
+ return VERBETH_CONFIG.creationBlock;
+}
+
+export function getModuleSetupHelper(chainId: number): `0x${string}` | undefined {
+ return MODULE_SETUP_HELPERS[chainId];
+}
+
+export function isModuleSetupSupported(chainId: number): boolean {
+ return chainId in MODULE_SETUP_HELPERS;
+}
+
+// Scanning defaults (chain-agnostic)
+export const SCAN_DEFAULTS = {
+ INITIAL_SCAN_BLOCKS: 1_000,
+ MAX_RETRIES: 3,
+ MAX_RANGE_PROVIDER: 2_000,
+ CHUNK_SIZE: 2_000,
+ REAL_TIME_BUFFER: 3,
+} as const;
diff --git a/packages/sdk/src/client/PendingManager.ts b/packages/sdk/src/client/PendingManager.ts
new file mode 100644
index 0000000..3c3ad3c
--- /dev/null
+++ b/packages/sdk/src/client/PendingManager.ts
@@ -0,0 +1,124 @@
+// packages/sdk/src/client/PendingManager.ts
+
+/**
+ * Internal Pending Message Coordinator.
+ *
+ * Manages the lifecycle of outbound messages:
+ * - Creating pending records before tx submission
+ * - Updating status on submission
+ * - Matching confirmations by txHash
+ * - Cleaning up after confirmation or failure
+ */
+
+import { PendingStore, PendingMessage, PendingStatus } from './types.js';
+
+export interface CreatePendingParams {
+ id: string;
+ conversationId: string;
+ topic: string;
+ payloadHex: string;
+ plaintext: string;
+ sessionStateBefore: string;
+ sessionStateAfter: string;
+ createdAt: number;
+}
+
+export class PendingManager {
+ constructor(private store: PendingStore) {}
+
+ /**
+ * Create and save a pending message record.
+ * Called right before submitting a transaction.
+ */
+ async create(params: CreatePendingParams): Promise {
+ const pending: PendingMessage = {
+ ...params,
+ txHash: null,
+ status: 'preparing',
+ };
+ await this.store.save(pending);
+ return pending;
+ }
+
+ /**
+ * Mark as submitted with transaction hash.
+ * Called immediately after tx is broadcast.
+ */
+ async markSubmitted(id: string, txHash: string): Promise {
+ await this.store.updateStatus(id, 'submitted', txHash);
+ }
+
+ /**
+ * Mark as failed.
+ * Called when tx submission fails.
+ * Note: Ratchet slot is already burned (session was committed).
+ */
+ async markFailed(id: string): Promise {
+ await this.store.updateStatus(id, 'failed');
+ }
+
+
+ async get(id: string): Promise {
+ return this.store.get(id);
+ }
+
+
+ async getByTxHash(txHash: string): Promise {
+ return this.store.getByTxHash(txHash);
+ }
+
+ async getByConversation(conversationId: string): Promise {
+ return this.store.getByConversation(conversationId);
+ }
+
+
+ /**
+ * Finalize and delete.
+ * Called when we see our MessageSent event on-chain.
+ *
+ * @returns The finalized pending message, or null if not found
+ */
+ async finalize(id: string): Promise {
+ const pending = await this.store.get(id);
+ if (!pending) {
+ return null;
+ }
+
+ await this.store.delete(id);
+ return pending;
+ }
+
+ /**
+ * Delete a pending message without finalizing.
+ * Used for cleanup on failure or cancellation.
+ */
+ async delete(id: string): Promise {
+ await this.store.delete(id);
+ }
+
+ /**
+ * Clean up stale pending messages.
+ * Called periodically to remove old failed/stuck records.
+ *
+ * @param conversationId - Conversation to clean up
+ * @param maxAgeMs - Maximum age in milliseconds (default: 24 hours)
+ * @returns Number of records cleaned up
+ */
+ async cleanupStale(
+ conversationId: string,
+ maxAgeMs: number = 24 * 60 * 60 * 1000
+ ): Promise {
+ const pending = await this.store.getByConversation(conversationId);
+ const cutoff = Date.now() - maxAgeMs;
+
+ let cleaned = 0;
+ for (const p of pending) {
+ if (p.createdAt < cutoff) {
+ await this.store.delete(p.id);
+ cleaned++;
+ }
+ }
+
+ return cleaned;
+ }
+}
\ No newline at end of file
diff --git a/packages/sdk/src/client/SessionManager.ts b/packages/sdk/src/client/SessionManager.ts
new file mode 100644
index 0000000..97e3589
--- /dev/null
+++ b/packages/sdk/src/client/SessionManager.ts
@@ -0,0 +1,173 @@
+// packages/sdk/src/client/SessionManager.ts
+
+/**
+ * Internal Session Coordinator.
+ *
+ * Handles:
+ * - Session caching for performance
+ * - Topic matching (current, next, previous)
+ * - Automatic topic promotion when next topic is used
+ * - Cache invalidation
+ */
+
+import { RatchetSession, TOPIC_TRANSITION_WINDOW_MS } from '../ratchet/types.js';
+import { SessionStore } from './types.js';
+
+export interface TopicLookupResult {
+ session: RatchetSession;
+ topicMatch: 'current' | 'next' | 'previous';
+}
+
+/**
+ * Internal session manager that wraps a SessionStore with caching
+ * and topic promotion logic.
+ */
+export class SessionManager {
+ private cache = new Map();
+
+ constructor(private store: SessionStore) {}
+
+ // ===========================================================================
+ // Session Retrieval
+ // ===========================================================================
+
+ /**
+ * Get session by conversation ID, checking cache first.
+ */
+ async getByConversationId(conversationId: string): Promise {
+ const cached = this.cache.get(conversationId);
+ if (cached) {
+ return cached;
+ }
+
+ const session = await this.store.get(conversationId);
+ if (session) {
+ this.cache.set(conversationId, session);
+ }
+ return session;
+ }
+
+ /**
+ * Find session by inbound topic with automatic topic promotion.
+ *
+ * Checks topics in order:
+ * 1. currentTopicInbound - standard case
+ * 2. nextTopicInbound - DH ratchet advanced, promotes topics
+ * 3. previousTopicInbound - grace period for late messages
+ *
+ * @param topic - The topic to look up
+ * @returns Session and match type, or null if not found
+ */
+ async getByInboundTopic(topic: string): Promise {
+ const topicLower = topic.toLowerCase();
+
+ const session = await this.store.getByInboundTopic(topic);
+ if (!session) {
+ return null;
+ }
+
+ // Check cache for more recent state (e.g. for batched operations)
+ const cached = this.cache.get(session.conversationId);
+ let workingSession = cached || session;
+
+ if (workingSession.currentTopicInbound.toLowerCase() === topicLower) {
+ if (!cached) {
+ this.cache.set(workingSession.conversationId, workingSession);
+ }
+ return { session: workingSession, topicMatch: 'current' };
+ }
+
+ if (workingSession.nextTopicInbound?.toLowerCase() === topicLower) {
+ // Promote next topics to current
+ workingSession = this.promoteTopics(workingSession);
+ this.cache.set(workingSession.conversationId, workingSession);
+ return { session: workingSession, topicMatch: 'next' };
+ }
+
+ if (workingSession.previousTopicInbound?.toLowerCase() === topicLower) {
+ if (!cached) {
+ this.cache.set(workingSession.conversationId, workingSession);
+ }
+ return { session: workingSession, topicMatch: 'previous' };
+ }
+
+ // Topic found in store but doesn't match current session state (this shouldn't happen normally, but handle gracefully)
+ return null;
+ }
+
+ // ===========================================================================
+ // Session Persistence
+ // ===========================================================================
+
+ /**
+ * Update session in cache and persist to store.
+ */
+ async save(session: RatchetSession): Promise {
+ this.cache.set(session.conversationId, session);
+ await this.store.save(session);
+ }
+
+ /**
+ * Update cache without persisting (for batch operations).
+ */
+ updateCache(session: RatchetSession): void {
+ this.cache.set(session.conversationId, session);
+ }
+
+ /**
+ * Persist all cached sessions to store.
+ */
+ async flushCache(): Promise {
+ const saves = Array.from(this.cache.values()).map(s => this.store.save(s));
+ await Promise.all(saves);
+ }
+
+ // ===========================================================================
+ // Cache Management
+ // ===========================================================================
+
+ /**
+ * Invalidate cache entry (e.g., on session reset).
+ */
+ invalidate(conversationId: string): void {
+ this.cache.delete(conversationId);
+ }
+
+ clearCache(): void {
+ this.cache.clear();
+ }
+
+ getCacheSize(): number {
+ return this.cache.size;
+ }
+
+ isCached(conversationId: string): boolean {
+ return this.cache.has(conversationId);
+ }
+
+ /**
+ * Promote next topics to current (internal helper).
+ * Called when a message arrives on nextTopicInbound.
+ */
+ private promoteTopics(session: RatchetSession): RatchetSession {
+ if (!session.nextTopicInbound || !session.nextTopicOutbound) {
+ return session;
+ }
+
+ return {
+ ...session,
+ // Move current to previous (for grace period)
+ previousTopicInbound: session.currentTopicInbound,
+ previousTopicExpiry: Date.now() + TOPIC_TRANSITION_WINDOW_MS,
+ // Promote next to current
+ currentTopicInbound: session.nextTopicInbound,
+ currentTopicOutbound: session.nextTopicOutbound,
+ // Clear next (will be computed on next DH ratchet)
+ nextTopicInbound: undefined,
+ nextTopicOutbound: undefined,
+ // Increment epoch
+ topicEpoch: session.topicEpoch + 1,
+ updatedAt: Date.now(),
+ };
+ }
+}
\ No newline at end of file
diff --git a/packages/sdk/src/client/VerbethClient.ts b/packages/sdk/src/client/VerbethClient.ts
index 9373833..c3648c6 100644
--- a/packages/sdk/src/client/VerbethClient.ts
+++ b/packages/sdk/src/client/VerbethClient.ts
@@ -1,9 +1,21 @@
// packages/sdk/src/client/VerbethClient.ts
-import nacl from 'tweetnacl';
-import { initiateHandshake, respondToHandshake, sendEncryptedMessage } from '../send.js';
-import { deriveDuplexTopics } from '../crypto.js';
-import type { VerbethClientConfig, HandshakeResult, HandshakeResponseResult } from './types.js';
+/**
+ * High-level client for Verbeth E2EE messaging.
+ *
+ * Provides a unified API for:
+ * - Handshake operations (sendHandshake, acceptHandshake)
+ * - Session creation for both initiator and responder
+ * - Message encryption/decryption with session management
+ * - Two-phase commit for message sending
+ * - Transaction confirmation handling
+ */
+
+import { hexlify, getBytes, keccak256 } from 'ethers';
+import { hkdf } from '@noble/hashes/hkdf';
+import { sha256 } from '@noble/hashes/sha2';
+import { initiateHandshake, respondToHandshake } from '../handshake.js';
+import { kem } from '../pq/kem.js';
import type { IExecutor } from '../executor.js';
import type { IdentityKeyPair, IdentityProof } from '../types.js';
import type { Signer } from 'ethers';
@@ -13,195 +25,632 @@ import * as payload from '../payload.js';
import * as verify from '../verify.js';
import * as utils from '../utils.js';
import * as identity from '../identity.js';
+import * as ratchet from '../ratchet/index.js';
+
+import { ratchetEncrypt } from '../ratchet/encrypt.js';
+import { ratchetDecrypt } from '../ratchet/decrypt.js';
+import { packageRatchetPayload, parseRatchetPayload, isRatchetPayload } from '../ratchet/codec.js';
+import { verifyMessageSignature } from '../ratchet/auth.js';
+import { dh, hybridInitialSecret } from '../ratchet/kdf.js';
+import { initSessionAsInitiator, initSessionAsResponder } from '../ratchet/session.js';
+import type { RatchetSession } from '../ratchet/types.js';
+
+import { SessionManager } from './SessionManager.js';
+import { PendingManager } from './PendingManager.js';
+import type {
+ VerbethClientConfig,
+ HandshakeResult,
+ HandshakeResponseResult,
+ SessionStore,
+ PendingStore,
+ PreparedMessage,
+ DecryptedMessage,
+ SendResult,
+ ConfirmResult,
+ SerializedSessionInfo,
+ VerbethClientCallbacks,
+ CreateInitiatorSessionFromHsrParams,
+} from './types.js';
+
+export interface CreateInitiatorSessionParams {
+ contactAddress: string;
+ initiatorEphemeralSecret: Uint8Array;
+ responderEphemeralPubKey: Uint8Array;
+ inResponseToTag: `0x${string}`;
+ kemCiphertext?: Uint8Array; // from handshake response (for KEM decapsulation)
+ initiatorKemSecret?: Uint8Array; // stored from sendHandshake
+}
+
+export interface CreateResponderSessionParams {
+ contactAddress: string;
+ responderEphemeralSecret: Uint8Array;
+ responderEphemeralPublic: Uint8Array;
+ initiatorEphemeralPubKey: Uint8Array;
+ salt: Uint8Array;
+ kemSharedSecret?: Uint8Array; // from acceptHandshake (for hybrid KDF)
+}
-/**
- * High-level client for Verbeth E2EE messaging
- *
- * VerbethClient provides a simplified API for common operations while
- * maintaining access to all low-level functions.
- *
- * @example
- * ```typescript
- * const client = new VerbethClient({
- * executor,
- * identityKeyPair,
- * identityProof,
- * signer,
- * address: '0x...'
- * });
- *
- * // Send a handshake
- * const { tx, ephemeralKeyPair } = await client.sendHandshake(
- * '0xBob...',
- * 'Hello Bob!'
- * );
- *
- * // Send a message
- * await client.sendMessage(
- * contact.topicOutbound,
- * contact.identityPubKey,
- * 'Hello again!'
- * );
- * ```
- */
export class VerbethClient {
private readonly executor: IExecutor;
private readonly identityKeyPair: IdentityKeyPair;
private readonly identityProof: IdentityProof;
private readonly signer: Signer;
private readonly address: string;
+ private readonly callbacks?: VerbethClientCallbacks;
+
+ // configured via setters
+ private sessionManager?: SessionManager;
+ private pendingManager?: PendingManager;
- /**
- * creates a new VerbethClient instance
- *
- * @param config - Client configuration with session-level parameters
- */
constructor(config: VerbethClientConfig) {
this.executor = config.executor;
this.identityKeyPair = config.identityKeyPair;
this.identityProof = config.identityProof;
this.signer = config.signer;
this.address = config.address;
+ this.callbacks = config.callbacks;
}
/**
- * Initiates a handshake with a recipient
- *
- * generates an ephemeral keypair for this handshake.
- * the ephemeralKeyPair must be stored to decrypt the response later.
- *
+ * to be called before using prepareMessage/decryptMessage/sendMessage.
+ */
+ setSessionStore(store: SessionStore): void {
+ this.sessionManager = new SessionManager(store);
+ }
+
+ /**
+ * to be called before using sendMessage/confirmTx/revertTx.
+ */
+ setPendingStore(store: PendingStore): void {
+ this.pendingManager = new PendingManager(store);
+ }
+
+ hasSessionStore(): boolean {
+ return !!this.sessionManager;
+ }
+
+ hasPendingStore(): boolean {
+ return !!this.pendingManager;
+ }
+
+
+ /**
+ * Initiates a handshake with a recipient.
+ *
+ * Generates an ephemeral keypair and ML-KEM keypair for this handshake.
+ * Both secretKeys must be stored for ratchet session initialization
+ * when the response arrives.
+ *
* @param recipientAddress - Blockchain address of the recipient
* @param message - Plaintext message to include in the handshake
- * @returns Transaction response and the ephemeral keypair (must be stored!)
- *
- * @example
- * ```typescript
- * const { tx, ephemeralKeyPair } = await client.sendHandshake(
- * '0xBob...',
- * 'Hi Bob!'
- * );
- *
- * // Store ephemeralKeyPair.secretKey to decrypt Bob's response
- * await storage.saveContact({
- * address: '0xBob...',
- * ephemeralKey: ephemeralKeyPair.secretKey,
- * // ...
- * });
- * ```
+ * @returns Transaction response, ephemeral keypair, and KEM keypair
*/
async sendHandshake(
recipientAddress: string,
message: string
): Promise {
- const ephemeralKeyPair = nacl.box.keyPair();
-
- const tx = await initiateHandshake({
+ const { tx, ephemeralKeyPair, kemKeyPair } = await initiateHandshake({
executor: this.executor,
recipientAddress,
identityKeyPair: this.identityKeyPair,
- ephemeralPubKey: ephemeralKeyPair.publicKey,
plaintextPayload: message,
identityProof: this.identityProof,
signer: this.signer,
});
- return { tx, ephemeralKeyPair };
+ return { tx, ephemeralKeyPair, kemKeyPair };
}
/**
- * Accepts a handshake from an initiator
- *
- * derives duplex topics for the conversation and returns them.
- *
- * @param initiatorEphemeralPubKey - initiator's ephemeral public key from handshake event
- * @param initiatorIdentityPubKey - initiator's long-term X25519 identity key
- * @param note - response message to send back
- * @returns transaction, derived duplex topics, and response tag
- *
- * @example
- * ```typescript
- * const { tx, duplexTopics } = await client.acceptHandshake(
- * handshake.ephemeralPubKey,
- * handshake.identityPubKey,
- * 'Hello Alice!'
- * );
- *
- * // Store the topics for future messaging
- * await storage.saveContact({
- * address: handshake.sender,
- * topicOutbound: duplexTopics.topicIn, // Responder writes to topicIn
- * topicInbound: duplexTopics.topicOut, // Responder reads from topicOut
- * // ...
- * });
- * ```
+ * Accepts a handshake from an initiator.
+ *
+ * Derives topics from ephemeral DH shared secret (same approach
+ * as post-handshake topic ratcheting). Returns topicOutbound/topicInbound
+ * directly instead of duplexTopics structure.
+ *
+ * Supports PQ-hybrid: if initiator includes ML-KEM public key (1216 bytes),
+ * performs KEM encapsulation and returns kemSharedSecret.
+ *
+ * @param initiatorEphemeralPubKey - Initiator's ephemeral key (32 bytes X25519 or 1216 bytes with KEM)
+ * @param note - Response message to send back
+ * @returns Transaction, derived topics, ephemeral keys for ratchet, and KEM shared secret
*/
async acceptHandshake(
initiatorEphemeralPubKey: Uint8Array,
- initiatorIdentityPubKey: Uint8Array,
note: string
): Promise {
- const { tx, salt, tag } = await respondToHandshake({
+ const {
+ tx,
+ salt,
+ tag,
+ responderEphemeralSecret,
+ responderEphemeralPublic,
+ kemSharedSecret,
+ } = await respondToHandshake({
executor: this.executor,
- initiatorPubKey: initiatorEphemeralPubKey,
+ initiatorEphemeralPubKey,
responderIdentityKeyPair: this.identityKeyPair,
note,
identityProof: this.identityProof,
signer: this.signer,
- initiatorIdentityPubKey,
});
- const duplexTopics = deriveDuplexTopics(
- this.identityKeyPair.secretKey,
- initiatorIdentityPubKey,
- salt
+ // Extract X25519 part for topic derivation (first 32 bytes if extended)
+ const x25519Pub = initiatorEphemeralPubKey.length > 32
+ ? initiatorEphemeralPubKey.slice(0, 32)
+ : initiatorEphemeralPubKey;
+
+ if (!kemSharedSecret) {
+ throw new Error("KEM is required for PQ-secure handshake");
+ }
+
+ const { topicOutbound, topicInbound } = this.deriveTopicsFromDH(
+ responderEphemeralSecret,
+ x25519Pub,
+ salt,
+ false,
+ kemSharedSecret
+ );
+
+ return {
+ tx,
+ topicOutbound,
+ topicInbound,
+ tag,
+ salt,
+ responderEphemeralSecret,
+ responderEphemeralPublic,
+ kemSharedSecret,
+ };
+ }
+
+ // ===========================================================================
+ // Session Creation - Encapsulates DH and topic derivation
+ // ===========================================================================
+
+ /**
+ * Create a ratchet session as the handshake initiator.
+ *
+ * Call this after receiving and validating a handshake response.
+ * Handles topic derivation from ephemeral DH internally.
+ *
+ * If KEM ciphertext and secret are provided (PQ-hybrid), decapsulates
+ * to derive hybrid shared secret for post-quantum security.
+ *
+ * @param params - Session creation parameters
+ * @returns Ready-to-save RatchetSession
+ */
+ createInitiatorSession(params: CreateInitiatorSessionParams): RatchetSession {
+ const {
+ contactAddress,
+ initiatorEphemeralSecret,
+ responderEphemeralPubKey,
+ inResponseToTag,
+ kemCiphertext,
+ initiatorKemSecret,
+ } = params;
+
+ if (!kemCiphertext || !initiatorKemSecret) {
+ throw new Error("KEM is required for PQ-secure handshake");
+ }
+ const kemSecret = kem.decapsulate(kemCiphertext, initiatorKemSecret);
+
+ const salt = getBytes(inResponseToTag);
+ const { topicOutbound, topicInbound } = this.deriveTopicsFromDH(
+ initiatorEphemeralSecret,
+ responderEphemeralPubKey,
+ salt,
+ true,
+ kemSecret
);
- return { tx, duplexTopics, tag };
+ return initSessionAsInitiator({
+ myAddress: this.address,
+ contactAddress,
+ myHandshakeEphemeralSecret: initiatorEphemeralSecret,
+ theirResponderEphemeralPubKey: responderEphemeralPubKey,
+ topicOutbound,
+ topicInbound,
+ kemSecret,
+ });
}
/**
- * Sends an encrypted message to a contact
+ * Create a ratchet session as the handshake responder.
+ *
+ * Call this after sending a handshake response.
+ * Handles topic derivation from ephemeral DH internally.
+ *
+ * If kemSharedSecret is provided (PQ-hybrid), uses hybrid KDF
+ * for post-quantum security.
+ *
+ * @param params - Session creation parameters
+ * @returns Ready-to-save RatchetSession
+ */
+ createResponderSession(params: CreateResponderSessionParams): RatchetSession {
+ const {
+ contactAddress,
+ responderEphemeralSecret,
+ responderEphemeralPublic,
+ initiatorEphemeralPubKey,
+ salt,
+ kemSharedSecret,
+ } = params;
+
+ if (!kemSharedSecret) {
+ throw new Error("KEM is required for PQ-secure handshake");
+ }
+
+ // Extract X25519 part for topic derivation (first 32 bytes if extended)
+ const x25519Pub = initiatorEphemeralPubKey.length > 32
+ ? initiatorEphemeralPubKey.slice(0, 32)
+ : initiatorEphemeralPubKey;
+
+ const { topicOutbound, topicInbound } = this.deriveTopicsFromDH(
+ responderEphemeralSecret,
+ x25519Pub,
+ salt,
+ false,
+ kemSharedSecret
+ );
+
+ return initSessionAsResponder({
+ myAddress: this.address,
+ contactAddress,
+ myResponderEphemeralSecret: responderEphemeralSecret,
+ myResponderEphemeralPublic: responderEphemeralPublic,
+ theirHandshakeEphemeralPubKey: x25519Pub,
+ topicOutbound,
+ topicInbound,
+ kemSecret: kemSharedSecret,
+ });
+ }
+
+ /**
+ * Accepting a structured HSR event object instead of individual parameters scattered across variables.
+ */
+ createInitiatorSessionFromHsr(params: CreateInitiatorSessionFromHsrParams): RatchetSession {
+ return this.createInitiatorSession({
+ contactAddress: params.contactAddress,
+ initiatorEphemeralSecret: params.myEphemeralSecret,
+ responderEphemeralPubKey: params.hsrEvent.responderEphemeralPubKey,
+ inResponseToTag: params.hsrEvent.inResponseToTag,
+ kemCiphertext: params.hsrEvent.kemCiphertext,
+ initiatorKemSecret: params.myKemSecret,
+ });
+ }
+
+ private deriveTopicsFromDH(
+ mySecret: Uint8Array,
+ theirPublic: Uint8Array,
+ salt: Uint8Array,
+ isInitiator: boolean,
+ kemSecret: Uint8Array
+ ): { topicOutbound: `0x${string}`; topicInbound: `0x${string}` } {
+ const ephemeralShared = dh(mySecret, theirPublic);
+ const hybridSecret = hybridInitialSecret(ephemeralShared, kemSecret);
+
+ const deriveEpoch0Topic = (direction: 'outbound' | 'inbound'): `0x${string}` => {
+ const info = `verbeth:topic-${direction}:v2`;
+ const okm = hkdf(sha256, hybridSecret, salt, info, 32);
+ return keccak256(okm) as `0x${string}`;
+ };
+
+ if (isInitiator) {
+ return {
+ topicOutbound: deriveEpoch0Topic('outbound'),
+ topicInbound: deriveEpoch0Topic('inbound'),
+ };
+ } else {
+ return {
+ topicOutbound: deriveEpoch0Topic('inbound'),
+ topicInbound: deriveEpoch0Topic('outbound'),
+ };
+ }
+ }
+
+ // ===========================================================================
+ // Message Operations
+ // ===========================================================================
+
+ /**
+ * Prepare a message for sending (encrypt without submitting).
*
- * handles timestamp, signing keys, and sender address.
+ * Two-phase commit pattern:
+ * 1. prepareMessage() - encrypts and persists session state immediately
+ * 2. Submit transaction using prepared.payload and prepared.topic
+ * 3. On confirmation, call confirmTx() to clean up pending record
*
- * @param topicOutbound - The outbound topic for this conversation
- * @param recipientPubKey - Recipient's X25519 public key (from handshake)
- * @param message - Plaintext message to encrypt and send
- * @returns Transaction response
+ * Session state is committed immediately for forward secrecy.
+ * If tx fails, the ratchet slot is "burned" (receiver handles via skip keys).
*
- * @example
- * ```typescript
- * await client.sendMessage(
- * contact.topicOutbound,
- * contact.identityPubKey,
- * 'Hello again!'
- * );
- * ```
+ * @param conversationId - The conversation to send in
+ * @param plaintext - Message text to encrypt
+ * @returns PreparedMessage with payload ready for on-chain submission
*/
- async sendMessage(
- topicOutbound: string,
- recipientPubKey: Uint8Array,
- message: string
- ): Promise {
- const signingKeyPair = {
- publicKey: this.identityKeyPair.signingPublicKey,
- secretKey: this.identityKeyPair.signingSecretKey,
+ async prepareMessage(
+ conversationId: string,
+ plaintext: string
+ ): Promise {
+ if (!this.sessionManager) {
+ throw new Error('SessionStore not configured. Call setSessionStore() first.');
+ }
+
+ const session = await this.sessionManager.getByConversationId(conversationId);
+ if (!session) {
+ throw new Error(`No session found for conversation: ${conversationId}`);
+ }
+
+ const plaintextBytes = new TextEncoder().encode(plaintext);
+
+ const encryptResult = ratchetEncrypt(
+ session,
+ plaintextBytes,
+ this.identityKeyPair.signingSecretKey
+ );
+
+ const packedPayload = packageRatchetPayload(
+ encryptResult.signature,
+ encryptResult.header,
+ encryptResult.ciphertext
+ );
+
+ await this.sessionManager.save(encryptResult.session);
+
+ const prepared: PreparedMessage = {
+ id: this.generatePreparedId(),
+ conversationId,
+ topic: encryptResult.topic,
+ payload: packedPayload,
+ plaintext,
+ sessionBefore: session,
+ sessionAfter: encryptResult.session,
+ messageNumber: session.sendingMsgNumber,
+ createdAt: Date.now(),
};
- const timestamp = Math.floor(Date.now() / 1000);
+ return prepared;
+ }
- return sendEncryptedMessage({
- executor: this.executor,
- topic: topicOutbound,
- message,
- recipientPubKey,
- senderAddress: this.address,
- senderSignKeyPair: signingKeyPair,
- timestamp,
+ // Session already saved in prepareMessage for forward secrecy.
+ // So this method can be used for additional bookkeeping if needed.
+ async commitMessage(_prepared: PreparedMessage): Promise {
+ }
+
+ /**
+ * Decrypt an incoming message.
+ *
+ * Handles:
+ * - Topic routing (current, next, previous)
+ * - Signature verification (DoS protection)
+ * - Ratchet decryption
+ * - Session state updates
+ * - Automatic topic promotion
+ *
+ * @param topic - The topic the message arrived on
+ * @param payload - Raw message payload (Uint8Array)
+ * @param senderSigningKey - Sender's Ed25519 signing public key
+ * @param isOwnMessage - Whether this is our own outbound message (echo)
+ * @returns DecryptedMessage or null if decryption fails
+ */
+ async decryptMessage(
+ topic: string,
+ payload: Uint8Array,
+ senderSigningKey: Uint8Array,
+ isOwnMessage: boolean = false
+ ): Promise {
+ if (!this.sessionManager) {
+ throw new Error('SessionStore not configured. Call setSessionStore() first.');
+ }
+
+ if (isOwnMessage) {
+ return null;
+ }
+
+ const result = await this.sessionManager.getByInboundTopic(topic);
+ if (!result) {
+ return null;
+ }
+
+ const { session, topicMatch } = result;
+
+ if (!isRatchetPayload(payload)) {
+ return null;
+ }
+
+ const parsed = parseRatchetPayload(payload);
+ if (!parsed) {
+ return null;
+ }
+
+ const sigValid = verifyMessageSignature(
+ parsed.signature,
+ parsed.header,
+ parsed.ciphertext,
+ senderSigningKey
+ );
+
+ if (!sigValid) {
+ return null;
+ }
+
+ const decryptResult = ratchetDecrypt(session, parsed.header, parsed.ciphertext);
+ if (!decryptResult) {
+ return null;
+ }
+
+ // Check for topic ratchet before saving
+ const topicRatcheted = decryptResult.session.topicEpoch > session.topicEpoch;
+ const previousTopicInbound = topicRatcheted ? session.currentTopicInbound : null;
+
+ await this.sessionManager.save(decryptResult.session);
+
+ // Invoke callbacks if configured
+ if (this.callbacks) {
+ if (topicRatcheted && this.callbacks.onTopicRatchet) {
+ this.callbacks.onTopicRatchet({
+ conversationId: session.conversationId,
+ previousTopicInbound,
+ currentTopicInbound: decryptResult.session.currentTopicInbound,
+ topicEpoch: decryptResult.session.topicEpoch,
+ });
+ }
+
+ if (this.callbacks.onMessageDecrypted) {
+ this.callbacks.onMessageDecrypted({
+ conversationId: session.conversationId,
+ topicMatch,
+ topicEpoch: decryptResult.session.topicEpoch,
+ });
+ }
+ }
+
+ const plaintextStr = new TextDecoder().decode(decryptResult.plaintext);
+
+ return {
+ conversationId: session.conversationId,
+ plaintext: plaintextStr,
+ isOwnMessage: false,
+ session: decryptResult.session,
+ topic,
+ topicMatch,
+ };
+ }
+
+ /**
+ * Send a message with full lifecycle management.
+ *
+ * This is the high-level API that handles:
+ * 1. Encryption (with session commit)
+ * 2. Pending record creation
+ * 3. Transaction submission
+ * 4. Status tracking
+ *
+ * After calling this, wait for on-chain confirmation and call confirmTx().
+ *
+ * @param conversationId - Conversation to send in
+ * @param plaintext - Message text
+ * @returns SendResult with txHash and metadata
+ */
+ async sendMessage(
+ conversationId: string,
+ plaintext: string
+ ): Promise {
+ if (!this.sessionManager) {
+ throw new Error('SessionStore not configured. Call setSessionStore() first.');
+ }
+ if (!this.pendingManager) {
+ throw new Error('PendingStore not configured. Call setPendingStore() first.');
+ }
+
+ // 1. Prepare message (encrypts and persists session)
+ const prepared = await this.prepareMessage(conversationId, plaintext);
+
+ // 2. Create pending record
+ await this.pendingManager.create({
+ id: prepared.id,
+ conversationId,
+ topic: prepared.topic,
+ payloadHex: hexlify(prepared.payload),
+ plaintext,
+ sessionStateBefore: JSON.stringify(this.serializeSessionInfo(prepared.sessionBefore)),
+ sessionStateAfter: JSON.stringify(this.serializeSessionInfo(prepared.sessionAfter)),
+ createdAt: prepared.createdAt,
});
+
+ // 3. Submit transaction
+ const timestamp = Math.floor(Date.now() / 1000);
+ const nonce = prepared.messageNumber;
+
+ try {
+ const tx = await this.executor.sendMessage(
+ prepared.payload,
+ prepared.topic,
+ timestamp,
+ BigInt(nonce)
+ );
+
+ // 4. Update pending with txHash
+ await this.pendingManager.markSubmitted(prepared.id, tx.hash);
+
+ return {
+ messageId: prepared.id,
+ txHash: tx.hash,
+ topic: prepared.topic,
+ messageNumber: nonce,
+ };
+ } catch (error) {
+ // Mark as failed (ratchet slot is already burned)
+ await this.pendingManager.markFailed(prepared.id);
+ throw error;
+ }
+ }
+
+ /**
+ * Confirm a transaction after on-chain confirmation.
+ * Call this when you see your MessageSent event on-chain.
+ *
+ * @param txHash - Transaction hash to confirm
+ * @returns ConfirmResult or null if not found
+ */
+ async confirmTx(txHash: string): Promise {
+ if (!this.pendingManager) {
+ throw new Error('PendingStore not configured.');
+ }
+
+ const pending = await this.pendingManager.getByTxHash(txHash);
+ if (!pending || pending.status !== 'submitted') {
+ return null;
+ }
+
+ // Finalize (delete pending record)
+ const finalized = await this.pendingManager.finalize(pending.id);
+ if (!finalized) {
+ return null;
+ }
+
+ return {
+ conversationId: finalized.conversationId,
+ plaintext: finalized.plaintext,
+ messageId: finalized.id,
+ };
}
- // ========== low-level API ==========
+ /**
+ * Handle transaction failure/revert.
+ *
+ * The ratchet slot is already burned (session was persisted in prepareMessage).
+ * This just cleans up the pending record.
+ *
+ * @param txHash - Transaction hash that failed
+ */
+ async revertTx(txHash: string): Promise {
+ if (!this.pendingManager) {
+ throw new Error('PendingStore not configured.');
+ }
+
+ const pending = await this.pendingManager.getByTxHash(txHash);
+ if (pending) {
+ await this.pendingManager.delete(pending.id);
+ }
+ }
+
+
+ invalidateSessionCache(conversationId: string): void {
+ this.sessionManager?.invalidate(conversationId);
+ }
+
+ clearSessionCache(): void {
+ this.sessionManager?.clearCache();
+ }
+
+ async getSession(conversationId: string): Promise {
+ return this.sessionManager?.getByConversationId(conversationId) ?? null;
+ }
+
+ // ===========================================================================
+ // Low-level API Access
+ // ===========================================================================
get crypto() {
return crypto;
@@ -223,11 +672,14 @@ export class VerbethClient {
return identity;
}
+ get ratchet() {
+ return ratchet;
+ }
+
get executorInstance(): IExecutor {
return this.executor;
}
-
get identityKeyPairInstance(): IdentityKeyPair {
return this.identityKeyPair;
}
@@ -235,4 +687,23 @@ export class VerbethClient {
get userAddress(): string {
return this.address;
}
+
+ get identityProofInstance(): IdentityProof {
+ return this.identityProof;
+ }
+
+ private generatePreparedId(): string {
+ return `prep-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`;
+ }
+
+ private serializeSessionInfo(session: RatchetSession): SerializedSessionInfo {
+ return {
+ conversationId: session.conversationId,
+ topicEpoch: session.topicEpoch,
+ sendingMsgNumber: session.sendingMsgNumber,
+ receivingMsgNumber: session.receivingMsgNumber,
+ currentTopicOutbound: session.currentTopicOutbound,
+ currentTopicInbound: session.currentTopicInbound,
+ };
+ }
}
\ No newline at end of file
diff --git a/packages/sdk/src/client/VerbethClientBuilder.ts b/packages/sdk/src/client/VerbethClientBuilder.ts
new file mode 100644
index 0000000..3d81572
--- /dev/null
+++ b/packages/sdk/src/client/VerbethClientBuilder.ts
@@ -0,0 +1,195 @@
+// packages/sdk/src/client/VerbethClientBuilder.ts
+
+/**
+ * Builder pattern and factory function for VerbethClient.
+ *
+ * Provides a fluent API for client setup, reducing boilerplate
+ * and making the initialization order explicit.
+ */
+
+import type { Signer } from 'ethers';
+import type { IExecutor } from '../executor.js';
+import type { IdentityKeyPair, IdentityProof } from '../types.js';
+import type { SessionStore, PendingStore, VerbethClientCallbacks } from './types.js';
+import { VerbethClient } from './VerbethClient.js';
+
+/**
+ * Options for createVerbethClient factory function.
+ */
+export interface CreateVerbethClientOptions {
+ address: string;
+ signer: Signer;
+ identityKeyPair: IdentityKeyPair;
+ identityProof: IdentityProof;
+ executor: IExecutor;
+ sessionStore?: SessionStore;
+ pendingStore?: PendingStore;
+ callbacks?: VerbethClientCallbacks;
+}
+
+/**
+ * Factory function for one-liner client creation.
+ *
+ * @example
+ * ```typescript
+ * const client = createVerbethClient({
+ * address,
+ * signer,
+ * identityKeyPair,
+ * identityProof,
+ * executor,
+ * sessionStore,
+ * pendingStore,
+ * });
+ * ```
+ */
+export function createVerbethClient(options: CreateVerbethClientOptions): VerbethClient {
+ const client = new VerbethClient({
+ address: options.address,
+ signer: options.signer,
+ identityKeyPair: options.identityKeyPair,
+ identityProof: options.identityProof,
+ executor: options.executor,
+ callbacks: options.callbacks,
+ });
+
+ if (options.sessionStore) {
+ client.setSessionStore(options.sessionStore);
+ }
+
+ if (options.pendingStore) {
+ client.setPendingStore(options.pendingStore);
+ }
+
+ return client;
+}
+
+/**
+ * Builder for fluent VerbethClient construction.
+ *
+ * @example
+ * ```typescript
+ * const client = new VerbethClientBuilder()
+ * .withAddress(address)
+ * .withSigner(signer)
+ * .withIdentity(keyPair, proof)
+ * .withExecutor(executor)
+ * .withStorage(sessionStore, pendingStore)
+ * .build();
+ * ```
+ */
+export class VerbethClientBuilder {
+ private address?: string;
+ private signer?: Signer;
+ private identityKeyPair?: IdentityKeyPair;
+ private identityProof?: IdentityProof;
+ private executor?: IExecutor;
+ private sessionStore?: SessionStore;
+ private pendingStore?: PendingStore;
+ private callbacks?: VerbethClientCallbacks;
+
+ /**
+ * Set the user's blockchain address.
+ */
+ withAddress(address: string): this {
+ this.address = address;
+ return this;
+ }
+
+ /**
+ * Set the ethers Signer for transaction signing.
+ */
+ withSigner(signer: Signer): this {
+ this.signer = signer;
+ return this;
+ }
+
+ /**
+ * Set the identity keypair and proof.
+ */
+ withIdentity(keyPair: IdentityKeyPair, proof: IdentityProof): this {
+ this.identityKeyPair = keyPair;
+ this.identityProof = proof;
+ return this;
+ }
+
+ /**
+ * Set the contract executor.
+ */
+ withExecutor(executor: IExecutor): this {
+ this.executor = executor;
+ return this;
+ }
+
+ /**
+ * Set storage adapters for sessions and pending messages.
+ * Both are optional - only set what you need.
+ */
+ withStorage(sessionStore?: SessionStore, pendingStore?: PendingStore): this {
+ if (sessionStore) {
+ this.sessionStore = sessionStore;
+ }
+ if (pendingStore) {
+ this.pendingStore = pendingStore;
+ }
+ return this;
+ }
+
+ /**
+ * Set the session store adapter.
+ */
+ withSessionStore(store: SessionStore): this {
+ this.sessionStore = store;
+ return this;
+ }
+
+ /**
+ * Set the pending message store adapter.
+ */
+ withPendingStore(store: PendingStore): this {
+ this.pendingStore = store;
+ return this;
+ }
+
+ /**
+ * Set optional callbacks for events.
+ */
+ withCallbacks(callbacks: VerbethClientCallbacks): this {
+ this.callbacks = callbacks;
+ return this;
+ }
+
+ /**
+ * Build the VerbethClient instance.
+ *
+ * @throws Error if required fields are missing
+ */
+ build(): VerbethClient {
+ if (!this.address) {
+ throw new Error('VerbethClientBuilder: address is required. Call withAddress().');
+ }
+ if (!this.signer) {
+ throw new Error('VerbethClientBuilder: signer is required. Call withSigner().');
+ }
+ if (!this.identityKeyPair) {
+ throw new Error('VerbethClientBuilder: identityKeyPair is required. Call withIdentity().');
+ }
+ if (!this.identityProof) {
+ throw new Error('VerbethClientBuilder: identityProof is required. Call withIdentity().');
+ }
+ if (!this.executor) {
+ throw new Error('VerbethClientBuilder: executor is required. Call withExecutor().');
+ }
+
+ return createVerbethClient({
+ address: this.address,
+ signer: this.signer,
+ identityKeyPair: this.identityKeyPair,
+ identityProof: this.identityProof,
+ executor: this.executor,
+ sessionStore: this.sessionStore,
+ pendingStore: this.pendingStore,
+ callbacks: this.callbacks,
+ });
+ }
+}
diff --git a/packages/sdk/src/client/hsrMatcher.ts b/packages/sdk/src/client/hsrMatcher.ts
new file mode 100644
index 0000000..89e2da2
--- /dev/null
+++ b/packages/sdk/src/client/hsrMatcher.ts
@@ -0,0 +1,45 @@
+// packages/sdk/src/client/hsrMatcher.ts
+
+import { computeHybridTagFromInitiator, decryptHandshakeResponse } from '../crypto.js';
+import { kem } from '../pq/kem.js';
+
+export interface PendingContactEntry {
+ address: string;
+ handshakeEphemeralSecret: Uint8Array;
+ kemSecretKey: Uint8Array;
+}
+
+/**
+ * Match an HSR event to a pending contact by computing hybrid tags.
+ *
+ * For each pending contact, attempts to decrypt the HSR payload and compute
+ * the expected tag. Returns the address of the first matching contact.
+ *
+ * Complexity: O(N) where N = number of pending contacts.
+ * Per contact: NaCl decrypt + ML-KEM decapsulate + HKDF tag computation.
+ *
+ * @param pendingContacts - Contacts with pending handshakes
+ * @param inResponseToTag - The tag from the HSR event (topics[1])
+ * @param R - Responder's ephemeral public key (from HSR event data)
+ * @param encryptedPayload - JSON string of the encrypted HSR payload
+ * @returns Address of matching contact, or null if no match
+ */
+export function matchHsrToContact(
+ pendingContacts: PendingContactEntry[],
+ inResponseToTag: `0x${string}`,
+ R: Uint8Array,
+ encryptedPayload: string
+): string | null {
+ for (const contact of pendingContacts) {
+ const decrypted = decryptHandshakeResponse(encryptedPayload, contact.handshakeEphemeralSecret);
+ if (!decrypted?.kemCiphertext) continue;
+
+ const kemSecret = kem.decapsulate(decrypted.kemCiphertext, contact.kemSecretKey);
+ const tag = computeHybridTagFromInitiator(contact.handshakeEphemeralSecret, R, kemSecret);
+
+ if (tag === inResponseToTag) {
+ return contact.address;
+ }
+ }
+ return null;
+}
diff --git a/packages/sdk/src/client/index.ts b/packages/sdk/src/client/index.ts
index 680e7ec..a4a2140 100644
--- a/packages/sdk/src/client/index.ts
+++ b/packages/sdk/src/client/index.ts
@@ -1,8 +1,43 @@
// packages/sdk/src/client/index.ts
export { VerbethClient } from './VerbethClient.js';
-export type {
+export { VerbethClientBuilder, createVerbethClient } from './VerbethClientBuilder.js';
+export { matchHsrToContact } from './hsrMatcher.js';
+
+export type {
+ CreateVerbethClientOptions,
+} from './VerbethClientBuilder.js';
+
+export type {
+ PendingContactEntry,
+} from './hsrMatcher.js';
+
+export type {
VerbethClientConfig,
+ VerbethClientCallbacks,
+ TopicRatchetEvent,
+ MessageDecryptedEvent,
+
HandshakeResult,
- HandshakeResponseResult
-} from './types.js';
\ No newline at end of file
+ HandshakeResponseResult,
+
+ SessionStore,
+ PendingStore,
+
+ PreparedMessage,
+ DecryptedMessage,
+ PendingMessage,
+ PendingStatus,
+
+ SendResult,
+ ConfirmResult,
+
+ SerializedSessionInfo,
+ HsrEventData,
+ CreateInitiatorSessionFromHsrParams,
+} from './types.js';
+
+export type {
+ CreateInitiatorSessionParams,
+ CreateResponderSessionParams,
+} from './VerbethClient.js';
\ No newline at end of file
diff --git a/packages/sdk/src/client/types.ts b/packages/sdk/src/client/types.ts
index 6c40758..862b0c9 100644
--- a/packages/sdk/src/client/types.ts
+++ b/packages/sdk/src/client/types.ts
@@ -1,12 +1,57 @@
// packages/sdk/src/client/types.ts
+/**
+ * Client types for VerbethClient.
+ *
+ * Includes storage interfaces that apps implement to connect
+ * VerbethClient to their persistence layer.
+ */
+
import type { Signer } from 'ethers';
import type { IExecutor } from '../executor.js';
-import type { IdentityKeyPair, IdentityProof, DuplexTopics } from '../types.js';
+import type { IdentityKeyPair, IdentityProof } from '../types.js';
+import type { RatchetSession } from '../ratchet/types.js';
import type nacl from 'tweetnacl';
+import type { KemKeyPair } from '../handshake.js';
+
+/**
+ * Event emitted when topic ratcheting occurs.
+ */
+export interface TopicRatchetEvent {
+ conversationId: string;
+ previousTopicInbound: string | null;
+ currentTopicInbound: string;
+ topicEpoch: number;
+}
+
+/**
+ * Event emitted after message decryption with topic match info.
+ */
+export interface MessageDecryptedEvent {
+ conversationId: string;
+ topicMatch: 'current' | 'next' | 'previous';
+ topicEpoch: number;
+}
+
+/**
+ * Optional callbacks for VerbethClient events.
+ */
+export interface VerbethClientCallbacks {
+ /**
+ * Called when a topic ratchet occurs (epoch advances).
+ * Useful for updating UI or triggering contact sync.
+ */
+ onTopicRatchet?: (event: TopicRatchetEvent) => void;
+
+ /**
+ * Called after successful message decryption.
+ * Provides topic match info for debugging/analytics.
+ */
+ onMessageDecrypted?: (event: MessageDecryptedEvent) => void;
+}
/**
- * Configuration for creating a VerbethClient instance
+ * Configuration for creating a VerbethClient instance.
*/
export interface VerbethClientConfig {
executor: IExecutor;
@@ -14,21 +59,137 @@ export interface VerbethClientConfig {
identityProof: IdentityProof;
signer: Signer;
address: string;
+ callbacks?: VerbethClientCallbacks;
}
-/**
- * Result from initiating a handshake
- */
export interface HandshakeResult {
tx: any;
- ephemeralKeyPair: nacl.BoxKeyPair; // to be stored
+ ephemeralKeyPair: nacl.BoxKeyPair;
+ kemKeyPair: KemKeyPair;
}
-/**
- * Result from accepting a handshake
- */
export interface HandshakeResponseResult {
tx: any;
- duplexTopics: DuplexTopics;
- tag: string;
+ topicOutbound: `0x${string}`;
+ topicInbound: `0x${string}`;
+ tag: `0x${string}`;
+ salt: Uint8Array;
+ responderEphemeralSecret: Uint8Array;
+ responderEphemeralPublic: Uint8Array;
+ kemSharedSecret?: Uint8Array;
+}
+
+/**
+ * Storage interface for ratchet sessions (to connect VerbethClient to a storage layer)
+ */
+export interface SessionStore {
+ get(conversationId: string): Promise;
+
+ /**
+ * Find session by any active inbound topic.
+ */
+ getByInboundTopic(topic: string): Promise;
+ save(session: RatchetSession): Promise;
+}
+
+/**
+ * Result of prepareMessage contains everything needed to send and commit.
+ *
+ * Two-phase commit pattern:
+ * 1. prepareMessage() - encrypts and returns PreparedMessage
+ * 2. Send transaction using prepared.payload and prepared.topic
+ * 3. Session state is already committed for forward secrecy
+ */
+export interface PreparedMessage {
+ id: string;
+ conversationId: string;
+ topic: `0x${string}`;
+ payload: Uint8Array;
+ plaintext: string;
+ sessionBefore: RatchetSession;
+ sessionAfter: RatchetSession;
+ messageNumber: number;
+ createdAt: number;
+}
+
+/**
+ * Result of decryptMessage.
+ */
+export interface DecryptedMessage {
+ conversationId: string;
+ plaintext: string;
+ isOwnMessage: boolean;
+ session: RatchetSession;
+ topic: string;
+ topicMatch: 'current' | 'next' | 'previous';
+}
+
+
+export type PendingStatus = 'preparing' | 'submitted' | 'confirmed' | 'failed';
+
+/**
+ * Pending outbound message record.
+ * Used for matching on-chain confirmations to sent messages.
+ */
+export interface PendingMessage {
+ id: string;
+ conversationId: string;
+ topic: string;
+ payloadHex: string;
+ plaintext: string;
+ sessionStateBefore: string;
+ sessionStateAfter: string;
+ createdAt: number;
+ txHash: string | null;
+ status: PendingStatus;
+}
+
+/**
+ * Storage interface for pending outbound messages.
+ * Implement this to enable sendMessage/confirmTx/revertTx.
+ */
+export interface PendingStore {
+ save(pending: PendingMessage): Promise;
+ get(id: string): Promise;
+ getByTxHash(txHash: string): Promise;
+ updateStatus(id: string, status: PendingStatus, txHash?: string): Promise;
+ delete(id: string): Promise;
+ getByConversation(conversationId: string): Promise;
+}
+
+export interface SendResult {
+ messageId: string;
+ txHash: string;
+ topic: `0x${string}`;
+ messageNumber: number;
+}
+
+
+export interface ConfirmResult {
+ conversationId: string;
+ plaintext: string;
+ messageId: string;
+}
+
+export interface SerializedSessionInfo {
+ conversationId: string;
+ topicEpoch: number;
+ sendingMsgNumber: number;
+ receivingMsgNumber: number;
+ currentTopicOutbound: string;
+ currentTopicInbound: string;
+}
+
+
+export interface HsrEventData {
+ inResponseToTag: `0x${string}`;
+ responderEphemeralPubKey: Uint8Array;
+ kemCiphertext?: Uint8Array;
+}
+
+export interface CreateInitiatorSessionFromHsrParams {
+ contactAddress: string;
+ myEphemeralSecret: Uint8Array;
+ myKemSecret?: Uint8Array;
+ hsrEvent: HsrEventData;
}
\ No newline at end of file
diff --git a/packages/sdk/src/crypto.ts b/packages/sdk/src/crypto.ts
index 2888a54..2342b4d 100644
--- a/packages/sdk/src/crypto.ts
+++ b/packages/sdk/src/crypto.ts
@@ -1,22 +1,38 @@
// packages/sdk/src/crypto.ts
+/**
+ * This module handles:
+ * - Handshake encryption/decryption
+ * - Tag computation for handshake responses
+ *
+ * Post-handshake message encryption uses the ratchet module.
+ * See `ratchet/encrypt.ts` and `ratchet/decrypt.ts` for Double Ratchet.
+ *
+ * Topic derivation is handled entirely by the ratchet module.
+ * See `ratchet/kdf.ts` for `deriveTopicFromDH`.
+ */
+
import nacl from 'tweetnacl';
-import { keccak256, toUtf8Bytes, dataSlice } from 'ethers';
+import { keccak256, toUtf8Bytes } from 'ethers';
import { sha256 } from '@noble/hashes/sha2';
import { hkdf } from '@noble/hashes/hkdf';
-import {
- encodePayload,
- decodePayload,
+import {
+ encodePayload,
+ decodePayload,
encodeStructuredContent,
decodeStructuredContent,
- MessagePayload,
HandshakeResponseContent,
extractKeysFromHandshakeResponse
} from './payload.js';
import { IdentityProof } from './types.js';
+// =============================================================================
+// Handshake Encryption
+// =============================================================================
+
/**
- * Encrypts a structured payload (JSON-serializable objects)
+ * Encrypts a structured payload (JSON-serializable objects) using NaCl box.
+ * Used for handshake responses where ratchet is not yet established.
*/
export function encryptStructuredPayload(
payload: T,
@@ -42,7 +58,8 @@ export function encryptStructuredPayload(
}
/**
- * Decrypts a structured payload with converter function
+ * Decrypts a structured payload with converter function.
+ * Used for handshake responses where ratchet is not yet established.
*/
export function decryptStructuredPayload(
payloadJson: string,
@@ -64,67 +81,34 @@ export function decryptStructuredPayload(
return decodeStructuredContent(box, converter);
}
-// wrappers for encrypting and decrypting messages
-export function encryptMessage(
- message: string,
- recipientPublicKey: Uint8Array,
- ephemeralSecretKey: Uint8Array,
- ephemeralPublicKey: Uint8Array,
- staticSigningSecretKey?: Uint8Array,
- staticSigningPublicKey?: Uint8Array
-): string {
- const payload: MessagePayload = { content: message };
- return encryptStructuredPayload(
- payload,
- recipientPublicKey,
- ephemeralSecretKey,
- ephemeralPublicKey,
- staticSigningSecretKey,
- staticSigningPublicKey
- );
-}
+// =============================================================================
+// Handshake Response Decryption
+// =============================================================================
-export function decryptMessage(
- payloadJson: string,
- recipientSecretKey: Uint8Array,
- staticSigningPublicKey?: Uint8Array
-): string | null {
- const result = decryptStructuredPayload(
- payloadJson,
- recipientSecretKey,
- (obj) => obj as MessagePayload,
- staticSigningPublicKey
- );
- return result ? result.content : null;
-}
-/**
- * Decrypts handshake response and extracts individual keys from unified format
- */
export function decryptHandshakeResponse(
payloadJson: string,
initiatorEphemeralSecretKey: Uint8Array
): HandshakeResponseContent | null {
- return decryptStructuredPayload(
+ return decryptStructuredPayload(
payloadJson,
initiatorEphemeralSecretKey,
- (obj) => {
+ (obj: any): HandshakeResponseContent => {
if (!obj.identityProof) {
throw new Error("Invalid handshake response: missing identityProof");
}
return {
unifiedPubKeys: Uint8Array.from(Buffer.from(obj.unifiedPubKeys, 'base64')),
ephemeralPubKey: Uint8Array.from(Buffer.from(obj.ephemeralPubKey, 'base64')),
+ ...(obj.kemCiphertext && { kemCiphertext: Uint8Array.from(Buffer.from(obj.kemCiphertext, 'base64')) }),
note: obj.note,
- identityProof: obj.identityProof
+ identityProof: obj.identityProof,
};
}
);
}
-/**
- * helper to decrypt handshake response and extract individual keys
- */
+
export function decryptAndExtractHandshakeKeys(
payloadJson: string,
initiatorEphemeralSecretKey: Uint8Array
@@ -132,108 +116,49 @@ export function decryptAndExtractHandshakeKeys(
identityPubKey: Uint8Array;
signingPubKey: Uint8Array;
ephemeralPubKey: Uint8Array;
+ kemCiphertext?: Uint8Array;
note?: string;
- identityProof: IdentityProof;
+ identityProof: IdentityProof;
} | null {
const decrypted = decryptHandshakeResponse(payloadJson, initiatorEphemeralSecretKey);
if (!decrypted) return null;
-
+
const extracted = extractKeysFromHandshakeResponse(decrypted);
if (!extracted) return null;
-
+
return {
identityPubKey: extracted.identityPubKey,
signingPubKey: extracted.signingPubKey,
ephemeralPubKey: extracted.ephemeralPubKey,
+ kemCiphertext: decrypted.kemCiphertext,
note: decrypted.note,
identityProof: decrypted.identityProof
};
}
+// =============================================================================
+// Hybrid Tag Computation (PQ-Secure)
+// =============================================================================
-/**
- * HKDF(sha256) on shared secret, info="verbeth:hsr", then Keccak-256 -> bytes32 (0x...)
- */
-function finalizeHsrTag(shared: Uint8Array): `0x${string}` {
- const okm = hkdf(sha256, shared, new Uint8Array(0), toUtf8Bytes("verbeth:hsr"), 32);
+function finalizeHybridHsrTag(kemSecret: Uint8Array, ecdhShared: Uint8Array): `0x${string}` {
+ const okm = hkdf(sha256, kemSecret, ecdhShared, toUtf8Bytes("verbeth:hsr-hybrid:v1"), 32);
return keccak256(okm) as `0x${string}`;
}
-/**
- * Responder: tag = H( KDF( ECDH(r, viewPubA), "verbeth:hsr"))
- */
-export function computeTagFromResponder(
+export function computeHybridTagFromResponder(
rSecretKey: Uint8Array,
- viewPubA: Uint8Array
+ viewPubA: Uint8Array,
+ kemSecret: Uint8Array
): `0x${string}` {
- const shared = nacl.scalarMult(rSecretKey, viewPubA);
- return finalizeHsrTag(shared);
+ const ecdhShared = nacl.scalarMult(rSecretKey, viewPubA);
+ return finalizeHybridHsrTag(kemSecret, ecdhShared);
}
-/**
- * Initiator: tag = H( KDF( ECDH(viewPrivA, R), "verbeth:hsr"))
- */
-export function computeTagFromInitiator(
+export function computeHybridTagFromInitiator(
viewPrivA: Uint8Array,
- R: Uint8Array
+ R: Uint8Array,
+ kemSecret: Uint8Array
): `0x${string}` {
- const shared = nacl.scalarMult(viewPrivA, R);
- return finalizeHsrTag(shared);
-}
-
-
-/**
- * Derives a bytes32 topic from the shared secret via HKDF(SHA256) + Keccak-256.
- * - info: domain separation (e.g., "verbeth:topic-out:v1")
- * - salt: recommended to use a tag as salt (stable and shareable)
- */
-function deriveTopic(
- shared: Uint8Array,
- info: string,
- salt?: Uint8Array
-): `0x${string}` {
- const okm = hkdf(sha256, shared, salt ?? new Uint8Array(0), new TextEncoder().encode(info), 32);
- return keccak256(okm) as `0x${string}`;
-}
-
-
-export function deriveLongTermShared(
- myIdentitySecretKey: Uint8Array,
- theirIdentityPublicKey: Uint8Array
-): Uint8Array {
- return nacl.scalarMult(myIdentitySecretKey, theirIdentityPublicKey);
-}
-
-/**
- * Directional duplex topics (Initiator-Responder, Responder-Initiator).
- * Recommended salt: tag (bytes)
- */
-export function deriveDuplexTopics(
- myIdentitySecretKey: Uint8Array,
- theirIdentityPublicKey: Uint8Array,
- salt?: Uint8Array
-): { topicOut: `0x${string}`; topicIn: `0x${string}`; checksum: `0x${string}` } {
- const shared = deriveLongTermShared(myIdentitySecretKey, theirIdentityPublicKey);
- const topicOut = deriveTopic(shared, "verbeth:topic-out:v1", salt);
- const topicIn = deriveTopic(shared, "verbeth:topic-in:v1", salt);
- const chkFull = keccak256(Buffer.concat([
- toUtf8Bytes("verbeth:topic-chk:v1"),
- Buffer.from(topicOut.slice(2), 'hex'),
- Buffer.from(topicIn.slice(2), 'hex'),
- ]));
- const checksum = dataSlice(chkFull as `0x${string}`, 8) as `0x${string}`;
- return { topicOut, topicIn, checksum };
-}
-
-export function verifyDuplexTopicsChecksum(
- topicOut: `0x${string}`,
- topicIn: `0x${string}`,
- checksum: `0x${string}`
-): boolean {
- const chkFull = keccak256(Buffer.concat([
- toUtf8Bytes("verbeth:topic-chk:v1"),
- Buffer.from(topicOut.slice(2), 'hex'),
- Buffer.from(topicIn.slice(2), 'hex'),
- ]));
- return dataSlice(chkFull as `0x${string}`, 8) === checksum;
-}
+ const ecdhShared = nacl.scalarMult(viewPrivA, R);
+ return finalizeHybridHsrTag(kemSecret, ecdhShared);
+}
\ No newline at end of file
diff --git a/packages/sdk/src/executor.ts b/packages/sdk/src/executor.ts
index 8e8da1f..bba9ec0 100644
--- a/packages/sdk/src/executor.ts
+++ b/packages/sdk/src/executor.ts
@@ -14,13 +14,12 @@ import {
UserOpV07,
PackedUserOperation,
} from "./types.js";
-import type { LogChainV1 } from "@verbeth/contracts/typechain-types";
+import type { VerbethV1 } from "@verbeth/contracts/typechain-types";
function pack128x128(high: bigint, low: bigint): bigint {
return (high << 128n) | (low & ((1n << 128n) - 1n));
}
-// Unpack a packed 256-bit value into two 128-bit values
export function split128x128(word: bigint): readonly [bigint, bigint] {
const lowMask = (1n << 128n) - 1n;
return [word >> 128n, word & lowMask] as const;
@@ -74,7 +73,7 @@ export interface IExecutor {
// EOA Executor - Direct contract calls via wallet signer
export class EOAExecutor implements IExecutor {
- constructor(private contract: LogChainV1) {}
+ constructor(private contract: VerbethV1) {}
async sendMessage(
ciphertext: Uint8Array,
@@ -110,23 +109,22 @@ export class EOAExecutor implements IExecutor {
// Base Smart Account Executor - Uses wallet_sendCalls for sponsored transactions
export class BaseSmartAccountExecutor implements IExecutor {
- private logChainInterface: Interface;
+ private verbEthInterface: Interface;
private chainId: string;
constructor(
private baseAccountProvider: any,
- private logChainAddress: string,
- chainId = 8453, // Base mainnet by default
+ private verbEthAddress: string,
+ chainId = 8453,
private paymasterServiceUrl?: string,
private subAccountAddress?: string
) {
- this.logChainInterface = new Interface([
+ this.verbEthInterface = new Interface([
"function sendMessage(bytes calldata ciphertext, bytes32 topic, uint256 timestamp, uint256 nonce)",
"function initiateHandshake(bytes32 recipientHash, bytes pubKeys, bytes ephemeralPubKey, bytes plaintextPayload)",
"function respondToHandshake(bytes32 inResponseTo, bytes32 responderEphemeralR, bytes ciphertext)",
]);
- // Convert chainId to hex
this.chainId =
chainId === 8453
? "0x2105" // Base mainnet
@@ -141,7 +139,7 @@ export class BaseSmartAccountExecutor implements IExecutor {
timestamp: number,
nonce: bigint
): Promise {
- const callData = this.logChainInterface.encodeFunctionData("sendMessage", [
+ const callData = this.verbEthInterface.encodeFunctionData("sendMessage", [
ciphertext,
topic,
timestamp,
@@ -150,7 +148,7 @@ export class BaseSmartAccountExecutor implements IExecutor {
return this.executeCalls([
{
- to: this.logChainAddress,
+ to: this.verbEthAddress,
value: "0x0",
data: callData,
},
@@ -163,14 +161,14 @@ export class BaseSmartAccountExecutor implements IExecutor {
ephemeralPubKey: string,
plaintextPayload: Uint8Array
): Promise {
- const callData = this.logChainInterface.encodeFunctionData(
+ const callData = this.verbEthInterface.encodeFunctionData(
"initiateHandshake",
[recipientHash, pubKeys, ephemeralPubKey, plaintextPayload]
);
return this.executeCalls([
{
- to: this.logChainAddress,
+ to: this.verbEthAddress,
value: "0x0",
data: callData,
},
@@ -182,14 +180,14 @@ export class BaseSmartAccountExecutor implements IExecutor {
responderEphemeralR: string,
ciphertext: Uint8Array
): Promise {
- const callData = this.logChainInterface.encodeFunctionData(
+ const callData = this.verbEthInterface.encodeFunctionData(
"respondToHandshake",
[inResponseTo, responderEphemeralR, ciphertext]
);
return this.executeCalls([
{
- to: this.logChainAddress,
+ to: this.verbEthAddress,
value: "0x0",
data: callData,
},
@@ -200,7 +198,6 @@ export class BaseSmartAccountExecutor implements IExecutor {
calls: Array<{ to: string; value: string; data: string }>
) {
try {
- //console.log("DEBUG: Sub account address:", this.subAccountAddress);
const requestParams: any = {
version: "1.0",
chainId: this.chainId,
@@ -210,7 +207,6 @@ export class BaseSmartAccountExecutor implements IExecutor {
//** WORK IN PROGRESS */
if (this.subAccountAddress) {
requestParams.from = this.subAccountAddress;
- //console.log("DEBUG: Using sub account for transaction");
}
if (this.paymasterServiceUrl) {
@@ -219,24 +215,19 @@ export class BaseSmartAccountExecutor implements IExecutor {
url: this.paymasterServiceUrl,
},
};
- //console.log("DEBUG: Using paymaster for gas sponsorship");
}
- //console.log("DEBUG: Request params:", requestParams);
-
const result = await this.baseAccountProvider.request({
method: "wallet_sendCalls",
params: [requestParams],
});
- // first 32 bytes are the actual userop hash
if (
typeof result === "string" &&
result.startsWith("0x") &&
result.length > 66
) {
- const actualTxHash = "0x" + result.slice(2, 66); // Extract first 32 bytes
- //console.log("DEBUG: extracted tx hash:", actualTxHash);
+ const actualTxHash = "0x" + result.slice(2, 66);
return { hash: actualTxHash };
}
@@ -250,22 +241,21 @@ export class BaseSmartAccountExecutor implements IExecutor {
// UserOp Executor - Account Abstraction via bundler
export class UserOpExecutor implements IExecutor {
- private logChainInterface: Interface;
+ private verbEthInterface: Interface;
private smartAccountInterface: Interface;
constructor(
private smartAccountAddress: string,
- private logChainAddress: string,
+ private verbEthAddress: string,
private bundlerClient: any,
private smartAccountClient: any
) {
- this.logChainInterface = new Interface([
+ this.verbEthInterface = new Interface([
"function sendMessage(bytes calldata ciphertext, bytes32 topic, uint256 timestamp, uint256 nonce)",
"function initiateHandshake(bytes32 recipientHash, bytes pubKeys, bytes ephemeralPubKey, bytes plaintextPayload)",
"function respondToHandshake(bytes32 inResponseTo, bytes32 responderEphemeralR, bytes ciphertext)",
]);
- // Smart account interface for executing calls to other contracts
this.smartAccountInterface = new Interface([
"function execute(address target, uint256 value, bytes calldata data) returns (bytes)",
]);
@@ -277,7 +267,7 @@ export class UserOpExecutor implements IExecutor {
timestamp: number,
nonce: bigint
): Promise {
- const logChainCallData = this.logChainInterface.encodeFunctionData(
+ const verbEthCallData = this.verbEthInterface.encodeFunctionData(
"sendMessage",
[ciphertext, topic, timestamp, nonce]
);
@@ -285,9 +275,9 @@ export class UserOpExecutor implements IExecutor {
const smartAccountCallData = this.smartAccountInterface.encodeFunctionData(
"execute",
[
- this.logChainAddress,
- 0, // value
- logChainCallData,
+ this.verbEthAddress,
+ 0,
+ verbEthCallData,
]
);
@@ -300,7 +290,7 @@ export class UserOpExecutor implements IExecutor {
ephemeralPubKey: string,
plaintextPayload: Uint8Array
): Promise {
- const logChainCallData = this.logChainInterface.encodeFunctionData(
+ const verbEthCallData = this.verbEthInterface.encodeFunctionData(
"initiateHandshake",
[recipientHash, pubKeys, ephemeralPubKey, plaintextPayload]
);
@@ -308,9 +298,9 @@ export class UserOpExecutor implements IExecutor {
const smartAccountCallData = this.smartAccountInterface.encodeFunctionData(
"execute",
[
- this.logChainAddress,
- 0, // value
- logChainCallData,
+ this.verbEthAddress,
+ 0,
+ verbEthCallData,
]
);
@@ -322,7 +312,7 @@ export class UserOpExecutor implements IExecutor {
responderEphemeralR: string,
ciphertext: Uint8Array
): Promise {
- const logChainCallData = this.logChainInterface.encodeFunctionData(
+ const verbEthCallData = this.verbEthInterface.encodeFunctionData(
"respondToHandshake",
[inResponseTo, responderEphemeralR, ciphertext]
);
@@ -330,9 +320,9 @@ export class UserOpExecutor implements IExecutor {
const smartAccountCallData = this.smartAccountInterface.encodeFunctionData(
"execute",
[
- this.logChainAddress,
- 0, // value
- logChainCallData,
+ this.verbEthAddress,
+ 0,
+ verbEthCallData,
]
);
@@ -371,9 +361,9 @@ export class UserOpExecutor implements IExecutor {
}
}
-// Direct EntryPoint Executor - for local testing (bypasses bundler)
+// Direct EntryPoint Executor (bypasses bundler for local testing)
export class DirectEntryPointExecutor implements IExecutor {
- private logChainInterface: Interface;
+ private verbEthInterface: Interface;
private smartAccountInterface: Interface;
private entryPointContract: Contract;
private spec: AASpecVersion;
@@ -381,17 +371,16 @@ export class DirectEntryPointExecutor implements IExecutor {
constructor(
private smartAccountAddress: string,
entryPointContract: Contract | BaseContract,
- private logChainAddress: string,
+ private verbEthAddress: string,
private smartAccountClient: any,
private signer: Signer
) {
- this.logChainInterface = new Interface([
+ this.verbEthInterface = new Interface([
"function sendMessage(bytes calldata ciphertext, bytes32 topic, uint256 timestamp, uint256 nonce)",
"function initiateHandshake(bytes32 recipientHash, bytes pubKeys, bytes ephemeralPubKey, bytes plaintextPayload)",
"function respondToHandshake(bytes32 inResponseTo, bytes32 responderEphemeralR, bytes ciphertext)",
]);
- // Smart account interface for executing calls to other contracts
this.smartAccountInterface = new Interface([
"function execute(address target, uint256 value, bytes calldata data) returns (bytes)",
]);
@@ -406,7 +395,7 @@ export class DirectEntryPointExecutor implements IExecutor {
timestamp: number,
nonce: bigint
): Promise {
- const logChainCallData = this.logChainInterface.encodeFunctionData(
+ const verbEthCallData = this.verbEthInterface.encodeFunctionData(
"sendMessage",
[ciphertext, topic, timestamp, nonce]
);
@@ -414,9 +403,9 @@ export class DirectEntryPointExecutor implements IExecutor {
const smartAccountCallData = this.smartAccountInterface.encodeFunctionData(
"execute",
[
- this.logChainAddress,
+ this.verbEthAddress,
0, // value
- logChainCallData,
+ verbEthCallData,
]
);
@@ -429,7 +418,7 @@ export class DirectEntryPointExecutor implements IExecutor {
ephemeralPubKey: string,
plaintextPayload: Uint8Array
): Promise {
- const logChainCallData = this.logChainInterface.encodeFunctionData(
+ const verbEthCallData = this.verbEthInterface.encodeFunctionData(
"initiateHandshake",
[recipientHash, pubKeys, ephemeralPubKey, plaintextPayload]
);
@@ -437,9 +426,9 @@ export class DirectEntryPointExecutor implements IExecutor {
const smartAccountCallData = this.smartAccountInterface.encodeFunctionData(
"execute",
[
- this.logChainAddress,
- 0, // value
- logChainCallData,
+ this.verbEthAddress,
+ 0,
+ verbEthCallData,
]
);
@@ -451,7 +440,7 @@ export class DirectEntryPointExecutor implements IExecutor {
responderEphemeralR: string,
ciphertext: Uint8Array
): Promise {
- const logChainCallData = this.logChainInterface.encodeFunctionData(
+ const verbEthCallData = this.verbEthInterface.encodeFunctionData(
"respondToHandshake",
[inResponseTo, responderEphemeralR, ciphertext]
);
@@ -459,9 +448,9 @@ export class DirectEntryPointExecutor implements IExecutor {
const smartAccountCallData = this.smartAccountInterface.encodeFunctionData(
"execute",
[
- this.logChainAddress,
- 0, // value
- logChainCallData,
+ this.verbEthAddress,
+ 0,
+ verbEthCallData,
]
);
@@ -505,15 +494,12 @@ export class DirectEntryPointExecutor implements IExecutor {
} as UserOpV07;
}
- // Pad bigints, bytes32 before signing
const paddedUserOp = padBigints(userOp);
- //console.log("Padded UserOp:", paddedUserOp);
const signed = await this.smartAccountClient.signUserOperation(
paddedUserOp
);
- // Direct submit to EntryPoint
const tx = await this.entryPointContract.handleOps(
[signed],
await this.signer.getAddress()
@@ -523,20 +509,20 @@ export class DirectEntryPointExecutor implements IExecutor {
}
export class ExecutorFactory {
- static createEOA(contract: LogChainV1): IExecutor {
+ static createEOA(contract: VerbethV1): IExecutor {
return new EOAExecutor(contract);
}
static createBaseSmartAccount(
baseAccountProvider: any,
- logChainAddress: string,
+ verbEthAddress: string,
chainId = 8453,
paymasterServiceUrl?: string,
subAccountAddress?: string
): IExecutor {
return new BaseSmartAccountExecutor(
baseAccountProvider,
- logChainAddress,
+ verbEthAddress,
chainId,
paymasterServiceUrl,
subAccountAddress
@@ -546,13 +532,13 @@ export class ExecutorFactory {
static createUserOp(
smartAccountAddress: string,
_entryPointAddress: string,
- logChainAddress: string,
+ verbEthAddress: string,
bundlerClient: any,
smartAccountClient: any
): IExecutor {
return new UserOpExecutor(
smartAccountAddress,
- logChainAddress,
+ verbEthAddress,
bundlerClient,
smartAccountClient
);
@@ -561,14 +547,14 @@ export class ExecutorFactory {
static createDirectEntryPoint(
smartAccountAddress: string,
entryPointContract: Contract | BaseContract,
- logChainAddress: string,
+ verbEthAddress: string,
smartAccountClient: any,
signer: Signer
): IExecutor {
return new DirectEntryPointExecutor(
smartAccountAddress,
entryPointContract,
- logChainAddress,
+ verbEthAddress,
smartAccountClient,
signer
);
@@ -577,21 +563,21 @@ export class ExecutorFactory {
// Auto-detect executor based on environment and signer type
static async createAuto(
signerOrAccount: any,
- contract: LogChainV1,
+ contract: VerbethV1,
options?: {
entryPointAddress?: string;
entryPointContract?: Contract | BaseContract;
- logChainAddress?: string;
+ verbEthAddress?: string;
bundlerClient?: any;
baseAccountProvider?: any;
chainId?: number;
isTestEnvironment?: boolean;
}
): Promise {
- if (options?.baseAccountProvider && options?.logChainAddress) {
+ if (options?.baseAccountProvider && options?.verbEthAddress) {
return new BaseSmartAccountExecutor(
options.baseAccountProvider,
- options.logChainAddress,
+ options.verbEthAddress,
options.chainId || 8453
);
}
@@ -599,7 +585,6 @@ export class ExecutorFactory {
try {
const provider = signerOrAccount?.provider || signerOrAccount;
if (provider && typeof provider.request === "function") {
- // test if provider supports wallet_sendCalls
const capabilities = await provider
.request({
method: "wallet_getCapabilities",
@@ -607,11 +592,11 @@ export class ExecutorFactory {
})
.catch(() => null);
- if (capabilities && options?.logChainAddress) {
+ if (capabilities && options?.verbEthAddress) {
// if wallet supports capabilities, it's likely a Base Smart Account
return new BaseSmartAccountExecutor(
provider,
- options.logChainAddress,
+ options.verbEthAddress,
options.chainId || 8453
);
}
@@ -625,12 +610,12 @@ export class ExecutorFactory {
if (
options.isTestEnvironment &&
options.entryPointContract &&
- options.logChainAddress
+ options.verbEthAddress
) {
return new DirectEntryPointExecutor(
signerOrAccount.address,
options.entryPointContract,
- options.logChainAddress,
+ options.verbEthAddress,
signerOrAccount,
signerOrAccount.signer || signerOrAccount
);
@@ -639,18 +624,17 @@ export class ExecutorFactory {
if (
options.bundlerClient &&
options.entryPointAddress &&
- options.logChainAddress
+ options.verbEthAddress
) {
return new UserOpExecutor(
signerOrAccount.address,
- options.logChainAddress,
+ options.verbEthAddress,
options.bundlerClient,
signerOrAccount
);
}
}
- // default to EOA executor
return new EOAExecutor(contract);
}
}
diff --git a/packages/sdk/src/handshake.ts b/packages/sdk/src/handshake.ts
new file mode 100644
index 0000000..09e61c5
--- /dev/null
+++ b/packages/sdk/src/handshake.ts
@@ -0,0 +1,216 @@
+// packages/sdk/src/handshake.ts
+
+import {
+ keccak256,
+ toUtf8Bytes,
+ hexlify,
+ Signer,
+ getBytes
+} from "ethers";
+import nacl from 'tweetnacl';
+import { encryptStructuredPayload } from './crypto.js';
+import {
+ HandshakeContent,
+ serializeHandshakeContent,
+ encodeUnifiedPubKeys,
+ createHandshakeResponseContent,
+} from './payload.js';
+import { IdentityKeyPair, IdentityProof } from './types.js';
+import { IExecutor } from './executor.js';
+import { computeHybridTagFromResponder } from './crypto.js';
+import { kem } from './pq/kem.js';
+
+export interface KemKeyPair {
+ publicKey: Uint8Array;
+ secretKey: Uint8Array;
+}
+
+/**
+ * Initiates an on-chain handshake with unified keys and mandatory identity proof.
+ * Executor-agnostic: works with EOA, UserOp, and Direct EntryPoint
+ *
+ * @returns Transaction, ephemeral keypair, and KEM keypair (must be persisted for session init)
+ */
+export async function initiateHandshake({
+ executor,
+ recipientAddress,
+ identityKeyPair,
+ plaintextPayload,
+ identityProof,
+}: {
+ executor: IExecutor;
+ recipientAddress: string;
+ identityKeyPair: IdentityKeyPair;
+ plaintextPayload: string;
+ identityProof: IdentityProof;
+ signer?: Signer;
+}): Promise<{
+ tx: any;
+ ephemeralKeyPair: nacl.BoxKeyPair;
+ kemKeyPair: KemKeyPair;
+}> {
+ if (!executor) {
+ throw new Error("Executor must be provided");
+ }
+
+ // Generate ephemeral keypair for this handshake
+ const ephemeralKeyPair = nacl.box.keyPair();
+
+ // Generate ML-KEM-768 keypair for PQ-hybrid key exchange
+ const kemKeyPair = kem.generateKeyPair();
+
+ const recipientHash = keccak256(
+ toUtf8Bytes('contact:' + recipientAddress.toLowerCase())
+ );
+
+ const handshakeContent: HandshakeContent = {
+ plaintextPayload,
+ identityProof
+ };
+
+ const serializedPayload = serializeHandshakeContent(handshakeContent);
+
+ const unifiedPubKeys = encodeUnifiedPubKeys(
+ identityKeyPair.publicKey,
+ identityKeyPair.signingPublicKey
+ );
+
+ // Ephemeral public key now includes KEM public key (32 + 1184 = 1216 bytes)
+ const ephemeralWithKem = new Uint8Array(32 + kem.publicKeyBytes);
+ ephemeralWithKem.set(ephemeralKeyPair.publicKey, 0);
+ ephemeralWithKem.set(kemKeyPair.publicKey, 32);
+
+ const tx = await executor.initiateHandshake(
+ recipientHash,
+ hexlify(unifiedPubKeys),
+ hexlify(ephemeralWithKem),
+ toUtf8Bytes(serializedPayload)
+ );
+
+ return {
+ tx,
+ ephemeralKeyPair, // Caller must persist secretKey for ratchet session init
+ kemKeyPair, // Caller must also persist secretKey for KEM decapsulation
+ };
+}
+
+/**
+ * Responds to a handshake with unified keys and mandatory identity proof.
+ * Executor-agnostic: works with EOA, UserOp, and Direct EntryPoint
+ *
+ * If initiator includes KEM public key, encapsulates a shared secret and includes ciphertext in response.
+ *
+ * @returns Transaction, tag, salt, ephemeral keys, and KEM secret
+ */
+export async function respondToHandshake({
+ executor,
+ initiatorEphemeralPubKey,
+ responderIdentityKeyPair,
+ note,
+ identityProof,
+}: {
+ executor: IExecutor;
+ /** Initiator's ephemeral key (32 bytes X25519) OR extended key (1216 bytes: X25519 + ML-KEM) */
+ initiatorEphemeralPubKey: Uint8Array;
+ responderIdentityKeyPair: IdentityKeyPair;
+ note?: string;
+ identityProof: IdentityProof;
+ signer?: Signer;
+}): Promise<{
+ tx: any;
+ salt: Uint8Array;
+ tag: `0x${string}`;
+ // Responder's DH ratchet secret (must persist as dhMySecretKey in ratchet session)
+ responderEphemeralSecret: Uint8Array;
+ // Responder's DH ratchet public (inside encrypted payload)
+ responderEphemeralPublic: Uint8Array;
+ // ML-KEM shared secret (32 bytes) (must persist for hybrid KDF, undefined if no KEM in handshake) */
+ kemSharedSecret?: Uint8Array;
+}> {
+ if (!executor) {
+ throw new Error("Executor must be provided");
+ }
+
+ // =========================================================================
+ // TWO SEPARATE KEYPAIRS for unlinkability:
+ //
+ // 1. tagKeyPair (R, r): only for tag computation
+ // - R goes on-chain as responderEphemeralR
+ // - Used by Alice to verify the tag
+ // - not used for ratchet
+ //
+ // 2. ratchetKeyPair: For post-handshake encryption and first DH ratchet key
+ // - Public key goes inside encrypted payload
+ // - Becomes dhMySecretKey/dhMyPublicKey in ratchet session
+ //
+ // Why this matters: With a single keypair, the on-chain R would equal the
+ // first message's DH header, allowing observers to link HandshakeResponse
+ // to subsequent conversation. With two keypairs, there's no on-chain link.
+ // =========================================================================
+
+ const tagKeyPair = nacl.box.keyPair();
+ const ratchetKeyPair = nacl.box.keyPair();
+
+ // Check if initiator included KEM public key (extended format: 32 + 1184 = 1216 bytes)
+ const hasKem = initiatorEphemeralPubKey.length === 32 + kem.publicKeyBytes;
+ const initiatorX25519Pub = hasKem
+ ? initiatorEphemeralPubKey.slice(0, 32)
+ : initiatorEphemeralPubKey;
+
+ // KEM encapsulation needed for hybrid tag
+ let kemCiphertext: Uint8Array | undefined;
+ let kemSharedSecret: Uint8Array | undefined;
+
+ if (hasKem) {
+ const initiatorKemPub = initiatorEphemeralPubKey.slice(32, 32 + kem.publicKeyBytes);
+ const { ciphertext, sharedSecret } = kem.encapsulate(initiatorKemPub);
+ kemCiphertext = ciphertext;
+ kemSharedSecret = sharedSecret;
+ }
+
+ if (!kemSharedSecret) {
+ throw new Error("KEM is required for PQ-secure handshake");
+ }
+
+ // Hybrid tag combines ECDH(r, viewPubA) + kemSecret
+ const inResponseTo = computeHybridTagFromResponder(
+ tagKeyPair.secretKey,
+ initiatorX25519Pub,
+ kemSharedSecret
+ );
+ const salt: Uint8Array = getBytes(inResponseTo);
+
+ const responseContent = createHandshakeResponseContent(
+ responderIdentityKeyPair.publicKey,
+ responderIdentityKeyPair.signingPublicKey,
+ ratchetKeyPair.publicKey, // first DH ratchet key inside payload
+ note,
+ identityProof,
+ kemCiphertext
+ );
+
+ // Encrypt using ratchetKeyPair (the epk in encrypted payload = ratchetKeyPair.publicKey)
+ const payload = encryptStructuredPayload(
+ responseContent,
+ initiatorX25519Pub,
+ ratchetKeyPair.secretKey,
+ ratchetKeyPair.publicKey
+ );
+
+ // tagKeyPair.publicKey goes on-chain, not ratchetKeyPair
+ const tx = await executor.respondToHandshake(
+ inResponseTo,
+ hexlify(tagKeyPair.publicKey),
+ toUtf8Bytes(payload)
+ );
+
+ return {
+ tx,
+ salt,
+ tag: inResponseTo,
+ // Return ratchet keys for session initialization
+ responderEphemeralSecret: ratchetKeyPair.secretKey,
+ responderEphemeralPublic: ratchetKeyPair.publicKey,
+ kemSharedSecret,
+ };
+}
\ No newline at end of file
diff --git a/packages/sdk/src/identity.ts b/packages/sdk/src/identity.ts
index 611b6f2..7d785fe 100644
--- a/packages/sdk/src/identity.ts
+++ b/packages/sdk/src/identity.ts
@@ -1,11 +1,11 @@
+// packages/sdk/src/utils/identity.ts
import { sha256 } from "@noble/hashes/sha2";
import { hkdf } from "@noble/hashes/hkdf";
-import { Signer, concat, hexlify, getBytes } from "ethers";
+import { Signer, Wallet, concat, hexlify, getBytes } from "ethers";
import nacl from "tweetnacl";
import { encodeUnifiedPubKeys } from "./payload.js";
import { IdentityContext, IdentityKeyPair, IdentityProof } from "./types.js";
-
const SECP256K1_N = BigInt(
"0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141"
);
@@ -29,8 +29,7 @@ function bigIntTo32BytesBE(x: bigint): Uint8Array {
}
/**
- * Canonicalize an Ethereum ECDSA signature (65 bytes) to low-s form.
- * This is only used as KDF input.
+ * Canonicalize an Ethereum ECDSA signature (65 bytes) to low-s form (only used as KDF input)
*/
function canonicalizeEcdsaSig65(sig: Uint8Array): Uint8Array {
if (sig.length !== 65) return sig;
@@ -45,19 +44,16 @@ function canonicalizeEcdsaSig65(sig: Uint8Array): Uint8Array {
const out = new Uint8Array(65);
out.set(r, 0);
out.set(bigIntTo32BytesBE(sLow), 32);
- out[64] = v;
+ out[64] = v;
return out;
}
-function buildSeedMessage(addrLower: string, ctx?: IdentityContext): string {
+function buildSeedMessage(addrLower: string): string {
const lines = [
"VerbEth Identity Seed v1",
`Address: ${addrLower}`,
"Context: verbeth",
- "Version: 1",
];
- if (typeof ctx?.chainId === "number") lines.push(`ChainId: ${ctx.chainId}`);
- if (ctx?.rpId) lines.push(`RpId: ${ctx.rpId}`);
return lines.join("\n");
}
@@ -65,6 +61,7 @@ function buildBindingMessage(
addrLower: string,
pkEd25519Hex: string,
pkX25519Hex: string,
+ executorAddress?: string,
ctx?: IdentityContext
): string {
const lines = [
@@ -72,41 +69,55 @@ function buildBindingMessage(
`Address: ${addrLower}`,
`PkEd25519: ${pkEd25519Hex}`,
`PkX25519: ${pkX25519Hex}`,
+ `ExecutorAddress: ${executorAddress ?? ""}`,
];
if (typeof ctx?.chainId === "number") lines.push(`ChainId: ${ctx.chainId}`);
if (ctx?.rpId) lines.push(`RpId: ${ctx.rpId}`);
- lines.push("Context: verbeth", "Version: 1");
return lines.join("\n");
}
+export interface DerivedIdentityKeys {
+ /** VerbEth identity key pair (X25519 + Ed25519) */
+ keyPair: IdentityKeyPair;
+ /** Hex-encoded secp256k1 private key for session signer */
+ sessionPrivateKey: string;
+ /** Ethereum address of the session signer */
+ sessionAddress: string;
+ /** Public key hex strings for binding message */
+ pkX25519Hex: string;
+ pkEd25519Hex: string;
+}
+
+export interface DerivedIdentityWithProof extends DerivedIdentityKeys {
+ identityProof: IdentityProof;
+}
+
+// ============================================================================
+// Derive all keys from seed signature
+// ============================================================================
+
/**
- * HKDF (RFC 5869) identity key derivation.
- * Returns a proof binding the derived keypair to the wallet address.
+ * Derive all identity keys and session key from a single seed signature.
*/
-export async function deriveIdentityKeyPairWithProof(
+export async function deriveIdentityKeys(
signer: any,
- address: string,
- ctx?: IdentityContext
-): Promise<{ keyPair: IdentityKeyPair; identityProof: IdentityProof }> {
+ address: string
+): Promise {
const enc = new TextEncoder();
const addrLower = address.toLowerCase();
- // 1) Signature-based seed
- const seedMessage = buildSeedMessage(addrLower, ctx);
+ const seedMessage = buildSeedMessage(addrLower);
let seedSignature = await signer.signMessage(seedMessage);
const seedSigBytes = canonicalizeEcdsaSig65(getBytes(seedSignature));
- seedSignature = ""; // wipe from memory
+ seedSignature = "";
// IKM = HKDF( canonicalSig || H(seedMessage) || "verbeth/addr:" || address_lower )
- // salt/info are public domain labels (versioned)
const seedSalt = enc.encode("verbeth/seed-sig-v1");
const seedInfo = enc.encode("verbeth/ikm");
const seedMsgHash = sha256(enc.encode(seedMessage));
- const ikmInput = getBytes(concat([
- seedSigBytes,
- seedMsgHash,
- enc.encode("verbeth/addr:" + addrLower),
- ]));
+ const ikmInput = getBytes(
+ concat([seedSigBytes, seedMsgHash, enc.encode("verbeth/addr:" + addrLower)])
+ );
const ikm = hkdf(sha256, ikmInput, seedSalt, seedInfo, 32);
// Derive X25519 (encryption)
@@ -119,13 +130,21 @@ export async function deriveIdentityKeyPairWithProof(
const ed25519_seed = hkdf(sha256, ikm, new Uint8Array(0), info_ed25519, 32);
const signKeyPair = nacl.sign.keyPair.fromSeed(ed25519_seed);
- // wipe intermediates without affecting returned keyPair buffers
+ // Derive secp256k1 session key for txs via Safe module
+ const info_session = enc.encode("verbeth-session-secp256k1-v1");
+ const sessionSeed = hkdf(sha256, ikm, new Uint8Array(0), info_session, 32);
+ const sessionPrivateKey = hexlify(sessionSeed);
+ const sessionWallet = new Wallet(sessionPrivateKey);
+ const sessionAddress = sessionWallet.address;
+
try {
seedSigBytes.fill(0);
seedMsgHash.fill(0);
ikmInput.fill(0);
ikm.fill(0);
ed25519_seed.fill(0);
+ x25519_sk.fill(0);
+ sessionSeed.fill(0);
} catch {}
const pkX25519Hex = hexlify(boxKeyPair.publicKey);
@@ -138,43 +157,98 @@ export async function deriveIdentityKeyPairWithProof(
signingSecretKey: signKeyPair.secretKey,
};
- // 2) Second signature: binding both public keys (as before)
+ return {
+ keyPair,
+ sessionPrivateKey,
+ sessionAddress,
+ pkX25519Hex,
+ pkEd25519Hex,
+ };
+}
+
+// ============================================================================
+// Create binding proof with Safe address
+// ============================================================================
+
+/**
+ * Create the binding proof that ties the derived keys to the Safe address.
+ */
+export async function createBindingProof(
+ signer: any,
+ address: string,
+ derivedKeys: DerivedIdentityKeys,
+ executorAddress: string,
+ ctx?: IdentityContext
+): Promise {
+ const addrLower = address.toLowerCase();
+ const executorAddressLower = executorAddress.toLowerCase();
+
const message = buildBindingMessage(
addrLower,
- pkEd25519Hex,
- pkX25519Hex,
+ derivedKeys.pkEd25519Hex,
+ derivedKeys.pkX25519Hex,
+ executorAddressLower,
ctx
);
+
const signature = await signer.signMessage(message);
const messageRawHex = ("0x" +
Buffer.from(message, "utf-8").toString("hex")) as `0x${string}`;
return {
- keyPair,
- identityProof: {
- message,
- signature,
- messageRawHex,
- },
+ message,
+ signature,
+ messageRawHex,
+ };
+}
+
+
+// this is when the Safe address is known upfront
+export async function deriveIdentityKeyPairWithProof(
+ signer: any,
+ address: string,
+ executorAddress?: string,
+ ctx?: IdentityContext
+): Promise