diff --git a/v3/@claude-flow/cli/src/appliance/gguf-engine.ts b/v3/@claude-flow/cli/src/appliance/gguf-engine.ts new file mode 100644 index 0000000000..8a744b9d01 --- /dev/null +++ b/v3/@claude-flow/cli/src/appliance/gguf-engine.ts @@ -0,0 +1,453 @@ +/** + * ruvLLM GGUF Inference Engine -- Pure Node.js GGUF Model Interface + * + * Provides: + * 1. GGUF binary header parsing (metadata without loading weights) + * 2. Model loading abstraction (node-llama-cpp when available, metadata-only fallback) + * 3. Token generation interface with async iterator streaming + * 4. KV-cache persistence to RVF-compatible binary format + * + * Zero external dependencies. node-llama-cpp is an optional peer. + * + * @module @claude-flow/cli/appliance/gguf-engine + */ + +import { open, readFile, writeFile, stat as fsStat } from 'node:fs/promises'; +import { createHash } from 'node:crypto'; +import { basename } from 'node:path'; + +// ── GGUF Metadata Value Types ─────────────────────────────── + +const enum GgufValueType { + UINT8 = 0, INT8 = 1, UINT16 = 2, INT16 = 3, UINT32 = 4, INT32 = 5, + FLOAT32 = 6, BOOL = 7, STRING = 8, ARRAY = 9, UINT64 = 10, INT64 = 11, FLOAT64 = 12, +} + +const GGUF_MAGIC = 0x46554747; // "GGUF" in little-endian +const RVKV_MAGIC = 0x564B5652; // "RVKV" in little-endian +const RVKV_VERSION = 1; + +// ── Public Interfaces ─────────────────────────────────────── + +export interface GgufMetadata { + magic: string; + version: number; + tensorCount: number; + kvCount: number; + architecture?: string; + name?: string; + contextLength?: number; + embeddingLength?: number; + blockCount?: number; + vocabSize?: number; + quantization?: string; + fileSize: number; + metadata: Record; +} + +export interface GgufEngineConfig { + contextSize?: number; + maxTokens?: number; + temperature?: number; + kvCachePath?: string; + verbose?: boolean; +} + +export interface GenerateRequest { + prompt: string; + model?: string; + maxTokens?: number; + temperature?: number; + stream?: boolean; + stopSequences?: string[]; +} + +export interface GenerateResponse { + text: string; + model: string; + tokensUsed: number; + latencyMs: number; + metadataOnly: boolean; +} + +// ── Internal Buffer Reader ────────────────────────────────── + +/** Stateful cursor over a Buffer for sequential binary reads. */ +class BufferReader { + private offset = 0; + constructor(private buf: Buffer) {} + get remaining(): number { return this.buf.length - this.offset; } + + readU8(): number { const v = this.buf.readUInt8(this.offset); this.offset += 1; return v; } + readI8(): number { const v = this.buf.readInt8(this.offset); this.offset += 1; return v; } + readU16(): number { const v = this.buf.readUInt16LE(this.offset); this.offset += 2; return v; } + readI16(): number { const v = this.buf.readInt16LE(this.offset); this.offset += 2; return v; } + readU32(): number { const v = this.buf.readUInt32LE(this.offset); this.offset += 4; return v; } + readI32(): number { const v = this.buf.readInt32LE(this.offset); this.offset += 4; return v; } + readF32(): number { const v = this.buf.readFloatLE(this.offset); this.offset += 4; return v; } + readF64(): number { const v = this.buf.readDoubleLE(this.offset); this.offset += 8; return v; } + readU64(): bigint { const v = this.buf.readBigUInt64LE(this.offset); this.offset += 8; return v; } + readI64(): bigint { const v = this.buf.readBigInt64LE(this.offset); this.offset += 8; return v; } + /** Safe for values up to 2^53. Real GGUF files never exceed this for tensor/kv counts. */ + readU64AsNumber(): number { return Number(this.readU64()); } + readBool(): boolean { return this.readU8() !== 0; } + + /** GGUF string: [length u64 LE][utf-8 bytes]. */ + readString(): string { + const len = this.readU64AsNumber(); + if (len === 0) return ''; + if (len > this.remaining) throw new Error(`String length ${len} exceeds remaining buffer`); + const s = this.buf.toString('utf-8', this.offset, this.offset + len); + this.offset += len; + return s; + } +} + +// ── GGUF Value Reading ────────────────────────────────────── + +/** Read a typed scalar from the buffer (shared by value and array-element readers). */ +function readScalar(reader: BufferReader, t: number): unknown { + switch (t) { + case GgufValueType.UINT8: return reader.readU8(); + case GgufValueType.INT8: return reader.readI8(); + case GgufValueType.UINT16: return reader.readU16(); + case GgufValueType.INT16: return reader.readI16(); + case GgufValueType.UINT32: return reader.readU32(); + case GgufValueType.INT32: return reader.readI32(); + case GgufValueType.FLOAT32: return reader.readF32(); + case GgufValueType.BOOL: return reader.readBool(); + case GgufValueType.STRING: return reader.readString(); + case GgufValueType.UINT64: return Number(reader.readU64()); + case GgufValueType.INT64: return Number(reader.readI64()); + case GgufValueType.FLOAT64: return reader.readF64(); + default: return undefined; + } +} + +/** Read a single GGUF typed value (scalar or array) from the buffer. */ +function readGgufValue(reader: BufferReader): unknown { + const valueType = reader.readU32(); + if (valueType === GgufValueType.ARRAY) { + const elemType = reader.readU32(); + const len = reader.readU64AsNumber(); + const arr: unknown[] = []; + for (let i = 0; i < len; i++) { + const v = readScalar(reader, elemType); + if (v === undefined) throw new Error(`Unknown GGUF array element type: ${elemType}`); + arr.push(v); + } + return arr; + } + const v = readScalar(reader, valueType); + if (v === undefined) throw new Error(`Unknown GGUF value type: ${valueType}`); + return v; +} + +// ── GGUF Header Parsing ───────────────────────────────────── + +/** + * Parse the header and metadata from a GGUF file without loading tensors. + * Reads only the first 256 KB of the file. + */ +export async function parseGgufHeader(path: string): Promise { + const fileInfo = await fsStat(path); + const readSize = Math.min(fileInfo.size, 256 * 1024); + const fh = await open(path, 'r'); + try { + const buf = Buffer.alloc(readSize); + await fh.read(buf, 0, readSize, 0); + return parseGgufBuffer(buf, fileInfo.size, path); + } finally { + await fh.close(); + } +} + +function parseGgufBuffer(buf: Buffer, fileSize: number, filePath: string): GgufMetadata { + const reader = new BufferReader(buf); + + const magic = reader.readU32(); + if (magic !== GGUF_MAGIC) { + throw new Error(`Invalid GGUF magic: 0x${magic.toString(16)} (expected 0x${GGUF_MAGIC.toString(16)})`); + } + + const version = reader.readU32(); + if (version < 2 || version > 3) { + throw new Error(`Unsupported GGUF version: ${version} (expected 2 or 3)`); + } + + const tensorCount = reader.readU64AsNumber(); + const kvCount = reader.readU64AsNumber(); + + const metadata: Record = {}; + for (let i = 0; i < kvCount; i++) { + if (reader.remaining < 12) break; + try { + const key = reader.readString(); + metadata[key] = readGgufValue(reader); + } catch { + break; // reached end of read window + } + } + + const arch = asString(metadata['general.architecture']); + const pfx = arch || 'llama'; // fallback prefix for well-known keys + + return { + magic: 'GGUF', version, tensorCount, kvCount, + architecture: arch, + name: asString(metadata['general.name']), + contextLength: asNumber(metadata[`${pfx}.context_length`]), + embeddingLength: asNumber(metadata[`${pfx}.embedding_length`]), + blockCount: asNumber(metadata[`${pfx}.block_count`]), + vocabSize: inferVocabSize(metadata), + quantization: inferQuantFromMetadata(metadata, filePath), + fileSize, metadata, + }; +} + +// ── Metadata Helpers ──────────────────────────────────────── + +function asString(v: unknown): string | undefined { return typeof v === 'string' ? v : undefined; } +function asNumber(v: unknown): number | undefined { return typeof v === 'number' ? v : undefined; } + +const QUANT_RE: Array<[RegExp, string]> = [ + [/q2_k/i, 'Q2_K'], [/q3_k_s/i, 'Q3_K_S'], [/q3_k_m/i, 'Q3_K_M'], [/q3_k_l/i, 'Q3_K_L'], + [/q4_k_s/i, 'Q4_K_S'], [/q4_k_m/i, 'Q4_K_M'], [/q4_0/i, 'Q4_0'], [/q4_1/i, 'Q4_1'], + [/q5_k_s/i, 'Q5_K_S'], [/q5_k_m/i, 'Q5_K_M'], [/q5_0/i, 'Q5_0'], [/q5_1/i, 'Q5_1'], + [/q6_k/i, 'Q6_K'], [/q8_0/i, 'Q8_0'], [/f16/i, 'F16'], [/f32/i, 'F32'], +]; + +function inferQuantFromMetadata(meta: Record, filePath: string): string { + const ft = meta['general.file_type']; + if (typeof ft === 'number') return `file_type_${ft}`; + const name = basename(filePath); + for (const [re, label] of QUANT_RE) if (re.test(name)) return label; + return 'unknown'; +} + +function inferVocabSize(meta: Record): number | undefined { + const tokens = meta['tokenizer.ggml.tokens']; + if (Array.isArray(tokens)) return tokens.length; + return asNumber(meta['tokenizer.ggml.vocab_size']); +} + +// ── GGUF Engine ───────────────────────────────────────────── + +export class GgufEngine { + private config: Required; + private llamaCpp: any = null; + private llamaModel: any = null; + private llamaContext: any = null; + private loadedModels: Map = new Map(); + private activeModelPath: string | null = null; + private kvCache: Map = new Map(); + + constructor(config: GgufEngineConfig) { + this.config = { + contextSize: config.contextSize ?? 4096, + maxTokens: config.maxTokens ?? 512, + temperature: config.temperature ?? 0.7, + kvCachePath: config.kvCachePath ?? '', + verbose: config.verbose ?? false, + }; + } + + /** Probe for node-llama-cpp availability. */ + async initialize(): Promise { + this.llamaCpp = await this.tryLoadLlamaCpp(); + if (this.config.verbose) { + console.log(`[gguf-engine] node-llama-cpp: ${this.llamaCpp ? 'available' : 'not found (metadata-only mode)'}`); + } + } + + /** Parse GGUF header and optionally load the model for inference. */ + async loadModel(path: string): Promise { + const meta = await parseGgufHeader(path); + this.loadedModels.set(path, meta); + this.activeModelPath = path; + + if (this.llamaCpp) { + try { + const { getLlama } = this.llamaCpp; + const llama = await getLlama(); + this.llamaModel = await llama.loadModel({ modelPath: path }); + this.llamaContext = await this.llamaModel.createContext({ contextSize: this.config.contextSize }); + if (this.config.verbose) console.log(`[gguf-engine] Model loaded: ${basename(path)}`); + } catch (err) { + if (this.config.verbose) console.warn('[gguf-engine] node-llama-cpp load failed:', err); + this.llamaModel = null; + this.llamaContext = null; + } + } + return meta; + } + + /** Generate text. Delegates to node-llama-cpp or returns a metadata-only stub. */ + async generate(request: GenerateRequest): Promise { + const start = performance.now(); + const modelPath = request.model ?? this.activeModelPath; + const modelName = modelPath ? basename(modelPath) : 'none'; + + if (this.llamaContext && this.llamaModel) { + try { + const session = new this.llamaCpp.LlamaChatSession({ + contextSequence: this.llamaContext.getSequence(), + }); + const text = await session.prompt(request.prompt, { + maxTokens: request.maxTokens ?? this.config.maxTokens, + temperature: request.temperature ?? this.config.temperature, + stopGenerationTrigger: request.stopSequences + ? request.stopSequences.map((s: string) => new this.llamaCpp.LlamaText([s])) + : undefined, + }); + // Use llama.cpp tokenizer for accurate count when available, else estimate + let tokensUsed: number; + try { + const seq = this.llamaContext.getSequence(); + tokensUsed = seq.tokenCount ?? Math.ceil(text.length / 4); + } catch { + tokensUsed = Math.ceil(text.length / 4); // ~4 chars per token heuristic + } + return { + text, model: modelName, tokensUsed, + latencyMs: performance.now() - start, metadataOnly: false, + }; + } catch (err) { + if (this.config.verbose) console.warn('[gguf-engine] Generation failed:', err); + } + } + + // Metadata-only fallback + const meta = modelPath ? this.loadedModels.get(modelPath) : undefined; + return { + text: meta + ? `[metadata-only] Model: ${meta.name ?? modelName}, arch: ${meta.architecture ?? 'unknown'}, ctx: ${meta.contextLength ?? 'unknown'}` + : '[metadata-only] No model loaded', + model: modelName, tokensUsed: 0, + latencyMs: performance.now() - start, metadataOnly: true, + }; + } + + /** Stream tokens via async iterator. Falls back to yielding full response. */ + async *stream(request: GenerateRequest): AsyncGenerator { + if (this.llamaContext && this.llamaModel) { + try { + const session = new this.llamaCpp.LlamaChatSession({ + contextSequence: this.llamaContext.getSequence(), + }); + const it = session.promptWithMeta(request.prompt, { + maxTokens: request.maxTokens ?? this.config.maxTokens, + temperature: request.temperature ?? this.config.temperature, + }); + if (it && typeof it[Symbol.asyncIterator] === 'function') { + for await (const chunk of it) { + if (typeof chunk === 'string') yield chunk; + else if (chunk?.text) yield chunk.text; + } + return; + } + } catch { /* fall through to single-chunk fallback */ } + } + const response = await this.generate(request); + yield response.text; + } + + /** + * Persist the KV cache to an RVF-compatible binary file. + * Format: RVKV magic | version u32 | model SHA-256 (32B) | entry count u32 + * entries: [key_len u32, key, val_len u32, val] | footer SHA-256 (32B) + */ + async persistKvCache(outputPath: string): Promise { + const path = outputPath || this.config.kvCachePath; + if (!path) throw new Error('No KV cache output path specified'); + + const modelHash = createHash('sha256').update(this.activeModelPath ?? 'no-model').digest(); + const entryBufs: Buffer[] = []; + for (const [key, value] of this.kvCache) { + const keyBuf = Buffer.from(key, 'utf-8'); + const hdr = Buffer.alloc(8); + hdr.writeUInt32LE(keyBuf.length, 0); + hdr.writeUInt32LE(value.length, 4); + entryBufs.push(hdr, keyBuf, value); + } + const entryData = Buffer.concat(entryBufs); + const footer = createHash('sha256').update(entryData).digest(); + + const header = Buffer.alloc(44); + header.writeUInt32LE(RVKV_MAGIC, 0); + header.writeUInt32LE(RVKV_VERSION, 4); + modelHash.copy(header, 8); + header.writeUInt32LE(this.kvCache.size, 40); + + await writeFile(path, Buffer.concat([header, entryData, footer])); + if (this.config.verbose) console.log(`[gguf-engine] KV cache persisted: ${this.kvCache.size} entries`); + } + + /** Restore KV cache from an RVF-compatible binary file. */ + async loadKvCache(inputPath: string): Promise { + const data = await readFile(inputPath); + if (data.length < 44) throw new Error('KV cache file too small'); + + const magic = data.readUInt32LE(0); + if (magic !== RVKV_MAGIC) throw new Error(`Invalid KV cache magic: 0x${magic.toString(16)}`); + const version = data.readUInt32LE(4); + if (version !== RVKV_VERSION) throw new Error(`Unsupported KV cache version: ${version}`); + + const entryCount = data.readUInt32LE(40); + let offset = 44; + const entries = new Map(); + + for (let i = 0; i < entryCount; i++) { + if (offset + 8 > data.length) throw new Error('KV cache file truncated'); + const keyLen = data.readUInt32LE(offset); + const valLen = data.readUInt32LE(offset + 4); + offset += 8; + if (offset + keyLen + valLen > data.length) throw new Error('KV cache file truncated'); + entries.set(data.toString('utf-8', offset, offset + keyLen), Buffer.from(data.subarray(offset + keyLen, offset + keyLen + valLen))); + offset += keyLen + valLen; + } + + // Verify footer hash (mandatory) + if (offset + 32 > data.length) { + throw new Error('KV cache file missing SHA256 footer'); + } + const stored = data.subarray(offset, offset + 32); + const computed = createHash('sha256').update(data.subarray(44, offset)).digest(); + if (!stored.equals(computed)) throw new Error('KV cache integrity check failed: hash mismatch'); + + this.kvCache = entries; + if (this.config.verbose) console.log(`[gguf-engine] KV cache loaded: ${entries.size} entries`); + } + + /** Return metadata for all loaded models. */ + getLoadedModels(): GgufMetadata[] { return Array.from(this.loadedModels.values()); } + + /** Store a key-value pair in the in-memory KV cache. */ + setKvEntry(key: string, value: Buffer): void { this.kvCache.set(key, value); } + + /** Retrieve a key-value pair from the in-memory KV cache. */ + getKvEntry(key: string): Buffer | undefined { return this.kvCache.get(key); } + + /** Release resources, unload models, and optionally persist the KV cache. */ + async shutdown(): Promise { + if (this.config.kvCachePath && this.kvCache.size > 0) { + try { await this.persistKvCache(this.config.kvCachePath); } + catch (err) { if (this.config.verbose) console.warn('[gguf-engine] KV persist failed:', err); } + } + if (this.llamaContext?.dispose) { try { await this.llamaContext.dispose(); } catch { /* ignore */ } } + if (this.llamaModel?.dispose) { try { await this.llamaModel.dispose(); } catch { /* ignore */ } } + this.llamaContext = null; + this.llamaModel = null; + this.activeModelPath = null; + this.loadedModels.clear(); + this.kvCache.clear(); + if (this.config.verbose) console.log('[gguf-engine] Shutdown complete'); + } + + // ── Private ─────────────────────────────────────────────── + + private async tryLoadLlamaCpp(): Promise { + // @ts-ignore -- optional peer dependency, may not be installed + try { return await import('node-llama-cpp'); } catch { return null; } + } +} diff --git a/v3/@claude-flow/cli/src/appliance/ruvllm-bridge.ts b/v3/@claude-flow/cli/src/appliance/ruvllm-bridge.ts index 42d634e062..e892563135 100644 --- a/v3/@claude-flow/cli/src/appliance/ruvllm-bridge.ts +++ b/v3/@claude-flow/cli/src/appliance/ruvllm-bridge.ts @@ -15,6 +15,7 @@ import { readdir, stat } from 'node:fs/promises'; import { join, extname, basename } from 'node:path'; +import type { GgufEngine as GgufEngineType } from './gguf-engine.js'; // ── Configuration ─────────────────────────────────────────── @@ -128,17 +129,34 @@ export class RuvllmBridge { private ruvectorCore: any = null; private ruvectorRouter: any = null; private ruvectorSona: any = null; + private ggufEngine: GgufEngineType | null = null; constructor(config: RuvllmConfig) { if (!config.modelsDir) throw new Error('RuvllmConfig.modelsDir is required'); this.config = { ...DEFAULT_CONFIG, ...config }; } - /** Probe optional @ruvector packages and scan modelsDir for GGUF files. */ + /** Probe optional @ruvector packages, initialize GGUF engine, and scan modelsDir. */ async initialize(): Promise { this.ruvectorCore = await this.tryImport('@ruvector/core'); this.ruvectorRouter = await this.tryImport('@ruvector/router'); this.ruvectorSona = await this.tryImport('@ruvector/sona'); + + // Initialize GGUF engine for local model inference + try { + const { GgufEngine } = await import('./gguf-engine.js'); + this.ggufEngine = new GgufEngine({ + contextSize: this.config.contextSize, + maxTokens: this.config.maxTokens, + temperature: this.config.temperature, + kvCachePath: this.config.kvCachePath, + verbose: this.config.verbose, + }); + await this.ggufEngine.initialize(); + } catch { + // GGUF engine is optional + } + await this.scanModelsDir(); if (this.config.verbose) { @@ -146,6 +164,7 @@ export class RuvllmBridge { this.ruvectorCore && '@ruvector/core', this.ruvectorRouter && '@ruvector/router', this.ruvectorSona && '@ruvector/sona', + this.ggufEngine && 'gguf-engine', ].filter(Boolean); if (pkgs.length) console.log(`[ruvLLM] Loaded: ${pkgs.join(', ')}`); console.log(`[ruvLLM] ${this.models.size} model(s) in ${this.config.modelsDir}`); @@ -157,12 +176,17 @@ export class RuvllmBridge { return Array.from(this.models.values()); } - /** Load a model into memory (delegates to @ruvector/core when available). */ + /** Load a model into memory (delegates to GGUF engine or @ruvector/core). */ async loadModel(name: string): Promise { const info = this.models.get(name); if (!info) throw new Error(`Model "${name}" not found. Available: ${[...this.models.keys()].join(', ')}`); - if (this.ruvectorCore?.loadModel) { + // Prefer GGUF engine (parses header, loads via node-llama-cpp if available) + if (this.ggufEngine) { + const meta = await this.ggufEngine.loadModel(info.path); + if (meta.architecture) info.parameters = meta.architecture; + if (meta.quantization) info.quantization = meta.quantization; + } else if (this.ruvectorCore?.loadModel) { await this.ruvectorCore.loadModel(info.path, { contextSize: this.config.contextSize }); } info.loaded = true; @@ -185,17 +209,27 @@ export class RuvllmBridge { return { text: booster, model: 'agent-booster', tokensUsed: 0, latencyMs: performance.now() - start, tier: 1, cached: false }; } - // Tier 2: Local model + // Tier 2: Local model (GGUF engine preferred, then @ruvector/core) const info = this.models.get(modelName); - if (info?.loaded && this.ruvectorCore?.generate) { + if (info?.loaded) { try { - const r = await this.ruvectorCore.generate({ - model: info.path, prompt: request.prompt, - maxTokens: request.maxTokens ?? this.config.maxTokens, - temperature: request.temperature ?? this.config.temperature, - stopSequences: request.stopSequences, - }); - return { text: r.text ?? '', model: modelName, tokensUsed: r.tokensUsed ?? 0, latencyMs: performance.now() - start, tier: 2, cached: false }; + if (this.ggufEngine) { + const r = await this.ggufEngine.generate({ + prompt: request.prompt, + maxTokens: request.maxTokens ?? this.config.maxTokens, + temperature: request.temperature ?? this.config.temperature, + stopSequences: request.stopSequences, + }); + return { text: r.text, model: modelName, tokensUsed: r.tokensUsed, latencyMs: performance.now() - start, tier: 2, cached: false }; + } else if (this.ruvectorCore?.generate) { + const r = await this.ruvectorCore.generate({ + model: info.path, prompt: request.prompt, + maxTokens: request.maxTokens ?? this.config.maxTokens, + temperature: request.temperature ?? this.config.temperature, + stopSequences: request.stopSequences, + }); + return { text: r.text ?? '', model: modelName, tokensUsed: r.tokensUsed ?? 0, latencyMs: performance.now() - start, tier: 2, cached: false }; + } } catch (err) { if (this.config.verbose) console.warn('[ruvLLM] Local generation failed, tier 3 fallback:', err); } @@ -236,6 +270,10 @@ export class RuvllmBridge { /** Persist KV-cache, unload models, and clean up. */ async shutdown(): Promise { + if (this.ggufEngine) { + await this.ggufEngine.shutdown(); + this.ggufEngine = null; + } if (this.config.kvCachePath && this.ruvectorCore?.persistKvCache) { try { await this.ruvectorCore.persistKvCache(this.config.kvCachePath); } catch (e) { if (this.config.verbose) console.warn('[ruvLLM] KV-cache persist failed:', e); } diff --git a/v3/@claude-flow/cli/src/appliance/rvfa-distribution.ts b/v3/@claude-flow/cli/src/appliance/rvfa-distribution.ts new file mode 100644 index 0000000000..8e61f9c215 --- /dev/null +++ b/v3/@claude-flow/cli/src/appliance/rvfa-distribution.ts @@ -0,0 +1,406 @@ +/** + * RVFA Distribution & Hot-Patch Module + * + * IPFS publishing of RVFA appliances via Pinata and RVFP binary patches + * for section-level hot updates with atomic rollback. + * + * RVFP layout: [4B "RVFP"] [4B version u32LE] [4B header_len u32LE] + * [header JSON] [new section data] [32B SHA256 footer] + */ + +import { createHash, sign, verify as edVerify } from 'node:crypto'; +import { readFile, writeFile, rename, unlink, copyFile, mkdir } from 'node:fs/promises'; +import { dirname } from 'node:path'; +import { request as httpsRequest } from 'node:https'; +import { gzipSync, gunzipSync } from 'node:zlib'; +import { RvfaReader, RvfaWriter } from './rvfa-format.js'; + +// ── Constants ──────────────────────────────────────────────── +const RVFP_VERSION = 1; +const PRE = 12; // preamble: 4 magic + 4 version + 4 header_len +const SHA_LEN = 32; +const DEFAULT_GW = 'https://gateway.pinata.cloud'; +const DEFAULT_API = 'https://api.pinata.cloud'; + +// ── Types ──────────────────────────────────────────────────── +export interface RvfpHeader { + magic: 'RVFP'; version: number; + targetApplianceName: string; targetApplianceVersion: string; + targetSection: string; patchVersion: string; created: string; + newSectionSize: number; newSectionSha256: string; + compression: 'none' | 'gzip'; signature?: string; signedBy?: string; +} +export interface CreatePatchOptions { + targetName: string; targetVersion: string; sectionId: string; + sectionData: Buffer; patchVersion: string; + compression?: 'none' | 'gzip'; privateKey?: Buffer; signedBy?: string; +} +export interface ApplyOptions { backup?: boolean; verify?: boolean; publicKey?: Buffer } +export interface ApplyResult { + success: boolean; backupPath?: string; newSize: number; + patchedSection: string; errors: string[]; +} +export interface PatchVerifyResult { valid: boolean; header: RvfpHeader; errors: string[] } +export interface PublishConfig { pinataJwt?: string; gatewayUrl?: string; apiUrl?: string } +export interface PublishMetadata { name?: string; description?: string; version?: string; profile?: string } +export interface PublishResult { cid: string; size: number; gatewayUrl: string; pinataUrl: string } +export interface PublishedItem { cid: string; name: string; size: number; date: string } + +// ── Crypto helpers ─────────────────────────────────────────── +function sha256(d: Buffer): string { return createHash('sha256').update(d).digest('hex'); } +function sha256B(d: Buffer): Buffer { return createHash('sha256').update(d).digest(); } + +function detectKeyFormat(key: Buffer): { format: 'pem' | 'der'; type: string } { + const str = key.toString('utf-8'); + if (str.includes('BEGIN PRIVATE KEY')) return { format: 'pem', type: 'pkcs8' }; + if (str.includes('BEGIN PUBLIC KEY')) return { format: 'pem', type: 'spki' }; + // Heuristic: DER-encoded keys are raw binary, never valid UTF-8 "BEGIN" + return { format: 'der', type: 'pkcs8' }; // caller must override type for public keys +} + +function edSign(data: Buffer, key: Buffer): string { + const det = detectKeyFormat(key); + return sign(null, data, { key, format: det.format, type: det.type } as any).toString('hex'); +} +function edCheck(data: Buffer, sig: string, key: Buffer): boolean { + try { + const det = detectKeyFormat(key); + const type = det.format === 'pem' ? det.type : 'spki'; // public key for verify + return edVerify(null, data, { key, format: det.format, type } as any, Buffer.from(sig, 'hex')); + } catch { return false; } +} + +// ── HTTP helpers ───────────────────────────────────────────── +function pinataReq( + method: string, path: string, jwt: string, body?: Buffer, ct?: string, +): Promise<{ status: number; data: unknown }> { + return new Promise((resolve, reject) => { + const u = new URL(path); + const h: Record = { Authorization: `Bearer ${jwt}` }; + if (ct) h['Content-Type'] = ct; + if (body) h['Content-Length'] = String(body.length); + const req = httpsRequest( + { hostname: u.hostname, path: u.pathname + u.search, method, headers: h }, + (res) => { + const ch: Buffer[] = []; + res.on('data', (c: Buffer) => ch.push(c)); + res.on('end', () => { + const raw = Buffer.concat(ch); + let data: unknown; + try { data = JSON.parse(raw.toString('utf-8')); } catch { data = raw; } + resolve({ status: res.statusCode ?? 0, data }); + }); + }, + ); + req.setTimeout(30_000, () => { req.destroy(new Error('Request timed out after 30s')); }); + req.on('error', reject); + if (body) req.write(body); + req.end(); + }); +} + +function httpGet(url: string, maxRedirects = 5): Promise { + return new Promise((resolve, reject) => { + if (maxRedirects <= 0) return reject(new Error('Too many redirects')); + const u = new URL(url); + const req = httpsRequest({ hostname: u.hostname, path: u.pathname + u.search, method: 'GET' }, (res) => { + if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) { + return void httpGet(res.headers.location, maxRedirects - 1).then(resolve, reject); + } + const ch: Buffer[] = []; + res.on('data', (c: Buffer) => ch.push(c)); + res.on('end', () => resolve(Buffer.concat(ch))); + }); + req.setTimeout(30_000, () => { req.destroy(new Error('Request timed out after 30s')); }); + req.on('error', reject); + req.end(); + }); +} + +function multipart( + name: string, file: string, data: Buffer, meta?: string, +): { body: Buffer; ct: string } { + const b = `----Rvfa${Date.now()}${Math.random().toString(36).slice(2)}`; + const parts: Buffer[] = []; + if (meta) { + parts.push(Buffer.from( + `--${b}\r\nContent-Disposition: form-data; name="pinataMetadata"\r\n` + + `Content-Type: application/json\r\n\r\n${meta}\r\n`, + )); + } + parts.push(Buffer.from( + `--${b}\r\nContent-Disposition: form-data; name="${name}"; filename="${file}"\r\n` + + `Content-Type: application/octet-stream\r\n\r\n`, + )); + parts.push(data, Buffer.from(`\r\n--${b}--\r\n`)); + return { body: Buffer.concat(parts), ct: `multipart/form-data; boundary=${b}` }; +} + +/** Extract patch section data and bounds from a parsed patch buffer. */ +function patchData(buf: Buffer): { start: number; end: number; section: Buffer } { + const hLen = buf.readUInt32LE(8); + const start = PRE + hLen; + const end = buf.length - SHA_LEN; + return { start, end, section: buf.subarray(start, end) }; +} + +/** Canonical JSON: recursive key-sorting for deterministic serialization. */ +function canonicalJson(value: unknown): string { + return JSON.stringify(value, (_key, val) => { + if (val !== null && typeof val === 'object' && !Array.isArray(val) && !Buffer.isBuffer(val)) { + const sorted: Record = {}; + for (const k of Object.keys(val as Record).sort()) { + sorted[k] = (val as Record)[k]; + } + return sorted; + } + return val; + }); +} + +/** Build a failed ApplyResult. */ +function failResult(sec: string, errs: string[], extra?: Partial): ApplyResult { + return { success: false, newSize: 0, patchedSection: sec, errors: errs, ...extra }; +} + +// ── RvfaPatcher ────────────────────────────────────────────── +export class RvfaPatcher { + static async createPatch(opts: CreatePatchOptions): Promise { + const comp = opts.compression ?? 'none'; + const payload = comp === 'gzip' ? gzipSync(opts.sectionData) : opts.sectionData; + const header: RvfpHeader = { + magic: 'RVFP', version: RVFP_VERSION, + targetApplianceName: opts.targetName, targetApplianceVersion: opts.targetVersion, + targetSection: opts.sectionId, patchVersion: opts.patchVersion, + created: new Date().toISOString(), newSectionSize: payload.length, + newSectionSha256: sha256(payload), compression: comp, + }; + if (opts.privateKey && opts.signedBy) { + const signable = Buffer.concat([Buffer.from(canonicalJson(header), 'utf-8'), payload]); + header.signature = edSign(signable, opts.privateKey); + header.signedBy = opts.signedBy; + } + const hJson = Buffer.from(JSON.stringify(header), 'utf-8'); + const magic = Buffer.from('RVFP'); + const ver = Buffer.alloc(4); ver.writeUInt32LE(RVFP_VERSION, 0); + const hLen = Buffer.alloc(4); hLen.writeUInt32LE(hJson.length, 0); + return Buffer.concat([magic, ver, hLen, hJson, payload, sha256B(payload)]); + } + + static parsePatchHeader(buf: Buffer): RvfpHeader { + if (buf.length < PRE) throw new Error('Buffer too small for RVFP preamble'); + const magic = buf.subarray(0, 4).toString('ascii'); + if (magic !== 'RVFP') throw new Error(`Invalid RVFP magic: "${magic}"`); + const ver = buf.readUInt32LE(4); + if (ver !== RVFP_VERSION) throw new Error(`Unsupported RVFP version: ${ver}`); + const hLen = buf.readUInt32LE(8); + if (PRE + hLen > buf.length) throw new Error('Buffer too small for declared header'); + const h = JSON.parse(buf.subarray(PRE, PRE + hLen).toString('utf-8')) as RvfpHeader; + if (h.magic !== 'RVFP') throw new Error('RVFP header magic mismatch'); + return h; + } + + static async verifyPatch(buf: Buffer): Promise { + const errors: string[] = []; + let header: RvfpHeader; + try { header = RvfaPatcher.parsePatchHeader(buf); } catch (e) { + const empty: RvfpHeader = { + magic: 'RVFP', version: 0, targetApplianceName: '', targetApplianceVersion: '', + targetSection: '', patchVersion: '', created: '', newSectionSize: 0, + newSectionSha256: '', compression: 'none', + }; + return { valid: false, header: empty, errors: [(e as Error).message] }; + } + const { start, end, section } = patchData(buf); + if (end < start) { + errors.push('Patch too small: no room for section data and footer'); + return { valid: false, header, errors }; + } + if (section.length !== header.newSectionSize) + errors.push(`Size mismatch: header=${header.newSectionSize}, actual=${section.length}`); + if (sha256(section) !== header.newSectionSha256) + errors.push('Section SHA256 mismatch'); + if (!sha256B(section).equals(buf.subarray(buf.length - SHA_LEN))) + errors.push('Footer SHA256 mismatch'); + return { valid: errors.length === 0, header, errors }; + } + + static async applyPatch( + rvfaPath: string, patchBuf: Buffer, opts?: ApplyOptions, + ): Promise { + const doBackup = opts?.backup ?? true; + const doVerify = opts?.verify ?? true; + + // Parse & verify patch + let header: RvfpHeader; + try { header = RvfaPatcher.parsePatchHeader(patchBuf); } catch (e) { + return failResult('', [(e as Error).message]); + } + const sec = header.targetSection; + + // Verify signature + if (opts?.publicKey && header.signature) { + const { section } = patchData(patchBuf); + const unsigned = { ...header } as Record; + delete unsigned.signature; delete unsigned.signedBy; + const signable = Buffer.concat([Buffer.from(canonicalJson(unsigned), 'utf-8'), section]); + if (!edCheck(signable, header.signature, opts.publicKey)) + return failResult(sec, ['Patch signature verification failed']); + } + + // Verify patch integrity + const check = await RvfaPatcher.verifyPatch(patchBuf); + if (!check.valid) return failResult(sec, check.errors); + + // Read target RVFA + let reader: RvfaReader; + try { reader = await RvfaReader.fromFile(rvfaPath); } catch (e) { + return failResult(sec, [`Failed to read RVFA: ${(e as Error).message}`]); + } + const rh = reader.getHeader(); + + // Verify target matches + const errs: string[] = []; + if (rh.name !== header.targetApplianceName) + errs.push(`Name mismatch: patch="${header.targetApplianceName}", file="${rh.name}"`); + if (rh.appVersion !== header.targetApplianceVersion) + errs.push(`Version mismatch: patch="${header.targetApplianceVersion}", file="${rh.appVersion}"`); + if (errs.length) return failResult(sec, errs); + if (!rh.sections.find((s) => s.id === sec)) + return failResult(sec, [`Section "${sec}" not found in appliance`]); + + // Backup + let backupPath: string | undefined; + if (doBackup) { backupPath = rvfaPath + '.bak'; await copyFile(rvfaPath, backupPath); } + + // Extract new section data from patch (decompress if needed) + let newData = patchData(patchBuf).section; + if (header.compression === 'gzip') newData = gunzipSync(newData); + + // Rebuild RVFA with replaced section + const writer = new RvfaWriter({ ...rh, sections: [] }); + for (const s of rh.sections) { + const comp = s.compression === 'zstd' ? 'gzip' : s.compression; + if (s.id === sec) { + writer.addSection(s.id, newData, { compression: comp, type: s.type }); + } else { + writer.addSection(s.id, reader.extractSection(s.id), { compression: comp, type: s.type }); + } + } + const newRvfa = writer.build(); + + // Atomic write (tmp + rename) + const tmpPath = rvfaPath + `.tmp.${Date.now()}`; + try { + await writeFile(tmpPath, newRvfa); + await rename(tmpPath, rvfaPath); + } catch (e) { + await unlink(tmpPath).catch(() => {}); + if (backupPath) await copyFile(backupPath, rvfaPath).catch(() => {}); + return failResult(sec, [`Atomic write failed: ${(e as Error).message}`], { backupPath }); + } + + // Post-patch verification with rollback + if (doVerify) { + try { + const vr = await RvfaReader.fromFile(rvfaPath); + const vResult = vr.verify(); + if (!vResult.valid) { + if (backupPath) await copyFile(backupPath, rvfaPath).catch(() => {}); + return failResult(sec, [`Post-patch verification failed: ${vResult.errors.join('; ')}`], + { backupPath, newSize: newRvfa.length }); + } + } catch (e) { + if (backupPath) await copyFile(backupPath, rvfaPath).catch(() => {}); + return failResult(sec, [`Post-patch verify error: ${(e as Error).message}`], + { backupPath, newSize: newRvfa.length }); + } + } + + return { success: true, backupPath, newSize: newRvfa.length, patchedSection: sec, errors: [] }; + } +} + +// ── RvfaPublisher ──────────────────────────────────────────── +export class RvfaPublisher { + private jwt: string; + private gw: string; + private api: string; + + constructor(config: PublishConfig) { + this.jwt = config.pinataJwt || process.env.PINATA_API_JWT || ''; + this.gw = (config.gatewayUrl || DEFAULT_GW).replace(/\/+$/, ''); + this.api = (config.apiUrl || DEFAULT_API).replace(/\/+$/, ''); + if (!this.jwt) throw new Error('Pinata JWT required (config.pinataJwt or PINATA_API_JWT)'); + } + + private async upload( + fileName: string, data: Buffer, kv: Record, + ): Promise { + const meta = JSON.stringify({ name: fileName, keyvalues: kv }); + const { body, ct } = multipart('file', fileName, data, meta); + const res = await pinataReq('POST', `${this.api}/pinning/pinFileToIPFS`, this.jwt, body, ct); + if (res.status !== 200) + throw new Error(`Pinata upload failed (HTTP ${res.status}): ${JSON.stringify(res.data)}`); + const r = res.data as { IpfsHash: string; PinSize: number }; + return { + cid: r.IpfsHash, size: r.PinSize, + gatewayUrl: `${this.gw}/ipfs/${r.IpfsHash}`, pinataUrl: `${this.api}/pinning/pins/${r.IpfsHash}`, + }; + } + + async publish(rvfaPath: string, meta?: PublishMetadata): Promise { + const data = await readFile(rvfaPath); + const name = meta?.name || rvfaPath.split('/').pop() || 'appliance.rvf'; + return this.upload(name, data, { + type: 'rvfa-appliance', version: meta?.version || '', + profile: meta?.profile || '', description: meta?.description || '', + }); + } + + async publishPatch(patchBuf: Buffer, meta?: PublishMetadata): Promise { + const name = meta?.name || `patch-${Date.now()}.rvfp`; + return this.upload(name, patchBuf, { + type: 'rvfp-patch', version: meta?.version || '', description: meta?.description || '', + }); + } + + async fetch(cid: string, outputPath: string): Promise { + const data = await httpGet(`${this.gw}/ipfs/${cid}`); + await mkdir(dirname(outputPath), { recursive: true }); + await writeFile(outputPath, data); + } + + async list(): Promise { + const res = await pinataReq('GET', `${this.api}/data/pinList?status=pinned&pageLimit=100`, this.jwt); + if (res.status !== 200) + throw new Error(`Pinata list failed (HTTP ${res.status}): ${JSON.stringify(res.data)}`); + const d = res.data as { rows: Array<{ + ipfs_pin_hash: string; metadata?: { name?: string }; size: number; date_pinned: string; + }> }; + return (d.rows || []).map((r) => ({ + cid: r.ipfs_pin_hash, name: r.metadata?.name || r.ipfs_pin_hash, + size: r.size, date: r.date_pinned, + })); + } + + async pin(cid: string, name?: string): Promise { + const body = Buffer.from(JSON.stringify({ hashToPin: cid, pinataMetadata: { name: name || cid } })); + const res = await pinataReq('POST', `${this.api}/pinning/pinByHash`, this.jwt, body, 'application/json'); + if (res.status !== 200) + throw new Error(`Pinata pin failed (HTTP ${res.status}): ${JSON.stringify(res.data)}`); + } +} + +// ── Convenience exports ────────────────────────────────────── +export function createPublisher(config?: Partial): RvfaPublisher { + return new RvfaPublisher({ pinataJwt: config?.pinataJwt, gatewayUrl: config?.gatewayUrl, apiUrl: config?.apiUrl }); +} + +export async function createAndVerifyPatch( + options: CreatePatchOptions, +): Promise<{ patch: Buffer; verification: PatchVerifyResult }> { + const patch = await RvfaPatcher.createPatch(options); + const verification = await RvfaPatcher.verifyPatch(patch); + return { patch, verification }; +} diff --git a/v3/@claude-flow/cli/src/appliance/rvfa-signing.ts b/v3/@claude-flow/cli/src/appliance/rvfa-signing.ts new file mode 100644 index 0000000000..37c7730152 --- /dev/null +++ b/v3/@claude-flow/cli/src/appliance/rvfa-signing.ts @@ -0,0 +1,461 @@ +/** + * RVFA Ed25519 Code Signing -- Digital signatures for RVFA appliance files. + * + * Provides tamper detection and publisher identity verification using + * Ed25519 (RFC 8032) via Node.js native crypto. Zero external dependencies. + * + * @module @claude-flow/cli/appliance/rvfa-signing + */ + +import { + generateKeyPairSync, createHash, sign, verify, + createPublicKey, createPrivateKey, + type KeyObject, +} from 'node:crypto'; +import { readFile, writeFile, stat, chmod, mkdir } from 'node:fs/promises'; +import { dirname } from 'node:path'; + +// ── Constants ──────────────────────────────────────────────── + +const PREAMBLE_SIZE = 12; // 4B magic + 4B version + 4B header_len +const SHA256_SIZE = 32; +const KEY_FILE_MODE = 0o600; + +// ── Public Interfaces ──────────────────────────────────────── + +export interface RvfaKeyPair { + publicKey: Buffer; + privateKey: Buffer; + fingerprint: string; +} + +export interface SignatureMetadata { + algorithm: 'ed25519'; + publicKeyFingerprint: string; + signature: string; + signedAt: string; + signedBy?: string; + scope: 'full' | 'sections'; +} + +export interface VerifyResult { + valid: boolean; + signerFingerprint?: string; + signedAt?: string; + signedBy?: string; + errors: string[]; +} + +// ── Key Management ─────────────────────────────────────────── + +/** Compute the fingerprint of a public key: first 16 hex chars of its SHA256. */ +function computeFingerprint(publicKeyPem: string): string { + return createHash('sha256') + .update(publicKeyPem, 'utf-8') + .digest('hex') + .slice(0, 16); +} + +/** + * Generate a new Ed25519 key pair for RVFA signing. + */ +export async function generateKeyPair(): Promise { + const { publicKey, privateKey } = generateKeyPairSync('ed25519', { + publicKeyEncoding: { type: 'spki', format: 'pem' }, + privateKeyEncoding: { type: 'pkcs8', format: 'pem' }, + }); + + const pubBuf = Buffer.from(publicKey as string, 'utf-8'); + const privBuf = Buffer.from(privateKey as string, 'utf-8'); + const fingerprint = computeFingerprint(publicKey as string); + + return { publicKey: pubBuf, privateKey: privBuf, fingerprint }; +} + +/** + * Save a key pair to disk as PEM files. + * + * @param keyPair The key pair to persist. + * @param dir Directory to write files into. + * @param name Base name for the key files (default: 'rvfa-signing'). + * @returns Paths to the written public and private key files. + */ +export async function saveKeyPair( + keyPair: RvfaKeyPair, + dir: string, + name = 'rvfa-signing', +): Promise<{ publicKeyPath: string; privateKeyPath: string }> { + await mkdir(dir, { recursive: true }); + + const pubPath = `${dir}/${name}.pub`; + const privPath = `${dir}/${name}.key`; + + await writeFile(pubPath, keyPair.publicKey); + await writeFile(privPath, keyPair.privateKey, { mode: KEY_FILE_MODE }); + + // Ensure private key has restrictive permissions even on existing files + await chmod(privPath, KEY_FILE_MODE); + + return { publicKeyPath: pubPath, privateKeyPath: privPath }; +} + +/** + * Load a key pair from PEM files on disk. + * + * @param dir Directory containing the key files. + * @param name Base name for the key files (default: 'rvfa-signing'). + */ +export async function loadKeyPair( + dir: string, + name = 'rvfa-signing', +): Promise { + const pubPath = `${dir}/${name}.pub`; + const privPath = `${dir}/${name}.key`; + + const publicKey = await readFile(pubPath); + const privateKey = await readFile(privPath); + + // Warn if private key permissions are too open + const privStat = await stat(privPath); + const mode = privStat.mode & 0o777; + if (mode & 0o077) { + console.warn( + `[rvfa-signing] WARNING: Private key ${privPath} has open permissions ` + + `(${mode.toString(8)}). Consider running: chmod 600 ${privPath}`, + ); + } + + const fingerprint = computeFingerprint(publicKey.toString('utf-8')); + return { publicKey, privateKey, fingerprint }; +} + +/** + * Load a public key from a single PEM file. + */ +export async function loadPublicKey(path: string): Promise { + return readFile(path); +} + +// ── Internal Helpers ───────────────────────────────────────── + +/** + * Recursively sort object keys for canonical JSON serialization. + * Produces deterministic output regardless of insertion order. + */ +function canonicalJson(value: unknown): string { + return JSON.stringify(value, (_key, val) => { + if (val !== null && typeof val === 'object' && !Array.isArray(val) && !Buffer.isBuffer(val)) { + const sorted: Record = {}; + for (const k of Object.keys(val as Record).sort()) { + sorted[k] = (val as Record)[k]; + } + return sorted; + } + return val; + }); +} + +/** + * Parse an RVFA binary into its components without full validation. + * Returns the header object, header JSON bytes, section data region, and footer. + */ +function parseRvfaBinary(buf: Buffer): { + header: Record; + headerStart: number; + headerEnd: number; + sectionData: Buffer; + footer: Buffer; +} { + if (buf.length < PREAMBLE_SIZE + SHA256_SIZE) { + throw new Error('Buffer too small to be a valid RVFA file'); + } + + const magic = buf.subarray(0, 4).toString('ascii'); + if (magic !== 'RVFA') { + throw new Error(`Invalid RVFA magic: expected "RVFA", got "${magic}"`); + } + + const headerLen = buf.readUInt32LE(8); + const headerStart = PREAMBLE_SIZE; + const headerEnd = headerStart + headerLen; + + if (headerEnd > buf.length - SHA256_SIZE) { + throw new Error('Header length extends beyond buffer'); + } + + const headerJson = buf.subarray(headerStart, headerEnd).toString('utf-8'); + let header: Record; + try { + header = JSON.parse(headerJson) as Record; + } catch { + throw new Error('Failed to parse RVFA header JSON'); + } + + const footer = buf.subarray(buf.length - SHA256_SIZE); + const sectionData = buf.subarray(headerEnd, buf.length - SHA256_SIZE); + + return { header, headerStart, headerEnd, sectionData, footer }; +} + +/** + * Compute the signing digest for an RVFA file. + * + * The digest is SHA256 of: canonical_header_json (without signature field) + * + section_data_bytes + * + footer_32_bytes + */ +function computeSigningDigest( + header: Record, + sectionData: Buffer, + footer: Buffer, +): Buffer { + // Strip signature field from header for digest computation + const stripped = { ...header }; + delete stripped.signature; + + const canonical = Buffer.from(canonicalJson(stripped), 'utf-8'); + + return createHash('sha256') + .update(canonical) + .update(sectionData) + .update(footer) + .digest(); +} + +/** Convert a Buffer or PEM string into a KeyObject. */ +function toPrivateKeyObject(key: Buffer | string): KeyObject { + const pem = Buffer.isBuffer(key) ? key.toString('utf-8') : key; + return createPrivateKey(pem); +} + +/** Convert a Buffer or PEM string into a KeyObject. */ +function toPublicKeyObject(key: Buffer | string): KeyObject { + const pem = Buffer.isBuffer(key) ? key.toString('utf-8') : key; + return createPublicKey(pem); +} + +/** + * Rebuild the RVFA binary with an updated header. + * + * Preserves the original preamble version, recalculates header length, + * and keeps section data and footer intact. + */ +function rebuildRvfa( + originalBuf: Buffer, + newHeader: Record, + sectionData: Buffer, + footer: Buffer, +): Buffer { + const headerJson = Buffer.from(JSON.stringify(newHeader), 'utf-8'); + + // Preamble: magic + version + new header length + const preamble = Buffer.alloc(PREAMBLE_SIZE); + originalBuf.copy(preamble, 0, 0, 8); // magic + version unchanged + preamble.writeUInt32LE(headerJson.length, 8); + + return Buffer.concat([preamble, headerJson, sectionData, footer]); +} + +// ── RvfaSigner ─────────────────────────────────────────────── + +/** + * Signs RVFA appliance files and data with Ed25519. + */ +export class RvfaSigner { + private readonly keyObj: KeyObject; + private readonly fingerprint: string; + + constructor(privateKey: Buffer | string) { + this.keyObj = toPrivateKeyObject(privateKey); + + // Derive public key to compute fingerprint + const pubPem = createPublicKey(this.keyObj) + .export({ type: 'spki', format: 'pem' }) as string; + this.fingerprint = computeFingerprint(pubPem); + } + + /** + * Sign an RVFA appliance file in-place. + * + * Algorithm: + * 1. Read and parse the RVFA binary + * 2. Strip any existing signature from the header + * 3. Compute SHA256 of [canonical_header + section_data + footer] + * 4. Sign the digest with Ed25519 + * 5. Embed signature metadata into the header + * 6. Write the updated binary back to the file + * + * @param rvfaPath Path to the .rvf appliance file. + * @param signedBy Optional publisher name. + * @returns The signature metadata that was embedded. + */ + async signAppliance(rvfaPath: string, signedBy?: string): Promise { + const buf = await readFile(rvfaPath); + const { header, sectionData, footer } = parseRvfaBinary(buf); + + // Compute digest over header (without signature) + sections + footer + const digest = computeSigningDigest(header, sectionData, footer); + + // Ed25519 sign + const sig = sign(null, digest, this.keyObj); + + const metadata: SignatureMetadata = { + algorithm: 'ed25519', + publicKeyFingerprint: this.fingerprint, + signature: sig.toString('hex'), + signedAt: new Date().toISOString(), + signedBy, + scope: 'full', + }; + + // Embed signature in header and rebuild + header.signature = metadata; + const rebuilt = rebuildRvfa(buf, header, sectionData, footer); + await writeFile(rvfaPath, rebuilt); + + return metadata; + } + + /** + * Sign a section footer hash (detached signature). + * + * @param footerHash The 32-byte SHA256 footer hash from an RVFA file. + * @returns Hex-encoded Ed25519 signature. + */ + async signSections(footerHash: Buffer): Promise { + if (footerHash.length !== SHA256_SIZE) { + throw new Error( + `Footer hash must be ${SHA256_SIZE} bytes, got ${footerHash.length}`, + ); + } + const sig = sign(null, footerHash, this.keyObj); + return sig.toString('hex'); + } + + /** + * Sign an RVFP patch file (detached signature). + * + * @param patchData The raw patch binary data. + * @returns Hex-encoded Ed25519 signature. + */ + async signPatch(patchData: Buffer): Promise { + const digest = createHash('sha256').update(patchData).digest(); + const sig = sign(null, digest, this.keyObj); + return sig.toString('hex'); + } +} + +// ── RvfaVerifier ───────────────────────────────────────────── + +/** + * Verifies Ed25519 signatures on RVFA appliance files and data. + */ +export class RvfaVerifier { + private readonly keyObj: KeyObject; + private readonly fingerprint: string; + + constructor(publicKey: Buffer | string) { + this.keyObj = toPublicKeyObject(publicKey); + const pem = Buffer.isBuffer(publicKey) ? publicKey.toString('utf-8') : publicKey; + this.fingerprint = computeFingerprint(pem); + } + + /** + * Verify the Ed25519 signature embedded in an RVFA appliance file. + * + * @param rvfaPath Path to the .rvf appliance file. + * @returns Verification result with details and any errors. + */ + async verifyAppliance(rvfaPath: string): Promise { + const errors: string[] = []; + + let buf: Buffer; + try { + buf = await readFile(rvfaPath); + } catch (err) { + return { valid: false, errors: [`Failed to read file: ${(err as Error).message}`] }; + } + + let parsed: ReturnType; + try { + parsed = parseRvfaBinary(buf); + } catch (err) { + return { valid: false, errors: [`Invalid RVFA file: ${(err as Error).message}`] }; + } + + const { header, sectionData, footer } = parsed; + + // Extract signature metadata from header + const sigRaw = header.signature; + if (!sigRaw || typeof sigRaw !== 'object') { + return { valid: false, errors: ['No signature found in RVFA header'] }; + } + + const sigMeta = sigRaw as Record; + if (sigMeta.algorithm !== 'ed25519') { + errors.push(`Unsupported algorithm: ${String(sigMeta.algorithm)}`); + return { valid: false, errors }; + } + + if (typeof sigMeta.signature !== 'string' || !sigMeta.signature) { + errors.push('Signature field is missing or empty'); + return { valid: false, errors }; + } + + // Recompute the digest the same way the signer did + const digest = computeSigningDigest(header, sectionData, footer); + + // Verify + let sigBuf: Buffer; + try { + sigBuf = Buffer.from(sigMeta.signature as string, 'hex'); + } catch { + errors.push('Signature is not valid hex'); + return { valid: false, errors }; + } + + let valid: boolean; + try { + valid = verify(null, digest, this.keyObj, sigBuf); + } catch (err) { + errors.push(`Verification error: ${(err as Error).message}`); + return { valid: false, errors }; + } + + if (!valid) { + errors.push('Ed25519 signature verification failed: data may be tampered'); + } + + return { + valid, + signerFingerprint: sigMeta.publicKeyFingerprint as string | undefined, + signedAt: sigMeta.signedAt as string | undefined, + signedBy: sigMeta.signedBy as string | undefined, + errors, + }; + } + + /** + * Verify a detached Ed25519 signature over arbitrary data. + * + * @param data The data that was signed. + * @param signature Hex-encoded Ed25519 signature. + */ + async verifyDetached(data: Buffer, signature: string): Promise { + const digest = createHash('sha256').update(data).digest(); + const sigBuf = Buffer.from(signature, 'hex'); + return verify(null, digest, this.keyObj, sigBuf); + } + + /** + * Verify an RVFP patch file signature. + * + * @param patchData The raw patch binary data. + * @param signature Hex-encoded Ed25519 signature. + */ + async verifyPatch(patchData: Buffer, signature: string): Promise { + const digest = createHash('sha256').update(patchData).digest(); + const sigBuf = Buffer.from(signature, 'hex'); + return verify(null, digest, this.keyObj, sigBuf); + } +} diff --git a/v3/@claude-flow/cli/src/commands/appliance-advanced.ts b/v3/@claude-flow/cli/src/commands/appliance-advanced.ts new file mode 100644 index 0000000000..2be0051d67 --- /dev/null +++ b/v3/@claude-flow/cli/src/commands/appliance-advanced.ts @@ -0,0 +1,219 @@ +/** + * V3 CLI Appliance Advanced Commands (Phase 3-4) + * Sign, publish, and hot-patch RVFA appliances. + */ + +import type { Command, CommandContext, CommandResult } from '../types.js'; +import { output } from '../output.js'; + +function fmtSize(bytes: number): string { + if (bytes < 1024) return `${bytes} B`; + if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`; + if (bytes < 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(1)} MB`; + return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`; +} + +function errMsg(err: unknown): string { + return err instanceof Error ? err.message : String(err); +} + +const fail = (msg: string, detail?: string): CommandResult => { + output.printError(msg, detail); + return { success: false, exitCode: 1 }; +}; + +function hdr(title: string): void { + output.writeln(); + output.writeln(output.bold(title)); + output.writeln(output.dim('─'.repeat(50))); + output.writeln(); +} + +async function requireFile(file: string): Promise { + const fs = await import('fs'); + if (!fs.existsSync(file)) { + output.printError(`File not found: ${file}`); + return false; + } + return true; +} + +// SIGN +export const signCommand: Command = { + name: 'sign', + description: 'Sign an RVFA appliance with Ed25519 for tamper detection', + options: [ + { name: 'file', short: 'f', type: 'string', description: 'Path to .rvf file', required: true }, + { name: 'key', short: 'k', type: 'string', description: 'Path to Ed25519 private key (PEM)' }, + { name: 'generate-keys', type: 'boolean', description: 'Generate a new key pair' }, + { name: 'key-dir', type: 'string', description: 'Directory for key storage', default: '.rvfa-keys' }, + { name: 'signer', type: 'string', description: 'Publisher name for signature metadata' }, + ], + action: async (ctx: CommandContext): Promise => { + const file = ctx.flags.file as string; + const keyPath = ctx.flags.key as string | undefined; + const genKeys = ctx.flags['generate-keys'] as boolean; + const keyDir = ctx.flags['key-dir'] as string || '.rvfa-keys'; + const signer = ctx.flags.signer as string | undefined; + if (!file) return fail('--file is required'); + + try { + const signing = await import('../appliance/rvfa-signing.js'); + + if (genKeys) { + hdr('Generating Ed25519 Key Pair'); + const kp = await signing.generateKeyPair(); + const paths = await signing.saveKeyPair(kp, keyDir); + output.printSuccess(`Public key: ${paths.publicKeyPath}`); + output.printSuccess(`Private key: ${paths.privateKeyPath}`); + output.printInfo(`Fingerprint: ${kp.fingerprint}`); + output.writeln(output.dim(' Keep the private key secure. Share only the public key.')); + output.writeln(); + } + + if (!(await requireFile(file))) return { success: false, exitCode: 1 }; + hdr('Signing RVFA Appliance'); + + let privateKey: Buffer; + if (keyPath) { + const fs = await import('fs'); + privateKey = fs.readFileSync(keyPath); + } else { + const kp = await signing.loadKeyPair(keyDir); + privateKey = kp.privateKey; + } + + const s = new signing.RvfaSigner(privateKey); + const meta = await s.signAppliance(file, signer); + output.printSuccess('Appliance signed successfully'); + output.printInfo(`Algorithm: ${meta.algorithm}`); + output.printInfo(`Fingerprint: ${meta.publicKeyFingerprint}`); + output.printInfo(`Signed at: ${meta.signedAt}`); + if (signer) output.printInfo(`Signed by: ${signer}`); + output.printInfo(`Signature: ${meta.signature.slice(0, 32)}...`); + return { success: true, data: meta }; + } catch (err) { + return fail('Signing failed', errMsg(err)); + } + }, +}; + +// PUBLISH +export const publishCommand: Command = { + name: 'publish', + description: 'Publish an RVFA appliance to IPFS via Pinata', + options: [ + { name: 'file', short: 'f', type: 'string', description: 'Path to .rvf file', required: true }, + { name: 'name', short: 'n', type: 'string', description: 'Publication name' }, + { name: 'description', type: 'string', description: 'Description' }, + ], + action: async (ctx: CommandContext): Promise => { + const file = ctx.flags.file as string; + if (!file) return fail('--file is required'); + if (!(await requireFile(file))) return { success: false, exitCode: 1 }; + + try { + const dist = await import('../appliance/rvfa-distribution.js'); + + hdr('Publishing RVFA to IPFS'); + output.printInfo(`File: ${file}`); + output.writeln(); + + const publisher = dist.createPublisher(); + const result = await publisher.publish(file, { + name: ctx.flags.name as string | undefined, + description: ctx.flags.description as string | undefined, + }); + + output.printSuccess('Published successfully'); + output.printInfo(`CID: ${output.bold(result.cid)}`); + output.printInfo(`Size: ${fmtSize(result.size)}`); + output.printInfo(`Gateway: ${result.gatewayUrl}`); + return { success: true, data: result }; + } catch (err) { + return fail('Publishing failed', errMsg(err)); + } + }, +}; + +// UPDATE (hot-patch) +export const updateAppCommand: Command = { + name: 'update', + description: 'Hot-patch a section in an RVFA appliance', + options: [ + { name: 'file', short: 'f', type: 'string', description: 'Path to .rvf file', required: true }, + { name: 'section', short: 's', type: 'string', description: 'Section to patch (e.g. ruflo, models)', required: true }, + { name: 'patch', short: 'p', type: 'string', description: 'Path to .rvfp patch file' }, + { name: 'data', short: 'd', type: 'string', description: 'Path to new section data (creates patch automatically)' }, + { name: 'version', type: 'string', description: 'Patch version', default: '0.0.1' }, + { name: 'no-backup', type: 'boolean', description: 'Skip backup creation' }, + { name: 'public-key', type: 'string', description: 'Path to public key for patch verification' }, + ], + action: async (ctx: CommandContext): Promise => { + const file = ctx.flags.file as string; + const section = ctx.flags.section as string; + const patchPath = ctx.flags.patch as string | undefined; + const dataPath = ctx.flags.data as string | undefined; + if (!file || !section) return fail('--file and --section are required'); + if (!patchPath && !dataPath) return fail('Provide --patch (RVFP file) or --data (raw section data)'); + if (!(await requireFile(file))) return { success: false, exitCode: 1 }; + + try { + const dist = await import('../appliance/rvfa-distribution.js'); + const { RvfaReader } = await import('../appliance/rvfa-format.js'); + const fs = await import('fs'); + + hdr('RVFA Hot-Patch Update'); + output.printInfo(`Appliance: ${file}`); + output.printInfo(`Section: ${section}`); + output.writeln(); + + let patchBuf: Buffer; + + if (patchPath) { + if (!(await requireFile(patchPath))) return { success: false, exitCode: 1 }; + patchBuf = fs.readFileSync(patchPath); + output.printInfo(`Patch file: ${patchPath} (${fmtSize(patchBuf.length)})`); + } else { + if (!(await requireFile(dataPath!))) return { success: false, exitCode: 1 }; + const newData = fs.readFileSync(dataPath!); + const reader = await RvfaReader.fromFile(file); + const appHdr = reader.getHeader(); + output.printInfo(`Creating patch for section "${section}" (${fmtSize(newData.length)} new data)`); + patchBuf = await dist.RvfaPatcher.createPatch({ + targetName: appHdr.name, + targetVersion: appHdr.appVersion, + sectionId: section, + sectionData: newData, + patchVersion: ctx.flags.version as string || '0.0.1', + compression: 'gzip', + }); + } + + let pubKey: Buffer | undefined; + if (ctx.flags['public-key']) { + const pkPath = ctx.flags['public-key'] as string; + if (!(await requireFile(pkPath))) return { success: false, exitCode: 1 }; + pubKey = fs.readFileSync(pkPath); + } + + const result = await dist.RvfaPatcher.applyPatch(file, patchBuf, { + backup: !(ctx.flags['no-backup'] as boolean), + verify: true, + publicKey: pubKey, + }); + + if (result.success) { + output.printSuccess(`Section "${result.patchedSection}" updated successfully`); + output.printInfo(`New size: ${fmtSize(result.newSize)}`); + if (result.backupPath) output.printInfo(`Backup: ${result.backupPath}`); + } else { + output.printError('Patch failed'); + result.errors.forEach(e => output.writeln(` ${output.error('X')} ${e}`)); + } + return { success: result.success, exitCode: result.success ? 0 : 1, data: result }; + } catch (err) { + return fail('Update failed', errMsg(err)); + } + }, +}; diff --git a/v3/@claude-flow/cli/src/commands/appliance.ts b/v3/@claude-flow/cli/src/commands/appliance.ts index f7f0263f0a..ce2cb93874 100644 --- a/v3/@claude-flow/cli/src/commands/appliance.ts +++ b/v3/@claude-flow/cli/src/commands/appliance.ts @@ -1,10 +1,11 @@ /** * V3 CLI Appliance Command - * Self-contained RVFA appliance management (build, inspect, verify, extract, run) + * Self-contained RVFA appliance management (build, inspect, verify, extract, run, sign, publish, update) */ import type { Command, CommandContext, CommandResult } from '../types.js'; import { output } from '../output.js'; +import { signCommand, publishCommand, updateAppCommand } from './appliance-advanced.js'; interface RvfaSection { id: string; @@ -402,13 +403,16 @@ export const applianceCommand: Command = { name: 'appliance', description: 'Self-contained RVFA appliance management (build, inspect, verify, extract, run)', aliases: ['rvfa'], - subcommands: [buildCommand, inspectCommand, verifyCommand, extractCommand, runCommand], + subcommands: [buildCommand, inspectCommand, verifyCommand, extractCommand, runCommand, signCommand, publishCommand, updateAppCommand], examples: [ { command: 'ruflo appliance build -p cloud', description: 'Build a cloud appliance' }, { command: 'ruflo appliance inspect -f ruflo.rvf', description: 'Inspect appliance contents' }, { command: 'ruflo appliance verify -f ruflo.rvf', description: 'Verify integrity' }, { command: 'ruflo appliance extract -f ruflo.rvf', description: 'Extract sections' }, { command: 'ruflo appliance run -f ruflo.rvf', description: 'Boot and run appliance' }, + { command: 'ruflo appliance sign -f ruflo.rvf --generate-keys', description: 'Generate keys and sign' }, + { command: 'ruflo appliance publish -f ruflo.rvf', description: 'Publish to IPFS via Pinata' }, + { command: 'ruflo appliance update -f ruflo.rvf -s ruflo -d ./new-ruflo.bin', description: 'Hot-patch a section' }, ], action: async (): Promise => { output.writeln(); @@ -422,6 +426,9 @@ export const applianceCommand: Command = { 'verify - Verify appliance integrity and run capability tests', 'extract - Extract all sections from an appliance', 'run - Boot and run an RVFA appliance', + 'sign - Sign an appliance with Ed25519 for tamper detection', + 'publish - Publish an appliance to IPFS via Pinata', + 'update - Hot-patch a section in an appliance', ]); output.writeln(); output.writeln('Profiles:'); diff --git a/v3/__tests__/appliance/gguf-engine.test.ts b/v3/__tests__/appliance/gguf-engine.test.ts new file mode 100644 index 0000000000..6ecc6da13e --- /dev/null +++ b/v3/__tests__/appliance/gguf-engine.test.ts @@ -0,0 +1,541 @@ +/** + * GGUF inference engine tests. + * + * Uses the Node.js built-in test runner (node:test). + * Run: npx tsx --test v3/__tests__/appliance/gguf-engine.test.ts + */ + +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { writeFileSync, unlinkSync, mkdirSync, existsSync, readFileSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { createHash } from 'node:crypto'; +import { + parseGgufHeader, + GgufEngine, + type GgufMetadata, +} from '../../@claude-flow/cli/src/appliance/gguf-engine.js'; + +// --------------------------------------------------------------------------- +// GGUF Binary Helpers +// --------------------------------------------------------------------------- + +const GGUF_MAGIC_LE = 0x46554747; // "GGUF" in little-endian + +/** + * Write a GGUF string field: [length u64 LE][utf-8 bytes]. + */ +function ggufString(str: string): Buffer { + const strBuf = Buffer.from(str, 'utf-8'); + const lenBuf = Buffer.alloc(8); + lenBuf.writeBigUInt64LE(BigInt(strBuf.length), 0); + return Buffer.concat([lenBuf, strBuf]); +} + +/** + * Write a GGUF KV entry: [key_string][value_type u32 LE][value_data]. + * Supports STRING (type 8) and UINT32 (type 4) values. + */ +function ggufKvString(key: string, value: string): Buffer { + const keyBuf = ggufString(key); + const typeBuf = Buffer.alloc(4); + typeBuf.writeUInt32LE(8, 0); // STRING type + const valueBuf = ggufString(value); + return Buffer.concat([keyBuf, typeBuf, valueBuf]); +} + +function ggufKvUint32(key: string, value: number): Buffer { + const keyBuf = ggufString(key); + const typeBuf = Buffer.alloc(4); + typeBuf.writeUInt32LE(4, 0); // UINT32 type + const valueBuf = Buffer.alloc(4); + valueBuf.writeUInt32LE(value, 0); + return Buffer.concat([keyBuf, typeBuf, valueBuf]); +} + +/** + * Build a minimal valid GGUF v3 binary buffer with the given KV pairs. + */ +function buildGgufBuffer(options?: { + version?: number; + tensorCount?: number; + kvEntries?: Buffer[]; +}): Buffer { + const version = options?.version ?? 3; + const tensorCount = options?.tensorCount ?? 0; + const kvEntries = options?.kvEntries ?? [ + ggufKvString('general.architecture', 'llama'), + ggufKvString('general.name', 'test-model'), + ]; + + const magicBuf = Buffer.alloc(4); + magicBuf.writeUInt32LE(GGUF_MAGIC_LE, 0); + + const versionBuf = Buffer.alloc(4); + versionBuf.writeUInt32LE(version, 0); + + const tensorCountBuf = Buffer.alloc(8); + tensorCountBuf.writeBigUInt64LE(BigInt(tensorCount), 0); + + const kvCountBuf = Buffer.alloc(8); + kvCountBuf.writeBigUInt64LE(BigInt(kvEntries.length), 0); + + return Buffer.concat([ + magicBuf, + versionBuf, + tensorCountBuf, + kvCountBuf, + ...kvEntries, + ]); +} + +// --------------------------------------------------------------------------- +// Temp file management +// --------------------------------------------------------------------------- + +const cleanupPaths: string[] = []; + +function tmpPath(suffix: string): string { + const p = join( + tmpdir(), + `gguf-test-${Date.now()}-${Math.random().toString(36).slice(2)}${suffix}`, + ); + cleanupPaths.push(p); + return p; +} + +function writeGgufFile(buf: Buffer): string { + const p = tmpPath('.gguf'); + writeFileSync(p, buf); + return p; +} + +afterEach(() => { + for (const p of cleanupPaths) { + try { unlinkSync(p); } catch { /* ignore */ } + } + cleanupPaths.length = 0; +}); + +// --------------------------------------------------------------------------- +// 1. GGUF header parsing +// --------------------------------------------------------------------------- + +describe('parseGgufHeader', () => { + it('parses a minimal GGUF v3 file with string KV entries', async () => { + const buf = buildGgufBuffer(); + const filePath = writeGgufFile(buf); + + const meta = await parseGgufHeader(filePath); + + assert.equal(meta.magic, 'GGUF'); + assert.equal(meta.version, 3); + assert.equal(meta.tensorCount, 0); + assert.equal(meta.kvCount, 2); + assert.equal(meta.architecture, 'llama'); + assert.equal(meta.name, 'test-model'); + }); + + it('supports GGUF version 2', async () => { + const buf = buildGgufBuffer({ version: 2 }); + const filePath = writeGgufFile(buf); + + const meta = await parseGgufHeader(filePath); + assert.equal(meta.version, 2); + assert.equal(meta.architecture, 'llama'); + }); + + it('supports GGUF version 3', async () => { + const buf = buildGgufBuffer({ version: 3 }); + const filePath = writeGgufFile(buf); + + const meta = await parseGgufHeader(filePath); + assert.equal(meta.version, 3); + }); + + it('parses tensor count correctly', async () => { + const buf = buildGgufBuffer({ tensorCount: 42 }); + const filePath = writeGgufFile(buf); + + const meta = await parseGgufHeader(filePath); + assert.equal(meta.tensorCount, 42); + }); + + it('parses integer KV entries (context_length, embedding_length)', async () => { + const kvEntries = [ + ggufKvString('general.architecture', 'llama'), + ggufKvString('general.name', 'test-model'), + ggufKvUint32('llama.context_length', 4096), + ggufKvUint32('llama.embedding_length', 2048), + ]; + const buf = buildGgufBuffer({ kvEntries }); + const filePath = writeGgufFile(buf); + + const meta = await parseGgufHeader(filePath); + assert.equal(meta.contextLength, 4096); + assert.equal(meta.embeddingLength, 2048); + }); + + it('rejects a file with invalid magic bytes', async () => { + const buf = Buffer.alloc(64); + buf.writeUInt32LE(0xDEADBEEF, 0); // wrong magic + buf.writeUInt32LE(3, 4); // version + const filePath = writeGgufFile(buf); + + await assert.rejects( + () => parseGgufHeader(filePath), + /Invalid GGUF magic/, + ); + }); + + it('rejects unsupported GGUF version (version 1)', async () => { + const buf = buildGgufBuffer({ version: 1 }); + const filePath = writeGgufFile(buf); + + await assert.rejects( + () => parseGgufHeader(filePath), + /Unsupported GGUF version/, + ); + }); + + it('rejects unsupported GGUF version (version 99)', async () => { + const buf = buildGgufBuffer({ version: 99 }); + const filePath = writeGgufFile(buf); + + await assert.rejects( + () => parseGgufHeader(filePath), + /Unsupported GGUF version/, + ); + }); + + it('handles a truncated buffer gracefully (partial KV data)', async () => { + // Build a valid header but truncate partway through KV entries + const fullBuf = buildGgufBuffer({ + kvEntries: [ + ggufKvString('general.architecture', 'llama'), + ggufKvString('general.name', 'test-model'), + ], + }); + // Truncate the buffer to cut off the second KV entry + const truncatedLen = 24 + 30; // preamble + first KV, partial second + const truncated = fullBuf.subarray(0, Math.min(truncatedLen, fullBuf.length)); + const filePath = writeGgufFile(truncated); + + // Should not throw -- metadata is partially parsed + const meta = await parseGgufHeader(filePath); + assert.equal(meta.magic, 'GGUF'); + assert.equal(meta.version, 3); + assert.equal(meta.kvCount, 2); + }); + + it('reports fileSize correctly', async () => { + const buf = buildGgufBuffer(); + const filePath = writeGgufFile(buf); + + const meta = await parseGgufHeader(filePath); + assert.equal(meta.fileSize, buf.length); + }); + + it('stores all parsed metadata in the metadata map', async () => { + const buf = buildGgufBuffer(); + const filePath = writeGgufFile(buf); + + const meta = await parseGgufHeader(filePath); + assert.equal(meta.metadata['general.architecture'], 'llama'); + assert.equal(meta.metadata['general.name'], 'test-model'); + }); +}); + +// --------------------------------------------------------------------------- +// 2. GgufEngine lifecycle +// --------------------------------------------------------------------------- + +describe('GgufEngine', () => { + let engine: GgufEngine; + + beforeEach(() => { + engine = new GgufEngine({ + contextSize: 2048, + maxTokens: 256, + temperature: 0.5, + verbose: false, + }); + }); + + afterEach(async () => { + await engine.shutdown(); + }); + + it('constructs with provided config', () => { + assert.ok(engine); + }); + + it('initialize succeeds even without node-llama-cpp', async () => { + await engine.initialize(); + // Should not throw -- degrades gracefully + }); + + it('loadModel parses GGUF header from a test file', async () => { + await engine.initialize(); + + const buf = buildGgufBuffer({ + kvEntries: [ + ggufKvString('general.architecture', 'llama'), + ggufKvString('general.name', 'engine-test-model'), + ], + }); + const filePath = writeGgufFile(buf); + + const meta = await engine.loadModel(filePath); + assert.equal(meta.architecture, 'llama'); + assert.equal(meta.name, 'engine-test-model'); + }); + + it('getLoadedModels returns models after loadModel', async () => { + await engine.initialize(); + + const buf = buildGgufBuffer(); + const filePath = writeGgufFile(buf); + + await engine.loadModel(filePath); + const models = engine.getLoadedModels(); + assert.equal(models.length, 1); + assert.equal(models[0].architecture, 'llama'); + }); + + it('shutdown clears loaded models', async () => { + await engine.initialize(); + + const buf = buildGgufBuffer(); + const filePath = writeGgufFile(buf); + + await engine.loadModel(filePath); + assert.equal(engine.getLoadedModels().length, 1); + + await engine.shutdown(); + assert.equal(engine.getLoadedModels().length, 0); + }); +}); + +// --------------------------------------------------------------------------- +// 3. Generate in metadata-only mode +// --------------------------------------------------------------------------- + +describe('GgufEngine.generate (metadata-only)', () => { + it('returns a metadata-only response when node-llama-cpp is unavailable', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + + const buf = buildGgufBuffer({ + kvEntries: [ + ggufKvString('general.architecture', 'llama'), + ggufKvString('general.name', 'fallback-model'), + ], + }); + const filePath = writeGgufFile(buf); + await engine.loadModel(filePath); + + const response = await engine.generate({ prompt: 'Hello world' }); + assert.equal(response.metadataOnly, true); + assert.ok(response.text.includes('metadata-only')); + assert.ok(response.text.includes('fallback-model')); + assert.equal(response.tokensUsed, 0); + assert.ok(response.latencyMs >= 0); + + await engine.shutdown(); + }); + + it('returns a no-model placeholder when no model is loaded', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + + const response = await engine.generate({ prompt: 'Hello world' }); + assert.equal(response.metadataOnly, true); + assert.ok(response.text.includes('No model loaded')); + + await engine.shutdown(); + }); +}); + +// --------------------------------------------------------------------------- +// 4. KV cache persistence (RVKV format) +// --------------------------------------------------------------------------- + +describe('KV cache persistence', () => { + it('writes and reads back KV cache entries (round-trip)', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + + // Load a model so there's an active model path + const buf = buildGgufBuffer(); + const filePath = writeGgufFile(buf); + await engine.loadModel(filePath); + + // Store some entries + engine.setKvEntry('key-alpha', Buffer.from('value-alpha')); + engine.setKvEntry('key-beta', Buffer.from('value-beta')); + engine.setKvEntry('key-gamma', Buffer.from('value-gamma')); + + // Persist + const cachePath = tmpPath('.rvkv'); + await engine.persistKvCache(cachePath); + + // Create a fresh engine and load the cache + const engine2 = new GgufEngine({ verbose: false }); + await engine2.initialize(); + await engine2.loadKvCache(cachePath); + + assert.deepEqual(engine2.getKvEntry('key-alpha'), Buffer.from('value-alpha')); + assert.deepEqual(engine2.getKvEntry('key-beta'), Buffer.from('value-beta')); + assert.deepEqual(engine2.getKvEntry('key-gamma'), Buffer.from('value-gamma')); + + await engine.shutdown(); + await engine2.shutdown(); + }); + + it('RVKV file starts with magic "RVKV" (0x564B5652 LE)', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + const buf = buildGgufBuffer(); + await engine.loadModel(writeGgufFile(buf)); + + engine.setKvEntry('test', Buffer.from('data')); + + const cachePath = tmpPath('.rvkv'); + await engine.persistKvCache(cachePath); + + const data = readFileSync(cachePath); + assert.equal(data.readUInt32LE(0), 0x564B5652, 'Magic should be RVKV'); + assert.equal(data.readUInt32LE(4), 1, 'Version should be 1'); + + await engine.shutdown(); + }); + + it('loadKvCache rejects a file with invalid magic', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + + const badFile = tmpPath('.rvkv'); + const badBuf = Buffer.alloc(64); + badBuf.writeUInt32LE(0xDEADBEEF, 0); + writeFileSync(badFile, badBuf); + + await assert.rejects( + () => engine.loadKvCache(badFile), + /Invalid KV cache magic/, + ); + + await engine.shutdown(); + }); + + it('loadKvCache rejects a file that is too small', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + + const smallFile = tmpPath('.rvkv'); + writeFileSync(smallFile, Buffer.alloc(10)); + + await assert.rejects( + () => engine.loadKvCache(smallFile), + /too small/, + ); + + await engine.shutdown(); + }); + + it('SHA256 footer is verified on loadKvCache', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + const ggufBuf = buildGgufBuffer(); + await engine.loadModel(writeGgufFile(ggufBuf)); + + engine.setKvEntry('important', Buffer.from('secret-data')); + + const cachePath = tmpPath('.rvkv'); + await engine.persistKvCache(cachePath); + + // Tamper with the SHA256 footer (last 32 bytes) to trigger hash mismatch + const data = readFileSync(cachePath); + const tampered = Buffer.from(data); + // Flip a byte in the footer hash area (last 32 bytes) + tampered[tampered.length - 1] ^= 0xFF; + writeFileSync(cachePath, tampered); + + const engine2 = new GgufEngine({ verbose: false }); + await engine2.initialize(); + + await assert.rejects( + () => engine2.loadKvCache(cachePath), + /hash mismatch/, + ); + + await engine.shutdown(); + await engine2.shutdown(); + }); + + it('handles empty KV cache (zero entries)', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + const ggufBuf = buildGgufBuffer(); + await engine.loadModel(writeGgufFile(ggufBuf)); + + // No entries set -- persist should still work + const cachePath = tmpPath('.rvkv'); + await engine.persistKvCache(cachePath); + + const engine2 = new GgufEngine({ verbose: false }); + await engine2.initialize(); + await engine2.loadKvCache(cachePath); + + assert.equal(engine2.getKvEntry('nonexistent'), undefined); + + await engine.shutdown(); + await engine2.shutdown(); + }); +}); + +// --------------------------------------------------------------------------- +// 5. Stream +// --------------------------------------------------------------------------- + +describe('GgufEngine.stream', () => { + it('yields at least one token from the async iterator', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + + const ggufBuf = buildGgufBuffer({ + kvEntries: [ + ggufKvString('general.architecture', 'llama'), + ggufKvString('general.name', 'stream-test'), + ], + }); + await engine.loadModel(writeGgufFile(ggufBuf)); + + const tokens: string[] = []; + for await (const token of engine.stream({ prompt: 'Hello' })) { + tokens.push(token); + } + + assert.ok(tokens.length >= 1, 'Stream should yield at least one token'); + // In metadata-only mode, it yields the full metadata response as one chunk + assert.ok(tokens[0].includes('metadata-only')); + + await engine.shutdown(); + }); + + it('yields the no-model fallback when no model is loaded', async () => { + const engine = new GgufEngine({ verbose: false }); + await engine.initialize(); + + const tokens: string[] = []; + for await (const token of engine.stream({ prompt: 'Hello' })) { + tokens.push(token); + } + + assert.ok(tokens.length >= 1); + assert.ok(tokens[0].includes('No model loaded')); + + await engine.shutdown(); + }); +}); diff --git a/v3/__tests__/appliance/rvfa-distribution.test.ts b/v3/__tests__/appliance/rvfa-distribution.test.ts new file mode 100644 index 0000000000..f748dab301 --- /dev/null +++ b/v3/__tests__/appliance/rvfa-distribution.test.ts @@ -0,0 +1,641 @@ +/** + * RVFA Distribution & Hot-Patch module tests. + * + * Uses the Node.js built-in test runner (node:test). + * Run: npx tsx --test v3/__tests__/appliance/rvfa-distribution.test.ts + */ + +import { describe, it, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { writeFileSync, unlinkSync, existsSync, readFileSync, mkdirSync, rmSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { + RvfaPatcher, + RvfaPublisher, + type RvfpHeader, + type CreatePatchOptions, +} from '../../@claude-flow/cli/src/appliance/rvfa-distribution.js'; +import { + RvfaWriter, + RvfaReader, + createDefaultHeader, +} from '../../@claude-flow/cli/src/appliance/rvfa-format.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +const cleanupPaths: string[] = []; + +function tmpPath(suffix: string): string { + const p = join( + tmpdir(), + `rvfa-dist-test-${Date.now()}-${Math.random().toString(36).slice(2)}${suffix}`, + ); + cleanupPaths.push(p); + return p; +} + +function tmpDir(): string { + const d = join( + tmpdir(), + `rvfa-dist-dir-${Date.now()}-${Math.random().toString(36).slice(2)}`, + ); + mkdirSync(d, { recursive: true }); + cleanupPaths.push(d); + return d; +} + +/** Build a test RVFA binary with the given sections. */ +function buildTestRvfa( + name = 'test-appliance', + version = '3.5.0', + sections?: Array<{ id: string; data: string }>, +): Buffer { + const header = createDefaultHeader('cloud'); + const writer = new RvfaWriter({ ...header, name, appVersion: version }); + const secs = sections ?? [ + { id: 'kernel', data: 'kernel-payload-original' }, + { id: 'runtime', data: 'runtime-payload-original' }, + { id: 'ruflo', data: 'ruflo-payload-original' }, + ]; + for (const s of secs) { + writer.addSection(s.id, Buffer.from(s.data), { compression: 'none' }); + } + return writer.build(); +} + +function writeTestRvfa( + name = 'test-appliance', + version = '3.5.0', + sections?: Array<{ id: string; data: string }>, +): string { + const buf = buildTestRvfa(name, version, sections); + const p = tmpPath('.rvf'); + writeFileSync(p, buf); + return p; +} + +afterEach(() => { + for (const p of cleanupPaths) { + try { + if (existsSync(p)) { + const s = require('node:fs').statSync(p); + if (s.isDirectory()) rmSync(p, { recursive: true, force: true }); + else unlinkSync(p); + } + } catch { /* ignore */ } + } + cleanupPaths.length = 0; +}); + +// --------------------------------------------------------------------------- +// 1. RVFP patch creation +// --------------------------------------------------------------------------- + +describe('RvfaPatcher.createPatch', () => { + it('creates a patch with RVFP magic bytes', async () => { + const patch = await RvfaPatcher.createPatch({ + targetName: 'test-appliance', + targetVersion: '3.5.0', + sectionId: 'kernel', + sectionData: Buffer.from('new-kernel-data'), + patchVersion: '1.0.0', + }); + + assert.equal(patch.subarray(0, 4).toString('ascii'), 'RVFP'); + }); + + it('creates a patch with correct version number', async () => { + const patch = await RvfaPatcher.createPatch({ + targetName: 'test-appliance', + targetVersion: '3.5.0', + sectionId: 'kernel', + sectionData: Buffer.from('new-kernel-data'), + patchVersion: '1.0.0', + }); + + assert.equal(patch.readUInt32LE(4), 1, 'Version should be 1'); + }); + + it('includes all header fields in the patch', async () => { + const patch = await RvfaPatcher.createPatch({ + targetName: 'my-app', + targetVersion: '2.0.0', + sectionId: 'runtime', + sectionData: Buffer.from('updated-runtime'), + patchVersion: '1.1.0', + }); + + const header = RvfaPatcher.parsePatchHeader(patch); + assert.equal(header.magic, 'RVFP'); + assert.equal(header.version, 1); + assert.equal(header.targetApplianceName, 'my-app'); + assert.equal(header.targetApplianceVersion, '2.0.0'); + assert.equal(header.targetSection, 'runtime'); + assert.equal(header.patchVersion, '1.1.0'); + assert.equal(typeof header.created, 'string'); + assert.ok(header.created.length > 0); + assert.equal(typeof header.newSectionSha256, 'string'); + assert.ok(header.newSectionSha256.length > 0); + }); +}); + +// --------------------------------------------------------------------------- +// 2. RVFP patch header +// --------------------------------------------------------------------------- + +describe('RvfaPatcher.parsePatchHeader', () => { + it('extracts all fields from a valid patch', async () => { + const sectionData = Buffer.from('test-section-content'); + const patch = await RvfaPatcher.createPatch({ + targetName: 'header-test', + targetVersion: '1.0.0', + sectionId: 'kernel', + sectionData, + patchVersion: '0.1.0', + compression: 'none', + }); + + const header = RvfaPatcher.parsePatchHeader(patch); + assert.equal(header.targetApplianceName, 'header-test'); + assert.equal(header.targetApplianceVersion, '1.0.0'); + assert.equal(header.targetSection, 'kernel'); + assert.equal(header.patchVersion, '0.1.0'); + assert.equal(header.compression, 'none'); + assert.equal(header.newSectionSize, sectionData.length); + }); + + it('rejects a buffer with wrong magic', () => { + const bad = Buffer.alloc(64); + bad.write('NOPE', 0, 'ascii'); + assert.throws( + () => RvfaPatcher.parsePatchHeader(bad), + /Invalid RVFP magic/, + ); + }); + + it('rejects a buffer that is too small', () => { + const small = Buffer.alloc(8); + small.write('RVFP', 0, 'ascii'); + assert.throws( + () => RvfaPatcher.parsePatchHeader(small), + /too small/, + ); + }); +}); + +// --------------------------------------------------------------------------- +// 3. Patch verification +// --------------------------------------------------------------------------- + +describe('RvfaPatcher.verifyPatch', () => { + it('returns valid=true for a well-formed patch', async () => { + const patch = await RvfaPatcher.createPatch({ + targetName: 'verify-test', + targetVersion: '1.0.0', + sectionId: 'kernel', + sectionData: Buffer.from('valid-content'), + patchVersion: '0.1.0', + }); + + const result = await RvfaPatcher.verifyPatch(patch); + assert.ok(result.valid, `Expected valid but got errors: ${result.errors.join(', ')}`); + assert.equal(result.errors.length, 0); + }); + + it('returns valid=false for a tampered patch payload', async () => { + const patch = await RvfaPatcher.createPatch({ + targetName: 'tamper-test', + targetVersion: '1.0.0', + sectionId: 'kernel', + sectionData: Buffer.from('original-payload-data'), + patchVersion: '0.1.0', + }); + + const tampered = Buffer.from(patch); + // Tamper with the payload area (after header, before footer) + const headerLen = tampered.readUInt32LE(8); + const payloadOffset = 12 + headerLen; + if (payloadOffset < tampered.length - 32) { + tampered[payloadOffset] ^= 0xFF; + } + + const result = await RvfaPatcher.verifyPatch(tampered); + assert.ok(!result.valid, 'Tampered patch should fail verification'); + assert.ok(result.errors.some((e) => e.includes('SHA256'))); + }); + + it('returns valid=false for a tampered footer', async () => { + const patch = await RvfaPatcher.createPatch({ + targetName: 'footer-test', + targetVersion: '1.0.0', + sectionId: 'kernel', + sectionData: Buffer.from('some-payload'), + patchVersion: '0.1.0', + }); + + const tampered = Buffer.from(patch); + // Tamper with the last byte of the footer + tampered[tampered.length - 1] ^= 0xFF; + + const result = await RvfaPatcher.verifyPatch(tampered); + assert.ok(!result.valid, 'Tampered footer should fail verification'); + }); +}); + +// --------------------------------------------------------------------------- +// 4. Patch application +// --------------------------------------------------------------------------- + +describe('RvfaPatcher.applyPatch', () => { + it('replaces the target section and preserves others', async () => { + const rvfaPath = writeTestRvfa('patch-app', '3.5.0'); + + const newKernelData = Buffer.from('brand-new-kernel-payload'); + const patch = await RvfaPatcher.createPatch({ + targetName: 'patch-app', + targetVersion: '3.5.0', + sectionId: 'kernel', + sectionData: newKernelData, + patchVersion: '1.0.0', + }); + + const result = await RvfaPatcher.applyPatch(rvfaPath, patch, { verify: true }); + + assert.ok(result.success, `Apply failed: ${result.errors.join(', ')}`); + assert.equal(result.patchedSection, 'kernel'); + assert.ok(result.newSize > 0); + + // Verify the patched file + const patchedBuf = readFileSync(rvfaPath); + const reader = RvfaReader.fromBuffer(patchedBuf); + + // Target section replaced + const kernel = reader.extractSection('kernel'); + assert.equal(kernel.toString('utf-8'), 'brand-new-kernel-payload'); + + // Other sections untouched + const runtime = reader.extractSection('runtime'); + assert.equal(runtime.toString('utf-8'), 'runtime-payload-original'); + + const ruflo = reader.extractSection('ruflo'); + assert.equal(ruflo.toString('utf-8'), 'ruflo-payload-original'); + }); + + it('creates a backup file', async () => { + const rvfaPath = writeTestRvfa('backup-test', '3.5.0'); + + const patch = await RvfaPatcher.createPatch({ + targetName: 'backup-test', + targetVersion: '3.5.0', + sectionId: 'kernel', + sectionData: Buffer.from('new-kernel'), + patchVersion: '1.0.0', + }); + + const result = await RvfaPatcher.applyPatch(rvfaPath, patch, { backup: true }); + + assert.ok(result.success); + assert.ok(result.backupPath, 'Backup path should be set'); + assert.ok(existsSync(result.backupPath!), 'Backup file should exist'); + cleanupPaths.push(result.backupPath!); + }); + + it('new RVFA passes verify() after patching', async () => { + const rvfaPath = writeTestRvfa('verify-after-patch', '3.5.0'); + + const patch = await RvfaPatcher.createPatch({ + targetName: 'verify-after-patch', + targetVersion: '3.5.0', + sectionId: 'runtime', + sectionData: Buffer.from('updated-runtime-payload'), + patchVersion: '1.0.0', + }); + + const result = await RvfaPatcher.applyPatch(rvfaPath, patch, { verify: true }); + assert.ok(result.success, `Apply failed: ${result.errors.join(', ')}`); + + // Double-check: read back and verify independently + const patchedBuf = readFileSync(rvfaPath); + const reader = RvfaReader.fromBuffer(patchedBuf); + const verifyResult = reader.verify(); + assert.ok(verifyResult.valid, `Verify failed: ${verifyResult.errors.join(', ')}`); + }); + + it('footer SHA256 is updated after patching', async () => { + const rvfaPath = writeTestRvfa('footer-update', '3.5.0'); + + // Read original footer + const originalBuf = readFileSync(rvfaPath); + const originalFooter = originalBuf.subarray(originalBuf.length - 32); + + const patch = await RvfaPatcher.createPatch({ + targetName: 'footer-update', + targetVersion: '3.5.0', + sectionId: 'kernel', + sectionData: Buffer.from('different-kernel-data'), + patchVersion: '1.0.0', + }); + + await RvfaPatcher.applyPatch(rvfaPath, patch); + + // Read new footer + const patchedBuf = readFileSync(rvfaPath); + const newFooter = patchedBuf.subarray(patchedBuf.length - 32); + + assert.ok(!originalFooter.equals(newFooter), 'Footer SHA256 should change after patching'); + }); +}); + +// --------------------------------------------------------------------------- +// 5. Patch for wrong target +// --------------------------------------------------------------------------- + +describe('Patch target mismatch', () => { + it('fails when patch targets a different appliance name', async () => { + const rvfaPath = writeTestRvfa('correct-app', '3.5.0'); + + const patch = await RvfaPatcher.createPatch({ + targetName: 'wrong-app', + targetVersion: '3.5.0', + sectionId: 'kernel', + sectionData: Buffer.from('new-data'), + patchVersion: '1.0.0', + }); + + const result = await RvfaPatcher.applyPatch(rvfaPath, patch); + assert.ok(!result.success, 'Should fail for wrong target app'); + assert.ok( + result.errors.some((e) => e.includes('mismatch') || e.includes('wrong-app')), + `Expected mismatch error, got: ${result.errors.join(', ')}`, + ); + }); + + it('fails when patch targets a different appliance version', async () => { + const rvfaPath = writeTestRvfa('version-test', '3.5.0'); + + const patch = await RvfaPatcher.createPatch({ + targetName: 'version-test', + targetVersion: '9.9.9', + sectionId: 'kernel', + sectionData: Buffer.from('new-data'), + patchVersion: '1.0.0', + }); + + const result = await RvfaPatcher.applyPatch(rvfaPath, patch); + assert.ok(!result.success, 'Should fail for wrong target version'); + assert.ok( + result.errors.some((e) => e.includes('mismatch') || e.includes('9.9.9')), + ); + }); + + it('fails when patch targets a nonexistent section', async () => { + const rvfaPath = writeTestRvfa('section-test', '3.5.0'); + + const patch = await RvfaPatcher.createPatch({ + targetName: 'section-test', + targetVersion: '3.5.0', + sectionId: 'nonexistent-section', + sectionData: Buffer.from('new-data'), + patchVersion: '1.0.0', + }); + + const result = await RvfaPatcher.applyPatch(rvfaPath, patch); + assert.ok(!result.success, 'Should fail for nonexistent section'); + assert.ok( + result.errors.some((e) => e.includes('not found') || e.includes('nonexistent')), + ); + }); +}); + +// --------------------------------------------------------------------------- +// 6. Signed patch verification (via rvfa-signing integration) +// --------------------------------------------------------------------------- + +describe('Signed patch', () => { + // Note: The distribution module uses raw DER Ed25519 keys, not PEM. + // We skip direct Ed25519 signing tests here since the signing module + // is tested separately. We test that the signature field in the header + // is properly set when a private key is provided. + + it('patch header contains signature when privateKey is provided', async () => { + // Generate raw Ed25519 DER keys using crypto + const { generateKeyPairSync } = await import('node:crypto'); + const keyPair = generateKeyPairSync('ed25519', { + publicKeyEncoding: { type: 'spki', format: 'der' }, + privateKeyEncoding: { type: 'pkcs8', format: 'der' }, + }); + + const patch = await RvfaPatcher.createPatch({ + targetName: 'signed-test', + targetVersion: '1.0.0', + sectionId: 'kernel', + sectionData: Buffer.from('signed-payload'), + patchVersion: '1.0.0', + privateKey: keyPair.privateKey as Buffer, + signedBy: 'test-publisher', + }); + + const header = RvfaPatcher.parsePatchHeader(patch); + assert.ok(header.signature, 'Signed patch should have a signature field'); + assert.equal(header.signedBy, 'test-publisher'); + assert.ok(header.signature!.length > 0); + }); + + it('unsigned patch has no signature field', async () => { + const patch = await RvfaPatcher.createPatch({ + targetName: 'unsigned-test', + targetVersion: '1.0.0', + sectionId: 'kernel', + sectionData: Buffer.from('unsigned-payload'), + patchVersion: '1.0.0', + }); + + const header = RvfaPatcher.parsePatchHeader(patch); + assert.equal(header.signature, undefined); + assert.equal(header.signedBy, undefined); + }); +}); + +// --------------------------------------------------------------------------- +// 7. Signed patch tamper detection +// --------------------------------------------------------------------------- + +describe('Signed patch tamper detection', () => { + it('verification fails when patch data is modified after signing', async () => { + const { generateKeyPairSync } = await import('node:crypto'); + const keyPair = generateKeyPairSync('ed25519', { + publicKeyEncoding: { type: 'spki', format: 'der' }, + privateKeyEncoding: { type: 'pkcs8', format: 'der' }, + }); + + const patch = await RvfaPatcher.createPatch({ + targetName: 'tamper-sign-test', + targetVersion: '1.0.0', + sectionId: 'kernel', + sectionData: Buffer.from('original-signed-data'), + patchVersion: '1.0.0', + privateKey: keyPair.privateKey as Buffer, + signedBy: 'publisher', + }); + + // Tamper with the section payload + const tampered = Buffer.from(patch); + const headerLen = tampered.readUInt32LE(8); + const payloadOffset = 12 + headerLen; + if (payloadOffset < tampered.length - 32) { + tampered[payloadOffset] ^= 0xFF; + } + + // Integrity verification should fail (SHA256 mismatch) + const result = await RvfaPatcher.verifyPatch(tampered); + assert.ok(!result.valid, 'Tampered signed patch should fail verification'); + }); +}); + +// --------------------------------------------------------------------------- +// 8. parsePatchHeader edge cases +// --------------------------------------------------------------------------- + +describe('parsePatchHeader edge cases', () => { + it('rejects a buffer with unsupported version', async () => { + const patch = await RvfaPatcher.createPatch({ + targetName: 'version-test', + targetVersion: '1.0.0', + sectionId: 'kernel', + sectionData: Buffer.from('data'), + patchVersion: '1.0.0', + }); + + const tampered = Buffer.from(patch); + tampered.writeUInt32LE(99, 4); // bad version + + assert.throws( + () => RvfaPatcher.parsePatchHeader(tampered), + /Unsupported RVFP version/, + ); + }); + + it('rejects a buffer with header length exceeding buffer size', async () => { + const buf = Buffer.alloc(16); + buf.write('RVFP', 0, 'ascii'); + buf.writeUInt32LE(1, 4); // version + buf.writeUInt32LE(9999, 8); // header_len way too big + + assert.throws( + () => RvfaPatcher.parsePatchHeader(buf), + /too small/, + ); + }); +}); + +// --------------------------------------------------------------------------- +// 9. RvfaPublisher config +// --------------------------------------------------------------------------- + +describe('RvfaPublisher', () => { + it('constructor accepts JWT from config', () => { + const publisher = new RvfaPublisher({ + pinataJwt: 'test-jwt-token-from-config', + }); + assert.ok(publisher); + }); + + it('constructor accepts JWT from process.env', () => { + const original = process.env.PINATA_API_JWT; + process.env.PINATA_API_JWT = 'test-jwt-from-env'; + try { + const publisher = new RvfaPublisher({}); + assert.ok(publisher); + } finally { + if (original !== undefined) { + process.env.PINATA_API_JWT = original; + } else { + delete process.env.PINATA_API_JWT; + } + } + }); + + it('constructor throws when no JWT is available', () => { + const original = process.env.PINATA_API_JWT; + delete process.env.PINATA_API_JWT; + try { + assert.throws( + () => new RvfaPublisher({ pinataJwt: '' }), + /JWT/i, + ); + } finally { + if (original !== undefined) { + process.env.PINATA_API_JWT = original; + } + } + }); +}); + +// --------------------------------------------------------------------------- +// 10. Publisher list (mock-safe) +// --------------------------------------------------------------------------- + +describe('Publisher URL construction', () => { + it('uses default gateway and API URLs', () => { + const original = process.env.PINATA_API_JWT; + process.env.PINATA_API_JWT = 'test-jwt'; + try { + const publisher = new RvfaPublisher({}); + // We cannot call .list() without network, but we can verify + // the publisher was created successfully with defaults + assert.ok(publisher); + } finally { + if (original !== undefined) { + process.env.PINATA_API_JWT = original; + } else { + delete process.env.PINATA_API_JWT; + } + } + }); + + it('accepts custom gateway and API URLs', () => { + const publisher = new RvfaPublisher({ + pinataJwt: 'test-jwt', + gatewayUrl: 'https://custom-gateway.example.com', + apiUrl: 'https://custom-api.example.com', + }); + assert.ok(publisher); + }); + + it('strips trailing slashes from URLs', () => { + const publisher = new RvfaPublisher({ + pinataJwt: 'test-jwt', + gatewayUrl: 'https://gateway.example.com///', + apiUrl: 'https://api.example.com//', + }); + // The publisher should be created without error (trailing slashes stripped internally) + assert.ok(publisher); + }); +}); + +// --------------------------------------------------------------------------- +// 11. Gzip compression in patches +// --------------------------------------------------------------------------- + +describe('Gzip-compressed patches', () => { + it('creates and verifies a gzip-compressed patch', async () => { + const sectionData = Buffer.alloc(1024, 0x42); // highly compressible + const patch = await RvfaPatcher.createPatch({ + targetName: 'gzip-test', + targetVersion: '1.0.0', + sectionId: 'kernel', + sectionData, + patchVersion: '1.0.0', + compression: 'gzip', + }); + + const header = RvfaPatcher.parsePatchHeader(patch); + assert.equal(header.compression, 'gzip'); + + const result = await RvfaPatcher.verifyPatch(patch); + assert.ok(result.valid, `Gzip patch should verify: ${result.errors.join(', ')}`); + }); +}); diff --git a/v3/__tests__/appliance/rvfa-signing.test.ts b/v3/__tests__/appliance/rvfa-signing.test.ts new file mode 100644 index 0000000000..83a05512d2 --- /dev/null +++ b/v3/__tests__/appliance/rvfa-signing.test.ts @@ -0,0 +1,451 @@ +/** + * RVFA Ed25519 signing module tests. + * + * Uses the Node.js built-in test runner (node:test). + * Run: npx tsx --test v3/__tests__/appliance/rvfa-signing.test.ts + */ + +import { describe, it, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { writeFileSync, unlinkSync, existsSync, readFileSync, mkdirSync, rmSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { + generateKeyPair, + saveKeyPair, + loadKeyPair, + RvfaSigner, + RvfaVerifier, + type RvfaKeyPair, +} from '../../@claude-flow/cli/src/appliance/rvfa-signing.js'; +import { + RvfaWriter, + RvfaReader, + createDefaultHeader, +} from '../../@claude-flow/cli/src/appliance/rvfa-format.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +const cleanupPaths: string[] = []; + +function tmpPath(suffix: string): string { + const p = join( + tmpdir(), + `rvfa-sign-test-${Date.now()}-${Math.random().toString(36).slice(2)}${suffix}`, + ); + cleanupPaths.push(p); + return p; +} + +function tmpDir(): string { + const d = join( + tmpdir(), + `rvfa-sign-dir-${Date.now()}-${Math.random().toString(36).slice(2)}`, + ); + mkdirSync(d, { recursive: true }); + cleanupPaths.push(d); + return d; +} + +function buildTestRvfa(name = 'test-appliance'): Buffer { + const header = createDefaultHeader('cloud'); + const writer = new RvfaWriter({ ...header, name }); + writer.addSection('kernel', Buffer.from('kernel-data'), { compression: 'none' }); + writer.addSection('runtime', Buffer.from('runtime-data'), { compression: 'none' }); + writer.addSection('ruflo', Buffer.from('ruflo-data'), { compression: 'none' }); + return writer.build(); +} + +function writeTestRvfa(name = 'test-appliance'): string { + const buf = buildTestRvfa(name); + const p = tmpPath('.rvf'); + writeFileSync(p, buf); + return p; +} + +afterEach(() => { + for (const p of cleanupPaths) { + try { + if (existsSync(p)) { + const s = require('node:fs').statSync(p); + if (s.isDirectory()) rmSync(p, { recursive: true, force: true }); + else unlinkSync(p); + } + } catch { /* ignore */ } + } + cleanupPaths.length = 0; +}); + +// --------------------------------------------------------------------------- +// 1. Key generation +// --------------------------------------------------------------------------- + +describe('generateKeyPair', () => { + it('produces a key pair with publicKey, privateKey, and fingerprint', async () => { + const kp = await generateKeyPair(); + + assert.ok(Buffer.isBuffer(kp.publicKey)); + assert.ok(Buffer.isBuffer(kp.privateKey)); + assert.equal(typeof kp.fingerprint, 'string'); + assert.ok(kp.fingerprint.length > 0); + }); + + it('public key is in PEM format (starts with BEGIN)', async () => { + const kp = await generateKeyPair(); + const pubPem = kp.publicKey.toString('utf-8'); + assert.ok(pubPem.includes('BEGIN PUBLIC KEY'), 'Public key should be PEM-encoded'); + }); + + it('private key is in PEM format (starts with BEGIN)', async () => { + const kp = await generateKeyPair(); + const privPem = kp.privateKey.toString('utf-8'); + assert.ok(privPem.includes('BEGIN PRIVATE KEY'), 'Private key should be PEM-encoded'); + }); + + it('generates distinct key pairs on each call', async () => { + const kp1 = await generateKeyPair(); + const kp2 = await generateKeyPair(); + assert.ok(!kp1.publicKey.equals(kp2.publicKey), 'Public keys should differ'); + assert.ok(!kp1.privateKey.equals(kp2.privateKey), 'Private keys should differ'); + assert.notEqual(kp1.fingerprint, kp2.fingerprint, 'Fingerprints should differ'); + }); +}); + +// --------------------------------------------------------------------------- +// 2. Key fingerprint +// --------------------------------------------------------------------------- + +describe('Key fingerprint', () => { + it('fingerprint is hex and 16 characters long', async () => { + const kp = await generateKeyPair(); + assert.match(kp.fingerprint, /^[0-9a-f]{16}$/); + }); + + it('fingerprint is deterministic for the same key', async () => { + const kp = await generateKeyPair(); + // Recreate a signer from the same private key -- fingerprint should match + const signer = new RvfaSigner(kp.privateKey); + // Sign something to verify signer is operational + const rvfaPath = writeTestRvfa(); + const sigMeta = await signer.signAppliance(rvfaPath); + assert.equal(sigMeta.publicKeyFingerprint, kp.fingerprint); + }); +}); + +// --------------------------------------------------------------------------- +// 3. Key save/load +// --------------------------------------------------------------------------- + +describe('saveKeyPair / loadKeyPair', () => { + it('round-trips a key pair through save and load', async () => { + const original = await generateKeyPair(); + const dir = tmpDir(); + + await saveKeyPair(original, dir, 'test-key'); + + assert.ok(existsSync(join(dir, 'test-key.pub')), 'Public key file should exist'); + assert.ok(existsSync(join(dir, 'test-key.key')), 'Private key file should exist'); + + const loaded = await loadKeyPair(dir, 'test-key'); + + assert.ok(loaded.publicKey.equals(original.publicKey), 'Public key should round-trip'); + assert.ok(loaded.privateKey.equals(original.privateKey), 'Private key should round-trip'); + assert.equal(loaded.fingerprint, original.fingerprint, 'Fingerprint should round-trip'); + }); + + it('uses default name "rvfa-signing" when none provided', async () => { + const kp = await generateKeyPair(); + const dir = tmpDir(); + + await saveKeyPair(kp, dir); + + assert.ok(existsSync(join(dir, 'rvfa-signing.pub'))); + assert.ok(existsSync(join(dir, 'rvfa-signing.key'))); + + const loaded = await loadKeyPair(dir); + assert.equal(loaded.fingerprint, kp.fingerprint); + }); +}); + +// --------------------------------------------------------------------------- +// 4. Sign appliance +// --------------------------------------------------------------------------- + +describe('RvfaSigner.signAppliance', () => { + it('signs an RVFA file and embeds signature metadata in the header', async () => { + const kp = await generateKeyPair(); + const signer = new RvfaSigner(kp.privateKey); + const rvfaPath = writeTestRvfa(); + + const sigMeta = await signer.signAppliance(rvfaPath, 'test-publisher'); + + assert.equal(sigMeta.algorithm, 'ed25519'); + assert.equal(typeof sigMeta.signature, 'string'); + assert.ok(sigMeta.signature.length > 0); + assert.equal(sigMeta.publicKeyFingerprint, kp.fingerprint); + assert.equal(sigMeta.signedBy, 'test-publisher'); + assert.equal(sigMeta.scope, 'full'); + assert.equal(typeof sigMeta.signedAt, 'string'); + }); + + it('signed file is still a valid RVFA (readable by RvfaReader)', async () => { + const kp = await generateKeyPair(); + const signer = new RvfaSigner(kp.privateKey); + const rvfaPath = writeTestRvfa(); + + await signer.signAppliance(rvfaPath); + + const buf = readFileSync(rvfaPath); + const reader = RvfaReader.fromBuffer(buf); + const header = reader.getHeader(); + assert.equal(header.name, 'test-appliance'); + assert.ok((header as any).signature, 'Header should contain signature field'); + }); +}); + +// --------------------------------------------------------------------------- +// 5. Verify valid signature +// --------------------------------------------------------------------------- + +describe('RvfaVerifier.verifyAppliance', () => { + it('returns valid=true for a correctly signed file', async () => { + const kp = await generateKeyPair(); + const signer = new RvfaSigner(kp.privateKey); + const verifier = new RvfaVerifier(kp.publicKey); + const rvfaPath = writeTestRvfa(); + + await signer.signAppliance(rvfaPath, 'publisher-name'); + + const result = await verifier.verifyAppliance(rvfaPath); + assert.ok(result.valid, `Expected valid but got errors: ${result.errors.join(', ')}`); + assert.equal(result.signerFingerprint, kp.fingerprint); + assert.equal(result.signedBy, 'publisher-name'); + assert.equal(result.errors.length, 0); + }); +}); + +// --------------------------------------------------------------------------- +// 6. Verify tampered data +// --------------------------------------------------------------------------- + +describe('Tamper detection', () => { + it('returns valid=false when section data is modified after signing', async () => { + const kp = await generateKeyPair(); + const signer = new RvfaSigner(kp.privateKey); + const verifier = new RvfaVerifier(kp.publicKey); + const rvfaPath = writeTestRvfa(); + + await signer.signAppliance(rvfaPath); + + // Read the signed file and tamper with section data + const signedBuf = readFileSync(rvfaPath); + const tampered = Buffer.from(signedBuf); + // Tamper with a byte near the end (in section data area, before footer) + const tamperOffset = tampered.length - 40; // before the 32-byte SHA256 footer + if (tamperOffset > 0) { + tampered[tamperOffset] ^= 0xFF; + } + writeFileSync(rvfaPath, tampered); + + const result = await verifier.verifyAppliance(rvfaPath); + assert.ok(!result.valid, 'Tampered file should fail verification'); + assert.ok(result.errors.length > 0); + }); + + it('returns valid=false when header is modified after signing', async () => { + const kp = await generateKeyPair(); + const signer = new RvfaSigner(kp.privateKey); + const verifier = new RvfaVerifier(kp.publicKey); + const rvfaPath = writeTestRvfa(); + + await signer.signAppliance(rvfaPath); + + // Read the signed file, modify the header name + const signedBuf = readFileSync(rvfaPath); + const headerLen = signedBuf.readUInt32LE(8); + const headerJson = signedBuf.subarray(12, 12 + headerLen).toString('utf-8'); + const header = JSON.parse(headerJson); + header.name = 'tampered-name'; + + const newHeaderJson = Buffer.from(JSON.stringify(header), 'utf-8'); + const preamble = Buffer.alloc(12); + signedBuf.copy(preamble, 0, 0, 8); + preamble.writeUInt32LE(newHeaderJson.length, 8); + + const rebuilt = Buffer.concat([ + preamble, + newHeaderJson, + signedBuf.subarray(12 + headerLen), + ]); + writeFileSync(rvfaPath, rebuilt); + + const result = await verifier.verifyAppliance(rvfaPath); + assert.ok(!result.valid, 'Modified header should fail verification'); + }); +}); + +// --------------------------------------------------------------------------- +// 7. Verify missing signature +// --------------------------------------------------------------------------- + +describe('Missing signature', () => { + it('returns valid=false with appropriate error for unsigned RVFA', async () => { + const kp = await generateKeyPair(); + const verifier = new RvfaVerifier(kp.publicKey); + const rvfaPath = writeTestRvfa(); + + const result = await verifier.verifyAppliance(rvfaPath); + assert.ok(!result.valid); + assert.ok( + result.errors.some((e) => e.includes('No signature') || e.includes('signature')), + `Expected signature-related error, got: ${result.errors.join(', ')}`, + ); + }); +}); + +// --------------------------------------------------------------------------- +// 8. Sign patch (detached) +// --------------------------------------------------------------------------- + +describe('RvfaSigner.signPatch / RvfaVerifier.verifyPatch', () => { + it('signs arbitrary data and verifies with detached signature', async () => { + const kp = await generateKeyPair(); + const signer = new RvfaSigner(kp.privateKey); + const verifier = new RvfaVerifier(kp.publicKey); + + const patchData = Buffer.from('this-is-a-patch-payload-for-signing'); + const signature = await signer.signPatch(patchData); + + assert.equal(typeof signature, 'string'); + assert.ok(signature.length > 0); + + const valid = await verifier.verifyPatch(patchData, signature); + assert.ok(valid, 'Detached patch signature should verify'); + }); + + it('detached signature fails for tampered data', async () => { + const kp = await generateKeyPair(); + const signer = new RvfaSigner(kp.privateKey); + const verifier = new RvfaVerifier(kp.publicKey); + + const patchData = Buffer.from('original-patch-data'); + const signature = await signer.signPatch(patchData); + + const tampered = Buffer.from('tampered-patch-data'); + const valid = await verifier.verifyPatch(tampered, signature); + assert.ok(!valid, 'Tampered data should fail verification'); + }); + + it('signSections signs a 32-byte footer hash and returns hex signature', async () => { + const kp = await generateKeyPair(); + const signer = new RvfaSigner(kp.privateKey); + + const footerHash = Buffer.alloc(32, 0xAB); + const signature = await signer.signSections(footerHash); + + assert.equal(typeof signature, 'string'); + assert.ok(signature.length > 0); + // signSections signs the raw hash directly (no re-hash), + // so we verify manually using Node.js crypto + const { verify: edVerify, createPublicKey } = await import('node:crypto'); + const pubKeyObj = createPublicKey({ + key: kp.publicKey, + format: 'pem', + type: 'spki', + }); + const valid = edVerify(null, footerHash, pubKeyObj, Buffer.from(signature, 'hex')); + assert.ok(valid, 'Signature over raw footer hash should verify'); + }); + + it('signSections rejects non-32-byte input', async () => { + const kp = await generateKeyPair(); + const signer = new RvfaSigner(kp.privateKey); + + await assert.rejects( + () => signer.signSections(Buffer.alloc(16)), + /32 bytes/, + ); + }); +}); + +// --------------------------------------------------------------------------- +// 9. Canonical JSON +// --------------------------------------------------------------------------- + +describe('Canonical JSON (deterministic signing)', () => { + it('signing produces the same signature regardless of header key order', async () => { + const kp = await generateKeyPair(); + + // Create two RVFA files with the same content + const rvfaPath1 = writeTestRvfa('canon-test'); + const rvfaPath2 = writeTestRvfa('canon-test'); + + // Sign both with the same key + const signer = new RvfaSigner(kp.privateKey); + const sig1 = await signer.signAppliance(rvfaPath1); + const sig2 = await signer.signAppliance(rvfaPath2); + + // Both should verify successfully + const verifier = new RvfaVerifier(kp.publicKey); + const result1 = await verifier.verifyAppliance(rvfaPath1); + const result2 = await verifier.verifyAppliance(rvfaPath2); + + assert.ok(result1.valid, 'First file should verify'); + assert.ok(result2.valid, 'Second file should verify'); + }); +}); + +// --------------------------------------------------------------------------- +// 10. Re-sign +// --------------------------------------------------------------------------- + +describe('Re-signing', () => { + it('re-signing replaces the old signature cleanly', async () => { + const kp1 = await generateKeyPair(); + const kp2 = await generateKeyPair(); + const rvfaPath = writeTestRvfa(); + + // Sign with first key + const signer1 = new RvfaSigner(kp1.privateKey); + await signer1.signAppliance(rvfaPath, 'publisher-one'); + + // Verify with first key + const verifier1 = new RvfaVerifier(kp1.publicKey); + const result1 = await verifier1.verifyAppliance(rvfaPath); + assert.ok(result1.valid, 'First signature should verify'); + + // Re-sign with second key + const signer2 = new RvfaSigner(kp2.privateKey); + await signer2.signAppliance(rvfaPath, 'publisher-two'); + + // Verify with second key should pass + const verifier2 = new RvfaVerifier(kp2.publicKey); + const result2 = await verifier2.verifyAppliance(rvfaPath); + assert.ok(result2.valid, 'Re-signed file should verify with new key'); + assert.equal(result2.signedBy, 'publisher-two'); + + // Verify with first key should fail + const result3 = await verifier1.verifyAppliance(rvfaPath); + assert.ok(!result3.valid, 'Old key should no longer verify'); + }); + + it('re-signed file still reads as valid RVFA and verifies', async () => { + const kp = await generateKeyPair(); + const rvfaPath = writeTestRvfa(); + + const signer = new RvfaSigner(kp.privateKey); + await signer.signAppliance(rvfaPath); + // Re-sign + const sigMeta = await signer.signAppliance(rvfaPath); + + assert.equal(sigMeta.algorithm, 'ed25519'); + assert.equal(sigMeta.scope, 'full'); + + // The re-signed file should still verify with the same key + const verifier = new RvfaVerifier(kp.publicKey); + const result = await verifier.verifyAppliance(rvfaPath); + assert.ok(result.valid, `Re-signed file should verify: ${result.errors.join(', ')}`); + }); +});