From b2f073d38c22372c724d2b5d2ef8ae3341b74a90 Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Tue, 14 Oct 2025 02:18:47 +0530 Subject: [PATCH 01/14] feat: add Poseidon2 hasher to build system with validation --- build.zig | 54 +++++++++++++++++++++++++++++++++++++++++++++++++++ build.zig.zon | 7 +++++++ 2 files changed, 61 insertions(+) diff --git a/build.zig b/build.zig index 448a6a1..d4d4c4e 100644 --- a/build.zig +++ b/build.zig @@ -1,3 +1,4 @@ +const std = @import("std"); const Builder = @import("std").Build; pub fn build(b: *Builder) void { @@ -35,4 +36,57 @@ pub fn build(b: *Builder) void { const test_step = b.step("test", "Run library tests"); test_step.dependOn(&run_main_tests.step); test_step.dependOn(&run_tests_tests.step); + + // Poseidon hasher build options + const poseidon_enabled = b.option(bool, "poseidon", "Enable Poseidon2 hash support") orelse false; + const poseidon_field = b.option([]const u8, "poseidon-field", "Poseidon2 field variant (babybear|koalabear)") orelse "koalabear"; + + // Validate poseidon fields + if (poseidon_enabled) { + const valid_fields = [_][]const u8{ "babybear", "koalabear" }; + var field_valid = false; + for (valid_fields) |valid_field| { + if (std.mem.eql(u8, poseidon_field, valid_field)) { + field_valid = true; + break; + } + } + if (!field_valid) { + std.log.err("Invalid Poseidon2 field configuration: '{s}'", .{poseidon_field}); + std.log.err("Valid field options are:\n1) 'koalabear'\n2) 'babybear'", .{}); + std.log.err("Usage examples:", .{}); + std.log.err("zig build -Dposeidon=true -Dposeidon-field=koalabear", .{}); + std.log.err("zig build -Dposeidon=true -Dposeidon-field=koalabear", .{}); + std.log.err("If no field is specified 'koalabear' will be used as the default.", .{}); + } + + std.log.info("Poseidon2 enabled with field: '{s}'", .{poseidon_field}); + } + + // Create build options + const options = b.addOptions(); + options.addOption(bool, "poseidon_enabled", poseidon_enabled); + options.addOption([]const u8, "poseidon_field", poseidon_field); + + // Get poseidon dependency once if enabled + const poseidon_module = if (poseidon_enabled) blk: { + const poseidon_dep = b.dependency("poseidon", .{ + .target = target, + .optimize = optimize, + }); + break :blk poseidon_dep.module("poseidon"); + } else null; + + // Add build options and poseidon import to all artifacts + mod.addOptions("build_options", options); + if (poseidon_module) |pm| mod.addImport("poseidon", pm); + + lib.root_module.addOptions("build_options", options); + if (poseidon_module) |pm| lib.root_module.addImport("poseidon", pm); + + main_tests.root_module.addOptions("build_options", options); + if (poseidon_module) |pm| main_tests.root_module.addImport("poseidon", pm); + + tests_tests.root_module.addOptions("build_options", options); + if (poseidon_module) |pm| tests_tests.root_module.addImport("poseidon", pm); } diff --git a/build.zig.zon b/build.zig.zon index b3a5565..cf52a6c 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -3,4 +3,11 @@ .fingerprint = 0x1d34bd0ceb1dfc2d, .version = "0.0.8", .paths = .{""}, + .dependencies = .{ + .poseidon = .{ + .url = "https://github.com/blockblaz/zig-poseidon/archive/refs/tags/v0.2.0.tar.gz", + .hash = "zig_poseidon-0.2.0-CHeW2H-SAAC83l4JGZOODgmgfEFpBa_KokE9oO3ilcf1", + }, + + }, } From 44db3929f935403cc26badf0eb6d8441e9c32a11 Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Fri, 17 Oct 2025 18:52:19 +0530 Subject: [PATCH 02/14] feature: add poseidon hasher support --- src/lib.zig | 49 ++++++++++------ src/poseidon_wrapper.zig | 122 +++++++++++++++++++++++++++++++++++++++ src/utils.zig | 8 +-- 3 files changed, 158 insertions(+), 21 deletions(-) create mode 100644 src/poseidon_wrapper.zig diff --git a/src/lib.zig b/src/lib.zig index 381f648..a74c826 100644 --- a/src/lib.zig +++ b/src/lib.zig @@ -6,7 +6,22 @@ pub const utils = @import("./utils.zig"); pub const zeros = @import("./zeros.zig"); const ArrayList = std.ArrayList; const builtin = std.builtin; +const build_options = @import("build_options"); const sha256 = std.crypto.hash.sha2.Sha256; + +// Configure the hasher based on build options +pub const Hasher = if (build_options.poseidon_enabled) blk: { + const poseidon = @import("poseidon"); + const poseidon_wrapper = @import("./poseidon_wrapper.zig"); + // Select the appropriate Poseidon2 hasher based on field configuration + const Poseidon2Type = if (std.mem.eql(u8, build_options.poseidon_field, "babybear")) + poseidon.Poseidon2BabyBear + else + poseidon.Poseidon2KoalaBear16; + // Wrap with SHA256-compatible API + break :blk poseidon_wrapper.PoseidonHasher(Poseidon2Type); +} else sha256; + const hashes_of_zero = zeros.hashes_of_zero; const Allocator = std.mem.Allocator; @@ -505,7 +520,7 @@ pub fn deserialize(comptime T: type, serialized: []const u8, out: *T, allocator: } pub fn mixInLength2(root: [32]u8, length: usize, out: *[32]u8) void { - var hasher = sha256.init(sha256.Options{}); + var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); var tmp = [_]u8{0} ** 32; @@ -515,7 +530,7 @@ pub fn mixInLength2(root: [32]u8, length: usize, out: *[32]u8) void { } fn mixInLength(root: [32]u8, length: [32]u8, out: *[32]u8) void { - var hasher = sha256.init(sha256.Options{}); + var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); hasher.update(length[0..]); hasher.final(out[0..]); @@ -535,7 +550,7 @@ test "mixInLength" { } fn mixInSelector(root: [32]u8, comptime selector: usize, out: *[32]u8) void { - var hasher = sha256.init(sha256.Options{}); + var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); var tmp = [_]u8{0} ** 32; std.mem.writeInt(@TypeOf(selector), tmp[0..@sizeOf(@TypeOf(selector))], selector, std.builtin.Endian.little); @@ -671,7 +686,7 @@ test "merkleize an empty slice" { defer list.deinit(); const chunks = &[0][32]u8{}; var out: [32]u8 = undefined; - try merkleize(sha256, chunks, null, &out); + try merkleize(Hasher, chunks, null, &out); try std.testing.expect(std.mem.eql(u8, out[0..], zero_chunk[0..])); } @@ -680,22 +695,22 @@ test "merkleize a string" { defer list.deinit(); const chunks = try pack([]const u8, "a" ** 100, &list); var out: [32]u8 = undefined; - try merkleize(sha256, chunks, null, &out); + try merkleize(Hasher, chunks, null, &out); // Build the expected tree const leaf1 = [_]u8{0x61} ** 32; // "0xaaaaa....aa" 32 times var leaf2: [32]u8 = [_]u8{0x61} ** 4 ++ [_]u8{0} ** 28; var root: [32]u8 = undefined; var internal_left: [32]u8 = undefined; var internal_right: [32]u8 = undefined; - var hasher = sha256.init(sha256.Options{}); + var hasher = Hasher.init(Hasher.Options{}); hasher.update(leaf1[0..]); hasher.update(leaf1[0..]); hasher.final(&internal_left); - hasher = sha256.init(sha256.Options{}); + hasher = Hasher.init(Hasher.Options{}); hasher.update(leaf1[0..]); hasher.update(leaf2[0..]); hasher.final(&internal_right); - hasher = sha256.init(sha256.Options{}); + hasher = Hasher.init(Hasher.Options{}); hasher.update(internal_left[0..]); hasher.update(internal_right[0..]); hasher.final(&root); @@ -710,7 +725,7 @@ test "merkleize a boolean" { var chunks = try pack(bool, false, &list); var expected = [_]u8{0} ** BYTES_PER_CHUNK; var out: [BYTES_PER_CHUNK]u8 = undefined; - try merkleize(sha256, chunks, null, &out); + try merkleize(Hasher, chunks, null, &out); try std.testing.expect(std.mem.eql(u8, out[0..], expected[0..])); @@ -719,7 +734,7 @@ test "merkleize a boolean" { chunks = try pack(bool, true, &list2); expected[0] = 1; - try merkleize(sha256, chunks, null, &out); + try merkleize(Hasher, chunks, null, &out); try std.testing.expect(std.mem.eql(u8, out[0..], expected[0..])); } @@ -764,7 +779,7 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) var list = ArrayList(u8).init(allctr); defer list.deinit(); const chunks = try pack(T, value, &list); - try merkleize(sha256, chunks, null, out); + try merkleize(Hasher, chunks, null, out); }, .array => |a| { // Check if the child is a basic type. If so, return @@ -776,13 +791,13 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) var list = ArrayList(u8).init(allctr); defer list.deinit(); const chunks = try pack(T, value, &list); - try merkleize(sha256, chunks, null, out); + try merkleize(Hasher, chunks, null, out); }, .bool => { var list = ArrayList(u8).init(allctr); defer list.deinit(); const chunks = try packBits(value[0..], &list); - try merkleize(sha256, chunks, chunkCount(T), out); + try merkleize(Hasher, chunks, chunkCount(T), out); }, .array => { var chunks = ArrayList(chunk).init(allctr); @@ -792,7 +807,7 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) try hashTreeRoot(@TypeOf(item), item, &tmp, allctr); try chunks.append(tmp); } - try merkleize(sha256, chunks.items, null, out); + try merkleize(Hasher, chunks.items, null, out); }, else => return error.NotSupported, } @@ -807,7 +822,7 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) defer list.deinit(); const chunks = try pack(T, value, &list); var tmp: chunk = undefined; - try merkleize(sha256, chunks, null, &tmp); + try merkleize(Hasher, chunks, null, &tmp); mixInLength2(tmp, value.len, out); }, // use bitlist @@ -821,7 +836,7 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) try hashTreeRoot(@TypeOf(item), item, &tmp, allctr); try chunks.append(tmp); } - try merkleize(sha256, chunks.items, null, &tmp); + try merkleize(Hasher, chunks.items, null, &tmp); mixInLength2(tmp, chunks.items.len, out); }, } @@ -837,7 +852,7 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) try hashTreeRoot(f.type, @field(value, f.name), &tmp, allctr); try chunks.append(tmp); } - try merkleize(sha256, chunks.items, null, out); + try merkleize(Hasher, chunks.items, null, out); }, // An optional is a union with `None` as first value. .optional => |opt| if (value != null) { diff --git a/src/poseidon_wrapper.zig b/src/poseidon_wrapper.zig new file mode 100644 index 0000000..9e42a76 --- /dev/null +++ b/src/poseidon_wrapper.zig @@ -0,0 +1,122 @@ +//! Provides a SHA256-compatible API wrapper for Poseidon2 hash function. +//! This allows Poseidon2 to be used as a drop-in replacement for SHA256 +//! in merkleization and hash tree root operations. +//! +//! IMPORTANT: This is a specialized wrapper for SSZ merkleization, which always +//! provides exactly 64 bytes (two 32-byte hashes). It is NOT a general-purpose +//! hash function and will produce collisions for variable-length inputs due to +//! simple zero-padding (e.g., "abc" and "abc\x00" would hash identically). + +const std = @import("std"); + +/// Creates a hasher type that wraps a Poseidon2 instance with SHA256-like API +pub fn PoseidonHasher(comptime Poseidon2Type: type) type { + const WIDTH = 16; // Poseidon2 width (16 field elements) + const FIELD_ELEM_SIZE = 4; // u32 = 4 bytes + const BUFFER_SIZE = WIDTH * FIELD_ELEM_SIZE; // 64 bytes + const OUTPUT_FIELD_ELEMS = 8; // 8 u32s = 32 bytes output + + return struct { + const Self = @This(); + + // Accumulated input bytes + buffer: [BUFFER_SIZE]u8, + buffer_len: usize, + + /// Options struct for compatibility with std.crypto.hash API + pub const Options = struct {}; + + /// Initialize a new hasher instance + pub fn init(_: Options) Self { + return .{ + .buffer = undefined, + .buffer_len = 0, + }; + } + + /// Update the hasher with new data + /// Note: This accumulates data. Poseidon2 requires exactly 64 bytes, + /// so we buffer until we have enough data. + pub fn update(self: *Self, data: []const u8) void { + // Enforce the 64-byte limit explicitly + std.debug.assert(self.buffer_len + data.len <= BUFFER_SIZE); + + // Copy data into buffer + const space_left = BUFFER_SIZE - self.buffer_len; + const copy_len = @min(data.len, space_left); + + @memcpy(self.buffer[self.buffer_len..][0..copy_len], data[0..copy_len]); + self.buffer_len += copy_len; + } + + /// Finalize the hash and write the result to out + pub fn final(self: *Self, out: *[32]u8) void { + // Pad buffer to 64 bytes if needed + if (self.buffer_len < BUFFER_SIZE) { + @memset(self.buffer[self.buffer_len..BUFFER_SIZE], 0); + } + + // Convert bytes to field elements (u32s) using little-endian encoding + var input: [WIDTH]u32 = undefined; + for (0..WIDTH) |i| { + input[i] = std.mem.readInt(u32, self.buffer[i * FIELD_ELEM_SIZE ..][0..FIELD_ELEM_SIZE], .little) % Poseidon2Type.Field.MODULUS; + } + + // Hash with Poseidon2 compress function + // Output 8 field elements (32 bytes total) + const output = Poseidon2Type.compress(OUTPUT_FIELD_ELEMS, input); + + // Convert field elements back to bytes using little-endian encoding + for (0..OUTPUT_FIELD_ELEMS) |i| { + std.mem.writeInt(u32, out[i * FIELD_ELEM_SIZE ..][0..FIELD_ELEM_SIZE], output[i], .little); + } + + // Reset buffer for potential reuse + self.buffer_len = 0; + } + }; +} + +test "PoseidonHasher basic API" { + // This test just verifies the API compiles and runs + // Actual hash correctness should be verified against known test vectors + const poseidon = @import("poseidon"); + const Hasher = PoseidonHasher(poseidon.Poseidon2KoalaBear16); + + var hasher = Hasher.init(.{}); + const data = "test data for hashing"; + hasher.update(data); + + var output: [32]u8 = undefined; + hasher.final(&output); + + // Just verify we got some output (not all zeros) + var has_nonzero = false; + for (output) |byte| { + if (byte != 0) { + has_nonzero = true; + break; + } + } + try std.testing.expect(has_nonzero); +} + +test "PoseidonHasher deterministic" { + // Verify same input produces same output + const poseidon = @import("poseidon"); + const Hasher = PoseidonHasher(poseidon.Poseidon2KoalaBear16); + + var hasher1 = Hasher.init(.{}); + var hasher2 = Hasher.init(.{}); + + const data = "deterministic test data"; + hasher1.update(data); + hasher2.update(data); + + var output1: [32]u8 = undefined; + var output2: [32]u8 = undefined; + hasher1.final(&output1); + hasher2.final(&output2); + + try std.testing.expectEqualSlices(u8, &output1, &output2); +} diff --git a/src/utils.zig b/src/utils.zig index e24074c..73d277e 100644 --- a/src/utils.zig +++ b/src/utils.zig @@ -7,7 +7,7 @@ const deserialize = lib.deserialize; const isFixedSizeObject = lib.isFixedSizeObject; const ArrayList = std.ArrayList; const Allocator = std.mem.Allocator; -const sha256 = std.crypto.hash.sha2.Sha256; +const Hasher = lib.Hasher; const hashes_of_zero = @import("./zeros.zig").hashes_of_zero; // SSZ specification constants @@ -155,7 +155,7 @@ pub fn List(comptime T: type, comptime N: usize) type { const items_per_chunk = BYTES_PER_CHUNK / bytes_per_item; const chunks_for_max_capacity = (N + items_per_chunk - 1) / items_per_chunk; var tmp: chunk = undefined; - try lib.merkleize(sha256, chunks, chunks_for_max_capacity, &tmp); + try lib.merkleize(Hasher, chunks, chunks_for_max_capacity, &tmp); lib.mixInLength2(tmp, items.len, out); }, else => { @@ -168,7 +168,7 @@ pub fn List(comptime T: type, comptime N: usize) type { } // Always use N (max capacity) for merkleization, even when empty // This ensures proper tree depth according to SSZ specification - try lib.merkleize(sha256, chunks.items, N, &tmp); + try lib.merkleize(Hasher, chunks.items, N, &tmp); lib.mixInLength2(tmp, items.len, out); }, } @@ -337,7 +337,7 @@ pub fn Bitlist(comptime N: usize) type { // Use chunk_count limit as per SSZ specification const chunk_count_limit = (N + 255) / 256; - try lib.merkleize(sha256, chunks, chunk_count_limit, &tmp); + try lib.merkleize(Hasher, chunks, chunk_count_limit, &tmp); lib.mixInLength2(tmp, bit_length, out); } From fbb1efe0914fed0ca0dd43e31072fed5524622ec Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Tue, 21 Oct 2025 19:27:34 +0530 Subject: [PATCH 03/14] point to fixed remote of zig-poseidon --- build.zig.zon | 5 ++--- src/poseidon_wrapper.zig | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/build.zig.zon b/build.zig.zon index cf52a6c..f6ac4d2 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -5,9 +5,8 @@ .paths = .{""}, .dependencies = .{ .poseidon = .{ - .url = "https://github.com/blockblaz/zig-poseidon/archive/refs/tags/v0.2.0.tar.gz", - .hash = "zig_poseidon-0.2.0-CHeW2H-SAAC83l4JGZOODgmgfEFpBa_KokE9oO3ilcf1", + .url = "https://github.com/blockblaz/zig-poseidon/archive/c2281f863a4c51c5f7e2957e266744e158ded6ca.tar.gz", + .hash = "zig_poseidon-0.2.0-CHeW2LS9AAB5ltpBgNJTHcTczmyAJjp7i_dwubSACNOR", }, - }, } diff --git a/src/poseidon_wrapper.zig b/src/poseidon_wrapper.zig index 9e42a76..a9fb714 100644 --- a/src/poseidon_wrapper.zig +++ b/src/poseidon_wrapper.zig @@ -120,3 +120,27 @@ test "PoseidonHasher deterministic" { try std.testing.expectEqualSlices(u8, &output1, &output2); } + +test "PoseidonHasher different inputs produce different outputs" { + // Verify different inputs produce different outputs + const poseidon = @import("poseidon"); + const Hasher = PoseidonHasher(poseidon.Poseidon2KoalaBear16); + + var hasher1 = Hasher.init(.{}); + var hasher2 = Hasher.init(.{}); + + const data1 = "first test data"; + const data2 = "second test data"; + + hasher1.update(data1); + hasher2.update(data2); + + var output1: [32]u8 = undefined; + var output2: [32]u8 = undefined; + hasher1.final(&output1); + hasher2.final(&output2); + + // Verify outputs are different + const are_equal = std.mem.eql(u8, &output1, &output2); + try std.testing.expect(!are_equal); +} From 13fd4afe4083332744b13223a4c00ceb08e64a4f Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Sat, 15 Nov 2025 22:55:37 +0530 Subject: [PATCH 04/14] chore: update zig-poseidon dep --- build.zig.zon | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.zig.zon b/build.zig.zon index f6ac4d2..a9936af 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -5,8 +5,8 @@ .paths = .{""}, .dependencies = .{ .poseidon = .{ - .url = "https://github.com/blockblaz/zig-poseidon/archive/c2281f863a4c51c5f7e2957e266744e158ded6ca.tar.gz", - .hash = "zig_poseidon-0.2.0-CHeW2LS9AAB5ltpBgNJTHcTczmyAJjp7i_dwubSACNOR", + .url = "https://github.com/blockblaz/zig-poseidon/archive/5c68c7e79361abb92ecfed3ff95ef795437452b3.tar.gz", + .hash = "zig_poseidon-0.2.0-CHeW2CRPAQBERdItC_QZGgNHZ4_Zcz1z_r1kIpZhq4a4", }, }, } From 7a53dfec54a22ac245bc424960e4f60c7d9dc599 Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Fri, 19 Dec 2025 16:29:15 +0530 Subject: [PATCH 05/14] chore: point to latest hash-zig with width check support --- build.zig | 39 +++++++++------------------------------ build.zig.zon | 6 +++--- 2 files changed, 12 insertions(+), 33 deletions(-) diff --git a/build.zig b/build.zig index d4d4c4e..66d848f 100644 --- a/build.zig +++ b/build.zig @@ -39,54 +39,33 @@ pub fn build(b: *Builder) void { // Poseidon hasher build options const poseidon_enabled = b.option(bool, "poseidon", "Enable Poseidon2 hash support") orelse false; - const poseidon_field = b.option([]const u8, "poseidon-field", "Poseidon2 field variant (babybear|koalabear)") orelse "koalabear"; - - // Validate poseidon fields if (poseidon_enabled) { - const valid_fields = [_][]const u8{ "babybear", "koalabear" }; - var field_valid = false; - for (valid_fields) |valid_field| { - if (std.mem.eql(u8, poseidon_field, valid_field)) { - field_valid = true; - break; - } - } - if (!field_valid) { - std.log.err("Invalid Poseidon2 field configuration: '{s}'", .{poseidon_field}); - std.log.err("Valid field options are:\n1) 'koalabear'\n2) 'babybear'", .{}); - std.log.err("Usage examples:", .{}); - std.log.err("zig build -Dposeidon=true -Dposeidon-field=koalabear", .{}); - std.log.err("zig build -Dposeidon=true -Dposeidon-field=koalabear", .{}); - std.log.err("If no field is specified 'koalabear' will be used as the default.", .{}); - } - - std.log.info("Poseidon2 enabled with field: '{s}'", .{poseidon_field}); + std.log.info("Poseidon2 enabled (koalabear, Poseidon2-24 Plonky3)", .{}); } // Create build options const options = b.addOptions(); options.addOption(bool, "poseidon_enabled", poseidon_enabled); - options.addOption([]const u8, "poseidon_field", poseidon_field); - // Get poseidon dependency once if enabled - const poseidon_module = if (poseidon_enabled) blk: { - const poseidon_dep = b.dependency("poseidon", .{ + // Poseidon2 implementation via hash-zig dependency + const hashzig_module = if (poseidon_enabled) blk: { + const hashzig_dep = b.dependency("hash_zig", .{ .target = target, .optimize = optimize, }); - break :blk poseidon_dep.module("poseidon"); + break :blk hashzig_dep.module("hash-zig"); } else null; // Add build options and poseidon import to all artifacts mod.addOptions("build_options", options); - if (poseidon_module) |pm| mod.addImport("poseidon", pm); + if (hashzig_module) |pm| mod.addImport("hash_zig", pm); lib.root_module.addOptions("build_options", options); - if (poseidon_module) |pm| lib.root_module.addImport("poseidon", pm); + if (hashzig_module) |pm| lib.root_module.addImport("hash_zig", pm); main_tests.root_module.addOptions("build_options", options); - if (poseidon_module) |pm| main_tests.root_module.addImport("poseidon", pm); + if (hashzig_module) |pm| main_tests.root_module.addImport("hash_zig", pm); tests_tests.root_module.addOptions("build_options", options); - if (poseidon_module) |pm| tests_tests.root_module.addImport("poseidon", pm); + if (hashzig_module) |pm| tests_tests.root_module.addImport("hash_zig", pm); } diff --git a/build.zig.zon b/build.zig.zon index a9936af..783190e 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -4,9 +4,9 @@ .version = "0.0.8", .paths = .{""}, .dependencies = .{ - .poseidon = .{ - .url = "https://github.com/blockblaz/zig-poseidon/archive/5c68c7e79361abb92ecfed3ff95ef795437452b3.tar.gz", - .hash = "zig_poseidon-0.2.0-CHeW2CRPAQBERdItC_QZGgNHZ4_Zcz1z_r1kIpZhq4a4", + .hash_zig = .{ + .url = "https://github.com/blockblaz/hash-zig/archive/2bca6541a933a2bbe4630f41c29f61b3f84ad7e0.tar.gz", + .hash = "hash_zig-1.1.3-POmurOPmCgCkbtpq_c62mZqLuHzVeaLZ2X23fxsoHVrI", }, }, } From 4806f29049f9867abc8f53cfdc8a86360d6f6964 Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Fri, 19 Dec 2025 17:15:45 +0530 Subject: [PATCH 06/14] chore: use hash-zig instead of zig-poseidon --- src/lib.zig | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/lib.zig b/src/lib.zig index a74c826..a367395 100644 --- a/src/lib.zig +++ b/src/lib.zig @@ -11,18 +11,15 @@ const sha256 = std.crypto.hash.sha2.Sha256; // Configure the hasher based on build options pub const Hasher = if (build_options.poseidon_enabled) blk: { - const poseidon = @import("poseidon"); + const hash_zig = @import("hash_zig"); + const poseidon2 = hash_zig.poseidon2; const poseidon_wrapper = @import("./poseidon_wrapper.zig"); - // Select the appropriate Poseidon2 hasher based on field configuration - const Poseidon2Type = if (std.mem.eql(u8, build_options.poseidon_field, "babybear")) - poseidon.Poseidon2BabyBear - else - poseidon.Poseidon2KoalaBear16; + const Poseidon2Type = poseidon2.Poseidon2KoalaBear24Plonky3; // Wrap with SHA256-compatible API break :blk poseidon_wrapper.PoseidonHasher(Poseidon2Type); } else sha256; -const hashes_of_zero = zeros.hashes_of_zero; +const hashes_of_zero = zeros.buildZeroHashes(Hasher, 32, 256); const Allocator = std.mem.Allocator; /// Number of bytes per chunk. @@ -537,6 +534,7 @@ fn mixInLength(root: [32]u8, length: [32]u8, out: *[32]u8) void { } test "mixInLength" { + if (build_options.poseidon_enabled) return; var root: [32]u8 = undefined; var length: [32]u8 = undefined; var expected: [32]u8 = undefined; @@ -559,6 +557,7 @@ fn mixInSelector(root: [32]u8, comptime selector: usize, out: *[32]u8) void { } test "mixInSelector" { + if (build_options.poseidon_enabled) return; var root: [32]u8 = undefined; var expected: [32]u8 = undefined; var mixin: [32]u8 = undefined; From 0240c456d1292fda494b03f5ba15b9e322e47b5b Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Sat, 20 Dec 2025 20:28:21 +0530 Subject: [PATCH 07/14] feat: use 24 bit data legs for ssz bytes -> field transformation and poseidon2 operations --- src/beacon_tests.zig | 18 +++++ src/poseidon_wrapper.zig | 140 +++++++++++++++++++++++++-------------- src/tests.zig | 28 ++++++++ 3 files changed, 138 insertions(+), 48 deletions(-) diff --git a/src/beacon_tests.zig b/src/beacon_tests.zig index 5a60c27..66260ca 100644 --- a/src/beacon_tests.zig +++ b/src/beacon_tests.zig @@ -4,6 +4,7 @@ const serialize = libssz.serialize; const deserialize = libssz.deserialize; const hashTreeRoot = libssz.hashTreeRoot; const std = @import("std"); +const build_options = @import("build_options"); const ArrayList = std.ArrayList; const expect = std.testing.expect; @@ -53,6 +54,7 @@ test "Validator struct serialization" { } test "Validator struct hash tree root" { + if (build_options.poseidon_enabled) return; const validator = Validator{ .pubkey = [_]u8{0x01} ** 48, .withdrawal_credentials = [_]u8{0x02} ** 32, @@ -116,6 +118,7 @@ test "Individual Validator serialization and hash" { try expect(std.mem.eql(u8, list.items, &expected_validator_bytes)); // Test hash tree root + if (build_options.poseidon_enabled) return; var hash: [32]u8 = undefined; try hashTreeRoot(Validator, validator, &hash, std.testing.allocator); @@ -190,6 +193,7 @@ test "List[Validator] serialization and hash tree root" { } // Test hash tree root + if (build_options.poseidon_enabled) return; var hash1: [32]u8 = undefined; try hashTreeRoot(ValidatorList, validator_list, &hash1, std.testing.allocator); @@ -279,6 +283,8 @@ test "BeamBlockBody with validator array - full cycle" { try expect(orig.withdrawable_epoch == deser.withdrawable_epoch); } + if (build_options.poseidon_enabled) return; + // Test hash tree root consistency var hash_original: [32]u8 = undefined; try hashTreeRoot(BeamBlockBody, beam_block_body, &hash_original, std.testing.allocator); @@ -377,6 +383,8 @@ test "Zeam-style List/Bitlist usage with tree root stability" { try expect(std.mem.eql(u8, state_serialized.items, &expected_zeam_state_bytes)); + if (build_options.poseidon_enabled) return; + // Test hash tree root determinism and validate against expected hashes var body_hash1: [32]u8 = undefined; var body_hash2: [32]u8 = undefined; @@ -450,6 +458,8 @@ test "BeamState with historical roots - comprehensive test" { const expected_comprehensive_beam_state_bytes = [_]u8{ 0x39, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x9C, 0x00, 0x00, 0x00, 0xE8, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items, &expected_comprehensive_beam_state_bytes)); + if (build_options.poseidon_enabled) return; + // Test hash tree root calculation var original_hash: [32]u8 = undefined; try hashTreeRoot(BeamState, beam_state, &original_hash, std.testing.allocator); @@ -524,6 +534,8 @@ test "BeamState with empty historical roots" { const expected_empty_beam_state_bytes = [_]u8{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items, &expected_empty_beam_state_bytes)); + if (build_options.poseidon_enabled) return; + // Test hash tree root calculation var original_hash: [32]u8 = undefined; try hashTreeRoot(SimpleBeamState, beam_state, &original_hash, std.testing.allocator); @@ -591,6 +603,8 @@ test "BeamState with maximum historical roots" { const expected_max_beam_state_bytes_start = [_]u8{ 0x3F, 0x42, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items[0..12], &expected_max_beam_state_bytes_start)); + if (build_options.poseidon_enabled) return; + // Test hash tree root calculation var original_hash: [32]u8 = undefined; try hashTreeRoot(MaxBeamState, beam_state, &original_hash, std.testing.allocator); @@ -671,6 +685,8 @@ test "BeamState historical roots access and comparison" { const expected_access_beam_state_bytes = [_]u8{ 0x31, 0xD4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xEF, 0xBE, 0xAD, 0xDE, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC }; try expect(std.mem.eql(u8, serialized_data.items, &expected_access_beam_state_bytes)); + if (build_options.poseidon_enabled) return; + // Test hash tree root calculation var original_hash: [32]u8 = undefined; try hashTreeRoot(AccessBeamState, beam_state, &original_hash, std.testing.allocator); @@ -741,6 +757,8 @@ test "SimpleBeamState with empty historical roots" { const expected_simple_beam_state_bytes = [_]u8{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items, &expected_simple_beam_state_bytes)); + if (build_options.poseidon_enabled) return; + // Test hash tree root calculation var original_hash: [32]u8 = undefined; try hashTreeRoot(SimpleBeamState, beam_state, &original_hash, std.testing.allocator); diff --git a/src/poseidon_wrapper.zig b/src/poseidon_wrapper.zig index a9fb714..bd5dd7c 100644 --- a/src/poseidon_wrapper.zig +++ b/src/poseidon_wrapper.zig @@ -3,18 +3,48 @@ //! in merkleization and hash tree root operations. //! //! IMPORTANT: This is a specialized wrapper for SSZ merkleization, which always -//! provides exactly 64 bytes (two 32-byte hashes). It is NOT a general-purpose -//! hash function and will produce collisions for variable-length inputs due to -//! simple zero-padding (e.g., "abc" and "abc\x00" would hash identically). +//! provides exactly 64 bytes (two 32-byte nodes). It is NOT a general-purpose +//! hash function: it enforces the fixed 64-byte input length and intentionally +//! does not implement any padding scheme. const std = @import("std"); /// Creates a hasher type that wraps a Poseidon2 instance with SHA256-like API pub fn PoseidonHasher(comptime Poseidon2Type: type) type { - const WIDTH = 16; // Poseidon2 width (16 field elements) + // SSZ compression in this codebase is always: + // H: {0,1}^512 -> {0,1}^256 + // i.e. exactly 64 bytes in, 32 bytes out. + const BUFFER_SIZE = 64; + + // Poseidon2-24 state width. + const WIDTH = 24; + + // Compile-time safety: verify Poseidon2Type has the required interface + comptime { + if (!@hasDecl(Poseidon2Type, "Field")) { + @compileError("Poseidon2Type must have a 'Field' declaration"); + } + if (!@hasDecl(Poseidon2Type, "permutation")) { + @compileError("Poseidon2Type must have a 'permutation' function"); + } + if (!@hasDecl(Poseidon2Type, "WIDTH")) { + @compileError("Poseidon2Type must expose a WIDTH constant"); + } + if (Poseidon2Type.WIDTH != WIDTH) { + @compileError(std.fmt.comptimePrint( + "PoseidonHasher requires width-{d} Poseidon2, got width-{d}", + .{ WIDTH, Poseidon2Type.WIDTH }, + )); + } + } + + // We encode 64 bytes as 22 limbs of 24 bits each (little-endian within each limb), + // which are always < 2^24 < p (KoalaBear prime), avoiding lossy modular reduction: + // 64 bytes = 21*3 + 1 => 22 limbs, fits in a single width-24 permutation. + const LIMBS = 22; + const FIELD_ELEM_SIZE = 4; // u32 = 4 bytes - const BUFFER_SIZE = WIDTH * FIELD_ELEM_SIZE; // 64 bytes - const OUTPUT_FIELD_ELEMS = 8; // 8 u32s = 32 bytes output + const OUTPUT_FIELD_ELEMS = 8; // 8 u32s = 32 bytes return struct { const Self = @This(); @@ -35,60 +65,74 @@ pub fn PoseidonHasher(comptime Poseidon2Type: type) type { } /// Update the hasher with new data - /// Note: This accumulates data. Poseidon2 requires exactly 64 bytes, + /// Note: This accumulates data. SSZ compression requires exactly 64 bytes, /// so we buffer until we have enough data. pub fn update(self: *Self, data: []const u8) void { // Enforce the 64-byte limit explicitly std.debug.assert(self.buffer_len + data.len <= BUFFER_SIZE); // Copy data into buffer - const space_left = BUFFER_SIZE - self.buffer_len; - const copy_len = @min(data.len, space_left); - - @memcpy(self.buffer[self.buffer_len..][0..copy_len], data[0..copy_len]); - self.buffer_len += copy_len; + @memcpy(self.buffer[self.buffer_len..][0..data.len], data); + self.buffer_len += data.len; } /// Finalize the hash and write the result to out - pub fn final(self: *Self, out: *[32]u8) void { - // Pad buffer to 64 bytes if needed - if (self.buffer_len < BUFFER_SIZE) { - @memset(self.buffer[self.buffer_len..BUFFER_SIZE], 0); + pub fn final(self: *Self, out: []u8) void { + std.debug.assert(out.len == 32); + // Enforce exact length: SSZ internal nodes and mix-in-length always pass 64 bytes. + std.debug.assert(self.buffer_len == BUFFER_SIZE); + + // Byte -> 24-bit limb packing (injective for fixed 64-byte inputs). + var limbs: [LIMBS]u32 = undefined; + for (0..(LIMBS - 1)) |i| { + const j = i * 3; + limbs[i] = @as(u32, self.buffer[j]) | + (@as(u32, self.buffer[j + 1]) << 8) | + (@as(u32, self.buffer[j + 2]) << 16); } + limbs[LIMBS - 1] = @as(u32, self.buffer[63]); - // Convert bytes to field elements (u32s) using little-endian encoding - var input: [WIDTH]u32 = undefined; - for (0..WIDTH) |i| { - input[i] = std.mem.readInt(u32, self.buffer[i * FIELD_ELEM_SIZE ..][0..FIELD_ELEM_SIZE], .little) % Poseidon2Type.Field.MODULUS; + // Build Poseidon2 state: 22 limbs + 2 zero lanes. + var state: [WIDTH]Poseidon2Type.Field = undefined; + for (0..LIMBS) |i| { + state[i] = Poseidon2Type.Field.fromU32(limbs[i]); } + state[22] = Poseidon2Type.Field.zero; + state[23] = Poseidon2Type.Field.zero; - // Hash with Poseidon2 compress function - // Output 8 field elements (32 bytes total) - const output = Poseidon2Type.compress(OUTPUT_FIELD_ELEMS, input); + // TruncatedPermutation semantics (no feed-forward): permute, then squeeze. + Poseidon2Type.permutation(state[0..]); - // Convert field elements back to bytes using little-endian encoding + // Squeeze first 8 lanes as 32 bytes, little-endian u32 per lane. for (0..OUTPUT_FIELD_ELEMS) |i| { - std.mem.writeInt(u32, out[i * FIELD_ELEM_SIZE ..][0..FIELD_ELEM_SIZE], output[i], .little); + const v = state[i].toU32(); + std.mem.writeInt(u32, out[i * FIELD_ELEM_SIZE ..][0..FIELD_ELEM_SIZE], v, .little); } - // Reset buffer for potential reuse + // Reset buffer for potential reuse. self.buffer_len = 0; } + + /// Convenience helper used by some generic code (e.g. zero-hash builders). + pub fn finalResult(self: *Self) [32]u8 { + var out: [32]u8 = undefined; + self.final(out[0..]); + return out; + } }; } test "PoseidonHasher basic API" { - // This test just verifies the API compiles and runs - // Actual hash correctness should be verified against known test vectors - const poseidon = @import("poseidon"); - const Hasher = PoseidonHasher(poseidon.Poseidon2KoalaBear16); + // This test just verifies the API compiles and runs. + const hash_zig = @import("hash_zig"); + const Hasher = PoseidonHasher(hash_zig.poseidon2.Poseidon2KoalaBear24Plonky3); var hasher = Hasher.init(.{}); - const data = "test data for hashing"; - hasher.update(data); + const data = [_]u8{0x01} ** 64; + hasher.update(data[0..]); var output: [32]u8 = undefined; - hasher.final(&output); + hasher.final(output[0..]); // Just verify we got some output (not all zeros) var has_nonzero = false; @@ -103,42 +147,42 @@ test "PoseidonHasher basic API" { test "PoseidonHasher deterministic" { // Verify same input produces same output - const poseidon = @import("poseidon"); - const Hasher = PoseidonHasher(poseidon.Poseidon2KoalaBear16); + const hash_zig = @import("hash_zig"); + const Hasher = PoseidonHasher(hash_zig.poseidon2.Poseidon2KoalaBear24Plonky3); var hasher1 = Hasher.init(.{}); var hasher2 = Hasher.init(.{}); - const data = "deterministic test data"; - hasher1.update(data); - hasher2.update(data); + const data = [_]u8{0x42} ** 64; + hasher1.update(data[0..]); + hasher2.update(data[0..]); var output1: [32]u8 = undefined; var output2: [32]u8 = undefined; - hasher1.final(&output1); - hasher2.final(&output2); + hasher1.final(output1[0..]); + hasher2.final(output2[0..]); try std.testing.expectEqualSlices(u8, &output1, &output2); } test "PoseidonHasher different inputs produce different outputs" { // Verify different inputs produce different outputs - const poseidon = @import("poseidon"); - const Hasher = PoseidonHasher(poseidon.Poseidon2KoalaBear16); + const hash_zig = @import("hash_zig"); + const Hasher = PoseidonHasher(hash_zig.poseidon2.Poseidon2KoalaBear24Plonky3); var hasher1 = Hasher.init(.{}); var hasher2 = Hasher.init(.{}); - const data1 = "first test data"; - const data2 = "second test data"; + const data1 = [_]u8{0x01} ** 64; + const data2 = [_]u8{0x02} ** 64; - hasher1.update(data1); - hasher2.update(data2); + hasher1.update(data1[0..]); + hasher2.update(data2[0..]); var output1: [32]u8 = undefined; var output2: [32]u8 = undefined; - hasher1.final(&output1); - hasher2.final(&output2); + hasher1.final(output1[0..]); + hasher2.final(output2[0..]); // Verify outputs are different const are_equal = std.mem.eql(u8, &output1, &output2); diff --git a/src/tests.zig b/src/tests.zig index d743a0a..5386a14 100644 --- a/src/tests.zig +++ b/src/tests.zig @@ -7,6 +7,7 @@ const chunkCount = libssz.chunkCount; const hashTreeRoot = libssz.hashTreeRoot; const isFixedSizeObject = libssz.isFixedSizeObject; const std = @import("std"); +const build_options = @import("build_options"); const ArrayList = std.ArrayList; const expect = std.testing.expect; const expectError = std.testing.expectError; @@ -529,6 +530,8 @@ const d_bits = bytesToBits(16, d_bytes); const e_bits = bytesToBits(16, e_bytes); test "calculate the root hash of a boolean" { + // SHA-specific expected vectors; skip when Poseidon is enabled. + if (build_options.poseidon_enabled) return; var expected = [_]u8{1} ++ [_]u8{0} ** 31; var hashed: [32]u8 = undefined; try hashTreeRoot(bool, true, &hashed, std.testing.allocator); @@ -540,6 +543,7 @@ test "calculate the root hash of a boolean" { } test "calculate root hash of an array of two Bitvector[128]" { + if (build_options.poseidon_enabled) return; const deserialized: [2][128]bool = [2][128]bool{ a_bits, b_bits }; var hashed: [32]u8 = undefined; try hashTreeRoot(@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); @@ -559,6 +563,7 @@ test "calculate the root hash of an array of integers" { } test "calculate root hash of an array of three Bitvector[128]" { + if (build_options.poseidon_enabled) return; const deserialized: [3][128]bool = [3][128]bool{ a_bits, b_bits, c_bits }; var hashed: [32]u8 = undefined; try hashTreeRoot(@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); @@ -578,6 +583,7 @@ test "calculate root hash of an array of three Bitvector[128]" { } test "calculate the root hash of an array of five Bitvector[128]" { + if (build_options.poseidon_enabled) return; const deserialized = [5][128]bool{ a_bits, b_bits, c_bits, d_bits, e_bits }; var hashed: [32]u8 = undefined; try hashTreeRoot(@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); @@ -616,6 +622,7 @@ const Fork = struct { }; test "calculate the root hash of a structure" { + if (build_options.poseidon_enabled) return; var hashed: [32]u8 = undefined; const fork = Fork{ .previous_version = [_]u8{ 0x9c, 0xe2, 0x5d, 0x26 }, @@ -629,6 +636,7 @@ test "calculate the root hash of a structure" { } test "calculate the root hash of an Optional" { + if (build_options.poseidon_enabled) return; var hashed: [32]u8 = undefined; var payload: [64]u8 = undefined; const v: ?u32 = null; @@ -647,6 +655,7 @@ test "calculate the root hash of an Optional" { } test "calculate the root hash of an union" { + if (build_options.poseidon_enabled) return; const Payload = union(enum) { int: u64, boolean: bool, @@ -919,6 +928,7 @@ test "structs with nested fixed/variable size u8 array" { } test "slice hashtree root composite type" { + if (build_options.poseidon_enabled) return; const Root = [32]u8; const RootsList = []Root; const test_root = [_]u8{23} ** 32; @@ -938,6 +948,7 @@ test "slice hashtree root composite type" { } test "slice hashtree root simple type" { + if (build_options.poseidon_enabled) return; const DynamicRoot = []u8; // merkelizes as List[u8,33] as dynamic data length is mixed in as bounded type var test_root = [_]u8{23} ** 33; @@ -955,6 +966,7 @@ test "slice hashtree root simple type" { } test "List tree root calculation" { + if (build_options.poseidon_enabled) return; const ListU64 = utils.List(u64, 1024); var empty_list = try ListU64.init(std.testing.allocator); @@ -1275,6 +1287,7 @@ test "serialize max/min integer values" { } test "Empty List hash tree root" { + if (build_options.poseidon_enabled) return; const ListU32 = utils.List(u32, 100); var empty_list = try ListU32.init(std.testing.allocator); defer empty_list.deinit(); @@ -1293,6 +1306,7 @@ test "Empty List hash tree root" { } test "Empty BitList(<=256) hash tree root" { + if (build_options.poseidon_enabled) return; const BitListLen100 = utils.Bitlist(100); var empty_list = try BitListLen100.init(std.testing.allocator); defer empty_list.deinit(); @@ -1310,6 +1324,7 @@ test "Empty BitList(<=256) hash tree root" { } test "Empty BitList (>256) hash tree root" { + if (build_options.poseidon_enabled) return; const BitListLen100 = utils.Bitlist(2570); var empty_list = try BitListLen100.init(std.testing.allocator); defer empty_list.deinit(); @@ -1327,6 +1342,7 @@ test "Empty BitList (>256) hash tree root" { } test "List at maximum capacity" { + if (build_options.poseidon_enabled) return; const ListU8 = utils.List(u8, 4); var full_list = try ListU8.init(std.testing.allocator); defer full_list.deinit(); @@ -1355,6 +1371,8 @@ test "List at maximum capacity" { } test "Array hash tree root" { + // SHA-specific expected vectors; skip when Poseidon is enabled. + if (build_options.poseidon_enabled) return; const data: [4]u32 = .{ 1, 2, 3, 4 }; var hash: [32]u8 = undefined; @@ -1394,6 +1412,8 @@ test "Large Bitvector serialization and hash" { try expect(list.items[32] & 0x01 == 0x01); // bit 256 -> LSB of byte 32 try expect(list.items[63] & 0x80 == 0x80); // bit 511 -> MSB of byte 63 + if (build_options.poseidon_enabled) return; + // Test hash tree root var hash: [32]u8 = undefined; try hashTreeRoot(LargeBitvec, data, &hash, std.testing.allocator); @@ -1407,6 +1427,7 @@ test "Large Bitvector serialization and hash" { } test "Bitlist edge cases" { + if (build_options.poseidon_enabled) return; const TestBitlist = utils.Bitlist(100); // All false @@ -1448,6 +1469,7 @@ test "Bitlist edge cases" { } test "Bitlist trailing zeros optimization" { + if (build_options.poseidon_enabled) return; const TestBitlist = utils.Bitlist(256); // Test case 1: 8 false bits - should result in one 0x00 byte after pack_bits @@ -1497,6 +1519,8 @@ test "Bitlist trailing zeros optimization" { } test "uint256 hash tree root" { + // SHA-specific expected vectors; skip when Poseidon is enabled. + if (build_options.poseidon_enabled) return; const data: u256 = 0x0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF; var hash: [32]u8 = undefined; @@ -1511,6 +1535,7 @@ test "uint256 hash tree root" { } test "Single element List" { + if (build_options.poseidon_enabled) return; const ListU64 = utils.List(u64, 10); var single = try ListU64.init(std.testing.allocator); defer single.deinit(); @@ -1529,6 +1554,7 @@ test "Single element List" { } test "Nested structure hash tree root" { + if (build_options.poseidon_enabled) return; const Inner = struct { a: u32, b: u64, @@ -1577,6 +1603,8 @@ test "serialize negative i8 and i16" { } test "Zero-length array" { + // SHA-specific expected vectors; skip when Poseidon is enabled. + if (build_options.poseidon_enabled) return; const empty: [0]u32 = .{}; var list = ArrayList(u8).init(std.testing.allocator); From df00de4c721513217d84cad6a3b25f1467b0edf4 Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Sat, 20 Dec 2025 20:28:21 +0530 Subject: [PATCH 08/14] test: add Plonky3 generated data based cross-testing --- build.zig | 21 ++++- src/poseidon_plonky3_validation.zig | 116 ++++++++++++++++++++++++++++ 2 files changed, 133 insertions(+), 4 deletions(-) create mode 100644 src/poseidon_plonky3_validation.zig diff --git a/build.zig b/build.zig index 66d848f..3bc87fc 100644 --- a/build.zig +++ b/build.zig @@ -33,10 +33,6 @@ pub fn build(b: *Builder) void { tests_tests.root_module.addImport("ssz.zig", mod); const run_tests_tests = b.addRunArtifact(tests_tests); - const test_step = b.step("test", "Run library tests"); - test_step.dependOn(&run_main_tests.step); - test_step.dependOn(&run_tests_tests.step); - // Poseidon hasher build options const poseidon_enabled = b.option(bool, "poseidon", "Enable Poseidon2 hash support") orelse false; if (poseidon_enabled) { @@ -68,4 +64,21 @@ pub fn build(b: *Builder) void { tests_tests.root_module.addOptions("build_options", options); if (hashzig_module) |pm| tests_tests.root_module.addImport("hash_zig", pm); + + const test_step = b.step("test", "Run library tests"); + test_step.dependOn(&run_main_tests.step); + test_step.dependOn(&run_tests_tests.step); + // Optional Poseidon validation suite (only when Poseidon is enabled) + if (poseidon_enabled) { + const plonky3_validation_tests = b.addTest(.{ + .root_source_file = .{ .cwd_relative = "src/poseidon_plonky3_validation.zig" }, + .optimize = optimize, + .target = target, + }); + plonky3_validation_tests.root_module.addOptions("build_options", options); + if (hashzig_module) |pm| plonky3_validation_tests.root_module.addImport("hash_zig", pm); + const run_plonky3_validation_tests = b.addRunArtifact(plonky3_validation_tests); + test_step.dependOn(&run_plonky3_validation_tests.step); + } + } diff --git a/src/poseidon_plonky3_validation.zig b/src/poseidon_plonky3_validation.zig new file mode 100644 index 0000000..e3e4064 --- /dev/null +++ b/src/poseidon_plonky3_validation.zig @@ -0,0 +1,116 @@ +//! Cross-validation: SSZ Poseidon2-24 wrapper vs Plonky3 reference outputs +//! +//! This test verifies that the SSZ Poseidon2 wrapper produces IDENTICAL outputs +//! to Plonky3's reference implementation for the same 64-byte inputs. + +const std = @import("std"); +const build_options = @import("build_options"); + +test "SSZ Poseidon2-24 matches Plonky3 reference outputs" { + if (!build_options.poseidon_enabled) return; + + const hash_zig = @import("hash_zig"); + const poseidon_wrapper = @import("./poseidon_wrapper.zig"); + const Hasher = poseidon_wrapper.PoseidonHasher(hash_zig.poseidon2.Poseidon2KoalaBear24Plonky3); + + // Test 1: All zeros (64 bytes) + { + var hasher = Hasher.init(.{}); + const input = [_]u8{0x00} ** 64; + hasher.update(&input); + + var output: [32]u8 = undefined; + hasher.final(&output); + + const expected = [_]u8{ 0xe4, 0xcb, 0xc9, 0x51, 0xcc, 0xd0, 0xf9, 0x07, 0xe1, 0xca, 0x89, 0x29, 0xc0, 0xa8, 0x70, 0x76, 0xf7, 0x8d, 0x75, 0x7a, 0xda, 0x87, 0xd4, 0x35, 0xd3, 0x86, 0xcc, 0x62, 0xd0, 0x64, 0x5a, 0x13 }; + try std.testing.expectEqualSlices(u8, &expected, &output); + } + + // Test 2: All 0x01 bytes + { + var hasher = Hasher.init(.{}); + const input = [_]u8{0x01} ** 64; + hasher.update(&input); + + var output: [32]u8 = undefined; + hasher.final(&output); + + const expected = [_]u8{ 0xb3, 0x16, 0xc9, 0x34, 0x81, 0x0a, 0x37, 0x73, 0x93, 0x89, 0x61, 0x7a, 0x5e, 0x9d, 0xc8, 0x6f, 0x75, 0x28, 0xd4, 0x27, 0x22, 0x8f, 0xf3, 0x57, 0x9d, 0xfb, 0xff, 0x5c, 0xef, 0x08, 0x1f, 0x00 }; + try std.testing.expectEqualSlices(u8, &expected, &output); + } + + // Test 3: All 0x42 bytes + { + var hasher = Hasher.init(.{}); + const input = [_]u8{0x42} ** 64; + hasher.update(&input); + + var output: [32]u8 = undefined; + hasher.final(&output); + + const expected = [_]u8{ 0x78, 0xae, 0xf5, 0x68, 0xa5, 0x4c, 0xf6, 0x59, 0x2f, 0x82, 0x6d, 0x1e, 0x5f, 0x8f, 0x5e, 0x68, 0x95, 0x94, 0xc6, 0x09, 0x25, 0x87, 0xce, 0x6d, 0x16, 0xd2, 0xb2, 0x21, 0xdb, 0x21, 0x3c, 0x1c }; + try std.testing.expectEqualSlices(u8, &expected, &output); + } + + // Test 4: Sequential bytes (0..63) + { + var hasher = Hasher.init(.{}); + var input: [64]u8 = undefined; + for (0..64) |i| { + input[i] = @intCast(i); + } + hasher.update(&input); + + var output: [32]u8 = undefined; + hasher.final(&output); + + const expected = [_]u8{ 0x29, 0x43, 0x5f, 0x44, 0xc0, 0xab, 0xbb, 0x1e, 0x3b, 0x42, 0x73, 0x2c, 0xfb, 0xac, 0x95, 0x67, 0xb1, 0xa6, 0x4b, 0x6d, 0xb9, 0x51, 0x6a, 0x23, 0xdd, 0x01, 0x03, 0x1d, 0x15, 0xf4, 0x3a, 0x63 }; + try std.testing.expectEqualSlices(u8, &expected, &output); + } + + // Test 5: SSZ pattern - hash two 32-byte nodes (0xAA || 0xBB) + { + var hasher = Hasher.init(.{}); + const left_node = [_]u8{0xAA} ** 32; + const right_node = [_]u8{0xBB} ** 32; + + hasher.update(&left_node); + hasher.update(&right_node); + + var output: [32]u8 = undefined; + hasher.final(&output); + + const expected = [_]u8{ 0xec, 0x3e, 0x77, 0x40, 0x7c, 0x50, 0xf7, 0x7a, 0x63, 0x98, 0xdb, 0x56, 0x94, 0x82, 0x6e, 0x21, 0xfb, 0xb8, 0x7f, 0x29, 0x92, 0x59, 0x3e, 0x59, 0x6c, 0xc9, 0x37, 0x7a, 0x50, 0x54, 0xdf, 0x56 }; + try std.testing.expectEqualSlices(u8, &expected, &output); + } + + // Test 6: Last byte boundary (63 bytes 0xFF, 1 byte 0x01) + { + var hasher = Hasher.init(.{}); + var input: [64]u8 = undefined; + @memset(input[0..63], 0xFF); + input[63] = 0x01; + hasher.update(&input); + + var output: [32]u8 = undefined; + hasher.final(&output); + + const expected = [_]u8{ 0xd2, 0xe5, 0x8c, 0x51, 0x39, 0xb5, 0x91, 0x64, 0xd2, 0xdb, 0x26, 0x49, 0x32, 0x50, 0x7d, 0x4e, 0x6d, 0xac, 0xef, 0x30, 0x76, 0x83, 0x12, 0x67, 0x4a, 0x9c, 0x70, 0x35, 0x87, 0xdf, 0xa9, 0x64 }; + try std.testing.expectEqualSlices(u8, &expected, &output); + } + + // Test 7: Last byte boundary variant (63 bytes 0xFF, 1 byte 0x02) + { + var hasher = Hasher.init(.{}); + var input: [64]u8 = undefined; + @memset(input[0..63], 0xFF); + input[63] = 0x02; + hasher.update(&input); + + var output: [32]u8 = undefined; + hasher.final(&output); + + const expected = [_]u8{ 0xc7, 0xed, 0x40, 0x1c, 0x2c, 0x03, 0x7e, 0x29, 0x3d, 0xb7, 0x76, 0x3f, 0xf2, 0xa7, 0x49, 0x39, 0xec, 0x47, 0x52, 0x3e, 0x5c, 0xeb, 0xad, 0x34, 0xe7, 0x4b, 0x00, 0x74, 0xf5, 0x01, 0xd4, 0x43 }; + try std.testing.expectEqualSlices(u8, &expected, &output); + } +} From 784aca0136da26b1dd2b5a78d759a572d49aa330 Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Mon, 22 Dec 2025 21:27:09 +0530 Subject: [PATCH 09/14] chore: lint fix --- build.zig | 1 - 1 file changed, 1 deletion(-) diff --git a/build.zig b/build.zig index 3bc87fc..66e1f7f 100644 --- a/build.zig +++ b/build.zig @@ -80,5 +80,4 @@ pub fn build(b: *Builder) void { const run_plonky3_validation_tests = b.addRunArtifact(plonky3_validation_tests); test_step.dependOn(&run_plonky3_validation_tests.step); } - } From 0506525b0c63a23083b5aeeb7aa51722949db65b Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Thu, 8 Jan 2026 19:11:50 +0530 Subject: [PATCH 10/14] refactor: make ssz.zig hash-function agnostic --- build.zig | 44 ------- build.zig.zon | 4 - src/beacon_tests.zig | 70 +++++----- src/lib.zig | 91 ++++++------- src/poseidon_plonky3_validation.zig | 116 ----------------- src/poseidon_wrapper.zig | 190 ---------------------------- src/tests.zig | 134 ++++++++------------ src/utils.zig | 13 +- 8 files changed, 130 insertions(+), 532 deletions(-) delete mode 100644 src/poseidon_plonky3_validation.zig delete mode 100644 src/poseidon_wrapper.zig diff --git a/build.zig b/build.zig index 66e1f7f..e4f7e0f 100644 --- a/build.zig +++ b/build.zig @@ -33,51 +33,7 @@ pub fn build(b: *Builder) void { tests_tests.root_module.addImport("ssz.zig", mod); const run_tests_tests = b.addRunArtifact(tests_tests); - // Poseidon hasher build options - const poseidon_enabled = b.option(bool, "poseidon", "Enable Poseidon2 hash support") orelse false; - if (poseidon_enabled) { - std.log.info("Poseidon2 enabled (koalabear, Poseidon2-24 Plonky3)", .{}); - } - - // Create build options - const options = b.addOptions(); - options.addOption(bool, "poseidon_enabled", poseidon_enabled); - - // Poseidon2 implementation via hash-zig dependency - const hashzig_module = if (poseidon_enabled) blk: { - const hashzig_dep = b.dependency("hash_zig", .{ - .target = target, - .optimize = optimize, - }); - break :blk hashzig_dep.module("hash-zig"); - } else null; - - // Add build options and poseidon import to all artifacts - mod.addOptions("build_options", options); - if (hashzig_module) |pm| mod.addImport("hash_zig", pm); - - lib.root_module.addOptions("build_options", options); - if (hashzig_module) |pm| lib.root_module.addImport("hash_zig", pm); - - main_tests.root_module.addOptions("build_options", options); - if (hashzig_module) |pm| main_tests.root_module.addImport("hash_zig", pm); - - tests_tests.root_module.addOptions("build_options", options); - if (hashzig_module) |pm| tests_tests.root_module.addImport("hash_zig", pm); - const test_step = b.step("test", "Run library tests"); test_step.dependOn(&run_main_tests.step); test_step.dependOn(&run_tests_tests.step); - // Optional Poseidon validation suite (only when Poseidon is enabled) - if (poseidon_enabled) { - const plonky3_validation_tests = b.addTest(.{ - .root_source_file = .{ .cwd_relative = "src/poseidon_plonky3_validation.zig" }, - .optimize = optimize, - .target = target, - }); - plonky3_validation_tests.root_module.addOptions("build_options", options); - if (hashzig_module) |pm| plonky3_validation_tests.root_module.addImport("hash_zig", pm); - const run_plonky3_validation_tests = b.addRunArtifact(plonky3_validation_tests); - test_step.dependOn(&run_plonky3_validation_tests.step); - } } diff --git a/build.zig.zon b/build.zig.zon index 83d6442..50ce542 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -4,9 +4,5 @@ .version = "0.0.9", .paths = .{""}, .dependencies = .{ - .hash_zig = .{ - .url = "https://github.com/blockblaz/hash-zig/archive/2bca6541a933a2bbe4630f41c29f61b3f84ad7e0.tar.gz", - .hash = "hash_zig-1.1.3-POmurOPmCgCkbtpq_c62mZqLuHzVeaLZ2X23fxsoHVrI", - }, }, } diff --git a/src/beacon_tests.zig b/src/beacon_tests.zig index 66260ca..8be0a3d 100644 --- a/src/beacon_tests.zig +++ b/src/beacon_tests.zig @@ -4,9 +4,9 @@ const serialize = libssz.serialize; const deserialize = libssz.deserialize; const hashTreeRoot = libssz.hashTreeRoot; const std = @import("std"); -const build_options = @import("build_options"); const ArrayList = std.ArrayList; const expect = std.testing.expect; +const Sha256 = std.crypto.hash.sha2.Sha256; // Beacon chain Validator struct for compatibility testing const Validator = struct { @@ -54,7 +54,6 @@ test "Validator struct serialization" { } test "Validator struct hash tree root" { - if (build_options.poseidon_enabled) return; const validator = Validator{ .pubkey = [_]u8{0x01} ** 48, .withdrawal_credentials = [_]u8{0x02} ** 32, @@ -67,7 +66,7 @@ test "Validator struct hash tree root" { }; var hash: [32]u8 = undefined; - try hashTreeRoot(Validator, validator, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,Validator, validator, &hash, std.testing.allocator); // Validate against expected hash const expected_validator_hash = [_]u8{ 0x70, 0x68, 0xE5, 0x06, 0xCB, 0xFF, 0xCD, 0x31, 0xBD, 0x2D, 0x13, 0x42, 0x5E, 0x4F, 0xDE, 0x98, 0x6E, 0xF3, 0x5E, 0x6F, 0xB5, 0x0F, 0x35, 0x9D, 0x7A, 0x26, 0xB6, 0x33, 0x2E, 0xE2, 0xCB, 0x94 }; @@ -75,7 +74,7 @@ test "Validator struct hash tree root" { // Hash should be deterministic for the same validator var hash2: [32]u8 = undefined; - try hashTreeRoot(Validator, validator, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256,Validator, validator, &hash2, std.testing.allocator); try expect(std.mem.eql(u8, &hash, &hash2)); // Different validator should produce different hash @@ -91,7 +90,7 @@ test "Validator struct hash tree root" { }; var hash3: [32]u8 = undefined; - try hashTreeRoot(Validator, validator2, &hash3, std.testing.allocator); + try hashTreeRoot(Sha256,Validator, validator2, &hash3, std.testing.allocator); try expect(!std.mem.eql(u8, &hash, &hash3)); } @@ -118,13 +117,12 @@ test "Individual Validator serialization and hash" { try expect(std.mem.eql(u8, list.items, &expected_validator_bytes)); // Test hash tree root - if (build_options.poseidon_enabled) return; - var hash: [32]u8 = undefined; - try hashTreeRoot(Validator, validator, &hash, std.testing.allocator); + var root: [32]u8 = undefined; + try hashTreeRoot(Sha256, Validator, validator, &root, std.testing.allocator); // Validate against expected hash const expected_validator_hash = [_]u8{ 0x70, 0x68, 0xE5, 0x06, 0xCB, 0xFF, 0xCD, 0x31, 0xBD, 0x2D, 0x13, 0x42, 0x5E, 0x4F, 0xDE, 0x98, 0x6E, 0xF3, 0x5E, 0x6F, 0xB5, 0x0F, 0x35, 0x9D, 0x7A, 0x26, 0xB6, 0x33, 0x2E, 0xE2, 0xCB, 0x94 }; - try expect(std.mem.eql(u8, &hash, &expected_validator_hash)); + try expect(std.mem.eql(u8, &root, &expected_validator_hash)); } test "List[Validator] serialization and hash tree root" { @@ -193,16 +191,15 @@ test "List[Validator] serialization and hash tree root" { } // Test hash tree root - if (build_options.poseidon_enabled) return; var hash1: [32]u8 = undefined; - try hashTreeRoot(ValidatorList, validator_list, &hash1, std.testing.allocator); + try hashTreeRoot(Sha256, ValidatorList, validator_list, &hash1, std.testing.allocator); // Validate against expected hash const expected_validator_list_hash = [_]u8{ 0x54, 0x80, 0xF8, 0x35, 0xD7, 0x52, 0xF7, 0x27, 0xC8, 0xF1, 0xE9, 0xCC, 0x0F, 0x84, 0x2B, 0x25, 0x76, 0xA5, 0x1A, 0xD2, 0xB7, 0xB5, 0x10, 0xF1, 0xA5, 0x39, 0xF7, 0xD8, 0xD0, 0x87, 0xC3, 0xC2 }; try expect(std.mem.eql(u8, &hash1, &expected_validator_list_hash)); var hash2: [32]u8 = undefined; - try hashTreeRoot(ValidatorList, deserialized_list, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256,ValidatorList, deserialized_list, &hash2, std.testing.allocator); // Hash should be the same for original and deserialized lists try expect(std.mem.eql(u8, &hash1, &hash2)); @@ -283,25 +280,24 @@ test "BeamBlockBody with validator array - full cycle" { try expect(orig.withdrawable_epoch == deser.withdrawable_epoch); } - if (build_options.poseidon_enabled) return; // Test hash tree root consistency var hash_original: [32]u8 = undefined; - try hashTreeRoot(BeamBlockBody, beam_block_body, &hash_original, std.testing.allocator); + try hashTreeRoot(Sha256,BeamBlockBody, beam_block_body, &hash_original, std.testing.allocator); // Validate against expected hash const expected_beam_block_body_hash = [_]u8{ 0x34, 0xF2, 0xBC, 0x58, 0xA0, 0xBF, 0x20, 0x72, 0x43, 0xF8, 0xC2, 0x5E, 0x0F, 0x83, 0x5E, 0x36, 0x90, 0x73, 0xD5, 0xAC, 0x97, 0x1E, 0x9A, 0x53, 0x71, 0x14, 0xA0, 0xFD, 0x1C, 0xC8, 0xD8, 0xE4 }; try expect(std.mem.eql(u8, &hash_original, &expected_beam_block_body_hash)); var hash_deserialized: [32]u8 = undefined; - try hashTreeRoot(BeamBlockBody, deserialized_body, &hash_deserialized, std.testing.allocator); + try hashTreeRoot(Sha256,BeamBlockBody, deserialized_body, &hash_deserialized, std.testing.allocator); // Hashes should be identical for original and deserialized data try expect(std.mem.eql(u8, &hash_original, &hash_deserialized)); // Test hash determinism var hash_duplicate: [32]u8 = undefined; - try hashTreeRoot(BeamBlockBody, beam_block_body, &hash_duplicate, std.testing.allocator); + try hashTreeRoot(Sha256,BeamBlockBody, beam_block_body, &hash_duplicate, std.testing.allocator); try expect(std.mem.eql(u8, &hash_original, &hash_duplicate)); } @@ -383,7 +379,6 @@ test "Zeam-style List/Bitlist usage with tree root stability" { try expect(std.mem.eql(u8, state_serialized.items, &expected_zeam_state_bytes)); - if (build_options.poseidon_enabled) return; // Test hash tree root determinism and validate against expected hashes var body_hash1: [32]u8 = undefined; @@ -391,10 +386,10 @@ test "Zeam-style List/Bitlist usage with tree root stability" { var state_hash1: [32]u8 = undefined; var state_hash2: [32]u8 = undefined; - try hashTreeRoot(ZeamBeamBlockBody, body, &body_hash1, std.testing.allocator); - try hashTreeRoot(ZeamBeamBlockBody, body, &body_hash2, std.testing.allocator); - try hashTreeRoot(BeamState, state, &state_hash1, std.testing.allocator); - try hashTreeRoot(BeamState, state, &state_hash2, std.testing.allocator); + try hashTreeRoot(Sha256,ZeamBeamBlockBody, body, &body_hash1, std.testing.allocator); + try hashTreeRoot(Sha256,ZeamBeamBlockBody, body, &body_hash2, std.testing.allocator); + try hashTreeRoot(Sha256,BeamState, state, &state_hash1, std.testing.allocator); + try hashTreeRoot(Sha256,BeamState, state, &state_hash2, std.testing.allocator); // Validate against expected hashes const expected_zeam_body_hash = [_]u8{ 0xAA, 0x2C, 0x76, 0x39, 0x96, 0xA6, 0xDD, 0x26, 0x25, 0x13, 0x12, 0x8D, 0xEA, 0xDF, 0xCB, 0x69, 0xF1, 0xEC, 0xEB, 0x60, 0xA8, 0xFF, 0xAC, 0xC7, 0xA7, 0xE4, 0x28, 0x3C, 0x74, 0xAA, 0x6A, 0xE4 }; @@ -458,11 +453,10 @@ test "BeamState with historical roots - comprehensive test" { const expected_comprehensive_beam_state_bytes = [_]u8{ 0x39, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x9C, 0x00, 0x00, 0x00, 0xE8, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items, &expected_comprehensive_beam_state_bytes)); - if (build_options.poseidon_enabled) return; // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(BeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256,BeamState, beam_state, &original_hash, std.testing.allocator); // Validate against expected hash const expected_comprehensive_beam_state_hash = [_]u8{ 0xBD, 0x36, 0x59, 0x5E, 0x3B, 0x4A, 0x51, 0x9C, 0xF3, 0x5F, 0x4F, 0x96, 0x88, 0x9E, 0x86, 0x10, 0xFF, 0x45, 0x20, 0x49, 0x15, 0xAE, 0x96, 0x2E, 0xF4, 0x0C, 0x81, 0x6B, 0xF7, 0x45, 0x4A, 0x17 }; @@ -498,7 +492,7 @@ test "BeamState with historical roots - comprehensive test" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(BeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256,BeamState, deserialized_state, &deserialized_hash, std.testing.allocator); // Verify hash tree roots are identical try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); @@ -534,11 +528,10 @@ test "BeamState with empty historical roots" { const expected_empty_beam_state_bytes = [_]u8{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items, &expected_empty_beam_state_bytes)); - if (build_options.poseidon_enabled) return; // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(SimpleBeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256,SimpleBeamState, beam_state, &original_hash, std.testing.allocator); // Validate against actual hash const expected_empty_beam_state_hash = [_]u8{ 0x58, 0xD2, 0x2B, 0xA0, 0x04, 0x45, 0xE8, 0xB7, 0x39, 0x5E, 0xC3, 0x93, 0x92, 0x45, 0xC6, 0xF1, 0x5A, 0x29, 0x91, 0xA5, 0x70, 0x3F, 0xC5, 0x05, 0x88, 0x10, 0x57, 0xDE, 0x9D, 0xF3, 0x64, 0x10 }; @@ -559,7 +552,7 @@ test "BeamState with empty historical roots" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(SimpleBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256,SimpleBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); // Verify hash tree roots are identical try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); @@ -603,11 +596,10 @@ test "BeamState with maximum historical roots" { const expected_max_beam_state_bytes_start = [_]u8{ 0x3F, 0x42, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items[0..12], &expected_max_beam_state_bytes_start)); - if (build_options.poseidon_enabled) return; // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(MaxBeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256,MaxBeamState, beam_state, &original_hash, std.testing.allocator); // Validate against actual hash const expected_max_beam_state_hash = [_]u8{ 0x3F, 0xFC, 0x7A, 0xA4, 0x85, 0x21, 0xD4, 0x02, 0x36, 0x46, 0x19, 0x2E, 0x8D, 0x73, 0xBC, 0x11, 0x3D, 0x1D, 0xE7, 0xF4, 0xDE, 0xC4, 0xD9, 0x6E, 0x94, 0x52, 0xD2, 0xCB, 0x95, 0xE3, 0x22, 0x9A }; @@ -633,7 +625,7 @@ test "BeamState with maximum historical roots" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(MaxBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256,MaxBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); } @@ -685,11 +677,10 @@ test "BeamState historical roots access and comparison" { const expected_access_beam_state_bytes = [_]u8{ 0x31, 0xD4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xEF, 0xBE, 0xAD, 0xDE, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC }; try expect(std.mem.eql(u8, serialized_data.items, &expected_access_beam_state_bytes)); - if (build_options.poseidon_enabled) return; // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(AccessBeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256,AccessBeamState, beam_state, &original_hash, std.testing.allocator); // Validate against expected hash const expected_access_beam_state_hash = [_]u8{ 0x22, 0x3E, 0xCB, 0xDD, 0x62, 0x46, 0x7F, 0x7F, 0x0F, 0xA8, 0x2C, 0x91, 0x54, 0x1F, 0xF4, 0xEA, 0xBF, 0x92, 0xB6, 0xB7, 0x67, 0x57, 0x02, 0x67, 0x16, 0xEF, 0x3A, 0xB0, 0x96, 0x4E, 0x91, 0x9E }; @@ -723,7 +714,7 @@ test "BeamState historical roots access and comparison" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(AccessBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256,AccessBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); } @@ -757,11 +748,10 @@ test "SimpleBeamState with empty historical roots" { const expected_simple_beam_state_bytes = [_]u8{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items, &expected_simple_beam_state_bytes)); - if (build_options.poseidon_enabled) return; // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(SimpleBeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256,SimpleBeamState, beam_state, &original_hash, std.testing.allocator); // Validate against actual hash const expected_simple_beam_state_hash = [_]u8{ 0x58, 0xD2, 0x2B, 0xA0, 0x04, 0x45, 0xE8, 0xB7, 0x39, 0x5E, 0xC3, 0x93, 0x92, 0x45, 0xC6, 0xF1, 0x5A, 0x29, 0x91, 0xA5, 0x70, 0x3F, 0xC5, 0x05, 0x88, 0x10, 0x57, 0xDE, 0x9D, 0xF3, 0x64, 0x10 }; @@ -781,7 +771,7 @@ test "SimpleBeamState with empty historical roots" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(SimpleBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256,SimpleBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); // Verify hash tree roots are identical try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); @@ -793,7 +783,7 @@ test "hashTreeRoot for pointer types" { // Test pointer size .one - SUPPORTED { var value: u32 = 8; - try hashTreeRoot(*u32, &value, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,*u32, &value, &hash, std.testing.allocator); var deserialized: u32 = undefined; try deserialize(u32, &hash, &deserialized, std.testing.allocator); @@ -804,7 +794,7 @@ test "hashTreeRoot for pointer types" { { var values = [4]u8{ 0xAA, 0xBB, 0xCC, 0xDD }; const values_ptr: *[4]u8 = &values; - try hashTreeRoot(*[4]u8, values_ptr, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,*[4]u8, values_ptr, &hash, std.testing.allocator); var deserialized: [4]u8 = undefined; try deserialize([4]u8, &hash, &deserialized, std.testing.allocator); @@ -815,13 +805,13 @@ test "hashTreeRoot for pointer types" { { var values = [4]u8{ 0xAA, 0xBB, 0xCC, 0xDD }; const values_ptr: [*]u8 = &values; - try std.testing.expectError(error.UnSupportedPointerType, hashTreeRoot([*]u8, values_ptr, &hash, std.testing.allocator)); + try std.testing.expectError(error.UnSupportedPointerType, hashTreeRoot(Sha256, [*]u8, values_ptr, &hash, std.testing.allocator)); } // Test pointer size .c - should return error { var values = [4]u8{ 0xAA, 0xBB, 0xCC, 0xDD }; const values_ptr: [*c]u8 = &values; - try std.testing.expectError(error.UnSupportedPointerType, hashTreeRoot([*c]u8, values_ptr, &hash, std.testing.allocator)); + try std.testing.expectError(error.UnSupportedPointerType, hashTreeRoot(Sha256, [*c]u8, values_ptr, &hash, std.testing.allocator)); } } diff --git a/src/lib.zig b/src/lib.zig index a367395..5b3d375 100644 --- a/src/lib.zig +++ b/src/lib.zig @@ -6,20 +6,6 @@ pub const utils = @import("./utils.zig"); pub const zeros = @import("./zeros.zig"); const ArrayList = std.ArrayList; const builtin = std.builtin; -const build_options = @import("build_options"); -const sha256 = std.crypto.hash.sha2.Sha256; - -// Configure the hasher based on build options -pub const Hasher = if (build_options.poseidon_enabled) blk: { - const hash_zig = @import("hash_zig"); - const poseidon2 = hash_zig.poseidon2; - const poseidon_wrapper = @import("./poseidon_wrapper.zig"); - const Poseidon2Type = poseidon2.Poseidon2KoalaBear24Plonky3; - // Wrap with SHA256-compatible API - break :blk poseidon_wrapper.PoseidonHasher(Poseidon2Type); -} else sha256; - -const hashes_of_zero = zeros.buildZeroHashes(Hasher, 32, 256); const Allocator = std.mem.Allocator; /// Number of bytes per chunk. @@ -516,7 +502,7 @@ pub fn deserialize(comptime T: type, serialized: []const u8, out: *T, allocator: } } -pub fn mixInLength2(root: [32]u8, length: usize, out: *[32]u8) void { +pub fn mixInLength2(comptime Hasher: type, root: [32]u8, length: usize, out: *[32]u8) void { var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); @@ -526,7 +512,7 @@ pub fn mixInLength2(root: [32]u8, length: usize, out: *[32]u8) void { hasher.final(out[0..]); } -fn mixInLength(root: [32]u8, length: [32]u8, out: *[32]u8) void { +fn mixInLength(comptime Hasher: type, root: [32]u8, length: [32]u8, out: *[32]u8) void { var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); hasher.update(length[0..]); @@ -534,7 +520,7 @@ fn mixInLength(root: [32]u8, length: [32]u8, out: *[32]u8) void { } test "mixInLength" { - if (build_options.poseidon_enabled) return; + const Sha256 = std.crypto.hash.sha2.Sha256; var root: [32]u8 = undefined; var length: [32]u8 = undefined; var expected: [32]u8 = undefined; @@ -542,12 +528,12 @@ test "mixInLength" { _ = try std.fmt.hexToBytes(root[0..], "2279cf111c15f2d594e7a0055e8735e7409e56ed4250735d6d2f2b0d1bcf8297"); _ = try std.fmt.hexToBytes(length[0..], "deadbeef00000000000000000000000000000000000000000000000000000000"); _ = try std.fmt.hexToBytes(expected[0..], "0b665dda6e4c269730bc4bbe3e990a69d37fa82892bac5fe055ca4f02a98c900"); - mixInLength(root, length, &mixin); + mixInLength(Sha256, root, length, &mixin); try std.testing.expect(std.mem.eql(u8, mixin[0..], expected[0..])); } -fn mixInSelector(root: [32]u8, comptime selector: usize, out: *[32]u8) void { +fn mixInSelector(comptime Hasher: type, root: [32]u8, comptime selector: usize, out: *[32]u8) void { var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); var tmp = [_]u8{0} ** 32; @@ -557,13 +543,13 @@ fn mixInSelector(root: [32]u8, comptime selector: usize, out: *[32]u8) void { } test "mixInSelector" { - if (build_options.poseidon_enabled) return; + const Sha256 = std.crypto.hash.sha2.Sha256; var root: [32]u8 = undefined; var expected: [32]u8 = undefined; var mixin: [32]u8 = undefined; _ = try std.fmt.hexToBytes(root[0..], "2279cf111c15f2d594e7a0055e8735e7409e56ed4250735d6d2f2b0d1bcf8297"); _ = try std.fmt.hexToBytes(expected[0..], "c483cb731afcfe9f2c596698eaca1c4e0dcb4a1136297adef74c31c268966eb5"); - mixInSelector(root, 25, &mixin); + mixInSelector(Sha256, root, 25, &mixin); try std.testing.expect(std.mem.eql(u8, mixin[0..], expected[0..])); } @@ -637,7 +623,10 @@ test "pack string" { } // merkleize recursively calculates the root hash of a Merkle tree. -pub fn merkleize(hasher: type, chunks: []chunk, limit: ?usize, out: *[32]u8) anyerror!void { +pub fn merkleize(comptime Hasher: type, chunks: []chunk, limit: ?usize, out: *[32]u8) anyerror!void { + // Generate zero hashes for this hasher type at comptime + const zero_hashes = comptime zeros.buildZeroHashes(Hasher, 32, 256); + // Calculate the number of chunks to be padded, check the limit if (limit != null and chunks.len > limit.?) { return error.ChunkSizeExceedsLimit; @@ -647,33 +636,30 @@ pub fn merkleize(hasher: type, chunks: []chunk, limit: ?usize, out: *[32]u8) any // Perform the merkelization switch (size) { - 0 => std.mem.copyForwards(u8, out.*[0..], hashes_of_zero[0][0..]), - 1 => std.mem.copyForwards(u8, out.*[0..], (if (chunks.len > 0) chunks[0] else hashes_of_zero[0])[0..]), + 0 => std.mem.copyForwards(u8, out.*[0..], zero_hashes[0][0..]), + 1 => std.mem.copyForwards(u8, out.*[0..], (if (chunks.len > 0) chunks[0] else zero_hashes[0])[0..]), else => { // Merkleize the left side. If the number of chunks // isn't enough to fill the entire width, complete // with zeroes. - var digest = hasher.init(hasher.Options{}); + var digest = Hasher.init(Hasher.Options{}); var buf: [32]u8 = undefined; const split = if (size / 2 < chunks.len) size / 2 else chunks.len; - try merkleize(hasher, chunks[0..split], size / 2, &buf); + try merkleize(Hasher, chunks[0..split], size / 2, &buf); digest.update(buf[0..]); // Merkleize the right side. If the number of chunks only // covers the first half, directly input the hashed zero- // filled subtrie. if (size / 2 < chunks.len) { - try merkleize(hasher, chunks[size / 2 ..], size / 2, &buf); + try merkleize(Hasher, chunks[size / 2 ..], size / 2, &buf); digest.update(buf[0..]); } else { // Use depth-based indexing for zero hashes // For a subtree of size/2 leaves, we need the zero hash at depth log2(size/2) - // hashes_of_zero[0] = single zero chunk (depth 0) - // hashes_of_zero[1] = hash of 2 zero chunks (depth 1) - // hashes_of_zero[d] = hash of 2^d zero chunks (depth d) const subtree_size = size / 2; const depth = std.math.log2_int(usize, subtree_size); - digest.update(hashes_of_zero[depth][0..]); + digest.update(zero_hashes[depth][0..]); } digest.final(out); }, @@ -681,35 +667,37 @@ pub fn merkleize(hasher: type, chunks: []chunk, limit: ?usize, out: *[32]u8) any } test "merkleize an empty slice" { + const Sha256 = std.crypto.hash.sha2.Sha256; var list = ArrayList(u8).init(std.testing.allocator); defer list.deinit(); const chunks = &[0][32]u8{}; var out: [32]u8 = undefined; - try merkleize(Hasher, chunks, null, &out); + try merkleize(Sha256, chunks, null, &out); try std.testing.expect(std.mem.eql(u8, out[0..], zero_chunk[0..])); } test "merkleize a string" { + const Sha256 = std.crypto.hash.sha2.Sha256; var list = ArrayList(u8).init(std.testing.allocator); defer list.deinit(); const chunks = try pack([]const u8, "a" ** 100, &list); var out: [32]u8 = undefined; - try merkleize(Hasher, chunks, null, &out); + try merkleize(Sha256, chunks, null, &out); // Build the expected tree const leaf1 = [_]u8{0x61} ** 32; // "0xaaaaa....aa" 32 times var leaf2: [32]u8 = [_]u8{0x61} ** 4 ++ [_]u8{0} ** 28; var root: [32]u8 = undefined; var internal_left: [32]u8 = undefined; var internal_right: [32]u8 = undefined; - var hasher = Hasher.init(Hasher.Options{}); + var hasher = Sha256.init(Sha256.Options{}); hasher.update(leaf1[0..]); hasher.update(leaf1[0..]); hasher.final(&internal_left); - hasher = Hasher.init(Hasher.Options{}); + hasher = Sha256.init(Sha256.Options{}); hasher.update(leaf1[0..]); hasher.update(leaf2[0..]); hasher.final(&internal_right); - hasher = Hasher.init(Hasher.Options{}); + hasher = Sha256.init(Sha256.Options{}); hasher.update(internal_left[0..]); hasher.update(internal_right[0..]); hasher.final(&root); @@ -718,13 +706,14 @@ test "merkleize a string" { } test "merkleize a boolean" { + const Sha256 = std.crypto.hash.sha2.Sha256; var list = ArrayList(u8).init(std.testing.allocator); defer list.deinit(); var chunks = try pack(bool, false, &list); var expected = [_]u8{0} ** BYTES_PER_CHUNK; var out: [BYTES_PER_CHUNK]u8 = undefined; - try merkleize(Hasher, chunks, null, &out); + try merkleize(Sha256, chunks, null, &out); try std.testing.expect(std.mem.eql(u8, out[0..], expected[0..])); @@ -733,7 +722,7 @@ test "merkleize a boolean" { chunks = try pack(bool, true, &list2); expected[0] = 1; - try merkleize(Hasher, chunks, null, &out); + try merkleize(Sha256, chunks, null, &out); try std.testing.expect(std.mem.eql(u8, out[0..], expected[0..])); } @@ -766,10 +755,10 @@ fn packBits(bits: []const bool, l: *ArrayList(u8)) ![]chunk { return std.mem.bytesAsSlice(chunk, l.items); } -pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) !void { +pub fn hashTreeRoot(comptime Hasher: type, comptime T: type, value: T, out: *[32]u8, allctr: Allocator) !void { // Check if type has its own hashTreeRoot method at compile time if (comptime std.meta.hasFn(T, "hashTreeRoot")) { - return value.hashTreeRoot(out, allctr); + return value.hashTreeRoot(Hasher, out, allctr); } const type_info = @typeInfo(T); @@ -803,7 +792,7 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) defer chunks.deinit(); var tmp: chunk = undefined; for (value) |item| { - try hashTreeRoot(@TypeOf(item), item, &tmp, allctr); + try hashTreeRoot(Hasher, @TypeOf(item), item, &tmp, allctr); try chunks.append(tmp); } try merkleize(Hasher, chunks.items, null, out); @@ -813,7 +802,7 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) }, .pointer => |ptr| { switch (ptr.size) { - .one => try hashTreeRoot(ptr.child, value.*, out, allctr), + .one => try hashTreeRoot(Hasher, ptr.child, value.*, out, allctr), .slice => { switch (@typeInfo(ptr.child)) { .int => { @@ -822,7 +811,7 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) const chunks = try pack(T, value, &list); var tmp: chunk = undefined; try merkleize(Hasher, chunks, null, &tmp); - mixInLength2(tmp, value.len, out); + mixInLength2(Hasher, tmp, value.len, out); }, // use bitlist .bool => return error.UnSupportedPointerType, @@ -832,11 +821,11 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) defer chunks.deinit(); var tmp: chunk = undefined; for (value) |item| { - try hashTreeRoot(@TypeOf(item), item, &tmp, allctr); + try hashTreeRoot(Hasher, @TypeOf(item), item, &tmp, allctr); try chunks.append(tmp); } try merkleize(Hasher, chunks.items, null, &tmp); - mixInLength2(tmp, chunks.items.len, out); + mixInLength2(Hasher, tmp, chunks.items.len, out); }, } }, @@ -848,7 +837,7 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) defer chunks.deinit(); var tmp: chunk = undefined; inline for (str.fields) |f| { - try hashTreeRoot(f.type, @field(value, f.name), &tmp, allctr); + try hashTreeRoot(Hasher, f.type, @field(value, f.name), &tmp, allctr); try chunks.append(tmp); } try merkleize(Hasher, chunks.items, null, out); @@ -856,10 +845,10 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) // An optional is a union with `None` as first value. .optional => |opt| if (value != null) { var tmp: chunk = undefined; - try hashTreeRoot(opt.child, value.?, &tmp, allctr); - mixInSelector(tmp, 1, out); + try hashTreeRoot(Hasher, opt.child, value.?, &tmp, allctr); + mixInSelector(Hasher, tmp, 1, out); } else { - mixInSelector(zero_chunk, 0, out); + mixInSelector(Hasher, zero_chunk, 0, out); }, .@"union" => |u| { if (u.tag_type == null) { @@ -868,8 +857,8 @@ pub fn hashTreeRoot(comptime T: type, value: T, out: *[32]u8, allctr: Allocator) inline for (u.fields, 0..) |f, index| { if (@intFromEnum(value) == index) { var tmp: chunk = undefined; - try hashTreeRoot(f.type, @field(value, f.name), &tmp, allctr); - mixInSelector(tmp, index, out); + try hashTreeRoot(Hasher, f.type, @field(value, f.name), &tmp, allctr); + mixInSelector(Hasher, tmp, index, out); } } }, diff --git a/src/poseidon_plonky3_validation.zig b/src/poseidon_plonky3_validation.zig deleted file mode 100644 index e3e4064..0000000 --- a/src/poseidon_plonky3_validation.zig +++ /dev/null @@ -1,116 +0,0 @@ -//! Cross-validation: SSZ Poseidon2-24 wrapper vs Plonky3 reference outputs -//! -//! This test verifies that the SSZ Poseidon2 wrapper produces IDENTICAL outputs -//! to Plonky3's reference implementation for the same 64-byte inputs. - -const std = @import("std"); -const build_options = @import("build_options"); - -test "SSZ Poseidon2-24 matches Plonky3 reference outputs" { - if (!build_options.poseidon_enabled) return; - - const hash_zig = @import("hash_zig"); - const poseidon_wrapper = @import("./poseidon_wrapper.zig"); - const Hasher = poseidon_wrapper.PoseidonHasher(hash_zig.poseidon2.Poseidon2KoalaBear24Plonky3); - - // Test 1: All zeros (64 bytes) - { - var hasher = Hasher.init(.{}); - const input = [_]u8{0x00} ** 64; - hasher.update(&input); - - var output: [32]u8 = undefined; - hasher.final(&output); - - const expected = [_]u8{ 0xe4, 0xcb, 0xc9, 0x51, 0xcc, 0xd0, 0xf9, 0x07, 0xe1, 0xca, 0x89, 0x29, 0xc0, 0xa8, 0x70, 0x76, 0xf7, 0x8d, 0x75, 0x7a, 0xda, 0x87, 0xd4, 0x35, 0xd3, 0x86, 0xcc, 0x62, 0xd0, 0x64, 0x5a, 0x13 }; - try std.testing.expectEqualSlices(u8, &expected, &output); - } - - // Test 2: All 0x01 bytes - { - var hasher = Hasher.init(.{}); - const input = [_]u8{0x01} ** 64; - hasher.update(&input); - - var output: [32]u8 = undefined; - hasher.final(&output); - - const expected = [_]u8{ 0xb3, 0x16, 0xc9, 0x34, 0x81, 0x0a, 0x37, 0x73, 0x93, 0x89, 0x61, 0x7a, 0x5e, 0x9d, 0xc8, 0x6f, 0x75, 0x28, 0xd4, 0x27, 0x22, 0x8f, 0xf3, 0x57, 0x9d, 0xfb, 0xff, 0x5c, 0xef, 0x08, 0x1f, 0x00 }; - try std.testing.expectEqualSlices(u8, &expected, &output); - } - - // Test 3: All 0x42 bytes - { - var hasher = Hasher.init(.{}); - const input = [_]u8{0x42} ** 64; - hasher.update(&input); - - var output: [32]u8 = undefined; - hasher.final(&output); - - const expected = [_]u8{ 0x78, 0xae, 0xf5, 0x68, 0xa5, 0x4c, 0xf6, 0x59, 0x2f, 0x82, 0x6d, 0x1e, 0x5f, 0x8f, 0x5e, 0x68, 0x95, 0x94, 0xc6, 0x09, 0x25, 0x87, 0xce, 0x6d, 0x16, 0xd2, 0xb2, 0x21, 0xdb, 0x21, 0x3c, 0x1c }; - try std.testing.expectEqualSlices(u8, &expected, &output); - } - - // Test 4: Sequential bytes (0..63) - { - var hasher = Hasher.init(.{}); - var input: [64]u8 = undefined; - for (0..64) |i| { - input[i] = @intCast(i); - } - hasher.update(&input); - - var output: [32]u8 = undefined; - hasher.final(&output); - - const expected = [_]u8{ 0x29, 0x43, 0x5f, 0x44, 0xc0, 0xab, 0xbb, 0x1e, 0x3b, 0x42, 0x73, 0x2c, 0xfb, 0xac, 0x95, 0x67, 0xb1, 0xa6, 0x4b, 0x6d, 0xb9, 0x51, 0x6a, 0x23, 0xdd, 0x01, 0x03, 0x1d, 0x15, 0xf4, 0x3a, 0x63 }; - try std.testing.expectEqualSlices(u8, &expected, &output); - } - - // Test 5: SSZ pattern - hash two 32-byte nodes (0xAA || 0xBB) - { - var hasher = Hasher.init(.{}); - const left_node = [_]u8{0xAA} ** 32; - const right_node = [_]u8{0xBB} ** 32; - - hasher.update(&left_node); - hasher.update(&right_node); - - var output: [32]u8 = undefined; - hasher.final(&output); - - const expected = [_]u8{ 0xec, 0x3e, 0x77, 0x40, 0x7c, 0x50, 0xf7, 0x7a, 0x63, 0x98, 0xdb, 0x56, 0x94, 0x82, 0x6e, 0x21, 0xfb, 0xb8, 0x7f, 0x29, 0x92, 0x59, 0x3e, 0x59, 0x6c, 0xc9, 0x37, 0x7a, 0x50, 0x54, 0xdf, 0x56 }; - try std.testing.expectEqualSlices(u8, &expected, &output); - } - - // Test 6: Last byte boundary (63 bytes 0xFF, 1 byte 0x01) - { - var hasher = Hasher.init(.{}); - var input: [64]u8 = undefined; - @memset(input[0..63], 0xFF); - input[63] = 0x01; - hasher.update(&input); - - var output: [32]u8 = undefined; - hasher.final(&output); - - const expected = [_]u8{ 0xd2, 0xe5, 0x8c, 0x51, 0x39, 0xb5, 0x91, 0x64, 0xd2, 0xdb, 0x26, 0x49, 0x32, 0x50, 0x7d, 0x4e, 0x6d, 0xac, 0xef, 0x30, 0x76, 0x83, 0x12, 0x67, 0x4a, 0x9c, 0x70, 0x35, 0x87, 0xdf, 0xa9, 0x64 }; - try std.testing.expectEqualSlices(u8, &expected, &output); - } - - // Test 7: Last byte boundary variant (63 bytes 0xFF, 1 byte 0x02) - { - var hasher = Hasher.init(.{}); - var input: [64]u8 = undefined; - @memset(input[0..63], 0xFF); - input[63] = 0x02; - hasher.update(&input); - - var output: [32]u8 = undefined; - hasher.final(&output); - - const expected = [_]u8{ 0xc7, 0xed, 0x40, 0x1c, 0x2c, 0x03, 0x7e, 0x29, 0x3d, 0xb7, 0x76, 0x3f, 0xf2, 0xa7, 0x49, 0x39, 0xec, 0x47, 0x52, 0x3e, 0x5c, 0xeb, 0xad, 0x34, 0xe7, 0x4b, 0x00, 0x74, 0xf5, 0x01, 0xd4, 0x43 }; - try std.testing.expectEqualSlices(u8, &expected, &output); - } -} diff --git a/src/poseidon_wrapper.zig b/src/poseidon_wrapper.zig deleted file mode 100644 index bd5dd7c..0000000 --- a/src/poseidon_wrapper.zig +++ /dev/null @@ -1,190 +0,0 @@ -//! Provides a SHA256-compatible API wrapper for Poseidon2 hash function. -//! This allows Poseidon2 to be used as a drop-in replacement for SHA256 -//! in merkleization and hash tree root operations. -//! -//! IMPORTANT: This is a specialized wrapper for SSZ merkleization, which always -//! provides exactly 64 bytes (two 32-byte nodes). It is NOT a general-purpose -//! hash function: it enforces the fixed 64-byte input length and intentionally -//! does not implement any padding scheme. - -const std = @import("std"); - -/// Creates a hasher type that wraps a Poseidon2 instance with SHA256-like API -pub fn PoseidonHasher(comptime Poseidon2Type: type) type { - // SSZ compression in this codebase is always: - // H: {0,1}^512 -> {0,1}^256 - // i.e. exactly 64 bytes in, 32 bytes out. - const BUFFER_SIZE = 64; - - // Poseidon2-24 state width. - const WIDTH = 24; - - // Compile-time safety: verify Poseidon2Type has the required interface - comptime { - if (!@hasDecl(Poseidon2Type, "Field")) { - @compileError("Poseidon2Type must have a 'Field' declaration"); - } - if (!@hasDecl(Poseidon2Type, "permutation")) { - @compileError("Poseidon2Type must have a 'permutation' function"); - } - if (!@hasDecl(Poseidon2Type, "WIDTH")) { - @compileError("Poseidon2Type must expose a WIDTH constant"); - } - if (Poseidon2Type.WIDTH != WIDTH) { - @compileError(std.fmt.comptimePrint( - "PoseidonHasher requires width-{d} Poseidon2, got width-{d}", - .{ WIDTH, Poseidon2Type.WIDTH }, - )); - } - } - - // We encode 64 bytes as 22 limbs of 24 bits each (little-endian within each limb), - // which are always < 2^24 < p (KoalaBear prime), avoiding lossy modular reduction: - // 64 bytes = 21*3 + 1 => 22 limbs, fits in a single width-24 permutation. - const LIMBS = 22; - - const FIELD_ELEM_SIZE = 4; // u32 = 4 bytes - const OUTPUT_FIELD_ELEMS = 8; // 8 u32s = 32 bytes - - return struct { - const Self = @This(); - - // Accumulated input bytes - buffer: [BUFFER_SIZE]u8, - buffer_len: usize, - - /// Options struct for compatibility with std.crypto.hash API - pub const Options = struct {}; - - /// Initialize a new hasher instance - pub fn init(_: Options) Self { - return .{ - .buffer = undefined, - .buffer_len = 0, - }; - } - - /// Update the hasher with new data - /// Note: This accumulates data. SSZ compression requires exactly 64 bytes, - /// so we buffer until we have enough data. - pub fn update(self: *Self, data: []const u8) void { - // Enforce the 64-byte limit explicitly - std.debug.assert(self.buffer_len + data.len <= BUFFER_SIZE); - - // Copy data into buffer - @memcpy(self.buffer[self.buffer_len..][0..data.len], data); - self.buffer_len += data.len; - } - - /// Finalize the hash and write the result to out - pub fn final(self: *Self, out: []u8) void { - std.debug.assert(out.len == 32); - // Enforce exact length: SSZ internal nodes and mix-in-length always pass 64 bytes. - std.debug.assert(self.buffer_len == BUFFER_SIZE); - - // Byte -> 24-bit limb packing (injective for fixed 64-byte inputs). - var limbs: [LIMBS]u32 = undefined; - for (0..(LIMBS - 1)) |i| { - const j = i * 3; - limbs[i] = @as(u32, self.buffer[j]) | - (@as(u32, self.buffer[j + 1]) << 8) | - (@as(u32, self.buffer[j + 2]) << 16); - } - limbs[LIMBS - 1] = @as(u32, self.buffer[63]); - - // Build Poseidon2 state: 22 limbs + 2 zero lanes. - var state: [WIDTH]Poseidon2Type.Field = undefined; - for (0..LIMBS) |i| { - state[i] = Poseidon2Type.Field.fromU32(limbs[i]); - } - state[22] = Poseidon2Type.Field.zero; - state[23] = Poseidon2Type.Field.zero; - - // TruncatedPermutation semantics (no feed-forward): permute, then squeeze. - Poseidon2Type.permutation(state[0..]); - - // Squeeze first 8 lanes as 32 bytes, little-endian u32 per lane. - for (0..OUTPUT_FIELD_ELEMS) |i| { - const v = state[i].toU32(); - std.mem.writeInt(u32, out[i * FIELD_ELEM_SIZE ..][0..FIELD_ELEM_SIZE], v, .little); - } - - // Reset buffer for potential reuse. - self.buffer_len = 0; - } - - /// Convenience helper used by some generic code (e.g. zero-hash builders). - pub fn finalResult(self: *Self) [32]u8 { - var out: [32]u8 = undefined; - self.final(out[0..]); - return out; - } - }; -} - -test "PoseidonHasher basic API" { - // This test just verifies the API compiles and runs. - const hash_zig = @import("hash_zig"); - const Hasher = PoseidonHasher(hash_zig.poseidon2.Poseidon2KoalaBear24Plonky3); - - var hasher = Hasher.init(.{}); - const data = [_]u8{0x01} ** 64; - hasher.update(data[0..]); - - var output: [32]u8 = undefined; - hasher.final(output[0..]); - - // Just verify we got some output (not all zeros) - var has_nonzero = false; - for (output) |byte| { - if (byte != 0) { - has_nonzero = true; - break; - } - } - try std.testing.expect(has_nonzero); -} - -test "PoseidonHasher deterministic" { - // Verify same input produces same output - const hash_zig = @import("hash_zig"); - const Hasher = PoseidonHasher(hash_zig.poseidon2.Poseidon2KoalaBear24Plonky3); - - var hasher1 = Hasher.init(.{}); - var hasher2 = Hasher.init(.{}); - - const data = [_]u8{0x42} ** 64; - hasher1.update(data[0..]); - hasher2.update(data[0..]); - - var output1: [32]u8 = undefined; - var output2: [32]u8 = undefined; - hasher1.final(output1[0..]); - hasher2.final(output2[0..]); - - try std.testing.expectEqualSlices(u8, &output1, &output2); -} - -test "PoseidonHasher different inputs produce different outputs" { - // Verify different inputs produce different outputs - const hash_zig = @import("hash_zig"); - const Hasher = PoseidonHasher(hash_zig.poseidon2.Poseidon2KoalaBear24Plonky3); - - var hasher1 = Hasher.init(.{}); - var hasher2 = Hasher.init(.{}); - - const data1 = [_]u8{0x01} ** 64; - const data2 = [_]u8{0x02} ** 64; - - hasher1.update(data1[0..]); - hasher2.update(data2[0..]); - - var output1: [32]u8 = undefined; - var output2: [32]u8 = undefined; - hasher1.final(output1[0..]); - hasher2.final(output2[0..]); - - // Verify outputs are different - const are_equal = std.mem.eql(u8, &output1, &output2); - try std.testing.expect(!are_equal); -} diff --git a/src/tests.zig b/src/tests.zig index a600752..c95d387 100644 --- a/src/tests.zig +++ b/src/tests.zig @@ -7,12 +7,12 @@ const chunkCount = libssz.chunkCount; const hashTreeRoot = libssz.hashTreeRoot; const isFixedSizeObject = libssz.isFixedSizeObject; const std = @import("std"); -const build_options = @import("build_options"); const ArrayList = std.ArrayList; const expect = std.testing.expect; const expectError = std.testing.expectError; -const sha256 = std.crypto.hash.sha2.Sha256; -const hashes_of_zero = libssz.zeros.hashes_of_zero; +const Sha256 = std.crypto.hash.sha2.Sha256; +const zeros = @import("zeros.zig"); +const hashes_of_zero = zeros.hashes_of_zero; test "serializes uint8" { const data: u8 = 0x55; @@ -530,27 +530,24 @@ const d_bits = bytesToBits(16, d_bytes); const e_bits = bytesToBits(16, e_bytes); test "calculate the root hash of a boolean" { - // SHA-specific expected vectors; skip when Poseidon is enabled. - if (build_options.poseidon_enabled) return; var expected = [_]u8{1} ++ [_]u8{0} ** 31; var hashed: [32]u8 = undefined; - try hashTreeRoot(bool, true, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256,bool, true, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); expected = hashes_of_zero[0]; - try hashTreeRoot(bool, false, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256,bool, false, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } test "calculate root hash of an array of two Bitvector[128]" { - if (build_options.poseidon_enabled) return; const deserialized: [2][128]bool = [2][128]bool{ a_bits, b_bits }; var hashed: [32]u8 = undefined; - try hashTreeRoot(@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256,@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); var expected: [32]u8 = undefined; const expected_preimage = a_bytes ++ empty_bytes ++ b_bytes ++ empty_bytes; - sha256.hash(expected_preimage[0..], &expected, sha256.Options{}); + Sha256.hash(expected_preimage[0..], &expected, Sha256.Options{}); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } @@ -558,23 +555,22 @@ test "calculate root hash of an array of two Bitvector[128]" { test "calculate the root hash of an array of integers" { var expected = [_]u8{ 0xef, 0xbe, 0xad, 0xde, 0xfe, 0xca, 0xfe, 0xca } ++ [_]u8{0} ** 24; var hashed: [32]u8 = undefined; - try hashTreeRoot([2]u32, [_]u32{ 0xdeadbeef, 0xcafecafe }, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256,[2]u32, [_]u32{ 0xdeadbeef, 0xcafecafe }, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } test "calculate root hash of an array of three Bitvector[128]" { - if (build_options.poseidon_enabled) return; const deserialized: [3][128]bool = [3][128]bool{ a_bits, b_bits, c_bits }; var hashed: [32]u8 = undefined; - try hashTreeRoot(@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256,@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); var left: [32]u8 = undefined; var expected: [32]u8 = undefined; const preimg1 = a_bytes ++ empty_bytes ++ b_bytes ++ empty_bytes; const preimg2 = c_bytes ++ empty_bytes ** 3; - sha256.hash(preimg1[0..], &left, sha256.Options{}); - sha256.hash(preimg2[0..], &expected, sha256.Options{}); - var digest = sha256.init(sha256.Options{}); + Sha256.hash(preimg1[0..], &left, Sha256.Options{}); + Sha256.hash(preimg2[0..], &expected, Sha256.Options{}); + var digest = Sha256.init(Sha256.Options{}); digest.update(left[0..]); digest.update(expected[0..]); digest.final(&expected); @@ -583,10 +579,9 @@ test "calculate root hash of an array of three Bitvector[128]" { } test "calculate the root hash of an array of five Bitvector[128]" { - if (build_options.poseidon_enabled) return; const deserialized = [5][128]bool{ a_bits, b_bits, c_bits, d_bits, e_bits }; var hashed: [32]u8 = undefined; - try hashTreeRoot(@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256,@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); var internal_nodes: [64]u8 = undefined; var left: [32]u8 = undefined; @@ -596,21 +591,21 @@ test "calculate the root hash of an array of five Bitvector[128]" { const preimg3 = e_bytes ++ empty_bytes ** 3; const preimg4 = empty_bytes ** 4; - sha256.hash(preimg1[0..], &left, sha256.Options{}); - sha256.hash(preimg2[0..], internal_nodes[0..32], sha256.Options{}); - var digest = sha256.init(sha256.Options{}); + Sha256.hash(preimg1[0..], &left, Sha256.Options{}); + Sha256.hash(preimg2[0..], internal_nodes[0..32], Sha256.Options{}); + var digest = Sha256.init(Sha256.Options{}); digest.update(left[0..]); digest.update(internal_nodes[0..32]); digest.final(internal_nodes[0..32]); - sha256.hash(preimg3[0..], &left, sha256.Options{}); - sha256.hash(preimg4[0..], internal_nodes[32..], sha256.Options{}); - digest = sha256.init(sha256.Options{}); + Sha256.hash(preimg3[0..], &left, Sha256.Options{}); + Sha256.hash(preimg4[0..], internal_nodes[32..], Sha256.Options{}); + digest = Sha256.init(Sha256.Options{}); digest.update(left[0..]); digest.update(internal_nodes[32..]); digest.final(internal_nodes[32..]); - sha256.hash(internal_nodes[0..], &expected, sha256.Options{}); + Sha256.hash(internal_nodes[0..], &expected, Sha256.Options{}); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } @@ -622,7 +617,6 @@ const Fork = struct { }; test "calculate the root hash of a structure" { - if (build_options.poseidon_enabled) return; var hashed: [32]u8 = undefined; const fork = Fork{ .previous_version = [_]u8{ 0x9c, 0xe2, 0x5d, 0x26 }, @@ -631,12 +625,11 @@ test "calculate the root hash of a structure" { }; var expected: [32]u8 = undefined; _ = try std.fmt.hexToBytes(expected[0..], "58316a908701d3660123f0b8cb7839abdd961f71d92993d34e4f480fbec687d9"); - try hashTreeRoot(Fork, fork, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256,Fork, fork, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } test "calculate the root hash of an Optional" { - if (build_options.poseidon_enabled) return; var hashed: [32]u8 = undefined; var payload: [64]u8 = undefined; const v: ?u32 = null; @@ -644,18 +637,17 @@ test "calculate the root hash of an Optional" { var expected: [32]u8 = undefined; _ = try std.fmt.hexToBytes(payload[0..], "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"); - sha256.hash(payload[0..], expected[0..], sha256.Options{}); - try hashTreeRoot(?u32, v, &hashed, std.testing.allocator); + Sha256.hash(payload[0..], expected[0..], Sha256.Options{}); + try hashTreeRoot(Sha256,?u32, v, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); _ = try std.fmt.hexToBytes(payload[0..], "efbeadde000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000"); - sha256.hash(payload[0..], expected[0..], sha256.Options{}); - try hashTreeRoot(?u32, u, &hashed, std.testing.allocator); + Sha256.hash(payload[0..], expected[0..], Sha256.Options{}); + try hashTreeRoot(Sha256,?u32, u, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } test "calculate the root hash of an union" { - if (build_options.poseidon_enabled) return; const Payload = union(enum) { int: u64, boolean: bool, @@ -664,14 +656,14 @@ test "calculate the root hash of an union" { var payload: [64]u8 = undefined; _ = try std.fmt.hexToBytes(payload[0..], "d2040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"); var exp1: [32]u8 = undefined; - sha256.hash(payload[0..], exp1[0..], sha256.Options{}); - try hashTreeRoot(Payload, Payload{ .int = 1234 }, &out, std.testing.allocator); + Sha256.hash(payload[0..], exp1[0..], Sha256.Options{}); + try hashTreeRoot(Sha256,Payload, Payload{ .int = 1234 }, &out, std.testing.allocator); try expect(std.mem.eql(u8, out[0..], exp1[0..])); var exp2: [32]u8 = undefined; _ = try std.fmt.hexToBytes(payload[0..], "01000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000"); - sha256.hash(payload[0..], exp2[0..], sha256.Options{}); - try hashTreeRoot(Payload, Payload{ .boolean = true }, &out, std.testing.allocator); + Sha256.hash(payload[0..], exp2[0..], Sha256.Options{}); + try hashTreeRoot(Sha256,Payload, Payload{ .boolean = true }, &out, std.testing.allocator); try expect(std.mem.eql(u8, out[0..], exp2[0..])); } @@ -928,7 +920,6 @@ test "structs with nested fixed/variable size u8 array" { } test "slice hashtree root composite type" { - if (build_options.poseidon_enabled) return; const Root = [32]u8; const RootsList = []Root; const test_root = [_]u8{23} ** 32; @@ -936,7 +927,7 @@ test "slice hashtree root composite type" { var roots_list = [_]Root{test_root}; var hash_root: [32]u8 = undefined; - try hashTreeRoot( + try hashTreeRoot(Sha256, RootsList, &roots_list, &hash_root, @@ -948,13 +939,12 @@ test "slice hashtree root composite type" { } test "slice hashtree root simple type" { - if (build_options.poseidon_enabled) return; const DynamicRoot = []u8; // merkelizes as List[u8,33] as dynamic data length is mixed in as bounded type var test_root = [_]u8{23} ** 33; var hash_root: [32]u8 = undefined; - try hashTreeRoot( + try hashTreeRoot(Sha256, DynamicRoot, &test_root, &hash_root, @@ -966,7 +956,6 @@ test "slice hashtree root simple type" { } test "List tree root calculation" { - if (build_options.poseidon_enabled) return; const ListU64 = utils.List(u64, 1024); var empty_list = try ListU64.init(std.testing.allocator); @@ -981,8 +970,8 @@ test "List tree root calculation" { var empty_hash: [32]u8 = undefined; var filled_hash: [32]u8 = undefined; - try hashTreeRoot(ListU64, empty_list, &empty_hash, std.testing.allocator); - try hashTreeRoot(ListU64, list_with_items, &filled_hash, std.testing.allocator); + try hashTreeRoot(Sha256,ListU64, empty_list, &empty_hash, std.testing.allocator); + try hashTreeRoot(Sha256,ListU64, list_with_items, &filled_hash, std.testing.allocator); try expect(std.mem.eql(u8, &filled_hash, &list_with_items_expected)); try expect(!std.mem.eql(u8, &empty_hash, &filled_hash)); @@ -994,7 +983,7 @@ test "List tree root calculation" { try same_content_list.append(456); var same_content_hash: [32]u8 = undefined; - try hashTreeRoot(ListU64, same_content_list, &same_content_hash, std.testing.allocator); + try hashTreeRoot(Sha256,ListU64, same_content_list, &same_content_hash, std.testing.allocator); try expect(std.mem.eql(u8, &filled_hash, &same_content_hash)); } @@ -1014,8 +1003,8 @@ test "Bitlist tree root calculation" { var empty_hash: [32]u8 = undefined; var filled_hash: [32]u8 = undefined; - try hashTreeRoot(TestBitlist, empty_bitlist, &empty_hash, std.testing.allocator); - try hashTreeRoot(TestBitlist, filled_bitlist, &filled_hash, std.testing.allocator); + try hashTreeRoot(Sha256,TestBitlist, empty_bitlist, &empty_hash, std.testing.allocator); + try hashTreeRoot(Sha256,TestBitlist, filled_bitlist, &filled_hash, std.testing.allocator); try expect(!std.mem.eql(u8, &empty_hash, &filled_hash)); @@ -1027,7 +1016,7 @@ test "Bitlist tree root calculation" { try same_content_bitlist.append(true); var same_content_hash: [32]u8 = undefined; - try hashTreeRoot(TestBitlist, same_content_bitlist, &same_content_hash, std.testing.allocator); + try hashTreeRoot(Sha256,TestBitlist, same_content_bitlist, &same_content_hash, std.testing.allocator); try expect(std.mem.eql(u8, &filled_hash, &same_content_hash)); } @@ -1041,7 +1030,7 @@ test "List of composite types tree root" { try pastry_list.append(Pastry{ .name = "muffin", .weight = 30 }); var hash1: [32]u8 = undefined; - try hashTreeRoot(ListOfPastry, pastry_list, &hash1, std.testing.allocator); + try hashTreeRoot(Sha256,ListOfPastry, pastry_list, &hash1, std.testing.allocator); var pastry_list2 = try ListOfPastry.init(std.testing.allocator); defer pastry_list2.deinit(); @@ -1049,13 +1038,13 @@ test "List of composite types tree root" { try pastry_list2.append(Pastry{ .name = "muffin", .weight = 30 }); var hash2: [32]u8 = undefined; - try hashTreeRoot(ListOfPastry, pastry_list2, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256,ListOfPastry, pastry_list2, &hash2, std.testing.allocator); try expect(std.mem.eql(u8, &hash1, &hash2)); try pastry_list2.append(Pastry{ .name = "bagel", .weight = 25 }); var hash3: [32]u8 = undefined; - try hashTreeRoot(ListOfPastry, pastry_list2, &hash3, std.testing.allocator); + try hashTreeRoot(Sha256,ListOfPastry, pastry_list2, &hash3, std.testing.allocator); try expect(!std.mem.eql(u8, &hash1, &hash3)); } @@ -1287,13 +1276,12 @@ test "serialize max/min integer values" { } test "Empty List hash tree root" { - if (build_options.poseidon_enabled) return; const ListU32 = utils.List(u32, 100); var empty_list = try ListU32.init(std.testing.allocator); defer empty_list.deinit(); var hash: [32]u8 = undefined; - try hashTreeRoot(ListU32, empty_list, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,ListU32, empty_list, &hash, std.testing.allocator); // Updated to correct SSZ-compliant hash that uses max capacity for merkleization const zig_expected = [_]u8{ @@ -1306,13 +1294,12 @@ test "Empty List hash tree root" { } test "Empty BitList(<=256) hash tree root" { - if (build_options.poseidon_enabled) return; const BitListLen100 = utils.Bitlist(100); var empty_list = try BitListLen100.init(std.testing.allocator); defer empty_list.deinit(); var hash: [32]u8 = undefined; - try hashTreeRoot(BitListLen100, empty_list, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,BitListLen100, empty_list, &hash, std.testing.allocator); const zig_expected = [_]u8{ 0xf5, 0xa5, 0xfd, 0x42, 0xd1, 0x6a, 0x20, 0x30, @@ -1324,13 +1311,12 @@ test "Empty BitList(<=256) hash tree root" { } test "Empty BitList (>256) hash tree root" { - if (build_options.poseidon_enabled) return; const BitListLen100 = utils.Bitlist(2570); var empty_list = try BitListLen100.init(std.testing.allocator); defer empty_list.deinit(); var hash: [32]u8 = undefined; - try hashTreeRoot(BitListLen100, empty_list, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,BitListLen100, empty_list, &hash, std.testing.allocator); const zig_expected = [_]u8{ 0x79, 0x29, 0x30, 0xbb, 0xd5, 0xba, 0xac, 0x43, @@ -1342,7 +1328,6 @@ test "Empty BitList (>256) hash tree root" { } test "List at maximum capacity" { - if (build_options.poseidon_enabled) return; const ListU8 = utils.List(u8, 4); var full_list = try ListU8.init(std.testing.allocator); defer full_list.deinit(); @@ -1358,7 +1343,7 @@ test "List at maximum capacity" { // Test hash tree root at capacity var hash: [32]u8 = undefined; - try hashTreeRoot(ListU8, full_list, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,ListU8, full_list, &hash, std.testing.allocator); // Python reference: List[uint8, 4] with [1,2,3,4] const expected = [_]u8{ @@ -1371,12 +1356,10 @@ test "List at maximum capacity" { } test "Array hash tree root" { - // SHA-specific expected vectors; skip when Poseidon is enabled. - if (build_options.poseidon_enabled) return; const data: [4]u32 = .{ 1, 2, 3, 4 }; var hash: [32]u8 = undefined; - try hashTreeRoot([4]u32, data, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,[4]u32, data, &hash, std.testing.allocator); // Python reference: Vector[uint32, 4] with [1,2,3,4] // For basic types packed in one chunk, hash is the serialized data @@ -1412,11 +1395,10 @@ test "Large Bitvector serialization and hash" { try expect(list.items[32] & 0x01 == 0x01); // bit 256 -> LSB of byte 32 try expect(list.items[63] & 0x80 == 0x80); // bit 511 -> MSB of byte 63 - if (build_options.poseidon_enabled) return; // Test hash tree root var hash: [32]u8 = undefined; - try hashTreeRoot(LargeBitvec, data, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,LargeBitvec, data, &hash, std.testing.allocator); const expected = [_]u8{ 0x1d, 0x83, 0x09, 0x11, 0x4a, 0xfe, 0xf7, 0x14, 0x89, 0xbe, 0x68, 0xd4, 0x5e, 0x18, 0xc3, 0x39, @@ -1427,7 +1409,6 @@ test "Large Bitvector serialization and hash" { } test "Bitlist edge cases" { - if (build_options.poseidon_enabled) return; const TestBitlist = utils.Bitlist(100); // All false @@ -1438,7 +1419,7 @@ test "Bitlist edge cases" { } var hash1: [32]u8 = undefined; - try hashTreeRoot(TestBitlist, all_false, &hash1, std.testing.allocator); + try hashTreeRoot(Sha256,TestBitlist, all_false, &hash1, std.testing.allocator); const expected_false = [_]u8{ 0x02, 0xc8, 0xc1, 0x5f, 0xed, 0x3f, 0x1b, 0x86, @@ -1456,7 +1437,7 @@ test "Bitlist edge cases" { } var hash2: [32]u8 = undefined; - try hashTreeRoot(TestBitlist, all_true, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256,TestBitlist, all_true, &hash2, std.testing.allocator); // Python reference: Bitlist[100] with 50 true bits const expected_true = [_]u8{ @@ -1469,7 +1450,6 @@ test "Bitlist edge cases" { } test "Bitlist trailing zeros optimization" { - if (build_options.poseidon_enabled) return; const TestBitlist = utils.Bitlist(256); // Test case 1: 8 false bits - should result in one 0x00 byte after pack_bits @@ -1480,7 +1460,7 @@ test "Bitlist trailing zeros optimization" { } var hash1: [32]u8 = undefined; - try hashTreeRoot(TestBitlist, eight_false, &hash1, std.testing.allocator); + try hashTreeRoot(Sha256,TestBitlist, eight_false, &hash1, std.testing.allocator); // Expected hash for 8 false bits in Bitlist[256] // This should keep one zero byte and not remove all then add back a chunk @@ -1504,7 +1484,7 @@ test "Bitlist trailing zeros optimization" { } var hash2: [32]u8 = undefined; - try hashTreeRoot(TestBitlist, pattern, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256,TestBitlist, pattern, &hash2, std.testing.allocator); // Expected hash for [T,F,T,F...F] (16 bits total) // First byte is 0x05, second byte is 0x00 @@ -1519,12 +1499,10 @@ test "Bitlist trailing zeros optimization" { } test "uint256 hash tree root" { - // SHA-specific expected vectors; skip when Poseidon is enabled. - if (build_options.poseidon_enabled) return; const data: u256 = 0x0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF; var hash: [32]u8 = undefined; - try hashTreeRoot(u256, data, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,u256, data, &hash, std.testing.allocator); const expected = [_]u8{ 0xEF, 0xCD, 0xAB, 0x89, 0x67, 0x45, 0x23, 0x01, 0xEF, 0xCD, 0xAB, 0x89, 0x67, 0x45, 0x23, 0x01, @@ -1535,14 +1513,13 @@ test "uint256 hash tree root" { } test "Single element List" { - if (build_options.poseidon_enabled) return; const ListU64 = utils.List(u64, 10); var single = try ListU64.init(std.testing.allocator); defer single.deinit(); try single.append(42); var hash: [32]u8 = undefined; - try hashTreeRoot(ListU64, single, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,ListU64, single, &hash, std.testing.allocator); const expected = [_]u8{ 0x54, 0xd7, 0x76, 0x7c, 0xc1, 0xdd, 0xd2, 0xf6, @@ -1554,7 +1531,6 @@ test "Single element List" { } test "Nested structure hash tree root" { - if (build_options.poseidon_enabled) return; const Inner = struct { a: u32, b: u64, @@ -1573,7 +1549,7 @@ test "Nested structure hash tree root" { }; var hash: [32]u8 = undefined; - try hashTreeRoot(Outer, data, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,Outer, data, &hash, std.testing.allocator); const expected = [_]u8{ 0x4e, 0xbe, 0x9c, 0x7f, 0x41, 0x63, 0xd9, 0x34, @@ -1603,8 +1579,6 @@ test "serialize negative i8 and i16" { } test "Zero-length array" { - // SHA-specific expected vectors; skip when Poseidon is enabled. - if (build_options.poseidon_enabled) return; const empty: [0]u32 = .{}; var list = ArrayList(u8).init(std.testing.allocator); @@ -1613,7 +1587,7 @@ test "Zero-length array" { try expect(list.items.len == 0); var hash: [32]u8 = undefined; - try hashTreeRoot([0]u32, empty, &hash, std.testing.allocator); + try hashTreeRoot(Sha256,[0]u32, empty, &hash, std.testing.allocator); // Should be the zero chunk try expect(std.mem.eql(u8, &hash, &([_]u8{0} ** 32))); } diff --git a/src/utils.zig b/src/utils.zig index 2a82f55..fe811ce 100644 --- a/src/utils.zig +++ b/src/utils.zig @@ -7,7 +7,6 @@ const deserialize = lib.deserialize; const isFixedSizeObject = lib.isFixedSizeObject; const ArrayList = std.ArrayList; const Allocator = std.mem.Allocator; -const Hasher = lib.Hasher; const hashes_of_zero = @import("./zeros.zig").hashes_of_zero; // SSZ specification constants @@ -154,7 +153,7 @@ pub fn List(comptime T: type, comptime N: usize) type { return lib.serializedSize(@TypeOf(inner_slice), inner_slice); } - pub fn hashTreeRoot(self: *const Self, out: *[32]u8, allctr: Allocator) !void { + pub fn hashTreeRoot(self: *const Self, comptime Hasher: type, out: *[32]u8, allctr: Allocator) !void { const items = self.constSlice(); switch (@typeInfo(Item)) { @@ -168,20 +167,20 @@ pub fn List(comptime T: type, comptime N: usize) type { const chunks_for_max_capacity = (N + items_per_chunk - 1) / items_per_chunk; var tmp: chunk = undefined; try lib.merkleize(Hasher, chunks, chunks_for_max_capacity, &tmp); - lib.mixInLength2(tmp, items.len, out); + lib.mixInLength2(Hasher, tmp, items.len, out); }, else => { var chunks = ArrayList(chunk).init(allctr); defer chunks.deinit(); var tmp: chunk = undefined; for (items) |item| { - try lib.hashTreeRoot(Item, item, &tmp, allctr); + try lib.hashTreeRoot(Hasher, Item, item, &tmp, allctr); try chunks.append(tmp); } // Always use N (max capacity) for merkleization, even when empty // This ensures proper tree depth according to SSZ specification try lib.merkleize(Hasher, chunks.items, N, &tmp); - lib.mixInLength2(tmp, items.len, out); + lib.mixInLength2(Hasher, tmp, items.len, out); }, } } @@ -324,7 +323,7 @@ pub fn Bitlist(comptime N: usize) type { return (self.length + 7 + 1) / 8; } - pub fn hashTreeRoot(self: *const Self, out: *[32]u8, allctr: Allocator) !void { + pub fn hashTreeRoot(self: *const Self, comptime Hasher: type, out: *[32]u8, allctr: Allocator) !void { const bit_length = self.length; var bitfield_bytes = ArrayList(u8).init(allctr); @@ -352,7 +351,7 @@ pub fn Bitlist(comptime N: usize) type { // Use chunk_count limit as per SSZ specification const chunk_count_limit = (N + 255) / 256; try lib.merkleize(Hasher, chunks, chunk_count_limit, &tmp); - lib.mixInLength2(tmp, bit_length, out); + lib.mixInLength2(Hasher, tmp, bit_length, out); } /// Validates that the bitlist is correctly formed From 51ad8de0ef9ca277a819e0a101a610ae709fd167 Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Fri, 9 Jan 2026 15:34:59 +0530 Subject: [PATCH 11/14] docs: add custom hasher info to README --- README.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/README.md b/README.md index 64140ba..ee79b4e 100644 --- a/README.md +++ b/README.md @@ -60,6 +60,31 @@ Supported types: * `List[N]` * `Bitlist[N]` +## Using Custom Hash Functions + +ssz.zig is hash-function agnostic. Pass your hasher as a type parameter: + +```zig +const std = @import("std"); +const ssz = @import("ssz.zig"); + +// Using SHA256 (from stdlib) +const Sha256 = std.crypto.hash.sha2.Sha256; +try ssz.hashTreeRoot(Sha256, MyType, value, &root, allocator); + +// Using a custom hasher (must implement init/update/final API) +const MyHasher = ...; // Your hasher type +try ssz.hashTreeRoot(MyHasher, MyType, value, &root, allocator); +``` + +**Required Hasher API:** +```zig +pub const Options = struct {}; +pub fn init(_: Options) Self; +pub fn update(self: *Self, data: []const u8) void; +pub fn final(self: *Self, out: *[32]u8) void; // out size matches 32 bytes for SSZ +``` + ## Contributing Simply create an issue or a PR. From e784fa0cc874d03e38149ee338cb8b09c4c6626c Mon Sep 17 00:00:00 2001 From: Chetany Bhardwaj Date: Fri, 9 Jan 2026 16:13:13 +0530 Subject: [PATCH 12/14] chore: lint fix --- build.zig.zon | 3 +- src/beacon_tests.zig | 53 ++++++++++++++----------------- src/tests.zig | 75 ++++++++++++++++++++++---------------------- 3 files changed, 62 insertions(+), 69 deletions(-) diff --git a/build.zig.zon b/build.zig.zon index 50ce542..ce1fa74 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -3,6 +3,5 @@ .fingerprint = 0x1d34bd0ceb1dfc2d, .version = "0.0.9", .paths = .{""}, - .dependencies = .{ - }, + .dependencies = .{}, } diff --git a/src/beacon_tests.zig b/src/beacon_tests.zig index 8be0a3d..0f4803d 100644 --- a/src/beacon_tests.zig +++ b/src/beacon_tests.zig @@ -66,7 +66,7 @@ test "Validator struct hash tree root" { }; var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,Validator, validator, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, Validator, validator, &hash, std.testing.allocator); // Validate against expected hash const expected_validator_hash = [_]u8{ 0x70, 0x68, 0xE5, 0x06, 0xCB, 0xFF, 0xCD, 0x31, 0xBD, 0x2D, 0x13, 0x42, 0x5E, 0x4F, 0xDE, 0x98, 0x6E, 0xF3, 0x5E, 0x6F, 0xB5, 0x0F, 0x35, 0x9D, 0x7A, 0x26, 0xB6, 0x33, 0x2E, 0xE2, 0xCB, 0x94 }; @@ -74,7 +74,7 @@ test "Validator struct hash tree root" { // Hash should be deterministic for the same validator var hash2: [32]u8 = undefined; - try hashTreeRoot(Sha256,Validator, validator, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256, Validator, validator, &hash2, std.testing.allocator); try expect(std.mem.eql(u8, &hash, &hash2)); // Different validator should produce different hash @@ -90,7 +90,7 @@ test "Validator struct hash tree root" { }; var hash3: [32]u8 = undefined; - try hashTreeRoot(Sha256,Validator, validator2, &hash3, std.testing.allocator); + try hashTreeRoot(Sha256, Validator, validator2, &hash3, std.testing.allocator); try expect(!std.mem.eql(u8, &hash, &hash3)); } @@ -199,7 +199,7 @@ test "List[Validator] serialization and hash tree root" { try expect(std.mem.eql(u8, &hash1, &expected_validator_list_hash)); var hash2: [32]u8 = undefined; - try hashTreeRoot(Sha256,ValidatorList, deserialized_list, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256, ValidatorList, deserialized_list, &hash2, std.testing.allocator); // Hash should be the same for original and deserialized lists try expect(std.mem.eql(u8, &hash1, &hash2)); @@ -280,24 +280,23 @@ test "BeamBlockBody with validator array - full cycle" { try expect(orig.withdrawable_epoch == deser.withdrawable_epoch); } - // Test hash tree root consistency var hash_original: [32]u8 = undefined; - try hashTreeRoot(Sha256,BeamBlockBody, beam_block_body, &hash_original, std.testing.allocator); + try hashTreeRoot(Sha256, BeamBlockBody, beam_block_body, &hash_original, std.testing.allocator); // Validate against expected hash const expected_beam_block_body_hash = [_]u8{ 0x34, 0xF2, 0xBC, 0x58, 0xA0, 0xBF, 0x20, 0x72, 0x43, 0xF8, 0xC2, 0x5E, 0x0F, 0x83, 0x5E, 0x36, 0x90, 0x73, 0xD5, 0xAC, 0x97, 0x1E, 0x9A, 0x53, 0x71, 0x14, 0xA0, 0xFD, 0x1C, 0xC8, 0xD8, 0xE4 }; try expect(std.mem.eql(u8, &hash_original, &expected_beam_block_body_hash)); var hash_deserialized: [32]u8 = undefined; - try hashTreeRoot(Sha256,BeamBlockBody, deserialized_body, &hash_deserialized, std.testing.allocator); + try hashTreeRoot(Sha256, BeamBlockBody, deserialized_body, &hash_deserialized, std.testing.allocator); // Hashes should be identical for original and deserialized data try expect(std.mem.eql(u8, &hash_original, &hash_deserialized)); // Test hash determinism var hash_duplicate: [32]u8 = undefined; - try hashTreeRoot(Sha256,BeamBlockBody, beam_block_body, &hash_duplicate, std.testing.allocator); + try hashTreeRoot(Sha256, BeamBlockBody, beam_block_body, &hash_duplicate, std.testing.allocator); try expect(std.mem.eql(u8, &hash_original, &hash_duplicate)); } @@ -379,17 +378,16 @@ test "Zeam-style List/Bitlist usage with tree root stability" { try expect(std.mem.eql(u8, state_serialized.items, &expected_zeam_state_bytes)); - // Test hash tree root determinism and validate against expected hashes var body_hash1: [32]u8 = undefined; var body_hash2: [32]u8 = undefined; var state_hash1: [32]u8 = undefined; var state_hash2: [32]u8 = undefined; - try hashTreeRoot(Sha256,ZeamBeamBlockBody, body, &body_hash1, std.testing.allocator); - try hashTreeRoot(Sha256,ZeamBeamBlockBody, body, &body_hash2, std.testing.allocator); - try hashTreeRoot(Sha256,BeamState, state, &state_hash1, std.testing.allocator); - try hashTreeRoot(Sha256,BeamState, state, &state_hash2, std.testing.allocator); + try hashTreeRoot(Sha256, ZeamBeamBlockBody, body, &body_hash1, std.testing.allocator); + try hashTreeRoot(Sha256, ZeamBeamBlockBody, body, &body_hash2, std.testing.allocator); + try hashTreeRoot(Sha256, BeamState, state, &state_hash1, std.testing.allocator); + try hashTreeRoot(Sha256, BeamState, state, &state_hash2, std.testing.allocator); // Validate against expected hashes const expected_zeam_body_hash = [_]u8{ 0xAA, 0x2C, 0x76, 0x39, 0x96, 0xA6, 0xDD, 0x26, 0x25, 0x13, 0x12, 0x8D, 0xEA, 0xDF, 0xCB, 0x69, 0xF1, 0xEC, 0xEB, 0x60, 0xA8, 0xFF, 0xAC, 0xC7, 0xA7, 0xE4, 0x28, 0x3C, 0x74, 0xAA, 0x6A, 0xE4 }; @@ -453,10 +451,9 @@ test "BeamState with historical roots - comprehensive test" { const expected_comprehensive_beam_state_bytes = [_]u8{ 0x39, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x9C, 0x00, 0x00, 0x00, 0xE8, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x33, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xBB, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xCC, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items, &expected_comprehensive_beam_state_bytes)); - // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,BeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256, BeamState, beam_state, &original_hash, std.testing.allocator); // Validate against expected hash const expected_comprehensive_beam_state_hash = [_]u8{ 0xBD, 0x36, 0x59, 0x5E, 0x3B, 0x4A, 0x51, 0x9C, 0xF3, 0x5F, 0x4F, 0x96, 0x88, 0x9E, 0x86, 0x10, 0xFF, 0x45, 0x20, 0x49, 0x15, 0xAE, 0x96, 0x2E, 0xF4, 0x0C, 0x81, 0x6B, 0xF7, 0x45, 0x4A, 0x17 }; @@ -492,7 +489,7 @@ test "BeamState with historical roots - comprehensive test" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,BeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256, BeamState, deserialized_state, &deserialized_hash, std.testing.allocator); // Verify hash tree roots are identical try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); @@ -528,10 +525,9 @@ test "BeamState with empty historical roots" { const expected_empty_beam_state_bytes = [_]u8{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items, &expected_empty_beam_state_bytes)); - // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,SimpleBeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256, SimpleBeamState, beam_state, &original_hash, std.testing.allocator); // Validate against actual hash const expected_empty_beam_state_hash = [_]u8{ 0x58, 0xD2, 0x2B, 0xA0, 0x04, 0x45, 0xE8, 0xB7, 0x39, 0x5E, 0xC3, 0x93, 0x92, 0x45, 0xC6, 0xF1, 0x5A, 0x29, 0x91, 0xA5, 0x70, 0x3F, 0xC5, 0x05, 0x88, 0x10, 0x57, 0xDE, 0x9D, 0xF3, 0x64, 0x10 }; @@ -552,7 +548,7 @@ test "BeamState with empty historical roots" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,SimpleBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256, SimpleBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); // Verify hash tree roots are identical try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); @@ -596,10 +592,9 @@ test "BeamState with maximum historical roots" { const expected_max_beam_state_bytes_start = [_]u8{ 0x3F, 0x42, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items[0..12], &expected_max_beam_state_bytes_start)); - // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,MaxBeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256, MaxBeamState, beam_state, &original_hash, std.testing.allocator); // Validate against actual hash const expected_max_beam_state_hash = [_]u8{ 0x3F, 0xFC, 0x7A, 0xA4, 0x85, 0x21, 0xD4, 0x02, 0x36, 0x46, 0x19, 0x2E, 0x8D, 0x73, 0xBC, 0x11, 0x3D, 0x1D, 0xE7, 0xF4, 0xDE, 0xC4, 0xD9, 0x6E, 0x94, 0x52, 0xD2, 0xCB, 0x95, 0xE3, 0x22, 0x9A }; @@ -625,7 +620,7 @@ test "BeamState with maximum historical roots" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,MaxBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256, MaxBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); } @@ -677,10 +672,9 @@ test "BeamState historical roots access and comparison" { const expected_access_beam_state_bytes = [_]u8{ 0x31, 0xD4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xEF, 0xBE, 0xAD, 0xDE, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x34, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x56, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x78, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0x9A, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC, 0xBC }; try expect(std.mem.eql(u8, serialized_data.items, &expected_access_beam_state_bytes)); - // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,AccessBeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256, AccessBeamState, beam_state, &original_hash, std.testing.allocator); // Validate against expected hash const expected_access_beam_state_hash = [_]u8{ 0x22, 0x3E, 0xCB, 0xDD, 0x62, 0x46, 0x7F, 0x7F, 0x0F, 0xA8, 0x2C, 0x91, 0x54, 0x1F, 0xF4, 0xEA, 0xBF, 0x92, 0xB6, 0xB7, 0x67, 0x57, 0x02, 0x67, 0x16, 0xEF, 0x3A, 0xB0, 0x96, 0x4E, 0x91, 0x9E }; @@ -714,7 +708,7 @@ test "BeamState historical roots access and comparison" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,AccessBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256, AccessBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); } @@ -748,10 +742,9 @@ test "SimpleBeamState with empty historical roots" { const expected_simple_beam_state_bytes = [_]u8{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; try expect(std.mem.eql(u8, serialized_data.items, &expected_simple_beam_state_bytes)); - // Test hash tree root calculation var original_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,SimpleBeamState, beam_state, &original_hash, std.testing.allocator); + try hashTreeRoot(Sha256, SimpleBeamState, beam_state, &original_hash, std.testing.allocator); // Validate against actual hash const expected_simple_beam_state_hash = [_]u8{ 0x58, 0xD2, 0x2B, 0xA0, 0x04, 0x45, 0xE8, 0xB7, 0x39, 0x5E, 0xC3, 0x93, 0x92, 0x45, 0xC6, 0xF1, 0x5A, 0x29, 0x91, 0xA5, 0x70, 0x3F, 0xC5, 0x05, 0x88, 0x10, 0x57, 0xDE, 0x9D, 0xF3, 0x64, 0x10 }; @@ -771,7 +764,7 @@ test "SimpleBeamState with empty historical roots" { // Test hash tree root consistency var deserialized_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,SimpleBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); + try hashTreeRoot(Sha256, SimpleBeamState, deserialized_state, &deserialized_hash, std.testing.allocator); // Verify hash tree roots are identical try expect(std.mem.eql(u8, &original_hash, &deserialized_hash)); @@ -783,7 +776,7 @@ test "hashTreeRoot for pointer types" { // Test pointer size .one - SUPPORTED { var value: u32 = 8; - try hashTreeRoot(Sha256,*u32, &value, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, *u32, &value, &hash, std.testing.allocator); var deserialized: u32 = undefined; try deserialize(u32, &hash, &deserialized, std.testing.allocator); @@ -794,7 +787,7 @@ test "hashTreeRoot for pointer types" { { var values = [4]u8{ 0xAA, 0xBB, 0xCC, 0xDD }; const values_ptr: *[4]u8 = &values; - try hashTreeRoot(Sha256,*[4]u8, values_ptr, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, *[4]u8, values_ptr, &hash, std.testing.allocator); var deserialized: [4]u8 = undefined; try deserialize([4]u8, &hash, &deserialized, std.testing.allocator); diff --git a/src/tests.zig b/src/tests.zig index c95d387..31562ae 100644 --- a/src/tests.zig +++ b/src/tests.zig @@ -532,18 +532,18 @@ const e_bits = bytesToBits(16, e_bytes); test "calculate the root hash of a boolean" { var expected = [_]u8{1} ++ [_]u8{0} ** 31; var hashed: [32]u8 = undefined; - try hashTreeRoot(Sha256,bool, true, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256, bool, true, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); expected = hashes_of_zero[0]; - try hashTreeRoot(Sha256,bool, false, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256, bool, false, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } test "calculate root hash of an array of two Bitvector[128]" { const deserialized: [2][128]bool = [2][128]bool{ a_bits, b_bits }; var hashed: [32]u8 = undefined; - try hashTreeRoot(Sha256,@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256, @TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); var expected: [32]u8 = undefined; const expected_preimage = a_bytes ++ empty_bytes ++ b_bytes ++ empty_bytes; @@ -555,14 +555,14 @@ test "calculate root hash of an array of two Bitvector[128]" { test "calculate the root hash of an array of integers" { var expected = [_]u8{ 0xef, 0xbe, 0xad, 0xde, 0xfe, 0xca, 0xfe, 0xca } ++ [_]u8{0} ** 24; var hashed: [32]u8 = undefined; - try hashTreeRoot(Sha256,[2]u32, [_]u32{ 0xdeadbeef, 0xcafecafe }, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256, [2]u32, [_]u32{ 0xdeadbeef, 0xcafecafe }, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } test "calculate root hash of an array of three Bitvector[128]" { const deserialized: [3][128]bool = [3][128]bool{ a_bits, b_bits, c_bits }; var hashed: [32]u8 = undefined; - try hashTreeRoot(Sha256,@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256, @TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); var left: [32]u8 = undefined; var expected: [32]u8 = undefined; @@ -581,7 +581,7 @@ test "calculate root hash of an array of three Bitvector[128]" { test "calculate the root hash of an array of five Bitvector[128]" { const deserialized = [5][128]bool{ a_bits, b_bits, c_bits, d_bits, e_bits }; var hashed: [32]u8 = undefined; - try hashTreeRoot(Sha256,@TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256, @TypeOf(deserialized), deserialized, &hashed, std.testing.allocator); var internal_nodes: [64]u8 = undefined; var left: [32]u8 = undefined; @@ -625,7 +625,7 @@ test "calculate the root hash of a structure" { }; var expected: [32]u8 = undefined; _ = try std.fmt.hexToBytes(expected[0..], "58316a908701d3660123f0b8cb7839abdd961f71d92993d34e4f480fbec687d9"); - try hashTreeRoot(Sha256,Fork, fork, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256, Fork, fork, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } @@ -638,12 +638,12 @@ test "calculate the root hash of an Optional" { _ = try std.fmt.hexToBytes(payload[0..], "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"); Sha256.hash(payload[0..], expected[0..], Sha256.Options{}); - try hashTreeRoot(Sha256,?u32, v, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256, ?u32, v, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); _ = try std.fmt.hexToBytes(payload[0..], "efbeadde000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000"); Sha256.hash(payload[0..], expected[0..], Sha256.Options{}); - try hashTreeRoot(Sha256,?u32, u, &hashed, std.testing.allocator); + try hashTreeRoot(Sha256, ?u32, u, &hashed, std.testing.allocator); try expect(std.mem.eql(u8, hashed[0..], expected[0..])); } @@ -657,13 +657,13 @@ test "calculate the root hash of an union" { _ = try std.fmt.hexToBytes(payload[0..], "d2040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"); var exp1: [32]u8 = undefined; Sha256.hash(payload[0..], exp1[0..], Sha256.Options{}); - try hashTreeRoot(Sha256,Payload, Payload{ .int = 1234 }, &out, std.testing.allocator); + try hashTreeRoot(Sha256, Payload, Payload{ .int = 1234 }, &out, std.testing.allocator); try expect(std.mem.eql(u8, out[0..], exp1[0..])); var exp2: [32]u8 = undefined; _ = try std.fmt.hexToBytes(payload[0..], "01000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000"); Sha256.hash(payload[0..], exp2[0..], Sha256.Options{}); - try hashTreeRoot(Sha256,Payload, Payload{ .boolean = true }, &out, std.testing.allocator); + try hashTreeRoot(Sha256, Payload, Payload{ .boolean = true }, &out, std.testing.allocator); try expect(std.mem.eql(u8, out[0..], exp2[0..])); } @@ -927,7 +927,8 @@ test "slice hashtree root composite type" { var roots_list = [_]Root{test_root}; var hash_root: [32]u8 = undefined; - try hashTreeRoot(Sha256, + try hashTreeRoot( + Sha256, RootsList, &roots_list, &hash_root, @@ -944,7 +945,8 @@ test "slice hashtree root simple type" { var test_root = [_]u8{23} ** 33; var hash_root: [32]u8 = undefined; - try hashTreeRoot(Sha256, + try hashTreeRoot( + Sha256, DynamicRoot, &test_root, &hash_root, @@ -970,8 +972,8 @@ test "List tree root calculation" { var empty_hash: [32]u8 = undefined; var filled_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,ListU64, empty_list, &empty_hash, std.testing.allocator); - try hashTreeRoot(Sha256,ListU64, list_with_items, &filled_hash, std.testing.allocator); + try hashTreeRoot(Sha256, ListU64, empty_list, &empty_hash, std.testing.allocator); + try hashTreeRoot(Sha256, ListU64, list_with_items, &filled_hash, std.testing.allocator); try expect(std.mem.eql(u8, &filled_hash, &list_with_items_expected)); try expect(!std.mem.eql(u8, &empty_hash, &filled_hash)); @@ -983,7 +985,7 @@ test "List tree root calculation" { try same_content_list.append(456); var same_content_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,ListU64, same_content_list, &same_content_hash, std.testing.allocator); + try hashTreeRoot(Sha256, ListU64, same_content_list, &same_content_hash, std.testing.allocator); try expect(std.mem.eql(u8, &filled_hash, &same_content_hash)); } @@ -1003,8 +1005,8 @@ test "Bitlist tree root calculation" { var empty_hash: [32]u8 = undefined; var filled_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,TestBitlist, empty_bitlist, &empty_hash, std.testing.allocator); - try hashTreeRoot(Sha256,TestBitlist, filled_bitlist, &filled_hash, std.testing.allocator); + try hashTreeRoot(Sha256, TestBitlist, empty_bitlist, &empty_hash, std.testing.allocator); + try hashTreeRoot(Sha256, TestBitlist, filled_bitlist, &filled_hash, std.testing.allocator); try expect(!std.mem.eql(u8, &empty_hash, &filled_hash)); @@ -1016,7 +1018,7 @@ test "Bitlist tree root calculation" { try same_content_bitlist.append(true); var same_content_hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,TestBitlist, same_content_bitlist, &same_content_hash, std.testing.allocator); + try hashTreeRoot(Sha256, TestBitlist, same_content_bitlist, &same_content_hash, std.testing.allocator); try expect(std.mem.eql(u8, &filled_hash, &same_content_hash)); } @@ -1030,7 +1032,7 @@ test "List of composite types tree root" { try pastry_list.append(Pastry{ .name = "muffin", .weight = 30 }); var hash1: [32]u8 = undefined; - try hashTreeRoot(Sha256,ListOfPastry, pastry_list, &hash1, std.testing.allocator); + try hashTreeRoot(Sha256, ListOfPastry, pastry_list, &hash1, std.testing.allocator); var pastry_list2 = try ListOfPastry.init(std.testing.allocator); defer pastry_list2.deinit(); @@ -1038,13 +1040,13 @@ test "List of composite types tree root" { try pastry_list2.append(Pastry{ .name = "muffin", .weight = 30 }); var hash2: [32]u8 = undefined; - try hashTreeRoot(Sha256,ListOfPastry, pastry_list2, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256, ListOfPastry, pastry_list2, &hash2, std.testing.allocator); try expect(std.mem.eql(u8, &hash1, &hash2)); try pastry_list2.append(Pastry{ .name = "bagel", .weight = 25 }); var hash3: [32]u8 = undefined; - try hashTreeRoot(Sha256,ListOfPastry, pastry_list2, &hash3, std.testing.allocator); + try hashTreeRoot(Sha256, ListOfPastry, pastry_list2, &hash3, std.testing.allocator); try expect(!std.mem.eql(u8, &hash1, &hash3)); } @@ -1281,7 +1283,7 @@ test "Empty List hash tree root" { defer empty_list.deinit(); var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,ListU32, empty_list, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, ListU32, empty_list, &hash, std.testing.allocator); // Updated to correct SSZ-compliant hash that uses max capacity for merkleization const zig_expected = [_]u8{ @@ -1299,7 +1301,7 @@ test "Empty BitList(<=256) hash tree root" { defer empty_list.deinit(); var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,BitListLen100, empty_list, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, BitListLen100, empty_list, &hash, std.testing.allocator); const zig_expected = [_]u8{ 0xf5, 0xa5, 0xfd, 0x42, 0xd1, 0x6a, 0x20, 0x30, @@ -1316,7 +1318,7 @@ test "Empty BitList (>256) hash tree root" { defer empty_list.deinit(); var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,BitListLen100, empty_list, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, BitListLen100, empty_list, &hash, std.testing.allocator); const zig_expected = [_]u8{ 0x79, 0x29, 0x30, 0xbb, 0xd5, 0xba, 0xac, 0x43, @@ -1343,7 +1345,7 @@ test "List at maximum capacity" { // Test hash tree root at capacity var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,ListU8, full_list, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, ListU8, full_list, &hash, std.testing.allocator); // Python reference: List[uint8, 4] with [1,2,3,4] const expected = [_]u8{ @@ -1359,7 +1361,7 @@ test "Array hash tree root" { const data: [4]u32 = .{ 1, 2, 3, 4 }; var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,[4]u32, data, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, [4]u32, data, &hash, std.testing.allocator); // Python reference: Vector[uint32, 4] with [1,2,3,4] // For basic types packed in one chunk, hash is the serialized data @@ -1395,10 +1397,9 @@ test "Large Bitvector serialization and hash" { try expect(list.items[32] & 0x01 == 0x01); // bit 256 -> LSB of byte 32 try expect(list.items[63] & 0x80 == 0x80); // bit 511 -> MSB of byte 63 - // Test hash tree root var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,LargeBitvec, data, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, LargeBitvec, data, &hash, std.testing.allocator); const expected = [_]u8{ 0x1d, 0x83, 0x09, 0x11, 0x4a, 0xfe, 0xf7, 0x14, 0x89, 0xbe, 0x68, 0xd4, 0x5e, 0x18, 0xc3, 0x39, @@ -1419,7 +1420,7 @@ test "Bitlist edge cases" { } var hash1: [32]u8 = undefined; - try hashTreeRoot(Sha256,TestBitlist, all_false, &hash1, std.testing.allocator); + try hashTreeRoot(Sha256, TestBitlist, all_false, &hash1, std.testing.allocator); const expected_false = [_]u8{ 0x02, 0xc8, 0xc1, 0x5f, 0xed, 0x3f, 0x1b, 0x86, @@ -1437,7 +1438,7 @@ test "Bitlist edge cases" { } var hash2: [32]u8 = undefined; - try hashTreeRoot(Sha256,TestBitlist, all_true, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256, TestBitlist, all_true, &hash2, std.testing.allocator); // Python reference: Bitlist[100] with 50 true bits const expected_true = [_]u8{ @@ -1460,7 +1461,7 @@ test "Bitlist trailing zeros optimization" { } var hash1: [32]u8 = undefined; - try hashTreeRoot(Sha256,TestBitlist, eight_false, &hash1, std.testing.allocator); + try hashTreeRoot(Sha256, TestBitlist, eight_false, &hash1, std.testing.allocator); // Expected hash for 8 false bits in Bitlist[256] // This should keep one zero byte and not remove all then add back a chunk @@ -1484,7 +1485,7 @@ test "Bitlist trailing zeros optimization" { } var hash2: [32]u8 = undefined; - try hashTreeRoot(Sha256,TestBitlist, pattern, &hash2, std.testing.allocator); + try hashTreeRoot(Sha256, TestBitlist, pattern, &hash2, std.testing.allocator); // Expected hash for [T,F,T,F...F] (16 bits total) // First byte is 0x05, second byte is 0x00 @@ -1502,7 +1503,7 @@ test "uint256 hash tree root" { const data: u256 = 0x0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF; var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,u256, data, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, u256, data, &hash, std.testing.allocator); const expected = [_]u8{ 0xEF, 0xCD, 0xAB, 0x89, 0x67, 0x45, 0x23, 0x01, 0xEF, 0xCD, 0xAB, 0x89, 0x67, 0x45, 0x23, 0x01, @@ -1519,7 +1520,7 @@ test "Single element List" { try single.append(42); var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,ListU64, single, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, ListU64, single, &hash, std.testing.allocator); const expected = [_]u8{ 0x54, 0xd7, 0x76, 0x7c, 0xc1, 0xdd, 0xd2, 0xf6, @@ -1549,7 +1550,7 @@ test "Nested structure hash tree root" { }; var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,Outer, data, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, Outer, data, &hash, std.testing.allocator); const expected = [_]u8{ 0x4e, 0xbe, 0x9c, 0x7f, 0x41, 0x63, 0xd9, 0x34, @@ -1587,7 +1588,7 @@ test "Zero-length array" { try expect(list.items.len == 0); var hash: [32]u8 = undefined; - try hashTreeRoot(Sha256,[0]u32, empty, &hash, std.testing.allocator); + try hashTreeRoot(Sha256, [0]u32, empty, &hash, std.testing.allocator); // Should be the zero chunk try expect(std.mem.eql(u8, &hash, &([_]u8{0} ** 32))); } From d7c0e0607654a1ef1a074e11d6085087873004c9 Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Thu, 15 Jan 2026 23:27:11 +0100 Subject: [PATCH 13/14] apply review feedback --- build.zig | 1 - build.zig.zon | 1 - src/lib.zig | 38 +++++++++++++++++--------------------- src/utils.zig | 6 +++--- src/zeros.zig | 4 ++-- 5 files changed, 22 insertions(+), 28 deletions(-) diff --git a/build.zig b/build.zig index e4f7e0f..448a6a1 100644 --- a/build.zig +++ b/build.zig @@ -1,4 +1,3 @@ -const std = @import("std"); const Builder = @import("std").Build; pub fn build(b: *Builder) void { diff --git a/build.zig.zon b/build.zig.zon index ce1fa74..f335c95 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -3,5 +3,4 @@ .fingerprint = 0x1d34bd0ceb1dfc2d, .version = "0.0.9", .paths = .{""}, - .dependencies = .{}, } diff --git a/src/lib.zig b/src/lib.zig index 6354db0..ff811c9 100644 --- a/src/lib.zig +++ b/src/lib.zig @@ -7,11 +7,12 @@ pub const zeros = @import("./zeros.zig"); const ArrayList = std.ArrayList; const builtin = std.builtin; const Allocator = std.mem.Allocator; +const Sha256 = std.crypto.hash.sha2.Sha256; /// Number of bytes per chunk. const BYTES_PER_CHUNK = 32; -pub fn serializedFixedSize(comptime T: type) !usize { +pub fn serializedFixedSize(T: type) !usize { const info = @typeInfo(T); return switch (info) { .int => @sizeOf(T), @@ -38,7 +39,7 @@ pub fn serializedFixedSize(comptime T: type) !usize { // Determine the serialized size of an object so that // the code serializing of variable-size objects can // determine the offset to the next object. -pub fn serializedSize(comptime T: type, data: T) !usize { +pub fn serializedSize(T: type, data: T) !usize { // Check for custom serializedSize method first for List types if (comptime std.meta.hasFn(T, "serializedSize")) { return data.serializedSize(); @@ -94,7 +95,7 @@ pub fn serializedSize(comptime T: type, data: T) !usize { } /// Returns true if an object is of fixed size -pub fn isFixedSizeObject(comptime T: type) !bool { +pub fn isFixedSizeObject(T: type) !bool { if (comptime std.meta.hasFn(T, "isFixedSizeObject")) { return T.isFixedSizeObject(); } @@ -120,7 +121,7 @@ pub fn isFixedSizeObject(comptime T: type) !bool { /// Provides the generic serialization of any `data` var to SSZ. The /// serialization is written to the `ArrayList` `l`. -pub fn serialize(comptime T: type, data: T, l: *ArrayList(u8)) !void { +pub fn serialize(T: type, data: T, l: *ArrayList(u8)) !void { // shortcut if the type implements its own encode method if (comptime std.meta.hasFn(T, "sszEncode")) { return data.sszEncode(l); @@ -314,7 +315,7 @@ pub fn serialize(comptime T: type, data: T, l: *ArrayList(u8)) !void { /// Takes a byte array containing the serialized payload of type `T` (with /// possible trailing data) and deserializes it into the `T` object pointed /// at by `out`. -pub fn deserialize(comptime T: type, serialized: []const u8, out: *T, allocator: ?std.mem.Allocator) !void { +pub fn deserialize(T: type, serialized: []const u8, out: *T, allocator: ?std.mem.Allocator) !void { // shortcut if the type implements its own decode method if (comptime std.meta.hasFn(T, "sszDecode")) { return T.sszDecode(serialized, out, allocator); @@ -515,7 +516,7 @@ pub fn deserialize(comptime T: type, serialized: []const u8, out: *T, allocator: } } -pub fn mixInLength2(comptime Hasher: type, root: [32]u8, length: usize, out: *[32]u8) void { +pub fn mixInLength2(Hasher: type, root: [32]u8, length: usize, out: *[32]u8) void { var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); @@ -525,7 +526,7 @@ pub fn mixInLength2(comptime Hasher: type, root: [32]u8, length: usize, out: *[3 hasher.final(out[0..]); } -fn mixInLength(comptime Hasher: type, root: [32]u8, length: [32]u8, out: *[32]u8) void { +fn mixInLength(Hasher: type, root: [32]u8, length: [32]u8, out: *[32]u8) void { var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); hasher.update(length[0..]); @@ -533,7 +534,6 @@ fn mixInLength(comptime Hasher: type, root: [32]u8, length: [32]u8, out: *[32]u8 } test "mixInLength" { - const Sha256 = std.crypto.hash.sha2.Sha256; var root: [32]u8 = undefined; var length: [32]u8 = undefined; var expected: [32]u8 = undefined; @@ -546,7 +546,7 @@ test "mixInLength" { try std.testing.expect(std.mem.eql(u8, mixin[0..], expected[0..])); } -fn mixInSelector(comptime Hasher: type, root: [32]u8, comptime selector: usize, out: *[32]u8) void { +fn mixInSelector(Hasher: type, root: [32]u8, comptime selector: usize, out: *[32]u8) void { var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); var tmp = [_]u8{0} ** 32; @@ -556,7 +556,6 @@ fn mixInSelector(comptime Hasher: type, root: [32]u8, comptime selector: usize, } test "mixInSelector" { - const Sha256 = std.crypto.hash.sha2.Sha256; var root: [32]u8 = undefined; var expected: [32]u8 = undefined; var mixin: [32]u8 = undefined; @@ -569,7 +568,7 @@ test "mixInSelector" { /// Calculates the number of leaves needed for the merkelization /// of this type. -pub fn chunkCount(comptime T: type) usize { +pub fn chunkCount(T: type) usize { const info = @typeInfo(T); switch (info) { .int, .bool => return 1, @@ -591,7 +590,7 @@ pub fn chunkCount(comptime T: type) usize { const chunk = [BYTES_PER_CHUNK]u8; const zero_chunk: chunk = [_]u8{0} ** BYTES_PER_CHUNK; -pub fn pack(comptime T: type, values: T, l: *ArrayList(u8)) ![]chunk { +pub fn pack(T: type, values: T, l: *ArrayList(u8)) ![]chunk { try serialize(T, values, l); const padding_size = (BYTES_PER_CHUNK - l.items.len % BYTES_PER_CHUNK) % BYTES_PER_CHUNK; _ = try l.writer().write(zero_chunk[0..padding_size]); @@ -636,9 +635,9 @@ test "pack string" { } // merkleize recursively calculates the root hash of a Merkle tree. -pub fn merkleize(comptime Hasher: type, chunks: []chunk, limit: ?usize, out: *[32]u8) anyerror!void { +pub fn merkleize(Hasher: type, chunks: []chunk, limit: ?usize, out: *[32]u8) anyerror!void { // Generate zero hashes for this hasher type at comptime - const zero_hashes = comptime zeros.buildZeroHashes(Hasher, 32, 256); + const hashes_of_zero = comptime zeros.buildHashesOfZero(Hasher, 32, 256); // Calculate the number of chunks to be padded, check the limit if (limit != null and chunks.len > limit.?) { @@ -649,8 +648,8 @@ pub fn merkleize(comptime Hasher: type, chunks: []chunk, limit: ?usize, out: *[3 // Perform the merkelization switch (size) { - 0 => std.mem.copyForwards(u8, out.*[0..], zero_hashes[0][0..]), - 1 => std.mem.copyForwards(u8, out.*[0..], (if (chunks.len > 0) chunks[0] else zero_hashes[0])[0..]), + 0 => std.mem.copyForwards(u8, out.*[0..], hashes_of_zero[0][0..]), + 1 => std.mem.copyForwards(u8, out.*[0..], (if (chunks.len > 0) chunks[0] else hashes_of_zero[0])[0..]), else => { // Merkleize the left side. If the number of chunks // isn't enough to fill the entire width, complete @@ -672,7 +671,7 @@ pub fn merkleize(comptime Hasher: type, chunks: []chunk, limit: ?usize, out: *[3 // For a subtree of size/2 leaves, we need the zero hash at depth log2(size/2) const subtree_size = size / 2; const depth = std.math.log2_int(usize, subtree_size); - digest.update(zero_hashes[depth][0..]); + digest.update(hashes_of_zero[depth][0..]); } digest.final(out); }, @@ -680,7 +679,6 @@ pub fn merkleize(comptime Hasher: type, chunks: []chunk, limit: ?usize, out: *[3 } test "merkleize an empty slice" { - const Sha256 = std.crypto.hash.sha2.Sha256; var list = ArrayList(u8).init(std.testing.allocator); defer list.deinit(); const chunks = &[0][32]u8{}; @@ -690,7 +688,6 @@ test "merkleize an empty slice" { } test "merkleize a string" { - const Sha256 = std.crypto.hash.sha2.Sha256; var list = ArrayList(u8).init(std.testing.allocator); defer list.deinit(); const chunks = try pack([]const u8, "a" ** 100, &list); @@ -719,7 +716,6 @@ test "merkleize a string" { } test "merkleize a boolean" { - const Sha256 = std.crypto.hash.sha2.Sha256; var list = ArrayList(u8).init(std.testing.allocator); defer list.deinit(); @@ -768,7 +764,7 @@ fn packBits(bits: []const bool, l: *ArrayList(u8)) ![]chunk { return std.mem.bytesAsSlice(chunk, l.items); } -pub fn hashTreeRoot(comptime Hasher: type, comptime T: type, value: T, out: *[32]u8, allctr: Allocator) !void { +pub fn hashTreeRoot(Hasher: type, T: type, value: T, out: *[32]u8, allctr: Allocator) !void { // Check if type has its own hashTreeRoot method at compile time if (comptime std.meta.hasFn(T, "hashTreeRoot")) { return value.hashTreeRoot(Hasher, out, allctr); diff --git a/src/utils.zig b/src/utils.zig index fe811ce..2b6df82 100644 --- a/src/utils.zig +++ b/src/utils.zig @@ -15,7 +15,7 @@ const chunk = [BYTES_PER_CHUNK]u8; const zero_chunk: chunk = [_]u8{0} ** BYTES_PER_CHUNK; /// Implements the SSZ `List[N]` container. -pub fn List(comptime T: type, comptime N: usize) type { +pub fn List(T: type, comptime N: usize) type { // Compile-time check: List[bool, N] is not allowed, use Bitlist[N] instead if (T == bool) { @compileError("List[bool, N] is not supported. Use Bitlist(" ++ std.fmt.comptimePrint("{}", .{N}) ++ ") instead for boolean lists."); @@ -153,7 +153,7 @@ pub fn List(comptime T: type, comptime N: usize) type { return lib.serializedSize(@TypeOf(inner_slice), inner_slice); } - pub fn hashTreeRoot(self: *const Self, comptime Hasher: type, out: *[32]u8, allctr: Allocator) !void { + pub fn hashTreeRoot(self: *const Self, Hasher: type, out: *[32]u8, allctr: Allocator) !void { const items = self.constSlice(); switch (@typeInfo(Item)) { @@ -323,7 +323,7 @@ pub fn Bitlist(comptime N: usize) type { return (self.length + 7 + 1) / 8; } - pub fn hashTreeRoot(self: *const Self, comptime Hasher: type, out: *[32]u8, allctr: Allocator) !void { + pub fn hashTreeRoot(self: *const Self, Hasher: type, out: *[32]u8, allctr: Allocator) !void { const bit_length = self.length; var bitfield_bytes = ArrayList(u8).init(allctr); diff --git a/src/zeros.zig b/src/zeros.zig index 1ecca76..ceb0e86 100644 --- a/src/zeros.zig +++ b/src/zeros.zig @@ -4,7 +4,7 @@ const std = @import("std"); /// Generic function to build zero hashes for any hash function /// HashType should be a hash type like std.crypto.hash.sha2.Sha256 /// digest_length is the output size of the hash in bytes -pub fn buildZeroHashes(comptime HashType: type, comptime digest_length: usize, comptime depth: usize) [depth][digest_length]u8 { +pub fn buildHashesOfZero(comptime HashType: type, comptime digest_length: usize, comptime depth: usize) [depth][digest_length]u8 { @setEvalBranchQuota(10000000); var ret: [depth][digest_length]u8 = undefined; @@ -26,4 +26,4 @@ pub fn buildZeroHashes(comptime HashType: type, comptime digest_length: usize, c } // SHA256 zero hashes (the default for SSZ) -pub const hashes_of_zero = buildZeroHashes(std.crypto.hash.sha2.Sha256, 32, 256); +pub const hashes_of_zero = buildHashesOfZero(std.crypto.hash.sha2.Sha256, 32, 256); From fbf27352b12b95c079af5f3c2b6f02750ce0dffa Mon Sep 17 00:00:00 2001 From: Guillaume Ballet <3272758+gballet@users.noreply.github.com> Date: Thu, 15 Jan 2026 23:35:29 +0100 Subject: [PATCH 14/14] use Hasher.digest_length for output params --- README.md | 2 +- src/lib.zig | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index ee79b4e..cd89ec4 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,7 @@ try ssz.hashTreeRoot(MyHasher, MyType, value, &root, allocator); pub const Options = struct {}; pub fn init(_: Options) Self; pub fn update(self: *Self, data: []const u8) void; -pub fn final(self: *Self, out: *[32]u8) void; // out size matches 32 bytes for SSZ +pub fn final(self: *Self, out: *[Self.digest_length]u8) void; // out size matches 32 bytes for SSZ ``` ## Contributing diff --git a/src/lib.zig b/src/lib.zig index ff811c9..00195cb 100644 --- a/src/lib.zig +++ b/src/lib.zig @@ -516,7 +516,7 @@ pub fn deserialize(T: type, serialized: []const u8, out: *T, allocator: ?std.mem } } -pub fn mixInLength2(Hasher: type, root: [32]u8, length: usize, out: *[32]u8) void { +pub fn mixInLength2(Hasher: type, root: [Hasher.digest_length]u8, length: usize, out: *[Hasher.digest_length]u8) void { var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); @@ -526,7 +526,7 @@ pub fn mixInLength2(Hasher: type, root: [32]u8, length: usize, out: *[32]u8) voi hasher.final(out[0..]); } -fn mixInLength(Hasher: type, root: [32]u8, length: [32]u8, out: *[32]u8) void { +fn mixInLength(Hasher: type, root: [Hasher.digest_length]u8, length: [32]u8, out: *[Hasher.digest_length]u8) void { var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); hasher.update(length[0..]); @@ -546,7 +546,7 @@ test "mixInLength" { try std.testing.expect(std.mem.eql(u8, mixin[0..], expected[0..])); } -fn mixInSelector(Hasher: type, root: [32]u8, comptime selector: usize, out: *[32]u8) void { +fn mixInSelector(Hasher: type, root: [Hasher.digest_length]u8, comptime selector: usize, out: *[Hasher.digest_length]u8) void { var hasher = Hasher.init(Hasher.Options{}); hasher.update(root[0..]); var tmp = [_]u8{0} ** 32; @@ -635,7 +635,7 @@ test "pack string" { } // merkleize recursively calculates the root hash of a Merkle tree. -pub fn merkleize(Hasher: type, chunks: []chunk, limit: ?usize, out: *[32]u8) anyerror!void { +pub fn merkleize(Hasher: type, chunks: []chunk, limit: ?usize, out: *[Hasher.digest_length]u8) anyerror!void { // Generate zero hashes for this hasher type at comptime const hashes_of_zero = comptime zeros.buildHashesOfZero(Hasher, 32, 256); @@ -764,7 +764,7 @@ fn packBits(bits: []const bool, l: *ArrayList(u8)) ![]chunk { return std.mem.bytesAsSlice(chunk, l.items); } -pub fn hashTreeRoot(Hasher: type, T: type, value: T, out: *[32]u8, allctr: Allocator) !void { +pub fn hashTreeRoot(Hasher: type, T: type, value: T, out: *[Hasher.digest_length]u8, allctr: Allocator) !void { // Check if type has its own hashTreeRoot method at compile time if (comptime std.meta.hasFn(T, "hashTreeRoot")) { return value.hashTreeRoot(Hasher, out, allctr);