diff --git a/.circleci/config.yml b/.circleci/config.yml index f866b94c45..4c3c93b573 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -287,15 +287,15 @@ jobs: pip install -e ".[dev]" - restore_cache: - key: test-vectors-e0b55b1fd3d72532b87a53fa6c313c8a09e34164 + key: test-vectors-233b764514a3cd35adf986e0b9967946d486c987 - run: name: Checkout Test Vectors command: | if [ ! -d "test-vectors" ]; then git clone https://github.com/firedancer-io/test-vectors --depth 1 cd test-vectors - git fetch origin e0b55b1fd3d72532b87a53fa6c313c8a09e34164 - git checkout e0b55b1fd3d72532b87a53fa6c313c8a09e34164 + git fetch origin 233b764514a3cd35adf986e0b9967946d486c987 + git checkout 233b764514a3cd35adf986e0b9967946d486c987 # We need each fixture file to have a unique name so that we can put them into the test-inputs directory # in a "flat" manner. The problem is that there are just too many files and it exceeds ARG_MAX for a single @@ -307,7 +307,7 @@ jobs: find vm_interp/fixtures/v2 -type f -name '*.fix' -print0 | xargs -0 rename "s/\.fix\$/-v2.fix/" fi - save_cache: - key: test-vectors-e0b55b1fd3d72532b87a53fa6c313c8a09e34164 + key: test-vectors-233b764514a3cd35adf986e0b9967946d486c987 paths: - test-vectors/ - run: diff --git a/conformance/scripts/download_artifacts.sh b/conformance/scripts/download_artifacts.sh index 1bf8664e29..e924846c86 100755 --- a/conformance/scripts/download_artifacts.sh +++ b/conformance/scripts/download_artifacts.sh @@ -1,8 +1,8 @@ #!/bin/bash set -e -# Specifically this commit of solfuzz-agave: 7e7b4935d66e17b478910b8a1e43b461cbb43f7d -artifact_id=3999155212 +# Specifically this commit of solfuzz-agave: cef278ea3216bde6ea7fe760a89217560c2da856 +artifact_id=4077531638 artifact_name="libsolfuzz-agave.so" artifacts_url="https://api.github.com/repos/firedancer-io/solfuzz-agave/actions/artifacts" diff --git a/conformance/src/txn_execute.zig b/conformance/src/txn_execute.zig index 9be83cfd1d..2d319e7291 100644 --- a/conformance/src/txn_execute.zig +++ b/conformance/src/txn_execute.zig @@ -667,9 +667,10 @@ fn executeTxnContext( // .accounts_db = &accounts_db, // }); - // Remove address lookup table and config program accounts by inserting empty accounts (zero-lamports) + // Remove address lookup table, stake, and config program accounts by inserting empty accounts (zero-lamports) try accounts_db.putAccount(slot, program.address_lookup_table.ID, .EMPTY); try accounts_db.putAccount(slot, program.config.ID, .EMPTY); + try accounts_db.putAccount(slot, program.stake.ID, .EMPTY); // Load accounts into accounts db for (accounts_map.keys(), accounts_map.values()) |pubkey, account| { @@ -1205,7 +1206,7 @@ fn loadTransaction( ); for (pb_txn.signatures.items, 0..) |pb_signature, i| - signatures[i] = .{ .data = pb_signature.getSlice()[0..Signature.SIZE].* }; + signatures[i] = .fromBytes(pb_signature.getSlice()[0..Signature.SIZE].*); if (pb_txn.signatures.items.len == 0) signatures[0] = Signature.ZEROES; diff --git a/scripts/gen_wycheproof.zig b/scripts/gen_wycheproof.zig new file mode 100644 index 0000000000..112c07d33c --- /dev/null +++ b/scripts/gen_wycheproof.zig @@ -0,0 +1,119 @@ +const std = @import("std"); + +const Inputs = struct { + algorithm: []const u8, + numberOfTests: u32, + testGroups: []const Group, + + const Group = struct { + type: []const u8, + publicKey: PublicKey, + tests: []const Test, + + const PublicKey = struct { + keySize: u32, + pk: []const u8, + }; + + const Test = struct { + tcId: u32, + comment: []const u8, + + msg: []const u8, + sig: []const u8, + + result: []const u8, + }; + }; +}; + +pub fn main() !void { + const allocator = std.heap.smp_allocator; + + const args = try std.process.argsAlloc(allocator); + defer std.process.argsFree(allocator, args); + + if (args.len != 3) @panic("gen_wycheproof [path to test case json] [path to output zig]"); + + const proof_path = args[1]; + const output_path = args[2]; + + const contents = try std.fs.cwd().readFileAlloc(allocator, proof_path, 1 * 1024 * 1024); + defer allocator.free(contents); + + const parsed: Inputs = try std.json.parseFromSliceLeaky( + Inputs, + allocator, + contents, + .{ .ignore_unknown_fields = true }, + ); + + const output_file = try std.fs.cwd().createFile(output_path, .{}); + defer output_file.close(); + const output = output_file.writer(); + + try output.print( + \\//! DO NOT EDIT! File generated by `gen_wycheproof.zig`. + \\//! algorithm: {s} + \\//! numberOfTests: {d} + \\ + , .{ parsed.algorithm, parsed.numberOfTests }); + + try output.writeAll( + \\ + \\const Group = struct { + \\ pubkey: []const u8, + \\ cases: []const Case, + \\ + \\ const Case = struct { + \\ msg: []const u8, + \\ sig: []const u8, + \\ expected: enum { valid, invalid }, + \\ }; + \\}; + \\ + \\pub const groups: []const Group = &.{ + \\ + ); + + for (parsed.testGroups) |group| { + try output.print( + \\ .{{ + \\ .pubkey = "{s}", + \\ .cases = &.{{ + \\ + , .{group.publicKey.pk}); + + for (group.tests) |case| { + if (case.comment.len != 0) try output.print( + \\ // {s} + \\ + , .{std.mem.trimRight(u8, case.comment, &.{' '})}); + + const expected = if (std.mem.eql(u8, case.result, "invalid")) + "invalid" + else if (std.mem.eql(u8, case.result, "valid")) + "valid" + else + std.debug.panic("unknown result: '{s}'", .{case.result}); + + try output.print( + \\ .{{ .msg = "{s}", .sig = "{s}", .expected = .{s} }}, + \\ + , .{ + case.msg, case.sig, expected, + }); + } + + try output.writeAll( + \\ }, + \\ }, + \\ + ); + } + + try output.writeAll( + \\}; + \\ + ); +} diff --git a/scripts/generator_chain.zig b/scripts/generator_chain.zig index 3462782a9d..e3dc1457d3 100644 --- a/scripts/generator_chain.zig +++ b/scripts/generator_chain.zig @@ -27,7 +27,6 @@ pub fn main() !void { \\ \\const std = @import("std"); \\const Ristretto255 = std.crypto.ecc.Ristretto255; - \\const Edwards25519 = std.crypto.ecc.Edwards25519; \\ ); @@ -37,7 +36,7 @@ pub fn main() !void { fn writeChain(comptime name: []const u8, writer: anytype) !void { try writer.print( - \\pub const {s}: [256]Edwards25519 = .{{ + \\pub const {s}: [256]Ristretto255 = .{{ \\ , .{name}); @@ -45,7 +44,7 @@ fn writeChain(comptime name: []const u8, writer: anytype) !void { for (0..256) |_| { const next = generator.next(); - try writer.writeAll(" .{ .x = "); + try writer.writeAll(" .{ .p = .{ .x = "); try printPoint(next.p.x, writer); try writer.writeAll(", .y = "); try printPoint(next.p.y, writer); @@ -54,19 +53,10 @@ fn writeChain(comptime name: []const u8, writer: anytype) !void { try writer.writeAll(", .t = "); try printPoint(next.p.t, writer); - try writer.writeAll(" },\n"); + try writer.writeAll(" }},\n"); } - try writer.print( - \\}}; - \\ - \\pub const {[0]s}_ristretto = blk: {{ - \\ var points: [256]Ristretto255 = undefined; - \\ for (&points, {[0]s}) |*p, e| p.* = .{{ .p = e }}; - \\ break :blk points; - \\}}; - \\ - , .{name}); + try writer.writeAll("};\n"); } fn printPoint(p: Ristretto255.Curve.Fe, writer: anytype) !void { diff --git a/scripts/style.py b/scripts/style.py index b96cfdb4a7..984ce70386 100644 --- a/scripts/style.py +++ b/scripts/style.py @@ -169,8 +169,9 @@ def unused_imports(args, files_to_check): "src/transaction_sender/service.zig", "src/transaction_sender/transaction_pool.zig", - # Generated file, will not conform to style guide. + # Generated files, will not conform to style guide. "src/crypto/bn254/bn254_64.zig", + "src/crypto/ed25519/wycheproof.zig", ] diff --git a/src/benchmarks.zig b/src/benchmarks.zig index 235c4a9a5e..825029c042 100644 --- a/src/benchmarks.zig +++ b/src/benchmarks.zig @@ -29,6 +29,7 @@ const Benchmark = enum { accounts_db_snapshot, // expensive accounts_db, bincode, + crypto, geyser, gossip, ledger, @@ -360,6 +361,17 @@ pub fn main() !void { ); } + if (filter == .crypto or run_all_benchmarks) { + try benchmark( + allocator, + .from(logger), + @import("crypto/benchmark.zig").Benchmark, + max_time_per_bench, + .micros, + &maybe_metrics, + ); + } + // save metrics if (collect_metrics) { try saveMetricsJson( diff --git a/src/consensus/vote_listener.zig b/src/consensus/vote_listener.zig index 6981316dff..bf659f86f0 100644 --- a/src/consensus/vote_listener.zig +++ b/src/consensus/vote_listener.zig @@ -1610,7 +1610,7 @@ fn newTowerSyncTransaction( const pos = vote_tx_msg.getSigningKeypairPosition(pubkey) orelse return error.MissingOrInvalidSigner; const signature = try keypairs[i].sign(msg_serialized.constSlice(), null); - signatures[pos] = .{ .data = signature.toBytes() }; + signatures[pos] = .fromSignature(signature); } const tx: Transaction = .{ diff --git a/src/core/entry.zig b/src/core/entry.zig index 1169e96d92..d67325ee9b 100644 --- a/src/core/entry.zig +++ b/src/core/entry.zig @@ -163,7 +163,7 @@ pub fn hashTransactions( try nodes.ensureTotalCapacity(allocator, capacity); for (transactions) |tx| for (tx.signatures) |signature| { - const hash = Hash.generateSha256(.{ LEAF_PREFIX, &signature.data }); + const hash = Hash.generateSha256(.{ LEAF_PREFIX, &signature.toBytes() }); nodes.appendAssumeCapacity(hash); }; diff --git a/src/core/features.zon b/src/core/features.zon index 2317bbc2b8..8d908e545d 100644 --- a/src/core/features.zon +++ b/src/core/features.zon @@ -234,7 +234,7 @@ .{ .name = "vote_only_retransmitter_signed_fec_sets", .pubkey = "RfEcA95xnhuwooVAhUUksEJLZBF7xKCLuqrJoqk4Zph" }, .{ .name = "mask_out_rent_epoch_in_vm_serialization", .pubkey = "RENtePQcDLrAbxAsP3k8dwVcnNYQ466hi2uKvALjnXx" }, .{ .name = "disable_zk_elgamal_proof_program", .pubkey = "zkdoVwnSFnSLtGJG7irJPEYUpmb4i7sGMGcnN6T9rnC" }, - .{ .name = "reenable_zk_elgamal_proof_program", .pubkey = "zkemPXcuM3G4wpMDZ36Cpw34EjUpvm1nuioiSGbGZPR" }, + .{ .name = "reenable_zk_elgamal_proof_program", .pubkey = "zkesAyFB19sTkX8i9ReoKaMNDA4YNTPYJpZKPDt7FMW" }, .{ .name = "formalize_loaded_transaction_data_size", .pubkey = "DeS7sR48ZcFTUmt5FFEVDr1v1bh73aAbZiZq3SYr8Eh8" }, .{ .name = "enable_extend_program_checked", .pubkey = "2oMRZEDWT2tqtYMofhmmfQ8SsjqUFzT6sYXppQDavxwz" }, .{ .name = "require_static_nonce_account", .pubkey = "7VVhpg5oAjAmnmz1zCcSHb2Z9ecZB2FQqpnEwReka9Zm" }, diff --git a/src/core/poh.zig b/src/core/poh.zig index a92a9554ed..acb1c7ad06 100644 --- a/src/core/poh.zig +++ b/src/core/poh.zig @@ -128,7 +128,7 @@ pub fn testPoh(valid_signatures: bool) !struct { Poh, std.BoundedArray(sig.core. const sigs = try allocator.dupe(sig.core.Signature, a_transaction.signatures); allocator.free(a_transaction.signatures); a_transaction.signatures = sigs; - sigs[0].data[0] +%= 1; + sigs[0].r[0] +%= 1; } const transactions = [_]Transaction{ diff --git a/src/core/pubkey.zig b/src/core/pubkey.zig index 8745948a6e..9f9c7817ff 100644 --- a/src/core/pubkey.zig +++ b/src/core/pubkey.zig @@ -2,6 +2,8 @@ const std = @import("std"); const base58 = @import("base58"); const BASE58_ENDEC = base58.Table.BITCOIN; +const Edwards25519 = std.crypto.ecc.Edwards25519; + pub const Pubkey = extern struct { data: [SIZE]u8, @@ -9,6 +11,11 @@ pub const Pubkey = extern struct { pub const ZEROES: Pubkey = .{ .data = .{0} ** SIZE }; + pub fn fromBytes(data: [SIZE]u8) !Pubkey { + try Edwards25519.rejectNonCanonical(data); + return .{ .data = data }; + } + pub fn fromPublicKey(public_key: *const std.crypto.sign.Ed25519.PublicKey) Pubkey { return .{ .data = public_key.bytes }; } diff --git a/src/core/signature.zig b/src/core/signature.zig index ada51e07d2..9f43eef8f3 100644 --- a/src/core/signature.zig +++ b/src/core/signature.zig @@ -1,47 +1,42 @@ const std = @import("std"); -const core = @import("lib.zig"); +const sig = @import("../sig.zig"); const base58 = @import("base58"); const BASE58_ENDEC = base58.Table.BITCOIN; -const Ed25519 = std.crypto.sign.Ed25519; -const Verifier = std.crypto.sign.Ed25519.Verifier; -const e = std.crypto.errors; +const ed25519 = sig.crypto.ed25519; +const Pubkey = sig.core.Pubkey; -const Pubkey = core.Pubkey; - -pub const Signature = struct { - data: [SIZE]u8, +pub const Signature = extern struct { + r: [32]u8, + s: [32]u8, pub const SIZE: usize = 64; - pub const ZEROES: Signature = .{ .data = .{0} ** SIZE }; + pub const ZEROES: Signature = .{ .r = @splat(0), .s = @splat(0) }; - pub const VerifyError = e.NonCanonicalError; - pub fn verify( - self: Signature, - pubkey: Pubkey, - msg: []const u8, - ) VerifyError!bool { - const signature = Ed25519.Signature.fromBytes(self.data); - const byte_pubkey = try Ed25519.PublicKey.fromBytes(pubkey.data); - signature.verify(msg, byte_pubkey) catch return false; - return true; + pub fn fromBytes(data: [SIZE]u8) Signature { + return .{ + .r = data[0..32].*, + .s = data[32..64].*, + }; } - pub const VerifierError = - e.NonCanonicalError || - e.EncodingError || - e.IdentityElementError; - pub fn verifier( - self: Signature, - pubkey: Pubkey, - ) VerifierError!Verifier { - const signature = Ed25519.Signature.fromBytes(self.data); - return signature.verifier(try Ed25519.PublicKey.fromBytes(pubkey.data)); + pub fn toBytes(self: Signature) [SIZE]u8 { + return self.r ++ self.s; + } + + pub fn fromSignature(signature: std.crypto.sign.Ed25519.Signature) Signature { + return .{ .r = signature.r, .s = signature.s }; + } + + pub fn verify(self: Signature, pubkey: Pubkey, message: []const u8) !void { + try ed25519.verifySignature(self, pubkey, message, true); } pub fn eql(self: *const Signature, other: *const Signature) bool { - return std.mem.eql(u8, self.data[0..], other.data[0..]); + const x: @Vector(SIZE, u8) = self.toBytes(); + const y: @Vector(SIZE, u8) = other.toBytes(); + return @reduce(.And, x == y); } pub inline fn parse(comptime str: []const u8) Signature { @@ -61,14 +56,14 @@ pub const Signature = struct { }; if (decoded.len != SIZE) return error.InvalidSignature; - return .{ .data = decoded.constSlice()[0..SIZE].* }; + return .fromBytes(decoded.constSlice()[0..SIZE].*); } pub const BASE58_MAX_SIZE = base58.encodedMaxSize(SIZE); pub const Base58String = std.BoundedArray(u8, BASE58_MAX_SIZE); pub fn base58String(self: Signature) Base58String { - return BASE58_ENDEC.encodeArray(SIZE, self.data); + return BASE58_ENDEC.encodeArray(SIZE, self.toBytes()); } pub fn format( diff --git a/src/core/status_cache.zig b/src/core/status_cache.zig index 3e655885b9..496b82e3bc 100644 --- a/src/core/status_cache.zig +++ b/src/core/status_cache.zig @@ -250,7 +250,7 @@ test "status cache empty" { try std.testing.expectEqual( null, status_cache.getStatus( - &signature.data, + &signature.toBytes(), &block_hash, &Ancestors{}, ), @@ -273,11 +273,11 @@ test "status cache find with ancestor fork" { var status_cache: StatusCache = .DEFAULT; defer status_cache.deinit(allocator); - try status_cache.insert(allocator, random, &blockhash, &signature.data, 0); + try status_cache.insert(allocator, random, &blockhash, &signature.toBytes(), 0); try std.testing.expectEqual( Fork{ .slot = 0 }, - status_cache.getStatus(&signature.data, &blockhash, &ancestors), + status_cache.getStatus(&signature.toBytes(), &blockhash, &ancestors), ); } @@ -294,11 +294,11 @@ test "status cache find without ancestor fork" { var status_cache: StatusCache = .DEFAULT; defer status_cache.deinit(allocator); - try status_cache.insert(allocator, random, &blockhash, &signature.data, 1); + try status_cache.insert(allocator, random, &blockhash, &signature.toBytes(), 1); try std.testing.expectEqual( null, - status_cache.getStatus(&signature.data, &blockhash, &ancestors), + status_cache.getStatus(&signature.toBytes(), &blockhash, &ancestors), ); } @@ -315,12 +315,12 @@ test "status cache find with root ancestor fork" { var status_cache: StatusCache = .DEFAULT; defer status_cache.deinit(allocator); - try status_cache.insert(allocator, random, &blockhash, &signature.data, 0); + try status_cache.insert(allocator, random, &blockhash, &signature.toBytes(), 0); try status_cache.addRoot(allocator, 0); try std.testing.expectEqual( Fork{ .slot = 0 }, - status_cache.getStatus(&signature.data, &blockhash, &ancestors), + status_cache.getStatus(&signature.toBytes(), &blockhash, &ancestors), ); } @@ -340,13 +340,13 @@ test "status cache insert picks latest blockhash fork" { var status_cache: StatusCache = .DEFAULT; defer status_cache.deinit(allocator); - try status_cache.insert(allocator, random, &blockhash, &signature.data, 0); - try status_cache.insert(allocator, random, &blockhash, &signature.data, 1); + try status_cache.insert(allocator, random, &blockhash, &signature.toBytes(), 0); + try status_cache.insert(allocator, random, &blockhash, &signature.toBytes(), 1); for (0..StatusCache.MAX_CACHE_ENTRIES + 1) |i| try status_cache.addRoot(allocator, i); try std.testing.expect( - status_cache.getStatus(&signature.data, &blockhash, &ancestors) != null, + status_cache.getStatus(&signature.toBytes(), &blockhash, &ancestors) != null, ); } @@ -363,11 +363,11 @@ test "status cache root expires" { var status_cache: StatusCache = .DEFAULT; defer status_cache.deinit(allocator); - try status_cache.insert(allocator, random, &blockhash, &signature.data, 0); + try status_cache.insert(allocator, random, &blockhash, &signature.toBytes(), 0); for (0..StatusCache.MAX_CACHE_ENTRIES + 1) |i| try status_cache.addRoot(allocator, i); try std.testing.expectEqual( null, - status_cache.getStatus(&signature.data, &blockhash, &ancestors), + status_cache.getStatus(&signature.toBytes(), &blockhash, &ancestors), ); } diff --git a/src/core/transaction.zig b/src/core/transaction.zig index d6d97b4903..3eb3c7264b 100644 --- a/src/core/transaction.zig +++ b/src/core/transaction.zig @@ -142,7 +142,7 @@ pub const Transaction = struct { for (signatures, keypairs) |*signature, keypair| { const msg_signature = keypair.sign(msg_bytes, null) catch return error.SigningError; - signature.* = .{ .data = msg_signature.toBytes() }; + signature.* = .fromSignature(msg_signature); } return .{ @@ -155,7 +155,7 @@ pub const Transaction = struct { pub fn serialize(writer: anytype, data: anytype, _: sig.bincode.Params) !void { std.debug.assert(data.signatures.len <= std.math.maxInt(u16)); try leb.writeULEB128(writer, @as(u16, @intCast(data.signatures.len))); - for (data.signatures) |sgn| try writer.writeAll(&sgn.data); + for (data.signatures) |sgn| try writer.writeAll(&sgn.toBytes()); try data.msg.serialize(writer, data.version); } @@ -164,7 +164,7 @@ pub const Transaction = struct { const signatures = try allocator.alloc(Signature, try leb.readULEB128(u16, reader)); errdefer allocator.free(signatures); - for (signatures) |*sgn| sgn.* = .{ .data = try reader.readBytesNoEof(Signature.SIZE) }; + for (signatures) |*sgn| sgn.* = .fromBytes(try reader.readBytesNoEof(Signature.SIZE)); var peekable = sig.utils.io.peekableReader(reader); const version = try Version.deserialize(&peekable); return .{ @@ -203,11 +203,13 @@ pub const Transaction = struct { /// verify signatures. Call `validate` to ensure full consistency. pub fn verifySignatures(self: Transaction, serialized_message: []const u8) VerifyError!void { if (self.msg.account_keys.len < self.signatures.len) return error.NotEnoughAccounts; - for (self.signatures, self.msg.account_keys[0..self.signatures.len]) |signature, pubkey| { - if (!try signature.verify(pubkey, serialized_message)) { - return error.SignatureVerificationFailed; - } - } + + sig.crypto.ed25519.verifyBatchOverSingleMessage( + 16, + self.signatures, + self.msg.account_keys[0..self.signatures.len], + serialized_message, + ) catch return error.SignatureVerificationFailed; } /// Count the number of accounts in the slice of transactions, diff --git a/src/crypto/benchmark.zig b/src/crypto/benchmark.zig new file mode 100644 index 0000000000..3b3e32d8af --- /dev/null +++ b/src/crypto/benchmark.zig @@ -0,0 +1,65 @@ +const std = @import("std"); +const sig = @import("../sig.zig"); + +const Ed25519 = std.crypto.sign.Ed25519; + +pub const Benchmark = struct { + pub const min_iterations = 100; + pub const max_iterations = 1_000; + + pub fn naiveBatchVerify() !sig.time.Duration { + const message = "test!"; + + const keypair = Ed25519.KeyPair.generate(); + const signature = try keypair.sign(message, null); + + const inputs: [100]Ed25519.Signature = @splat(signature); + + var start = try sig.time.Timer.start(); + for (inputs) |s| { + std.mem.doNotOptimizeAway(s.verify(message, keypair.public_key)); + } + return start.read(); + } + + pub fn stdBatchVerify() !sig.time.Duration { + const message = "test!"; + + const keypair = Ed25519.KeyPair.generate(); + const signature = try keypair.sign(message, null); + + const inputs: [100]Ed25519.Signature = @splat(signature); + + var batch: [100]Ed25519.BatchElement = undefined; + for (&batch, inputs) |*element, input| { + element.* = .{ + .public_key = keypair.public_key, + .msg = message, + .sig = input, + }; + } + + var start = try sig.time.Timer.start(); + std.mem.doNotOptimizeAway(Ed25519.verifyBatch(100, batch)); + return start.read(); + } + + pub fn sigBatchVerify() !sig.time.Duration { + const message = "test!"; + + const keypair = Ed25519.KeyPair.generate(); + const signature = try keypair.sign(message, null); + + const signatures: [100]sig.core.Signature = @splat(.fromSignature(signature)); + const pubkey: [100]sig.core.Pubkey = @splat(.{ .data = keypair.public_key.toBytes() }); + + var start = try sig.time.Timer.start(); + std.mem.doNotOptimizeAway(sig.crypto.ed25519.verifyBatchOverSingleMessage( + 100, + &signatures, + &pubkey, + message, + )); + return start.read(); + } +}; diff --git a/src/crypto/bn254/fields.zig b/src/crypto/bn254/fields.zig index 04c07cb642..7bd78c761d 100644 --- a/src/crypto/bn254/fields.zig +++ b/src/crypto/bn254/fields.zig @@ -804,22 +804,6 @@ pub const Fp12 = struct { .c0 = c2.mulByGamma().add(c0), .c1 = c2.dbl(), }; - - // const c0 = a.c0.sub(a.c1) - - // var c0: Fp6 = undefined; - // var c2: Fp6 = undefined; - // var c3: Fp6 = undefined; - - // c0.sub(a.c0, a.c1); - // c3.mulByGamma(a.c1); - // c3.sub(a.c0, c3); - // c2.mul(a.c0, a.c1); - // c0.mul(c0, c3); - // c0.add(c0, c2); - // r.c1.add(c2, c2); - // r.c0.mulByGamma(c2); - // r.c0.add(r.c0, c0); } pub fn conj(a: Fp12) Fp12 { diff --git a/src/crypto/bn254/lib.zig b/src/crypto/bn254/lib.zig index 6ffb31da5b..bbe91fd091 100644 --- a/src/crypto/bn254/lib.zig +++ b/src/crypto/bn254/lib.zig @@ -221,7 +221,7 @@ pub const G2 = struct { // G2 does *not* have prime order, so we need to perform a secondary subgroup membership check. // https://eprint.iacr.org/2022/348, Sec 3.1. // [r]P == 0 <==> [x+1]P + ψ([x]P) + ψ²([x]P) = ψ³([2x]P) - const xp: G2 = mulScalar(p, @bitCast(Fp.constants.x)); + const xp: G2 = mulScalar(p, Fp.constants.x); const psi = xp.frob(); const psi2 = xp.frob2(); @@ -464,16 +464,17 @@ fn dbl(p: anytype) @TypeOf(p) { /// /// https://encrypt.a41.io/primitives/abstract-algebra/elliptic-curve/scalar-multiplication/double-and-add /// https://en.wikipedia.org/wiki/Elliptic_curve_point_multiplication#Double-and-add -fn mulScalar(a: anytype, scalar: [4]u64) @TypeOf(a) { +fn mulScalar(a: anytype, scalar: u256) @TypeOf(a) { // TODO: can be further optimized with GLV and wNAF - const leading = @clz(@as(u256, @bitCast(scalar))); + const limbs: [4]u64 = @bitCast(scalar); + const leading = @clz(scalar); if (leading == 256) return .zero; var i: u8 = @intCast(256 - 1 - leading); var r = a; while (i > 0) { i -= 1; r = dbl(r); - if (bit(scalar, i)) r = addMixed(r, a); + if (bit(limbs, i)) r = addMixed(r, a); } return r; } @@ -488,7 +489,7 @@ pub fn addSyscall(out: *[64]u8, input: *const [128]u8) !void { pub fn mulSyscall(out: *[64]u8, input: *const [96]u8) !void { const a: G1 = try .fromBytes(input[0..64]); // Scalar is provided in big-endian and we do *not* validate it. - const b: [4]u64 = @bitCast(Fp.byteSwap(input[64..][0..32].*)); + const b: u256 = @bitCast(Fp.byteSwap(input[64..][0..32].*)); const result = mulScalar(a, b); result.toBytes(out); } diff --git a/src/crypto/avx512.zig b/src/crypto/ed25519/avx512.zig similarity index 92% rename from src/crypto/avx512.zig rename to src/crypto/ed25519/avx512.zig index 7d4384a667..527a1b9ce9 100644 --- a/src/crypto/avx512.zig +++ b/src/crypto/ed25519/avx512.zig @@ -8,6 +8,7 @@ const Fe = Ed25519.Fe; const u32x8 = @Vector(8, u32); const i32x8 = @Vector(8, i32); const u64x4 = @Vector(4, u64); +const u52x4 = @Vector(4, u52); // TODO: there's no inherent limitation from using inline assembly instead, // however this currently (Zig 0.14.1) crashes both LLVM and the self-hosted backend. @@ -15,8 +16,28 @@ const u64x4 = @Vector(4, u64); // better codegen as opposed to inline assembly. extern fn @"llvm.x86.avx512.vpmadd52l.uq.256"(u64x4, u64x4, u64x4) u64x4; extern fn @"llvm.x86.avx512.vpmadd52h.uq.256"(u64x4, u64x4, u64x4) u64x4; -const madd52lo = @"llvm.x86.avx512.vpmadd52l.uq.256"; -const madd52hi = @"llvm.x86.avx512.vpmadd52h.uq.256"; + +inline fn madd52lo(x: u64x4, y: u64x4, z: u64x4) u64x4 { + if (@inComptime()) { + const V = @Vector(4, u128); + const tsrc2: u52x4 = @truncate(z); + const temp128 = @as(V, @as(u52x4, @truncate(y))) * @as(V, tsrc2); + return x + @as(u52x4, @truncate(temp128)); + } else { + return @"llvm.x86.avx512.vpmadd52l.uq.256"(x, y, z); + } +} + +inline fn madd52hi(x: u64x4, y: u64x4, z: u64x4) u64x4 { + if (@inComptime()) { + const V = @Vector(4, u128); + const tsrc2: u52x4 = @truncate(z); + const temp128 = @as(V, @as(u52x4, @truncate(y))) * @as(V, tsrc2); + return x + @as(u52x4, @truncate(temp128 >> @splat(52))); + } else { + return @"llvm.x86.avx512.vpmadd52h.uq.256"(x, y, z); + } +} /// A vector of four field elements. pub const ExtendedPoint = struct { @@ -93,7 +114,11 @@ pub const ExtendedPoint = struct { } }; } - fn add(self: ExtendedPoint, other: ExtendedPoint) ExtendedPoint { + pub fn add(self: ExtendedPoint, other: ExtendedPoint) ExtendedPoint { + return self.addCached(.fromExtended(other)); + } + + fn addLimbs(self: ExtendedPoint, other: ExtendedPoint) ExtendedPoint { return .{ .limbs = .{ self.limbs[0] + other.limbs[0], self.limbs[1] + other.limbs[1], @@ -117,7 +142,7 @@ pub const ExtendedPoint = struct { } pub fn subCached(self: ExtendedPoint, cp: CachedPoint) ExtendedPoint { - return self.addCached(cp.negate()); + return self.addCached(cp.neg()); } fn shuffle(self: ExtendedPoint, comptime control: Shuffle) ExtendedPoint { @@ -143,7 +168,7 @@ pub const ExtendedPoint = struct { fn diffSum(self: ExtendedPoint) ExtendedPoint { const tmp1 = self.shuffle(.BADC); const tmp2 = self.blend(self.negateLazy(), .AC); - return tmp1.add(tmp2); + return tmp1.addLimbs(tmp2); } fn negateLazy(self: ExtendedPoint) ExtendedPoint { @@ -160,7 +185,7 @@ pub const ExtendedPoint = struct { pub fn dbl(self: ExtendedPoint) ExtendedPoint { var tmp0 = self.shuffle(.BADC); - var tmp1 = self.add(tmp0).shuffle(.ABAB); + var tmp1 = self.addLimbs(tmp0).shuffle(.ABAB); tmp0 = self.blend(tmp1, .D); tmp1 = tmp0.reduce().square(); @@ -170,18 +195,34 @@ pub const ExtendedPoint = struct { const S2_S2_S2_S4 = S2_S2_S2_S2.blend(tmp1, .D).negateLazy(); - tmp0 = S1_S1_S1_S1.add(zero.blend(tmp1.add(tmp1), .C)); - tmp0 = tmp0.add(zero.blend(S2_S2_S2_S2, .AD)); - tmp0 = tmp0.add(zero.blend(S2_S2_S2_S4, .BCD)); + tmp0 = S1_S1_S1_S1.addLimbs(zero.blend(tmp1.addLimbs(tmp1), .C)); + tmp0 = tmp0.addLimbs(zero.blend(S2_S2_S2_S2, .AD)); + tmp0 = tmp0.addLimbs(zero.blend(S2_S2_S2_S4, .BCD)); const tmp2 = tmp0.reduce(); return tmp2.shuffle(.DBBD).mul(tmp2.shuffle(.CACA)); } + + pub fn mulByPow2(self: ExtendedPoint, comptime k: u32) ExtendedPoint { + var s = self; + for (0..k) |_| s = s.dbl(); + return s; + } }; pub const CachedPoint = struct { limbs: [5]u64x4, + // zig fmt: off + pub const identityElement: CachedPoint = .{ .limbs = .{ + .{ 121647, 121666, 243332, 2251799813685229 }, + .{ 2251799813685248, 0, 0, 2251799813685247 }, + .{ 2251799813685247, 0, 0, 2251799813685247 }, + .{ 2251799813685247, 0, 0, 2251799813685247 }, + .{ 2251799813685247, 0, 0, 2251799813685247 }, + } }; + // zig fmt: on + fn mul(self: CachedPoint, b: CachedPoint) ExtendedPoint { const x = self.limbs; const y = b.limbs; @@ -485,7 +526,7 @@ pub const CachedPoint = struct { } }; } - fn negate(self: CachedPoint) CachedPoint { + pub fn neg(self: CachedPoint) CachedPoint { const swapped = self.shuffle(.BACD); const negated = ExtendedPoint.fromCached(self).negateLazy().reduce(); return swapped.blend(negated, .D); @@ -582,8 +623,8 @@ test "vpmadd52luq" { const z: u64x4 = @splat(5); try std.testing.expectEqual( - madd52lo(z, x, y), - @as(u64x4, @splat(5 + 2 * 3)), + madd52lo(x, y, z), + @as(u64x4, @splat(2 + 3 * 5)), ); } diff --git a/src/crypto/generic.zig b/src/crypto/ed25519/generic.zig similarity index 95% rename from src/crypto/generic.zig rename to src/crypto/ed25519/generic.zig index 2e1d151873..b5e96a7ddb 100644 --- a/src/crypto/generic.zig +++ b/src/crypto/ed25519/generic.zig @@ -363,17 +363,17 @@ pub const ExtendedPoint = struct { var tmp0 = self.shuffle(.ABAB); var tmp1 = tmp0.shuffle(.BADC); - tmp0 = self.blend(tmp0.add(tmp1), .D); + tmp0 = self.blend(tmp0.addLimbs(tmp1), .D); tmp1 = tmp0.squareAndNegateD(); const S_1 = tmp1.shuffle(.AAAA); const S_2 = tmp1.shuffle(.BBBB); - tmp0 = zero.blend(tmp1.add(tmp1), .C); + tmp0 = zero.blend(tmp1.addLimbs(tmp1), .C); tmp0 = tmp0.blend(tmp1, .D); - tmp0 = tmp0.add(S_1); - tmp0 = tmp0.add(zero.blend(S_2, .AD)); - tmp0 = tmp0.add(zero.blend(S_2.negateLazy(), .BC)); + tmp0 = tmp0.addLimbs(S_1); + tmp0 = tmp0.addLimbs(zero.blend(S_2, .AD)); + tmp0 = tmp0.addLimbs(zero.blend(S_2.negateLazy(), .BC)); tmp1 = tmp0.shuffle(.DBBD); tmp0 = tmp0.shuffle(.CACA); @@ -527,6 +527,10 @@ pub const ExtendedPoint = struct { } pub fn add(self: ExtendedPoint, other: ExtendedPoint) ExtendedPoint { + return self.addCached(.fromExtended(other)); + } + + pub fn addLimbs(self: ExtendedPoint, other: ExtendedPoint) ExtendedPoint { return .{ .limbs = .{ self.limbs[0] + other.limbs[0], self.limbs[1] + other.limbs[1], @@ -562,7 +566,7 @@ pub const ExtendedPoint = struct { // tmp2 = (-A, B, -C, D) const tmp2 = self.blend(self.negateLazy(), .AC); // (B - A, B + A, D - C, D + C) - return tmp1.add(tmp2); + return tmp1.addLimbs(tmp2); } /// Square the field element and negate the result's `D` value. @@ -631,11 +635,27 @@ pub const ExtendedPoint = struct { return reduce64(.{ z0, z1, z2, z3, z4, z5, z6, z7, z8, z9 }); } + + pub fn mulByPow2(self: ExtendedPoint, comptime k: u32) ExtendedPoint { + var s = self; + for (0..k) |_| s = s.dbl(); + return s; + } }; pub const CachedPoint = struct { element: ExtendedPoint, + // zig fmt: off + pub const identityElement: CachedPoint = .{ .element = .{ .limbs = .{ + .{ 121647, 121666, 0, 0, 243332, 67108845, 0, 33554431 }, + .{ 67108864, 0, 33554431, 0, 0, 67108863, 0, 33554431 }, + .{ 67108863, 0, 33554431, 0, 0, 67108863, 0, 33554431 }, + .{ 67108863, 0, 33554431, 0, 0, 67108863, 0, 33554431 }, + .{ 67108863, 0, 33554431, 0, 0, 67108863, 0, 33554431 }, + } } }; + // zig fmt: on + pub fn fromExtended(p: ExtendedPoint) CachedPoint { var x = p; @@ -646,7 +666,7 @@ pub const CachedPoint = struct { return .{ .element = x }; } - fn neg(self: CachedPoint) CachedPoint { + pub fn neg(self: CachedPoint) CachedPoint { const swapped = self.element.shuffle(.BACD); const element = swapped.blend(swapped.negateLazy(), .D); return .{ .element = element }; @@ -726,7 +746,7 @@ test "add vs serial" { const vec = ExtendedPoint.init(x0, x1, x2, x3); const vecprime = vec; - const result = vec.add(vecprime).split(); + const result = vec.addLimbs(vecprime).split(); try std.testing.expectEqual(x0.add(x0), result[0]); try std.testing.expectEqual(x1.add(x1), result[1]); diff --git a/src/crypto/ed25519/lib.zig b/src/crypto/ed25519/lib.zig new file mode 100644 index 0000000000..5c6a76946d --- /dev/null +++ b/src/crypto/ed25519/lib.zig @@ -0,0 +1,697 @@ +const sig = @import("../../sig.zig"); +const std = @import("std"); +const builtin = @import("builtin"); + +pub const pippenger = @import("pippenger.zig"); +pub const straus = @import("straus.zig"); + +pub const mul = straus.mul; +pub const mulManyWithSameScalar = straus.mulManyWithSameScalar; +pub const mulMulti = straus.mulMulti; + +const convention: std.builtin.CallingConvention = switch (builtin.mode) { + .ReleaseFast => .@"inline", + else => .auto, +}; + +const generic = @import("generic.zig"); +const avx512 = @import("avx512.zig"); +const has_avx512 = builtin.cpu.arch == .x86_64 and + std.Target.x86.featureSetHas(builtin.cpu.features, .avx512ifma) and + std.Target.x86.featureSetHas(builtin.cpu.features, .avx512vl); +pub const use_avx125 = has_avx512 and builtin.zig_backend == .stage2_llvm; + +// avx512 implementation relies on llvm specific tricks +const namespace = if (use_avx125) avx512 else generic; +pub const ExtendedPoint = namespace.ExtendedPoint; +pub const CachedPoint = namespace.CachedPoint; + +const Edwards25519 = std.crypto.ecc.Edwards25519; +const Ristretto255 = std.crypto.ecc.Ristretto255; +const Sha512 = std.crypto.hash.sha2.Sha512; +const CompressedScalar = [32]u8; + +/// Mainly used for transaction signature verification. +/// +/// Verifies signatures in a somewhat batched manner in order to retain conformance with Agave's +/// loop of verify_stricts. Due to the unfortunately inconsistent nature of EdDSA, while +/// a batched method would be faster and remain compliant with the RFC, it would fail to +/// catch certain types of invalid signatures, incorrectly allowing them, thus breaking consensus +/// with the rest of the network. +/// +/// Perhaps in the future we can move Solana over to using ed25519-zebra or move from a `verify_strict` +/// loop to a `verify` one, allowing batched verification. +pub fn verifyBatchOverSingleMessage( + max: comptime_int, + signatures: []const sig.core.Signature, + public_keys: []const sig.core.Pubkey, + message: []const u8, +) !void { + std.debug.assert(signatures.len <= max); + std.debug.assert(public_keys.len <= max); + std.debug.assert(signatures.len == public_keys.len); + + var s_batch: std.BoundedArray(CompressedScalar, max) = .{}; + var a_batch: std.BoundedArray(Edwards25519, max) = .{}; + var hram_batch: std.BoundedArray(CompressedScalar, max) = .{}; + var expected_r_batch: std.BoundedArray(Edwards25519, max) = .{}; + + for (signatures, public_keys) |signature, pubkey| { + const r = signature.r; + const s = signature.s; + + try Edwards25519.scalar.rejectNonCanonical(s); + + const a = try Edwards25519.fromBytes(pubkey.data); + const expected_r = try Edwards25519.fromBytes(r); + + try affineLowOrder(a); + try affineLowOrder(expected_r); + + var h = Sha512.init(.{}); + h.update(&r); + h.update(&pubkey.data); + h.update(message); + var hram64: [Sha512.digest_length]u8 = undefined; + h.final(&hram64); + + expected_r_batch.appendAssumeCapacity(expected_r); + s_batch.appendAssumeCapacity(s); + a_batch.appendAssumeCapacity(a); + hram_batch.appendAssumeCapacity(Edwards25519.scalar.reduce64(hram64)); + } + + for ( + a_batch.constSlice(), + hram_batch.constSlice(), + s_batch.constSlice(), + expected_r_batch.constSlice(), + ) |a, k, s, expected_r| { + const r = doubleBaseMul(k, a.neg(), s); + if (!affineEqual(r, expected_r)) return error.InvalidSignature; + } +} + +/// See the doc-comment above `verifyBatchOverSingleMessage` for further detail, +/// but this is that same thing, just for single messages, and with the ability to toggle +/// between `verify` and `verify_strict` semantics (used in ed25519 precompile). +pub fn verifySignature( + signature: sig.core.Signature, + pubkey: sig.core.Pubkey, + message: []const u8, + strict: bool, +) !void { + const s = signature.s; + const r = signature.r; + try Edwards25519.scalar.rejectNonCanonical(s); + + const a = try Edwards25519.fromBytes(pubkey.data); + const expected_r = try Edwards25519.fromBytes(r); + + if (strict) { + try affineLowOrder(a); + try affineLowOrder(expected_r); + } + + var h = Sha512.init(.{}); + h.update(&r); + h.update(&pubkey.data); + h.update(message); + var hram64: [Sha512.digest_length]u8 = undefined; + h.final(&hram64); + + const computed = doubleBaseMul(Edwards25519.scalar.reduce64(hram64), a.neg(), s); + if (!affineEqual(computed, expected_r)) return error.InvalidSignature; +} + +/// Equate two ed25519 points with the assumption that b.z is 1. +/// b.z == 1 is common when we have just deserialized a point from the wire +pub fn affineEqual(a: Edwards25519, b: Edwards25519) bool { + const x1 = b.x.mul(a.z); + const y1 = b.y.mul(a.z); + return x1.equivalent(a.x) and y1.equivalent(a.y); +} + +/// Determines whether `a` is of small order (in the torision subgroup E[8]), but with the +/// assumption that `a.Z == 1`. +/// +/// There are 8 points with an order <= 8: +/// Order | Point | Serialize Point +/// 1 (0, 1) 0100000000000000000000000000000000000000000000000000000000000000 +/// 2 (0, 2^255 - 20) ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f +/// 4 (-sqrt(-1), 0) 0000000000000000000000000000000000000000000000000000000000000080 +/// 4 (sqrt(-1), 0) 0000000000000000000000000000000000000000000000000000000000000000 +/// 8 ... c7176a703d4dd84fba3c0b760d10670f2a2053fa2c39ccc64ec7fd7792ac037a +/// 8 ... c7176a703d4dd84fba3c0b760d10670f2a2053fa2c39ccc64ec7fd7792ac03fa +/// 8 ... 26e8958fc2b227b045c3f489f2ef98f0d5dfac05d3c63339b13802886d53fc05 +/// 8 ... 26e8958fc2b227b045c3f489f2ef98f0d5dfac05d3c63339b13802886d53fc85 +/// +/// Since in this function we know that Z will be 1, we don't need to perform any +/// normalization to cancel out the projective denominator, instead just directly performing +/// checks on the x,y coordinates. You'll notice that low-order points when negated still +/// retain their low-order nature, so there are 4 "pairs" of low order points. This means +/// just checking a single coordinate of the point is enough to determine if it's in the blacklist, +/// meaning we only need 4 equivalence checks to cover all of the pairs. +/// +pub fn affineLowOrder(a: Edwards25519) !void { + // y coordinate of points 5 and 6 + const y0: Edwards25519.Fe = .{ .limbs = .{ + 0x4d3d706a17c7, + 0x1aec1679749fb, + 0x14c80a83d9c40, + 0x3a763661c967d, + 0x7a03ac9277fdc, + } }; + // y coordinate of points 7 and 8 + const y1: Edwards25519.Fe = .{ .limbs = .{ + 0x7b2c28f95e826, + 0x6513e9868b604, + 0x6b37f57c263bf, + 0x4589c99e36982, + 0x5fc536d88023, + } }; + + if (a.x.isZero() // first pair + or a.y.isZero() // second pair + or a.y.equivalent(y0) // third pair + or a.y.equivalent(y1) // fourth pair + ) return error.WeakPublicKey; +} + +pub fn ReturnType(encoded: bool, ristretto: bool) type { + const Base = if (ristretto) Ristretto255 else Edwards25519; + return if (encoded) (error{NonCanonical} || std.crypto.errors.EncodingError)!Base else Base; +} + +pub fn PointType(encoded: bool, ristretto: bool) type { + if (encoded) return [32]u8; + return if (ristretto) Ristretto255 else Edwards25519; +} + +/// MSM in variable time with a runtime known (but comptime bounded) number +/// of points. useful for things such as bulletproofs where we are generic over +/// the bitsize and it can change between being more optimal to use straus or pippenger. +/// +/// Generally speaking, `mulMulti` will be more useful as in most cases the number of points +/// and scalars is known ahead of time. +pub fn mulMultiRuntime( + comptime max_elements: comptime_int, + /// Set to true if the input is in wire-format. This lets us usually save + /// an extra stack copy and loop when buffering the decoding process, + /// instead just doing it once here straight into the extended point form. + /// + /// Changes the return type of the function to an error union, in case + /// the encoded points decode into a non-canonical form. + comptime encoded: bool, + /// (Option only applies if we're decoding from a wire format). + /// + /// Set to true if the wire format we're decoding from is Ristretto instead + /// of Edwards25519. The actual MSM itself still happens on the underlying + /// Edwards25519 element, since there's no difference between the operation + /// on Ristretto and Edwards25519, but the decoding is different. + comptime ristretto: bool, + ed_points: []const PointType(encoded, ristretto), + compressed_scalars: []const CompressedScalar, +) ReturnType(encoded, ristretto) { + // through impirical benchmarking, we see that pippenger's MSM becomes faster around + // the 190 element mark. + // TODO: maybe consider checking the `max_elements < 190` here instead + // in order to avoid generating both versions? probably would be slower, not sure about the + // code size impact. + + if (ed_points.len < 190) { + return straus.mulMultiRuntime( + max_elements, + encoded, + ristretto, + ed_points, + compressed_scalars, + ); + } else { + return pippenger.mulMultiRuntime( + max_elements, + encoded, + ristretto, + ed_points, + compressed_scalars, + ); + } +} + +/// Stores a lookup table of multiplications of a point over radix-16 scalars, which is the most +/// common usecase for straus' method. table contains 1P, 2P, 3P, 4P, 5P, 6P, 7P, 8P, and +/// our window for the scalar indexes into it. Since we want radix-16 (i.e one nibble per byte), +/// we need 16 points, however we can optimize further by centering the radix at 0 (-8..8) and +/// negating the cached point if the radix is below zero. Thus our initialization for the table +/// is twice as keep while retaining the same effect. +pub const LookupTable = struct { + table: [8]CachedPoint, + + pub fn init(point: Edwards25519) callconv(convention) LookupTable { + const e: ExtendedPoint = .fromPoint(point); + var points: [8]CachedPoint = @splat(.fromExtended(e)); + for (0..7) |i| points[i + 1] = .fromExtended(e.addCached(points[i])); + return .{ .table = points }; + } + + /// NOTE: variable time! + pub fn select(self: LookupTable, index: i8) callconv(convention) CachedPoint { + // ensure we're in radix + std.debug.assert(index >= -8); + std.debug.assert(index <= 8); + + const abs = @abs(index); + + // t == |x| * P + var t: CachedPoint = if (abs == 0) .identityElement else self.table[abs - 1]; + // if index was negative, negate the point + if (index < 0) t = t.neg(); + + return t; + } +}; + +/// Similar structure to `LookupTable` but it holds odd multiples of the root point: +/// 1A, 3A, 5A, 7A, 9A, 11A, 13A, 15A. +const NafLookupTable5 = struct { + table: [8]CachedPoint, + + fn init(point: Edwards25519) callconv(convention) NafLookupTable5 { + const A: ExtendedPoint = .fromPoint(point); + var Ai: [8]CachedPoint = @splat(.fromExtended(A)); + const A2 = A.dbl(); + for (0..7) |i| Ai[i + 1] = .fromExtended(A2.addCached(Ai[i])); + return .{ .table = Ai }; + } + + fn select(self: NafLookupTable5, index: u64) CachedPoint { + std.debug.assert(index & 1 == 1); // make sure the index is odd + std.debug.assert(index < 16); // fits inside + return self.table[index / 2]; + } +}; + +/// Same thing as `NafLookupTable5` but just stores points for radix 2^8 instead of 2^5 +const NafLookupTable8 = struct { + table: [64]CachedPoint, + + fn init(point: Edwards25519) callconv(convention) NafLookupTable8 { + const A: ExtendedPoint = .fromPoint(point); + var Ai: [64]CachedPoint = @splat(.fromExtended(A)); + const A2 = A.dbl(); + for (0..63) |i| Ai[i + 1] = .fromExtended(A2.addCached(Ai[i])); + return .{ .table = Ai }; + } + + fn select(self: NafLookupTable8, index: u64) CachedPoint { + std.debug.assert(index & 1 == 1); // make sure the index is odd + std.debug.assert(index < 128); + return self.table[index / 2]; + } +}; + +/// Compute `(aA + bB)`, in variable time, where `B` is the Ed25519 basepoint. +pub fn doubleBaseMul(a: CompressedScalar, A: Edwards25519, b: CompressedScalar) Edwards25519 { + const a_naf = asNaf(a, 5); + const b_naf = asNaf(b, 8); + + // Search through our NAFs to find the first index that will actually affect the outcome. + // Otherwise the prepending 0s added by `asNaf` will just keep doubling the identityElement. + var i: u64 = std.math.maxInt(u8); + for (0..256) |rev| { + i = 256 - rev - 1; + if (a_naf[i] != 0 or b_naf[i] != 0) break; + } + + const table_A: NafLookupTable5 = .init(A); + + // avx512 backend only needs ~25k quota, but avx2 one needs ~100k + // TODO: make comptime precompilation stuff use the avx512 one because of this + @setEvalBranchQuota(100_000); + + // Since we are pre-computing the basePoint lookup table, we might as well pre-compute it + // for a larger amount of points in order to make it fast. + const table_B: NafLookupTable8 = comptime .init(.basePoint); + + var Q: ExtendedPoint = .identityElement; + while (true) { + Q = Q.dbl(); + + switch (std.math.order(a_naf[i], 0)) { + .gt => Q = Q.addCached(table_A.select(@intCast(a_naf[i]))), + .lt => Q = Q.subCached(table_A.select(@intCast(-a_naf[i]))), + .eq => {}, + } + + switch (std.math.order(b_naf[i], 0)) { + .gt => Q = Q.addCached(table_B.select(@intCast(b_naf[i]))), + .lt => Q = Q.subCached(table_B.select(@intCast(-b_naf[i]))), + .eq => {}, + } + + if (i == 0) break; + i -= 1; + } + + return Q.toPoint(); +} + +/// Ported from: https://github.com/dalek-cryptography/curve25519-dalek/blob/c3a82a8a38a58aee500a20bde1664012fcfa83ba/curve25519-dalek/src/scalar.rs#L958 +fn asNaf(a: CompressedScalar, w: comptime_int) [256]i8 { + std.debug.assert(w >= 2); + std.debug.assert(w <= 8); + + var naf: [256]i8 = @splat(0); + + var x: [5]u64 = @splat(0); + @memcpy(std.mem.asBytes(x[0..4]), &a); + + const width = 1 << w; + const window_mask = width - 1; + + var pos: u64 = 0; + var carry: u64 = 0; + while (pos < 256) { + const idx = pos / 64; + const bit_idx: std.math.Log2Int(u64) = @intCast(pos % 64); + + const bit_buf: u64 = switch (bit_idx) { + 0...63 - w => x[idx] >> bit_idx, + else => x[idx] >> bit_idx | x[1 + idx] << @intCast(64 - @as(u7, bit_idx)), + }; + + const window = carry + (bit_buf & window_mask); + + if (window & 1 == 0) { + pos += 1; + continue; + } + + if (window < width / 2) { + carry = 0; + naf[pos] = @intCast(window); + } else { + carry = 1; + const signed: i64 = @bitCast(window); + naf[pos] = @as(i8, @truncate(signed)) -% @as(i8, @truncate(width)); + } + + pos += w; + } + + return naf; +} + +test asNaf { + // https://github.com/dalek-cryptography/curve25519-dalek/blob/c3a82a8a38a58aee500a20bde1664012fcfa83ba/curve25519-dalek/src/scalar.rs#L1495-L1513 + const A_SCALAR: [32]u8 = .{ + 0x1a, 0x0e, 0x97, 0x8a, 0x90, 0xf6, 0x62, 0x2d, 0x37, 0x47, 0x02, 0x3f, 0x8a, 0xd8, + 0x26, 0x4d, 0xa7, 0x58, 0xaa, 0x1b, 0x88, 0xe0, 0x40, 0xd1, 0x58, 0x9e, 0x7b, 0x7f, + 0x23, 0x76, 0xef, 0x09, + }; + const A_NAF: [256]i8 = .{ + 0, 13, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, -9, 0, 0, 0, 0, -11, + 0, 0, 0, 0, 3, 0, 0, 0, 0, 1, 0, 0, 0, 0, 9, 0, 0, 0, 0, -5, 0, 0, + 0, 0, 0, 0, 3, 0, 0, 0, 0, 11, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, -9, 0, + 0, 0, 0, 0, -3, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, + -1, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, -15, 0, 0, 0, 0, -7, 0, 0, 0, 0, -9, + 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, -3, 0, 0, 0, 0, -11, + 0, 0, 0, 0, -7, 0, 0, 0, 0, -13, 0, 0, 0, 0, 11, 0, 0, 0, 0, -9, 0, 0, + 0, 0, 0, 1, 0, 0, 0, 0, 0, -15, 0, 0, 0, 0, 1, 0, 0, 0, 0, 7, 0, 0, + 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 11, 0, 0, + 0, 0, 0, 15, 0, 0, 0, 0, 0, -9, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, + 0, 0, 0, 7, 0, 0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 15, 0, 0, 0, 0, 15, 0, + 0, 0, 0, 15, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, + }; + + const result = asNaf(A_SCALAR, 5); + try std.testing.expectEqualSlices(i8, &A_NAF, &result); +} + +test "wnaf reconstruction" { + const Scalar = Edwards25519.scalar.Scalar; + for (0..1000) |_| { + const scalar: Scalar = .random(); + inline for (.{ 5, 6, 7, 8 }) |w| { + const naf = asNaf(scalar.toBytes(), w); + var y: Scalar = .fromBytes(@splat(0)); + for (0..256) |rev| { + const i = 256 - rev - 1; + y = y.add(y); + + const n = @abs(naf[i]); + var limbs: [32]u8 = @splat(0); + std.mem.writeInt(u64, limbs[0..8], n, .little); + + const digit: Scalar = .fromBytes(if (naf[i] < 0) + Edwards25519.scalar.neg(limbs) + else + limbs); + + y = y.add(digit); + } + + try std.testing.expectEqual(y, scalar); + } + } +} + +test doubleBaseMul { + // https://github.com/dalek-cryptography/curve25519-dalek/blob/c3a82a8a38a58aee500a20bde1664012fcfa83ba/curve25519-dalek/src/edwards.rs#L1812-L1835 + const A_TIMES_BASEPOINT: [32]u8 = .{ + 0xea, 0x27, 0xe2, 0x60, 0x53, 0xdf, 0x1b, 0x59, 0x56, 0xf1, 0x4d, 0x5d, 0xec, 0x3c, 0x34, + 0xc3, 0x84, 0xa2, 0x69, 0xb7, 0x4c, 0xc3, 0x80, 0x3e, 0xa8, 0xe2, 0xe7, 0xc9, 0x42, 0x5e, + 0x40, 0xa5, + }; + const A_SCALAR: [32]u8 = .{ + 0x1a, 0x0e, 0x97, 0x8a, 0x90, 0xf6, 0x62, 0x2d, 0x37, 0x47, 0x02, 0x3f, 0x8a, 0xd8, + 0x26, 0x4d, 0xa7, 0x58, 0xaa, 0x1b, 0x88, 0xe0, 0x40, 0xd1, 0x58, 0x9e, 0x7b, 0x7f, + 0x23, 0x76, 0xef, 0x09, + }; + const B_SCALAR: [32]u8 = .{ + 0x91, 0x26, 0x7a, 0xcf, 0x25, 0xc2, 0x09, 0x1b, 0xa2, 0x17, 0x74, 0x7b, 0x66, 0xf0, + 0xb3, 0x2e, 0x9d, 0xf2, 0xa5, 0x67, 0x41, 0xcf, 0xda, 0xc4, 0x56, 0xa7, 0xd4, 0xaa, + 0xb8, 0x60, 0x8a, 0x05, + }; + const DOUBLE_BASE_MUL_RESULT: [32]u8 = .{ + 0x7d, 0xfd, 0x6c, 0x45, 0xaf, 0x6d, 0x6e, 0x0e, 0xba, 0x20, 0x37, 0x1a, 0x23, 0x64, 0x59, + 0xc4, 0xc0, 0x46, 0x83, 0x43, 0xde, 0x70, 0x4b, 0x85, 0x09, 0x6f, 0xfe, 0x35, 0x4f, 0x13, + 0x2b, 0x42, + }; + + const A: Edwards25519 = try .fromBytes(A_TIMES_BASEPOINT); + const result = doubleBaseMul(A_SCALAR, A, B_SCALAR); + + try std.testing.expectEqualSlices(u8, &result.toBytes(), &DOUBLE_BASE_MUL_RESULT); +} + +test "eddsa test cases" { + const Vec = struct { + msg_hex: []const u8, + public_key_hex: *const [64:0]u8, + sig_hex: *const [128:0]u8, + expected: ?anyerror, + }; + + // Entries based off of ed25519-dalek 2.0 `verify_strict`. Dalek sometimes returns slightly + // different types of errors, due to differences in the order of input parsing, but the + // main factor we care about is whether or not it accepts the signature. + // sig fmt: off + const entries = [_]Vec{ + Vec{ + .msg_hex = "8c93255d71dcab10e8f379c26200f3c7bd5f09d9bc3068d3ef4edeb4853022b6", + .public_key_hex = "c7176a703d4dd84fba3c0b760d10670f2a2053fa2c39ccc64ec7fd7792ac03fa", + .sig_hex = "c7176a703d4dd84fba3c0b760d10670f2a2053fa2c39ccc64ec7fd7792ac037a0000000000000000000000000000000000000000000000000000000000000000", + .expected = error.WeakPublicKey, // 0 + }, + Vec{ + .msg_hex = "9bd9f44f4dcc75bd531b56b2cd280b0bb38fc1cd6d1230e14861d861de092e79", + .public_key_hex = "c7176a703d4dd84fba3c0b760d10670f2a2053fa2c39ccc64ec7fd7792ac03fa", + .sig_hex = "f7badec5b8abeaf699583992219b7b223f1df3fbbea919844e3f7c554a43dd43a5bb704786be79fc476f91d3f3f89b03984d8068dcf1bb7dfc6637b45450ac04", + .expected = error.WeakPublicKey, // 1 + }, + Vec{ + .msg_hex = "48656c6c6f", + .public_key_hex = "7d4d0e7f6153a69b6242b522abbee685fda4420f8834b108c3bdae369ef549fa", + .sig_hex = "1c1ad976cbaae3b31dee07971cf92c928ce2091a85f5899f5e11ecec90fc9f8e93df18c5037ec9b29c07195ad284e63d548cd0a6fe358cc775bd6c1608d2c905", + .expected = null, + }, + Vec{ + .msg_hex = "9bd9f44f4dcc75bd531b56b2cd280b0bb38fc1cd6d1230e14861d861de092e79", + .public_key_hex = "cdb267ce40c5cd45306fa5d2f29731459387dbf9eb933b7bd5aed9a765b88d4d", + .sig_hex = "9046a64750444938de19f227bb80485e92b83fdb4b6506c160484c016cc1852f87909e14428a7a1d62e9f22f3d3ad7802db02eb2e688b6c52fcd6648a98bd009", + .expected = null, // 3 - mixed orders + }, + Vec{ + .msg_hex = "e47d62c63f830dc7a6851a0b1f33ae4bb2f507fb6cffec4011eaccd55b53f56c", + .public_key_hex = "cdb267ce40c5cd45306fa5d2f29731459387dbf9eb933b7bd5aed9a765b88d4d", + .sig_hex = "160a1cb0dc9c0258cd0a7d23e94d8fa878bcb1925f2c64246b2dee1796bed5125ec6bc982a269b723e0668e540911a9a6a58921d6925e434ab10aa7940551a09", + .expected = error.InvalidSignature, // 4 - cofactored verification + }, + Vec{ + .msg_hex = "e47d62c63f830dc7a6851a0b1f33ae4bb2f507fb6cffec4011eaccd55b53f56c", + .public_key_hex = "cdb267ce40c5cd45306fa5d2f29731459387dbf9eb933b7bd5aed9a765b88d4d", + .sig_hex = "21122a84e0b5fca4052f5b1235c80a537878b38f3142356b2c2384ebad4668b7e40bc836dac0f71076f9abe3a53f9c03c1ceeeddb658d0030494ace586687405", + .expected = error.InvalidSignature, // 5 - cofactored verification + }, + Vec{ + .msg_hex = "85e241a07d148b41e47d62c63f830dc7a6851a0b1f33ae4bb2f507fb6cffec40", + .public_key_hex = "442aad9f089ad9e14647b1ef9099a1ff4798d78589e66f28eca69c11f582a623", + .sig_hex = "e96f66be976d82e60150baecff9906684aebb1ef181f67a7189ac78ea23b6c0e547f7690a0e2ddcd04d87dbc3490dc19b3b3052f7ff0538cb68afb369ba3a514", + .expected = error.NonCanonical, // 6 - S > L + }, + Vec{ + .msg_hex = "85e241a07d148b41e47d62c63f830dc7a6851a0b1f33ae4bb2f507fb6cffec40", + .public_key_hex = "442aad9f089ad9e14647b1ef9099a1ff4798d78589e66f28eca69c11f582a623", + .sig_hex = "8ce5b96c8f26d0ab6c47958c9e68b937104cd36e13c33566acd2fe8d38aa19427e71f98a473474f2f13f06f97c20d58cc3f54b8bd0d272f42b695dd7e89a8c22", + .expected = error.NonCanonical, // 7 - S >> L + }, + Vec{ + .msg_hex = "9bedc267423725d473888631ebf45988bad3db83851ee85c85e241a07d148b41", + .public_key_hex = "f7badec5b8abeaf699583992219b7b223f1df3fbbea919844e3f7c554a43dd43", + .sig_hex = "ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff03be9678ac102edcd92b0210bb34d7428d12ffc5df5f37e359941266a4e35f0f", + .expected = error.WeakPublicKey, // 8 - non-canonical R + }, + Vec{ + .msg_hex = "9bedc267423725d473888631ebf45988bad3db83851ee85c85e241a07d148b41", + .public_key_hex = "f7badec5b8abeaf699583992219b7b223f1df3fbbea919844e3f7c554a43dd43", + .sig_hex = "ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffca8c5b64cd208982aa38d4936621a4775aa233aa0505711d8fdcfdaa943d4908", + .expected = error.WeakPublicKey, // 9 - non-canonical R + }, + Vec{ + .msg_hex = "e96b7021eb39c1a163b6da4e3093dcd3f21387da4cc4572be588fafae23c155b", + .public_key_hex = "ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + .sig_hex = "a9d55260f765261eb9b84e106f665e00b867287a761990d7135963ee0a7d59dca5bb704786be79fc476f91d3f3f89b03984d8068dcf1bb7dfc6637b45450ac04", + .expected = error.WeakPublicKey, // 10 - small-order A + }, + Vec{ + .msg_hex = "39a591f5321bbe07fd5a23dc2f39d025d74526615746727ceefd6e82ae65c06f", + .public_key_hex = "ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + .sig_hex = "a9d55260f765261eb9b84e106f665e00b867287a761990d7135963ee0a7d59dca5bb704786be79fc476f91d3f3f89b03984d8068dcf1bb7dfc6637b45450ac04", + .expected = error.WeakPublicKey, // 11 - small-order A + }, + }; + // sig fmt: on + + for (entries) |entry| { + var msg: [64 / 2]u8 = undefined; + const msg_len = entry.msg_hex.len / 2; + _ = try std.fmt.hexToBytes(msg[0..msg_len], entry.msg_hex); + var public_key_bytes: [32]u8 = undefined; + _ = try std.fmt.hexToBytes(&public_key_bytes, entry.public_key_hex); + var sig_bytes: [64]u8 = undefined; + _ = try std.fmt.hexToBytes(&sig_bytes, entry.sig_hex); + + const public_key: sig.core.Pubkey = try .fromBytes(public_key_bytes); + const signature: sig.core.Signature = .fromBytes(sig_bytes); + + const result = verifyBatchOverSingleMessage( + 1, + &.{signature}, + &.{public_key}, + msg[0..msg_len], + ); + + if (entry.expected) |error_type| { + try std.testing.expectError(error_type, result); + } else { + try result; + } + } +} + +test "batch verification" { + for (0..100) |_| { + const key_pair1 = std.crypto.sign.Ed25519.KeyPair.generate(); + const key_pair2 = std.crypto.sign.Ed25519.KeyPair.generate(); + var msg1: [64]u8 = undefined; + var msg2: [64]u8 = undefined; + std.crypto.random.bytes(&msg1); + std.crypto.random.bytes(&msg2); + const sig1 = try key_pair1.sign(&msg1, null); + const sig2 = try key_pair2.sign(&msg1, null); + + try verifyBatchOverSingleMessage(2, &.{ + .fromSignature(sig1), + .fromSignature(sig2), + }, &.{ + .fromPublicKey(&key_pair1.public_key), + .fromPublicKey(&key_pair2.public_key), + }, &msg1); + + try std.testing.expectError( + error.InvalidSignature, + verifyBatchOverSingleMessage(2, &.{ + .fromSignature(sig1), + .fromSignature(sig2), + }, &.{ + .fromPublicKey(&key_pair1.public_key), + .fromPublicKey(&key_pair1.public_key), + }, &msg1), + ); + + try std.testing.expectError( + error.InvalidSignature, + verifyBatchOverSingleMessage(2, &.{ + .fromSignature(sig1), + .fromSignature(sig2), + }, &.{ + .fromPublicKey(&key_pair1.public_key), + .fromPublicKey(&key_pair2.public_key), + }, &msg2), + ); + } +} + +test "wycheproof" { + const groups = @import("wycheproof.zig").groups; + for (groups) |group| { + var public_key_buffer: [32]u8 = undefined; + const public_key = try std.fmt.hexToBytes(&public_key_buffer, group.pubkey); + if (public_key.len != 32) continue; + + for (group.cases) |case| { + var msg_buffer: [1024]u8 = undefined; + const msg_len = case.msg.len / 2; + const message = try std.fmt.hexToBytes(msg_buffer[0..msg_len], case.msg); + + var sig_buffer: [64]u8 = undefined; + if (case.sig.len > 64 * 2) continue; + const signature_bytes = try std.fmt.hexToBytes(&sig_buffer, case.sig); + if (signature_bytes.len != 64) continue; + + const pubkey = sig.core.Pubkey.fromBytes(public_key_buffer) catch continue; + const signature: sig.core.Signature = .fromBytes(sig_buffer); + + // Single verify + { + const result = verifyBatchOverSingleMessage( + 1, + &.{signature}, + &.{pubkey}, + message, + ); + + switch (case.expected) { + .valid => try result, + .invalid => try std.testing.expect(std.meta.isError(result)), + } + } + + // Multi verify + { + const result = verifyBatchOverSingleMessage( + 10, // more max than inputs + &.{ signature, signature, signature, signature }, + &.{ pubkey, pubkey, pubkey, pubkey }, + message, + ); + + switch (case.expected) { + .valid => try result, + .invalid => try std.testing.expect(std.meta.isError(result)), + } + } + } + } +} diff --git a/src/crypto/pippenger.zig b/src/crypto/ed25519/pippenger.zig similarity index 79% rename from src/crypto/pippenger.zig rename to src/crypto/ed25519/pippenger.zig index 12c68e4d97..dc7ed703ef 100644 --- a/src/crypto/pippenger.zig +++ b/src/crypto/ed25519/pippenger.zig @@ -3,7 +3,7 @@ //! See Section 4. of const std = @import("std"); -const sig = @import("../sig.zig"); +const sig = @import("../../sig.zig"); const crypto = std.crypto; const Ed25519 = crypto.ecc.Edwards25519; const Ristretto255 = crypto.ecc.Ristretto255; @@ -22,21 +22,47 @@ fn PointType(encoded: bool, ristretto: bool) type { return if (ristretto) Ristretto255 else Ed25519; } -pub fn mulMulti( +fn asRadix2w(c: CompressedScalar, w: u6) [64]i8 { + var scalars: [4]u64 = @splat(0); + @memcpy(scalars[0..4], std.mem.bytesAsSlice(u64, &c)); + + const radix = @as(u64, 1) << w; + const window_mask = radix - 1; + + var carry: u64 = 0; + const digits_count = (@as(u64, 256) + w - 1) / w; + var digits: [64]i8 = @splat(0); + + for (0..digits_count) |i| { + const bit_offset = i * w; + const u64_idx = bit_offset / 64; + const bit_idx: u6 = @truncate(bit_offset); + const element = scalars[u64_idx] >> bit_idx; + + const below = bit_idx < @as(u64, 64) - w or u64_idx == 3; + const bit_buf: u64 = switch (below) { + true => element, + else => element | (scalars[1 + u64_idx] << @intCast(@as(u64, 64) - bit_idx)), + }; + + const coef = carry + (bit_buf & window_mask); + carry = (coef + (radix / 2)) >> w; + const signed_coef: i64 = @bitCast(coef); + const cindex: i64 = @bitCast(carry << w); + digits[i] = @truncate(signed_coef - cindex); + } + + switch (w) { + 8 => digits[digits_count] += @intCast(@as(i64, @bitCast(carry))), + else => digits[digits_count - 1] += @intCast(@as(i64, @bitCast(carry << w))), + } + + return digits; +} + +pub fn mulMultiRuntime( comptime max_elements: comptime_int, - /// Set to true if the input is in wire-format. This lets us usually save - /// an extra stack copy and loop when buffering the decoding process, - /// instead just doing it once here straight into the extended point form. - /// - /// Changes the return type of the function to an error union, in case - /// the encoded points decode into a non-canonical form. comptime encoded: bool, - /// (Option only applies if we're decoding from a wire format). - /// - /// Set to true if the wire format we're decoding from is Ristretto instead - /// of Edwards25519. The actual MSM itself still happens on the underlying - /// Edwards25519 element, since there's no difference between the operation - /// on Ristretto and Edwards25519, but the decoding is different. comptime ristretto: bool, ed_points: []const PointType(encoded, ristretto), compressed_scalars: []const CompressedScalar, @@ -62,7 +88,7 @@ pub fn mulMulti( var points: std.BoundedArray(CachedPoint, max_elements) = .{}; for (compressed_scalars) |s| { - scalars.appendAssumeCapacity(asRadix(s, w)); + scalars.appendAssumeCapacity(asRadix2w(s, w)); } for (ed_points) |l| { // Translate from whatever the input format is to a decompressed Ed25519 point. @@ -105,8 +131,8 @@ pub fn mulMulti( var sum = buckets[buckets_count - 1]; for (0..buckets_count - 1) |bucket_fwd| { const i = buckets_count - 2 - bucket_fwd; - interm_sum = interm_sum.addCached(.fromExtended(buckets[i])); - sum = sum.addCached(.fromExtended(interm_sum)); + interm_sum = interm_sum.add(buckets[i]); + sum = sum.add(interm_sum); } column.* = sum; @@ -114,7 +140,7 @@ pub fn mulMulti( var hi_column = columns[0]; for (columns[1..]) |p| { - hi_column = mulByPow2(hi_column, w).addCached(.fromExtended(p)); + hi_column = hi_column.mulByPow2(w).add(p); } return switch (ristretto) { @@ -122,47 +148,3 @@ pub fn mulMulti( else => hi_column.toPoint(), }; } - -inline fn mulByPow2(p: ExtendedPoint, k: u32) ExtendedPoint { - var s = p; - for (0..k) |_| s = s.dbl(); - return s; -} - -fn asRadix(c: CompressedScalar, w: u6) [64]i8 { - var scalars: [4]u64 = @splat(0); - @memcpy(scalars[0..4], std.mem.bytesAsSlice(u64, &c)); - - const radix = @as(u64, 1) << w; - const window_mask = radix - 1; - - var carry: u64 = 0; - const digits_count = (@as(u64, 256) + w - 1) / w; - var digits: [64]i8 = @splat(0); - - for (0..digits_count) |i| { - const bit_offset = i * w; - const u64_idx = bit_offset / 64; - const bit_idx: u6 = @truncate(bit_offset); - const element = scalars[u64_idx] >> bit_idx; - - const below = bit_idx < @as(u64, 64) - w or u64_idx == 3; - const bit_buf: u64 = switch (below) { - true => element, - else => element | (scalars[1 + u64_idx] << @intCast(@as(u64, 64) - bit_idx)), - }; - - const coef = carry + (bit_buf & window_mask); - carry = (coef + (radix / 2)) >> w; - const signed_coef: i64 = @bitCast(coef); - const cindex: i64 = @bitCast(carry << w); - digits[i] = @truncate(signed_coef - cindex); - } - - switch (w) { - 8 => digits[digits_count] += @intCast(@as(i64, @bitCast(carry))), - else => digits[digits_count - 1] += @intCast(@as(i64, @bitCast(carry << w))), - } - - return digits; -} diff --git a/src/crypto/ed25519/straus.zig b/src/crypto/ed25519/straus.zig new file mode 100644 index 0000000000..1f35373a97 --- /dev/null +++ b/src/crypto/ed25519/straus.zig @@ -0,0 +1,161 @@ +const std = @import("std"); +const sig = @import("../../sig.zig"); + +const ed25519 = sig.crypto.ed25519; +const Edwards25519 = std.crypto.ecc.Edwards25519; +const Ristretto255 = std.crypto.ecc.Ristretto255; +const CompressedScalar = Edwards25519.scalar.CompressedScalar; + +const ExtendedPoint = ed25519.ExtendedPoint; +const LookupTable = ed25519.LookupTable; + +fn asRadix16(c: CompressedScalar) [64]i8 { + std.debug.assert(c[31] <= 127); + + var output: [64]i8 = @splat(0); + + // radix 256 -> radix 16 + for (0..32) |i| { + output[i * 2] = @intCast(c[i] & 0b1111); + output[i * 2 + 1] = @intCast(c[i] >> 4); + } + + // recenter + for (0..63) |i| { + const carry = (output[i] + 8) >> 4; + output[i] -= carry << 4; + output[i + 1] += carry; + } + + return output; +} + +pub fn mulMultiRuntime( + comptime max: comptime_int, + comptime encoded: bool, + comptime ristretto: bool, + points: []const ed25519.PointType(encoded, ristretto), + scalars: []const CompressedScalar, +) ed25519.ReturnType(encoded, ristretto) { + std.debug.assert(points.len <= max); + std.debug.assert(scalars.len <= max); + std.debug.assert(points.len == scalars.len); + + var scalars_in_radix: std.BoundedArray([64]i8, max) = .{}; + for (scalars) |scalar| { + scalars_in_radix.appendAssumeCapacity(asRadix16(scalar)); + } + + var lookup_tables: std.BoundedArray(LookupTable, max) = .{}; + for (points) |point| { + // Translate from whatever the input format is to a decompressed Ed25519 point. + const decompressed = switch (encoded) { + true => switch (ristretto) { + true => (try Ristretto255.fromBytes(point)).p, + else => try Edwards25519.fromBytes(point), + }, + else => switch (ristretto) { + true => point.p, + else => point, + }, + }; + lookup_tables.appendAssumeCapacity(.init(decompressed)); + } + + var q: ExtendedPoint = .identityElement; + for (0..64) |rev| { + const i = 64 - rev - 1; + q = q.mulByPow2(4); + for (scalars_in_radix.constSlice(), lookup_tables.constSlice()) |s, lt| { + q = q.addCached(lt.select(s[i])); + } + } + + return switch (ristretto) { + true => .{ .p = q.toPoint() }, + else => q.toPoint(), + }; +} + +/// Same as `mulMulti` except it takes a comptime known amount of points/scalars. Seems +/// to help with inlining part of the precomputation steps to the callsite. +pub fn mulMulti( + comptime N: comptime_int, + points: [N]Ristretto255, + scalars: [N]CompressedScalar, +) Ristretto255 { + var radix: [N][64]i8 = undefined; + for (&radix, scalars) |*r, s| { + r.* = asRadix16(s); + } + + var lts: [N]LookupTable = undefined; + for (<s, points) |*lt, p| { + lt.* = .init(p.p); + } + + var q: ExtendedPoint = .identityElement; + for (0..64) |rev| { + const i = 64 - rev - 1; + q = q.mulByPow2(4); + for (&radix, <s) |s, lt| { + q = q.addCached(lt.select(s[i])); + } + } + + return .{ .p = q.toPoint() }; +} + +/// variable time, variable base scalar multiplication +pub fn mul( + comptime ristretto: bool, + point: ed25519.PointType(false, ristretto), + scalar: CompressedScalar, +) ed25519.PointType(false, ristretto) { + const lookup_table: LookupTable = .init(if (ristretto) point.p else point); + const radix = asRadix16(scalar); + + const q = step(lookup_table, radix); + + return switch (ristretto) { + true => .{ .p = q }, + else => q, + }; +} + +/// Variable-base multiplication of `scalar` by a comptime known point. +pub fn mulByKnown(comptime point: Ristretto255, scalar: CompressedScalar) Ristretto255 { + @setEvalBranchQuota(9_000); + const lookup_table: LookupTable = comptime .init(point.p); + const radix = asRadix16(scalar); + return .{ .p = step(lookup_table, radix) }; +} + +/// Small optimization, sometimes we need to multiply a few points with the same scalar. +/// By batching them into this one function, we can save on converting the scalar into radix-16 +/// for each point and instead just reuse the single transformation. +pub fn mulManyWithSameScalar( + comptime N: comptime_int, + points: [N]Ristretto255, + scalar: CompressedScalar, +) [N]Ristretto255 { + const radix = asRadix16(scalar); + var output: [N]Ristretto255 = undefined; + for (points, &output) |point, *out| { + out.* = .{ .p = step(.init(point.p), radix) }; + } + return output; +} + +inline fn step( + lookup_table: LookupTable, + radix: [64]i8, +) Edwards25519 { + var q: ExtendedPoint = .identityElement; + for (0..64) |rev| { + const i = 64 - rev - 1; + q = q.mulByPow2(4); + q = q.addCached(lookup_table.select(radix[i])); + } + return q.toPoint(); +} diff --git a/src/crypto/ed25519/wycheproof.zig b/src/crypto/ed25519/wycheproof.zig new file mode 100644 index 0000000000..370742e7ba --- /dev/null +++ b/src/crypto/ed25519/wycheproof.zig @@ -0,0 +1,684 @@ +//! DO NOT EDIT! File generated by `gen_wycheproof.zig`. +//! algorithm: EDDSA +//! numberOfTests: 150 + +const Group = struct { + pubkey: []const u8, + cases: []const Case, + + const Case = struct { + msg: []const u8, + sig: []const u8, + expected: enum { valid, invalid }, + }; +}; + +pub const groups: []const Group = &.{ + .{ + .pubkey = "7d4d0e7f6153a69b6242b522abbee685fda4420f8834b108c3bdae369ef549fa", + .cases = &.{ + .{ .msg = "", .sig = "d4fbdb52bfa726b44d1786a8c0d171c3e62ca83c9e5bbe63de0bb2483f8fd6cc1429ab72cafc41ab56af02ff8fcc43b99bfe4c7ae940f60f38ebaa9d311c4007", .expected = .valid }, + .{ .msg = "78", .sig = "d80737358ede548acb173ef7e0399f83392fe8125b2ce877de7975d8b726ef5b1e76632280ee38afad12125ea44b961bf92f1178c9fa819d020869975bcbe109", .expected = .valid }, + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab07a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b30d", .expected = .valid }, + .{ .msg = "48656c6c6f", .sig = "1c1ad976cbaae3b31dee07971cf92c928ce2091a85f5899f5e11ecec90fc9f8e93df18c5037ec9b29c07195ad284e63d548cd0a6fe358cc775bd6c1608d2c905", .expected = .valid }, + .{ .msg = "313233343030", .sig = "657c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2bf0cf5b3a289976458a1be6277a5055545253b45b07dcc1abd96c8b989c00f301", .expected = .valid }, + .{ .msg = "000000000000000000000000", .sig = "d46543bfb892f84ec124dcdfc847034c19363bf3fc2fa89b1267833a14856e52e60736918783f950b6f1dd8d40dc343247cd43ce054c2d68ef974f7ed0f3c60f", .expected = .valid }, + .{ .msg = "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161", .sig = "879350045543bc14ed2c08939b68c30d22251d83e018cacbaf0c9d7a48db577e80bdf76ce99e5926762bc13b7b3483260a5ef63d07e34b58eb9c14621ac92f00", .expected = .valid }, + .{ .msg = "202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f60", .sig = "7bdc3f9919a05f1d5db4a3ada896094f6871c1f37afc75db82ec3147d84d6f237b7e5ecc26b59cfea0c7eaf1052dc427b0f724615be9c3d3e01356c65b9b5109", .expected = .valid }, + .{ .msg = "ffffffffffffffffffffffffffffffff", .sig = "5dbd7360e55aa38e855d6ad48c34bd35b7871628508906861a7c4776765ed7d1e13d910faabd689ec8618b78295c8ab8f0e19c8b4b43eb8685778499e943ae04", .expected = .valid }, + }, + }, + .{ + .pubkey = "7d4d0e7f6153a69b6242b522abbee685fda4420f8834b108c3bdae369ef549fa", + .cases = &.{ + // special values for r and s + .{ .msg = "3f", .sig = "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "00000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "0000000000000000000000000000000000000000000000000000000000000000ecd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "0000000000000000000000000000000000000000000000000000000000000000edd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "0000000000000000000000000000000000000000000000000000000000000000edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "01000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "0100000000000000000000000000000000000000000000000000000000000000ecd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "0100000000000000000000000000000000000000000000000000000000000000edd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "0100000000000000000000000000000000000000000000000000000000000000edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edd3f55c1a631258d69cf7a2def9de14000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edd3f55c1a631258d69cf7a2def9de14000000000000000000000000000000100100000000000000000000000000000000000000000000000000000000000000", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010ecd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010edd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f0100000000000000000000000000000000000000000000000000000000000000", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fecd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fedd3f55c1a631258d69cf7a2def9de1400000000000000000000000000000010", .expected = .invalid }, + // special values for r and s + .{ .msg = "3f", .sig = "edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fedffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f", .expected = .invalid }, + }, + }, + .{ + .pubkey = "7d4d0e7f6153a69b6242b522abbee685fda4420f8834b108c3bdae369ef549fa", + .cases = &.{ + // empty signature + .{ .msg = "54657374", .sig = "", .expected = .invalid }, + // s missing + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab0", .expected = .invalid }, + // signature too short + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab07a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946", .expected = .invalid }, + // signature too long + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab07a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b30d2020", .expected = .invalid }, + // include pk in signature + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab07a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b30d7d4d0e7f6153a69b6242b522abbee685fda4420f8834b108c3bdae369ef549fa", .expected = .invalid }, + // prepending 0 byte to signature + .{ .msg = "54657374", .sig = "007c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab07a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b30d", .expected = .invalid }, + // prepending 0 byte to s + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab0007a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b30d", .expected = .invalid }, + // appending 0 byte to signature + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab07a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b30d00", .expected = .invalid }, + // removing 0 byte from signature + .{ .msg = "546573743137", .sig = "93de3ca252426c95f735cb9edd92e83321ac62372d5aa5b379786bae111ab6b17251330e8f9a7c30d6993137c596007d7b001409287535ac4804e662bc58a3", .expected = .invalid }, + // removing 0 byte from signature + .{ .msg = "54657374313236", .sig = "dffed33a7f420b62bb1731cfd03be805affd18a281ec02b1067ba6e9d20826569e742347df59c88ae96db1f1969fb189b0ec34381d85633e1889da48d95e0e", .expected = .invalid }, + // removing leading 0 byte from signature + .{ .msg = "546573743530", .sig = "6e170c719577c25e0e1e8b8aa7a6346f8b109f37385cc2e85dc3b4c0f46a9c6bcafd67f52324c5dbaf40a1b673fb29c4a56052d2d6999d0838a8337bccb502", .expected = .invalid }, + // dropping byte from signature + .{ .msg = "54657374333437", .sig = "b0928b46e99fbbad3f5cb502d2cd309d94a7e86cfd4d84b1fcf4cea18075a9c36993c0582dba1e9e519fae5a8654f454201ae0c3cb397c37b8f4f8eef18400", .expected = .invalid }, + }, + }, + .{ + .pubkey = "7d4d0e7f6153a69b6242b522abbee685fda4420f8834b108c3bdae369ef549fa", + .cases = &.{ + // modified bit 0 in R + .{ .msg = "313233343030", .sig = "647c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2b1d125e5538f38afbcc1c84e489521083041d24bc6240767029da063271a1ff0c", .expected = .invalid }, + // modified bit 1 in R + .{ .msg = "313233343030", .sig = "677c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2bc108ca4b87a49c9ed2cf383aecad8f54a962b2899da891e12004d7993a627e01", .expected = .invalid }, + // modified bit 2 in R + .{ .msg = "313233343030", .sig = "617c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2b9ce23fc6213ed5b87912e9bbf92f5e2c780eae26d15c50a112d1e97d2ea33c06", .expected = .invalid }, + // modified bit 7 in R + .{ .msg = "313233343030", .sig = "e57c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2bbb3eb51cd98dddb235a5f46f2bded6af184a58d09cce928bda43f41d69118a03", .expected = .invalid }, + // modified bit 8 in R + .{ .msg = "313233343030", .sig = "657d1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2bcd237dda9a116501f67a5705a854b9adc304f34720803a91b324f2c13e0f5a09", .expected = .invalid }, + // modified bit 16 in R + .{ .msg = "313233343030", .sig = "657c1592402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2b6b167bbdc0d881cc04d28905552c1876f3709851abc5007376940cc8a435c300", .expected = .invalid }, + // modified bit 31 in R + .{ .msg = "313233343030", .sig = "657c1412402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2b7fd2ac7da14afffcceeb13f2a0d6b887941cb1a5eb57a52f3cb131a16cce7b0e", .expected = .invalid }, + // modified bit 32 in R + .{ .msg = "313233343030", .sig = "657c1492412ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2b7373ba13ebbef99cd2a8ead55ce735c987d85a35320925a8e871702dc7c5c40d", .expected = .invalid }, + // modified bit 63 in R + .{ .msg = "313233343030", .sig = "657c1492402ab54e03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2bd35bd331c03f0855504ca1cab87b83c36a028425a3cf007ede4f4254c261cb00", .expected = .invalid }, + // modified bit 64 in R + .{ .msg = "313233343030", .sig = "657c1492402ab5ce02e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2bcb35101f73cf467deac8c1a03b6c3dc35af544132734b7e57ab20c89b2e4750d", .expected = .invalid }, + // modified bit 97 in R + .{ .msg = "313233343030", .sig = "657c1492402ab5ce03e2c3a7f2384d051b9cf3570f1207fc78c1bcc98c281c2bb58d2e8878290bff8d3355fdd4ea381924ee578752354eb6dee678ab4011c301", .expected = .invalid }, + // modified bit 127 in R + .{ .msg = "313233343030", .sig = "657c1492402ab5ce03e2c3a7f0384d851b9cf3570f1207fc78c1bcc98c281c2bb978c866187ffb1cc7b29a0b4045aefc08768df65717194ff0c6e63f4dea0d02", .expected = .invalid }, + // modified bit 240 in R + .{ .msg = "313233343030", .sig = "657c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281d2b0576ecf8eaf675f00f3dfbe19f75b83b7607a6c96414f6821af920a2498d0305", .expected = .invalid }, + // modified bit 247 in R + .{ .msg = "313233343030", .sig = "657c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c289c2be5241a345c7b5428054c74b7c382fa10d4a5f1e8f8b79a71d3fdea2254f1ff0e", .expected = .invalid }, + // modified bit 248 in R + .{ .msg = "313233343030", .sig = "657c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c2a63950c85cd6dc96364e768de50ff7732b538f8a0b1615d799190ab600849230e", .expected = .invalid }, + // modified bit 253 in R + .{ .msg = "313233343030", .sig = "657c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c0b543bd3da0a56a8c9c152f59c9fec12f31fa66434d48b817b30d90cb4efa8b501", .expected = .invalid }, + // modified bit 254 in R + .{ .msg = "313233343030", .sig = "657c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281c6b8da07efd07a6dafb015ed6a32fe136319a972ffbc341f3a0beae97ccf8136505", .expected = .invalid }, + // modified bit 255 in R + .{ .msg = "313233343030", .sig = "657c1492402ab5ce03e2c3a7f0384d051b9cf3570f1207fc78c1bcc98c281cab227aedf259f910f0f3a759a335062665217925d019173b88917eae294f75d40f", .expected = .invalid }, + // R==0 + .{ .msg = "313233343030", .sig = "0000000000000000000000000000000000000000000000000000000000000000e0b8e7770d51c7a36375d006c5bffd6af43ff54aaf47e4330dc118c71d61ec02", .expected = .invalid }, + // invalid R + .{ .msg = "313233343030", .sig = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff463a1908382e7eb7693acef9884f7cf931a215e0791876be22c631a59881fd0e", .expected = .invalid }, + // all bits flipped in R + .{ .msg = "313233343030", .sig = "9a83eb6dbfd54a31fc1d3c580fc7b2fae4630ca8f0edf803873e433673d7e3d40e94254586cb6188c5386c3febed477cb9a6cb29e3979adc4cb27cf5278fb70a", .expected = .invalid }, + }, + }, + .{ + .pubkey = "7d4d0e7f6153a69b6242b522abbee685fda4420f8834b108c3bdae369ef549fa", + .cases = &.{ + // checking malleability + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab067654bce3832c2d76f8f6f5dafc08d9339d4eef676573336a5c51eb6f946b31d", .expected = .invalid }, + // checking malleability + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab05439412b5395d42f462c67008eba6ca839d4eef676573336a5c51eb6f946b32d", .expected = .invalid }, + // checking malleability + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab02ee12ce5875bf9dff26556464bae2ad239d4eef676573336a5c51eb6f946b34d", .expected = .invalid }, + // checking malleability + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab0e2300459f1e742404cd934d2c595a6253ad4eef676573336a5c51eb6f946b38d", .expected = .invalid }, + // checking malleability + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab07a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b32d", .expected = .invalid }, + // checking malleability + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab07a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b34d", .expected = .invalid }, + // checking malleability + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab07a9155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b38d", .expected = .invalid }, + // checking malleability + .{ .msg = "54657374", .sig = "7c38e026f29e14aabd059a0f2db8b0cd783040609a8be684db12f82a27774ab0679155711ecfaf7f99f277bad0c6ae7e39d4eef676573336a5c51eb6f946b38d", .expected = .invalid }, + }, + }, + .{ + .pubkey = "a12c2beb77265f2aac953b5009349d94155a03ada416aad451319480e983ca4c", + .cases = &.{ + .{ .msg = "", .sig = "5056325d2ab440bf30bbf0f7173199aa8b4e6fbc091cf3eb6bc6cf87cd73d992ffc216c85e4ab5b8a0bbc7e9a6e9f8d33b7f6e5ac0ffdc22d9fcaf784af84302", .expected = .valid }, + .{ .msg = "78", .sig = "481fafbf4364d7b682475282f517a3ac0538c9a6b6a562e99a3d8e5afb4f90a559b056b9f07af023905753b02d95eb329a35c77f154b79abbcd291615ce42f02", .expected = .valid }, + .{ .msg = "54657374", .sig = "8a9bb4c465a3863abc9fd0dd35d80bb28f7d33d37d74679802d63f82b20da114b8d765a1206b3e9ad7cf2b2d8d778bb8651f1fa992db293c0039eacb6161480f", .expected = .valid }, + .{ .msg = "48656c6c6f", .sig = "d839c20abfda1fd429531831c64f813f84b913e9928540310cf060b44c3dbf9457d44a7721fdc0d67724ff81cb450dd39b10cfb65db15dda4b8bf09d26bd3801", .expected = .valid }, + .{ .msg = "313233343030", .sig = "9bbb1052dcfa8ad2715c2eb716ae4f1902dea353d42ee09fd4c0b4fcb8b52b5219e2200016e1199d0061891c263e31b0bc3b55673c19610c4e0fa5408004160b", .expected = .valid }, + .{ .msg = "000000000000000000000000", .sig = "f63b5c0667c7897fc283296416f7f60e84bbde9cbd832e56be463ed9f568069702b17a2f7c341ebf590706a6388ac76ac613c1675ec0f2c7118f2573422a500b", .expected = .valid }, + .{ .msg = "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161", .sig = "1bc44d7001e6b5b9090fef34b2ca480f9786bbefa7d279353e5881e8dfb91b803ccd46500e270ef0109bfd741037558832120bc2a4f20fbe7b5fb3c3aaf23e08", .expected = .valid }, + .{ .msg = "202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f60", .sig = "ea8e22143b02372e76e99aece3ed36aec529768a27e2bb49bdc135d44378061e1f62d1ac518f33ebf37b2ee8cc6dde68a4bd7d4a2f4d6cb77f015f71ca9fc30d", .expected = .valid }, + .{ .msg = "ffffffffffffffffffffffffffffffff", .sig = "8acd679e1a914fc45d5fa83d3021f0509c805c8d271df54e52f43cfbd00cb6222bf81d58fe1de2de378df67ee9f453786626961fe50a9b05f12b6f0899ebdd0a", .expected = .valid }, + }, + }, + .{ + .pubkey = "d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a", + .cases = &.{ + // draft-josefsson-eddsa-ed25519-02: Test 1 + .{ .msg = "", .sig = "e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b", .expected = .valid }, + }, + }, + .{ + .pubkey = "3d4017c3e843895a92b70aa74d1b7ebc9c982ccf2ec4968cc0cd55f12af4660c", + .cases = &.{ + // draft-josefsson-eddsa-ed25519-02: Test 2 + .{ .msg = "72", .sig = "92a009a9f0d4cab8720e820b5f642540a2b27b5416503f8fb3762223ebdb69da085ac1e43e15996e458f3613d0f11d8c387b2eaeb4302aeeb00d291612bb0c00", .expected = .valid }, + }, + }, + .{ + .pubkey = "fc51cd8e6218a1a38da47ed00230f0580816ed13ba3303ac5deb911548908025", + .cases = &.{ + // draft-josefsson-eddsa-ed25519-02: Test 3 + .{ .msg = "af82", .sig = "6291d657deec24024827e69c3abe01a30ce548a284743a445e3680d7db5ac3ac18ff9b538d16f290ae67f760984dc6594a7c15e9716ed28dc027beceea1ec40a", .expected = .valid }, + }, + }, + .{ + .pubkey = "278117fc144c72340f67d0f2316e8386ceffbf2b2428c9c51fef7c597f1d426e", + .cases = &.{ + // draft-josefsson-eddsa-ed25519-02: Test 1024 + .{ .msg = "08b8b2b733424243760fe426a4b54908632110a66c2f6591eabd3345e3e4eb98fa6e264bf09efe12ee50f8f54e9f77b1e355f6c50544e23fb1433ddf73be84d879de7c0046dc4996d9e773f4bc9efe5738829adb26c81b37c93a1b270b20329d658675fc6ea534e0810a4432826bf58c941efb65d57a338bbd2e26640f89ffbc1a858efcb8550ee3a5e1998bd177e93a7363c344fe6b199ee5d02e82d522c4feba15452f80288a821a579116ec6dad2b3b310da903401aa62100ab5d1a36553e06203b33890cc9b832f79ef80560ccb9a39ce767967ed628c6ad573cb116dbefefd75499da96bd68a8a97b928a8bbc103b6621fcde2beca1231d206be6cd9ec7aff6f6c94fcd7204ed3455c68c83f4a41da4af2b74ef5c53f1d8ac70bdcb7ed185ce81bd84359d44254d95629e9855a94a7c1958d1f8ada5d0532ed8a5aa3fb2d17ba70eb6248e594e1a2297acbbb39d502f1a8c6eb6f1ce22b3de1a1f40cc24554119a831a9aad6079cad88425de6bde1a9187ebb6092cf67bf2b13fd65f27088d78b7e883c8759d2c4f5c65adb7553878ad575f9fad878e80a0c9ba63bcbcc2732e69485bbc9c90bfbd62481d9089beccf80cfe2df16a2cf65bd92dd597b0707e0917af48bbb75fed413d238f5555a7a569d80c3414a8d0859dc65a46128bab27af87a71314f318c782b23ebfe808b82b0ce26401d2e22f04d83d1255dc51addd3b75a2b1ae0784504df543af8969be3ea7082ff7fc9888c144da2af58429ec96031dbcad3dad9af0dcbaaaf268cb8fcffead94f3c7ca495e056a9b47acdb751fb73e666c6c655ade8297297d07ad1ba5e43f1bca32301651339e22904cc8c42f58c30c04aafdb038dda0847dd988dcda6f3bfd15c4b4c4525004aa06eeff8ca61783aacec57fb3d1f92b0fe2fd1a85f6724517b65e614ad6808d6f6ee34dff7310fdc82aebfd904b01e1dc54b2927094b2db68d6f903b68401adebf5a7e08d78ff4ef5d63653a65040cf9bfd4aca7984a74d37145986780fc0b16ac451649de6188a7dbdf191f64b5fc5e2ab47b57f7f7276cd419c17a3ca8e1b939ae49e488acba6b965610b5480109c8b17b80e1b7b750dfc7598d5d5011fd2dcc5600a32ef5b52a1ecc820e308aa342721aac0943bf6686b64b2579376504ccc493d97e6aed3fb0f9cd71a43dd497f01f17c0e2cb3797aa2a2f256656168e6c496afc5fb93246f6b1116398a346f1a641f3b041e989f7914f90cc2c7fff357876e506b50d334ba77c225bc307ba537152f3f1610e4eafe595f6d9d90d11faa933a15ef1369546868a7f3a45a96768d40fd9d03412c091c6315cf4fde7cb68606937380db2eaaa707b4c4185c32eddcdd306705e4dc1ffc872eeee475a64dfac86aba41c0618983f8741c5ef68d3a101e8a3b8cac60c905c15fc910840b94c00a0b9d0", .sig = "0aab4c900501b3e24d7cdf4663326a3a87df5e4843b2cbdb67cbf6e460fec350aa5371b1508f9f4528ecea23c436d94b5e8fcd4f681e30a6ac00a9704a188a03", .expected = .valid }, + }, + }, + .{ + .pubkey = "100fdf47fb94f1536a4f7c3fda27383fa03375a8f527c537e6f1703c47f94f86", + .cases = &.{ + // Signature with S just under the bound. [David Benjamin] + .{ .msg = "124e583f8b8eca58bb29c271b41d36986bbc45541f8e51f9cb0133eca447601e", .sig = "dac119d6ca87fc59ae611c157048f4d4fc932a149dbe20ec6effd1436abf83ea05c7df0fef06147241259113909bc71bd3c53ba4464ffcad3c0968f2ffffff0f", .expected = .valid }, + }, + }, + .{ + .pubkey = "100fdf47fb94f1536a4f7c3fda27383fa03375a8f527c537e6f1703c47f94f86", + .cases = &.{ + // Signature with S just above the bound. [David Benjamin] + .{ .msg = "6a0bc2b0057cedfc0fa2e3f7f7d39279b30f454a69dfd1117c758d86b19d85e0", .sig = "0971f86d2c9c78582524a103cb9cf949522ae528f8054dc20107d999be673ff4e25ebf2f2928766b1248bec6e91697775f8446639ede46ad4df4053000000010", .expected = .invalid }, + }, + }, + .{ + .pubkey = "8fd659b77b558ed93882c1157438450ac86ec62d421d568e98ee236f3810295a", + .cases = &.{ + // Random test failure 1 + .{ .msg = "b0729a713593a92e46b56eaa66b9e435f7a09a8e7de03b078f6f282285276635f301e7aaafe42187c45d6f5b13f9f16b11195cc125c05b90d24dfe4c", .sig = "7db17557ac470c0eda4eedaabce99197ab62565653cf911f632ee8be0e5ffcfc88fb94276b42e0798fd3aa2f0318be7fc6a29fae75f70c3dcdc414a0ad866601", .expected = .valid }, + }, + }, + .{ + .pubkey = "2a606bf67ac770c607038b004101b325edb569efd3413d2d1f2c3e6b4e6e3082", + .cases = &.{ + // Random test failure 2 + .{ .msg = "a8546e50ba31cae3234310d32672447be213fad91a227a19669c53d309b959782b0e6b71f8791fdb470043b58122003157d2d96a43a6cbd7d3a8d86bf4c97391883e268d50af80e1e6e12939c2bd50ca746cdadfad4edf1bda875299740724148efb1ebe73fb60088cda890317658627a5f7ab5a0c075d9d8f3f97b6492b35519e50ff6b38377432a7081f9176bb1c29a862deac1336ca20b097a47829cec10a6a7cec178eda2d12f6dc6c87f910454af0123555ba184e68804d9cced60fd5c8c90943e56599c8f0ba59a38491ba5e5a53460682474c07e40ca142983314fd762856bb1093f359da6eb0a756bd93a3160c10dd8feea6b97e7c6a17cb54bd5d7649c05c66d7bdee056671dfdaf689fa3945bb8e29a429f4bd5d355dce9687b06f01d5e33e3999f0e8", .sig = "67d84d4c3945aaf06e06d524be63acbfb5dbb1988c4aea96a5ee9f7a9b9eecc29df4f66b8aa1d9e8607a58fb1ef0c2ad69aac005b4f58e34103344a9c8871a09", .expected = .valid }, + }, + }, + .{ + .pubkey = "c9c946cbc5544ac74eef491f07c5881c16faf7ec31ce4aa91bb60ae7b4539051", + .cases = &.{ + // Random test failure 3 + .{ .msg = "cd2212eddb0706f62c995cef958634f0cb7793444cbf4d30e81c27c41ebea6cb02607510131f9c015692dfd521b148841e9a2d3564d20ac401f6cb8e40f520fe0cafbeaa88840b83013369d879f013463fe52a13267aa0c8c59c45cde9399cd1e6be8cc64cf48315ac2eb31a1c567a4fb7d601746d1f63b5ac020712adbbe07519bded6f", .sig = "24087d47f3e20af51b9668ae0a88ce76586802d0ec75d8c0f28fc30962b5e1d1a1d509571a1624ed125a8df92a6e963728d6b5de99200b8e285f70feb6f05207", .expected = .valid }, + }, + }, + .{ + .pubkey = "32ad026f693d0d2afe7f4388d91c4c964426fcb9e3665c3ebd8650009b815c8e", + .cases = &.{ + // Random test failure 4 + .{ .msg = "ec5c7cb078", .sig = "d920d421a5956b69bfe1ba834c025e2babb6c7a6d78c97de1d9bb1116dfdd1185147b2887e34e15578172e150774275ea2aad9e02106f7e8ca1caa669a066f0c", .expected = .valid }, + }, + }, + .{ + .pubkey = "32ad026f693d0d2afe7f4388d91c4c964426fcb9e3665c3ebd8650009b815c8e", + .cases = &.{ + // Random test failure 5 + .{ .msg = "4668c6a76f0e482190a7175b9f3806a5fe4314a004fa69f988373f7a", .sig = "4f62daf7f7c162038552ad7d306e195baa37ecf6ca7604142679d7d1128e1f8af52e4cb3545748c44ef1ff1c64e877e4f4d248259b7f6eb56e3ef72097dc8e0c", .expected = .valid }, + }, + }, + .{ + .pubkey = "c29ec1894e06d27b4e40486b4fa5063d66a746c7f9c323b12203c03b72b8b78a", + .cases = &.{ + // Random test failure 6 + .{ .msg = "0f325ffd87e58131ffa23c05ea4579513b287fdba87b44", .sig = "6669acf94667c5b541afe5307bde9476b13ae7e0e6058a772101ac8eb0a94331428eb4db0a2c68a9b6c1763b8624dab259b0876cdcfaeacc17b21a18e3fc010a", .expected = .valid }, + }, + }, + .{ + .pubkey = "cfda5b899e35764c5229e59295fe1222b7ddce176643697c29e46ecbba10cf10", + .cases = &.{ + // Random test failure 7 + .{ .msg = "ec5c7cb078", .sig = "30490c28f806298225df62103521dcee047153912c33ab8ab8bbdd1ffabd70fd4fdb360f05be535b067d1cf4e78c2cb432206bf280aab3bd21aaa1cb894c5b06", .expected = .valid }, + }, + }, + .{ + .pubkey = "32ad026f693d0d2afe7f4388d91c4c964426fcb9e3665c3ebd8650009b815c8e", + .cases = &.{ + // Random test failure 8 + .{ .msg = "5dc9bb87eb11621a93f92abe53515697d2611b2eef73", .sig = "deecafb6f2ede73fec91a6f10e45b9c1c61c4b9bfbe6b6147e2de0b1df6938971f7896c3ab83851fb5d9e537037bff0fca0ccb4a3cc38f056f91f7d7a0557e08", .expected = .valid }, + }, + }, + .{ + .pubkey = "cfda5b899e35764c5229e59295fe1222b7ddce176643697c29e46ecbba10cf10", + .cases = &.{ + // Random test failure 9 + .{ .msg = "67484059b2490b1a0a4f8dee77979e26", .sig = "4cd4f77ed473a6647387f3163541c67a1708a3c3bd1673247cb87f0cb68b3c56f04bfa72970c8a483efe659c87009ab4020b590b6641316b3deddb5450544e02", .expected = .valid }, + }, + }, + .{ + .pubkey = "32ad026f693d0d2afe7f4388d91c4c964426fcb9e3665c3ebd8650009b815c8e", + .cases = &.{ + // Random test failure 10 + .{ .msg = "7dcfe60f881e1285676f35b68a1b2dbcdd7be6f719a288ababc28d36e3a42ac3010a1ca54b32760e74", .sig = "7f8663cf98cbd39d5ff553f00bcf3d0d520605794f8866ce75714d77cc51e66c91818b657d7b0dae430a68353506edc4a714c345f5ddb5c8b958ba3d035f7a01", .expected = .valid }, + }, + }, + .{ + .pubkey = "cfda5b899e35764c5229e59295fe1222b7ddce176643697c29e46ecbba10cf10", + .cases = &.{ + // Random test failure 11 + .{ .msg = "a020a4381dc9141f47ee508871ab7a8b5a3648727c4281ae9932376f23a8e1bcda0626b7129197d864178631ec89c4332dbb18", .sig = "1e41a24fe732bd7cab14c2a2f5134ee8c87fcbd2e987e60957ed9239e5c32404d56977e1b4282871896cb10625a1937468e4dc266e16a9c1b8e9891177eca802", .expected = .valid }, + }, + }, + .{ + .pubkey = "32ad026f693d0d2afe7f4388d91c4c964426fcb9e3665c3ebd8650009b815c8e", + .cases = &.{ + // Random test failure 12 + .{ .msg = "58e456064dff471109def4ca27fa8310a1df32739655b624f27e6418d34b7f007173f3faa5", .sig = "6aab49e5c0bc309b783378ee03ffda282f0185cdf94c847701ff307a6ee8d0865411c44e0a8206f6a5f606107451940c2593af790ce1860f4c14ab25b2deae08", .expected = .valid }, + }, + }, + .{ + .pubkey = "529919c9c780985a841c42ba6c180ff2d67a276ccfbe281080e47ab71a758f56", + .cases = &.{ + // Random test failure 13 + .{ .msg = "e1cbf2d86827825613fb7a85811d", .sig = "01abfa4d6bbc726b196928ec84fd03f0c953a4fa2b228249562ff1442a4f63a7150b064f3712b51c2af768d2c2711a71aabf8d186833e941a0301b82f0502905", .expected = .valid }, + }, + }, + .{ + .pubkey = "cfda5b899e35764c5229e59295fe1222b7ddce176643697c29e46ecbba10cf10", + .cases = &.{ + // Random test failure 14 + .{ .msg = "a25176b3afea318b2ec11ddacb10caf7179c0b3f8eabbfa2895581138d3c1e0e", .sig = "2a833aadecd9f28235cb5896bf3781521dc71f28af2e91dbe1735a61dce3e31ac15ca24b3fc47817a59d386bbbb2ce60a6adc0a2703bb2bdea8f70f91051f706", .expected = .valid }, + }, + }, + .{ + .pubkey = "32ad026f693d0d2afe7f4388d91c4c964426fcb9e3665c3ebd8650009b815c8e", + .cases = &.{ + // Random test failure 15 + .{ .msg = "a1", .sig = "1a74ed2cbdc7d8f3827014e8e6ecf8fd2698ac8f86833acccdd400df710fe0d6b0543c9cfa00d52bf024ab7ce0d91981944097233ec134d5c7abbd44bfd32d0d", .expected = .valid }, + }, + }, + .{ + .pubkey = "2252b3d57c74cbf8bc460dc2e082847926bc022f09ab6ae95756362bfd1167c1", + .cases = &.{ + // Random test failure 16 + .{ .msg = "975ef941710071a9e1e6325a0c860becd7c695b5117c3107b686e330e5", .sig = "af0fd9dda7e03e12313410d8d8844ebb6fe6b7f65141f22d7bcba5695a25414a9e54326fb44d59fb14707899a8aae70857b23d4080d7ab2c396ef3a36d45ce02", .expected = .valid }, + }, + }, + .{ + .pubkey = "c0a773110f975de3732355bb7ec7f0c41c091c0252966070205516693b992a4a", + .cases = &.{ + // Random test failure 17 + .{ .msg = "", .sig = "0280427e713378f49d478df6373c6cac847b622b567daa2376c839e7ac10e22c380ab0fa8617c9dcfe76c4d9db5459b21dc1413726e46cc8f387d359e344f407", .expected = .valid }, + }, + }, + .{ + .pubkey = "cfda5b899e35764c5229e59295fe1222b7ddce176643697c29e46ecbba10cf10", + .cases = &.{ + // Random test failure 18 + .{ .msg = "a9e6d94870a67a9fe1cf13b1e6f9150cdd407bf6480ec841ea586ae3935e9787163cf419c1", .sig = "c97e3190f83bae7729ba473ad46b420b8aad735f0808ea42c0f898ccfe6addd4fd9d9fa3355d5e67ee21ab7e1f805cd07f1fce980e307f4d7ad36cc924eef00c", .expected = .valid }, + }, + }, + .{ + .pubkey = "32ad026f693d0d2afe7f4388d91c4c964426fcb9e3665c3ebd8650009b815c8e", + .cases = &.{ + // Random test failure 19 + .{ .msg = "11cb1eafa4c42a8402c4193c4696f7b2e6d4585e4b42dcf1a8b67a80b2da80bc9d4b649fb2f35eaf1f56c426fd0b", .sig = "14ceb2eaf4688d995d482f44852d71ad878cd7c77b41e60b0065fd01a59b054ee74759224187dbde9e59a763a70277c960892ef89fba997aba2576b2c54ba608", .expected = .valid }, + }, + }, + .{ + .pubkey = "c9c946cbc5544ac74eef491f07c5881c16faf7ec31ce4aa91bb60ae7b4539051", + .cases = &.{ + // Random test failure 20 + .{ .msg = "27d465bc632743522aefa23c", .sig = "c2656951e2a0285585a51ff0eda7e9a23c2dfd2ffa273aee7808f4604e8f9a8c8ea49e9fce4eb2d8d75d36b7238fe6fc13b6c5d9427dd58f8c6615d033c0bd0f", .expected = .valid }, + }, + }, + .{ + .pubkey = "c29ec1894e06d27b4e40486b4fa5063d66a746c7f9c323b12203c03b72b8b78a", + .cases = &.{ + // Random test failure 21 + .{ .msg = "5ffa", .sig = "931e5152fcef078c22cc5d6a3a65f06e396289f6f5f2d1efa6340254a53526ef5dc6874eeddf35c3f50991c53cd02bf06313e37d93ee1f7022128ffa3b8f300b", .expected = .valid }, + }, + }, + .{ + .pubkey = "529919c9c780985a841c42ba6c180ff2d67a276ccfbe281080e47ab71a758f56", + .cases = &.{ + // Random test failure 22 + .{ .msg = "25", .sig = "e4ae21f7a8f4b3b325c161a8c6e53e2edd7005b9c2f8a2e3b0ac4ba94aa80be6f2ee22ac8d4a96b9a3eb73a825e7bb5aff4a3393bf5b4a38119e9c9b1b041106", .expected = .valid }, + }, + }, + .{ + .pubkey = "2252b3d57c74cbf8bc460dc2e082847926bc022f09ab6ae95756362bfd1167c1", + .cases = &.{ + // Random test failure 23 + .{ .msg = "80fdd6218f29c8c8f6bd820945f9b0854e3a8824", .sig = "e097e0bd0370bff5bde359175a11b728ee9639095d5df8eda496395565616edfe079977f7d4dc8c75d6113a83d6a55e6e1676408c0967a2906339b43337dcb01", .expected = .valid }, + }, + }, + .{ + .pubkey = "2a606bf67ac770c607038b004101b325edb569efd3413d2d1f2c3e6b4e6e3082", + .cases = &.{ + // Random test failure 24 + .{ .msg = "b477b0480bb84642608b908d29a51cf2fce63f24ee95", .sig = "28fafbb62b4d688fa79e1ac92851f46e319b161f801d4dc09acc21fdd6780a2c4292b8c1003c61c2bcebe7f3f88ccc4bb26d407387c5f27cb8c94cf6ce810405", .expected = .valid }, + }, + }, + .{ + .pubkey = "32ad026f693d0d2afe7f4388d91c4c964426fcb9e3665c3ebd8650009b815c8e", + .cases = &.{ + // Random test failure 25 + .{ .msg = "aa365b442d12b7f3c925", .sig = "83c40ce13d483cc58ff65844875862d93df4bd367af77efa469ec06a8ed9e6d7905a04879535708ddf225567a815c9b941d405c98e918fd0c151165cea7fb101", .expected = .valid }, + }, + }, + .{ + .pubkey = "54cda623245759ad6d43e620a606908befc633d60792bc7798447a0ef38e7311", + .cases = &.{ + // Random test failure 26 + .{ .msg = "27e792b28b2f1702", .sig = "14d9b497c19b91d43481c55bb6f5056de252d9ecb637575c807e58e9b4c5eac8b284089d97e2192dc242014363208e2c9a3435edf8928fb1d893553e9be4c703", .expected = .valid }, + }, + }, + .{ + .pubkey = "2362bac514d5fad33802642e979a1e82de6eb6f1bcbf6a5b304f2bb02b9e57fe", + .cases = &.{ + // Random test failure 27 + .{ .msg = "eef3bb0f617c17d0420c115c21c28e3762edc7b7fb048529b84a9c2bc6", .sig = "242ddb3a5d938d07af690b1b0ef0fa75842c5f9549bf39c8750f75614c712e7cbaf2e37cc0799db38b858d41aec5b9dd2fca6a3c8e082c10408e2cf3932b9d08", .expected = .valid }, + }, + }, + .{ + .pubkey = "32ad026f693d0d2afe7f4388d91c4c964426fcb9e3665c3ebd8650009b815c8e", + .cases = &.{ + // Random test failure 28 + .{ .msg = "475f", .sig = "71a4a06a34075f2fd47bc3abf4714d46db7e97b08cb6180d3f1539ac50b18ce51f8af8ae95ed21d4fa0daab7235925631ecea1fd9d0d8a2ba7a7583fd04b900c", .expected = .valid }, + }, + }, + .{ + .pubkey = "037b55b427dc8daa0f80fcebaf0846902309f8a6cf18b465c0ce9b6539629ac8", + .cases = &.{ + // Test case for overflow in signature generation + .{ .msg = "01234567", .sig = "c964e100033ce8888b23466677da4f4aea29923f642ae508f9d0888d788150636ab9b2c3765e91bbb05153801114d9e52dc700df377212222bb766be4b8c020d", .expected = .valid }, + }, + }, + .{ + .pubkey = "9c0007698f177998a7666c7cf7973e2b88e9c4946e33804a7bbe8968d2394b2e", + .cases = &.{ + // Test case for overflow in signature generation + .{ .msg = "9399a6db9433d2a28d2b0c11c8794ab7d108c95b", .sig = "176065c6d64a136a2227687d77f61f3fca3b16122c966276fd9a8b14a1a2cea4c33b3533d11101717016684e3810efbea63bb23773f7cc480174199abd734f08", .expected = .valid }, + }, + }, + .{ + .pubkey = "ed3a6f9721dc9729c1f76635bcf080d7036e1c2f0228654ccbbe1e738c17b963", + .cases = &.{ + // Test case for overflow in signature generation + .{ .msg = "7af783afbbd44c1833ab7237ecaf63b94ffdd003", .sig = "7ca69331eec8610d38f00e2cdbd46966cb359dcde98a257ac6f362cc00c8f4fe85c02285fe4d66e31a44cadb2bf474e1a7957609eb4fe95a71473fe6699aa70d", .expected = .valid }, + }, + }, + .{ + .pubkey = "4abfb535313705a6570018440cdec1a3ae33e51f352112fa6acbd0c6bc3ea859", + .cases = &.{ + // Test case for overflow in signature generation + .{ .msg = "321b5f663c19e30ee7bbb85e48ecf44db9d3f512", .sig = "f296715e855d8aecccba782b670163dedc4458fe4eb509a856bcac450920fd2e95a3a3eb212d2d9ccaf948c39ae46a2548af125f8e2ad9b77bd18f92d59f9200", .expected = .valid }, + }, + }, + .{ + .pubkey = "4f2162e6bf03a712db0efa418b7e7006e23871d9d7ec555a313885c4afd96385", + .cases = &.{ + // Test case for overflow in signature generation + .{ .msg = "c48890e92aeeb3af04858a8dc1d34f16a4347b91", .sig = "367d07253a9d5a77d054b9c1a82d3c0a448a51905343320b3559325ef41839608aa45564978da1b2968c556cfb23b0c98a9be83e594d5e769d69d1156e1b1506", .expected = .valid }, + }, + }, + .{ + .pubkey = "4abfb535313705a6570018440cdec1a3ae33e51f352112fa6acbd0c6bc3ea859", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "321b5f663c19e30ee7bbb85e48ecf44db9d3f512", .sig = "f296715e855d8aecccba782b670163dedc4458fe4eb509a856bcac450920fd2e95a3a3eb212d2d9ccaf948c39ae46a2548af125f8e2ad9b77bd18f92d59f9200", .expected = .valid }, + }, + }, + .{ + .pubkey = "4f2162e6bf03a712db0efa418b7e7006e23871d9d7ec555a313885c4afd96385", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "c48890e92aeeb3af04858a8dc1d34f16a4347b91", .sig = "367d07253a9d5a77d054b9c1a82d3c0a448a51905343320b3559325ef41839608aa45564978da1b2968c556cfb23b0c98a9be83e594d5e769d69d1156e1b1506", .expected = .valid }, + }, + }, + .{ + .pubkey = "0717d75ce27ea181ed5a30e6456c649b5cf453a6b4c12cd3f9fd16b31e0c25cd", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "26d5f0631f49106db58c4cfc903691134811b33c", .sig = "9588e02bc815649d359ce710cdc69814556dd8c8bab1c468f40a49ebefb7f0de7ed49725edfd1b708fa1bad277c35d6c1b9c5ec25990997645780f9203d7dd08", .expected = .valid }, + }, + }, + .{ + .pubkey = "db5b9eab7e84e5a13505865fa711c9c896c898609fc11fc9bc1e55028f9496df", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "2a71f064af982a3a1103a75cef898732d7881981", .sig = "2217a0be57dd0d6c0090641496bcb65e37213f02a0df50aff0368ee2808e1376504f37b37494132dfc4d4887f58b9e86eff924040db3925ee4f8e1428c4c500e", .expected = .valid }, + }, + }, + .{ + .pubkey = "7bac18f6d2625d3915f233434cda38a577247a7332a5170b37142a34644145e0", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "bf26796cef4ddafcf5033c8d105057db0210b6ad", .sig = "1fda6dd4519fdbefb515bfa39e8e5911f4a0a8aa65f40ef0c542b8b34b87f9c249dc57f320718ff457ed5915c4d0fc352affc1287724d3f3a9de1ff777a02e01", .expected = .valid }, + }, + }, + .{ + .pubkey = "7bac18f6d2625d3915f233434cda38a577247a7332a5170b37142a34644145e0", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "bf26796cef4ddafcf5033c8d105057db0210b6ad", .sig = "1fda6dd4519fdbefb515bfa39e8e5911f4a0a8aa65f40ef0c542b8b34b87f9c249dc57f320718ff457ed5915c4d0fc352affc1287724d3f3a9de1ff777a02e01", .expected = .valid }, + }, + }, + .{ + .pubkey = "38ead304624abebf3e2b31e20e5629531e3fc659008887c9106f5e55adbbc62a", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "ae03da6997e40cea67935020152d3a9a365cc055", .sig = "068eafdc2f36b97f9bae7fbda88b530d16b0e35054d3a351e3a4c914b22854c711505e49682e1a447e10a69e3b04d0759c859897b64f71137acf355b63faf100", .expected = .valid }, + }, + }, + .{ + .pubkey = "e9bc95049af7e4817b17c402269ba5e767b7348757ac8002fec9e08390c0a9cf", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "489d473f7fb83c7f6823baf65482517bccd8f4ea", .sig = "43670abc9f09a8a415e76f4a21c6a46156f066b5a37b3c1e867cf67248c7b927e8d13a763e37abf936f5f27f7a8aa290539d21f740efd26b65fd5ad27085f400", .expected = .valid }, + }, + }, + .{ + .pubkey = "ee8155ca4e8fe7bc5bca5992044eab7f8c3c6a13db1176f42f46c29da5b064f4", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "1b704d6692d60a07ad1e1d047b65e105a80d3459", .sig = "56388f2228893b14ce4f2a5e0cc626591061de3a57c50a5ecab7b9d5bb2caeea191560a1cf2344c75fdb4a085444aa68d727b39f498169eaa82cf64a31f59803", .expected = .valid }, + }, + }, + .{ + .pubkey = "db507bfcc9576393f7157bb360532b05c5fcf2e764b690cc6698a4a30d349095", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "dc87030862c4c32f56261e93a367caf458c6be27", .sig = "553e5845fc480a577da6544e602caadaa00ae3e5aa3dce9ef332b1541b6d5f21bdf1d01e98baf80b8435f9932f89b3eb70f02da24787aac8e77279e797d0bd0b", .expected = .valid }, + }, + }, + .{ + .pubkey = "994eaf03309d6ad9d95a656bc1744e2886f029023a3750b34f35086b3c7227f8", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "7f41ef68508343ef18813cb2fb332445ec6480cd", .sig = "bc10f88081b7be1f2505b6e76c5c82e358cf21ec11b7df1f334fb587bada465b53d9f7b4d4fec964432ee91ead1bc32ed3c82f2167da1c834a37515df7fe130e", .expected = .valid }, + }, + }, + .{ + .pubkey = "127d37e406e0d83e4b55a09e21e8f50fb88af47e4a43f018cdebffc1948757f0", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "e1ce107971534bc46a42ac609a1a37b4ca65791d", .sig = "00c11e76b5866b7c37528b0670188c1a0473fb93c33b72ae604a8865a7d6e094ff722e8ede3cb18389685ff3c4086c29006047466f81e71a329711e0b9294709", .expected = .valid }, + }, + }, + .{ + .pubkey = "d83ba84edfb4bec49f29be31d80a64b7c0b5a502438cdb1d0dd1e0e3e55786de", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "869a827397c585cf35acf88a8728833ab1c8c81e", .sig = "0a6f0ac47ea136cb3ff00f7a96638e4984048999ee2da0af6e5c86bffb0e70bb97406b6ad5a4b764f7c99ebb6ec0fd434b8efe253b0423ef876c037998e8ab07", .expected = .valid }, + }, + }, + .{ + .pubkey = "d3c9aa2f3d6ef217a166e8ae403ed436c37facbbe3beceb78df6eb439f8fa04a", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "619d8c4f2c93104be01cd574a385ceca08c33a9e", .sig = "b7cbb942a6661e2312f79548224f3e44f5841c6e880c68340756a00ce94a914e8404858265985e6bb97ef01d2d7e5e41340309606bfc43c8c6a8f925126b3d09", .expected = .valid }, + }, + }, + .{ + .pubkey = "d53280367c1c0b95ac4112218b92c6a71c51fb6312ce668de196c7d52a136155", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "5257a0bae8326d259a6ce97420c65e6c2794afe2", .sig = "27a4f24009e579173ff3064a6eff2a4d20224f8f85fdec982a9cf2e6a3b51537348a1d7851a3a932128a923a393ea84e6b35eb3473c32dceb9d7e9cab03a0f0d", .expected = .valid }, + }, + }, + .{ + .pubkey = "94ac2336ba97a476fb4c9f2b5563e4167ca292c6e99e422350a911ae3172c315", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "5acb6afc9b368f7acac0e71f6a4831c72d628405", .sig = "985b605fe3f449f68081197a68c714da0bfbf6ac2ab9abb0508b6384ea4999cb8d79af98e86f589409e8d2609a8f8bd7e80aaa8d92a84e7737fbe8dcef41920a", .expected = .valid }, + }, + }, + .{ + .pubkey = "e1e7316d231f7f275bdf403360304da1509fdf1af1fd25ca214eaac0a289398f", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "3c87b3453277b353941591fc7eaa7dd37604b42a", .sig = "1c8fbda3d39e2b441f06da6071c13115cb4115c7c3341704cf6513324d4cf1ef4a1dd7678a048b0dde84e48994d080befcd70854079d44b6a0b0f9fa002d130c", .expected = .valid }, + }, + }, + .{ + .pubkey = "fffbeea71215efaf9888fec2cc68edb3703ff11a66fd629b53cbda5eabc18750", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "0a68e27ef6847bfd9e398b328a0ded3679d4649d", .sig = "59097233eb141ed948b4f3c28a9496b9a7eca77454ecfe7e46737d1449a0b76b15aacf77cf48af27a668aa4434cfa26c504d75a2bcc4feac46465446234c0508", .expected = .valid }, + }, + }, + .{ + .pubkey = "19ccc0527599cb032e0b4c4d74e60f13901768a99df041c3bc1bf6c0ef271169", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "4e9bef60737c7d4dd10bd52567e1473a36d3573d", .sig = "519105608508fe2f1b6da4cc8b23e39798b1d18d25972beed0404cec722e01ba1b6a0f85e99e092cca8076b101b60d4ac5035684357f4d0daacdc642da742a06", .expected = .valid }, + }, + }, + .{ + .pubkey = "0e726e27047563aa0a1a9c2e085d8d26af2acba129d0869c65031e3e6cac329a", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "cc82b3163efda3ba7e9240e765112caa69113694", .sig = "d8b03ee579e73f16477527fc9dc37a72eaac0748a733772c483ba013944f01ef64fb4ec5e3a95021dc22f4ae282baff6e9b9cc8433c6b6710d82e7397d72ef04", .expected = .valid }, + }, + }, + .{ + .pubkey = "e77717b54a2b5e5bce5bccb8f0c5fdb5fd7df77ac254020fc9120dc0d4df4178", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "923a5c9e7b5635bb6c32c5a408a4a15b652450eb", .sig = "26da61fdfd38e6d01792813f27840c8b4766b0faaed39d0ee898cb450d94a5d5f57e58b6a003d7f9b56b20561954c6edcf66492d116b8b5e91f205a3a6449d0b", .expected = .valid }, + }, + }, + .{ + .pubkey = "6220972d3f7d150b36790d7d522384876d64d640cd9913186815e1629582ed36", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "6f2f0245de4587062979d0422d349f93ccdc3af2", .sig = "4adeaff7a58c5010a5a067feea0ae504d37b0c6a76c6c153e222f13409dff2df0fab69bc5059b97d925dc1b89e9851d7c627cb82d65585f9fd976124553f8902", .expected = .valid }, + }, + }, + .{ + .pubkey = "7b64a28c50ec7678a90e3e1a21522e30ac9db7b5215aea2bfb33bea037eab987", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "6e911edb27a170b983d4dee1110554f804330f41", .sig = "4204d620cde0c3008c0b2901f5d6b44f88f0e3cb4f4d62252bf6f3cb37c1fb150a9ccb296afe5e7c75f65b5c8edd13dc4910ffe1e1265b3707c59042cf9a5902", .expected = .valid }, + }, + }, + .{ + .pubkey = "724452210a9e4c994819229bf12bf84e95768a3a97c08d8d8f5f939a4cad34c5", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "b8cf807eea809aaf739aa091f3b7a3f2fd39fb51", .sig = "f8a69d3fd8c2ff0a9dec41e4c6b43675ce08366a35e220b1185ffc246c339e22c20ac661e866f52054015efd04f42eca2adcee6834c4df923b4a62576e4dff0e", .expected = .valid }, + }, + }, + .{ + .pubkey = "bad265b294ed2f422cb6a141694086238fbfe987571aa765d8b4f3a24105aa01", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "01a2b5f7fee813b4e9bd7fc25137648004795010", .sig = "61792c9442bc6338ac41fd42a40bee9b02ec1836503d60ff725128c63d72808880c36e6190b7da525cbee5d12900aa043547dd14a2709ef9e49d628f37f6b70c", .expected = .valid }, + }, + }, + .{ + .pubkey = "0aaee4b723db9b51ba7d22eb23eb8a76a5ac02f4fc9dd06f77bea42e1d37ec5a", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "0fbf5d47cb5d498feace8f98f1896208da38a885", .sig = "fa3cd41e3a8c00b19eecd404a63c3cb787cd30de0dfc936966cff2117f5aff18db6bef80fcfd8856f3fb2e9c3dc47593e9471103032af918feee638a33d40505", .expected = .valid }, + }, + }, + .{ + .pubkey = "812344af15a91ba83c2c91e96f1727ac0f3c4c41385b9fa84efa399ada5168be", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "36e67c1939750bffb3e4ba6cb85562612275e862", .sig = "97fbbcd7a1d0eb42d2f8c42448ef35a2c2472740556b645547865330d6c57068af377fced08aaf810c08cd3c43d296f1975710312e9334c98b485f831efa4103", .expected = .valid }, + }, + }, + .{ + .pubkey = "0ee5cb5597fbdf8dccc48b01485e39b33aa133b52d30d23740277267cfec3e3e", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "13945c894c1d3fe8562e8b20e5f0efaa26ade8e3", .sig = "d7dbaa337ffd2a5fd8d5fd8ad5aeccc0c0f83795c2c59fe62a40b87903b1ae62ed748a8df5af4d32f9f822a65d0e498b6f40eaf369a9342a1164ee7d08b58103", .expected = .valid }, + }, + }, + .{ + .pubkey = "9fba1de92b60b5b4703089763d0d6f9125e4dd7efae41f08a22882aef96892c4", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "4de142af4b8402f80a47fa812df84f42e283cee7", .sig = "09a2ed303a2fa7027a1dd7c3b0d25121eeed2b644a2fbc17aa0c8aea4524071ede7e7dd7a536d5497f8165d29e4e1b63200f74bbae39fbbbccb29889c62c1f09", .expected = .valid }, + }, + }, + .{ + .pubkey = "7582ab1b52e1316e5c13671f43b39ca36b28133cd0832831bcddd0b0f23398cb", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "563357f41b8b23b1d83f19f5667177a67da20b18", .sig = "e6884a6e6b2e60a0b5862251c001e7c79d581d777d6fc11d218d0aecd79f26a30e2ca22cc7c4674f8b72655bc4ee5cb5494ca07c05177656142ac55cc9d33e02", .expected = .valid }, + }, + }, + .{ + .pubkey = "dd2d678bae222f3fb6e8278f08cc9e1a66339c926c29ac0a16f9717f5ee18cd8", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "931bbf9c877a6571cf7d4609fc3eb867edd43f51", .sig = "6124c206d864507ea5d984b363b4cf583314db6856a45ded5e61eebff4d5e337e0b4c82b445ae2e52d549d2d961eace2ea01f81158e09a9686baa040db65ad08", .expected = .valid }, + }, + }, + .{ + .pubkey = "ccbe7cb2e4bc215cee2f885e1d22f7e0d582b2bbbd782c104e548b152d26fc69", + .cases = &.{ + // regression test for arithmetic error + .{ .msg = "44530b0b34f598767a7b875b0caee3c7b9c502d1", .sig = "cfbd450a2c83cb8436c348822fe3ee347d4ee937b7f2ea11ed755cc52852407c9eec2c1fa30d2f9aef90e89b2cc3bcef2b1b9ca59f712110d19894a9cf6a2802", .expected = .valid }, + }, + }, +}; diff --git a/src/crypto/lib.zig b/src/crypto/lib.zig index 7ef14853ca..f3aedbe572 100644 --- a/src/crypto/lib.zig +++ b/src/crypto/lib.zig @@ -1,22 +1,8 @@ const std = @import("std"); -const builtin = @import("builtin"); pub const FnvHasher = @import("fnv.zig").FnvHasher; pub const bn254 = @import("bn254/lib.zig"); - -pub const ed25519 = struct { - const generic = @import("generic.zig"); - const avx512 = @import("avx512.zig"); - const has_avx512 = builtin.cpu.arch == .x86_64 and - std.Target.x86.featureSetHas(builtin.cpu.features, .avx512ifma) and - std.Target.x86.featureSetHas(builtin.cpu.features, .avx512vl); - - // avx512 implementation relies on llvm specific tricks - const namespace = if (has_avx512 and builtin.zig_backend == .stage2_llvm) avx512 else generic; - pub const ExtendedPoint = namespace.ExtendedPoint; - pub const CachedPoint = namespace.CachedPoint; - pub const pippenger = @import("pippenger.zig"); -}; +pub const ed25519 = @import("ed25519/lib.zig"); /// Extern definition of Ecdsa signature. pub const EcdsaSignature = extern struct { diff --git a/src/gossip/data.zig b/src/gossip/data.zig index 6b33561cee..ecf0e1f661 100644 --- a/src/gossip/data.zig +++ b/src/gossip/data.zig @@ -109,7 +109,7 @@ pub const SignedGossipData = struct { error.NonCanonical => unreachable, }; return .{ - .signature = .{ .data = signature.toBytes() }, + .signature = .fromSignature(signature), .data = data, }; } @@ -125,11 +125,11 @@ pub const SignedGossipData = struct { self.data.deinit(allocator); } - pub fn verify(self: *const Self, pubkey: Pubkey) !bool { + pub fn verify(self: *const Self, pubkey: Pubkey) !void { // should always be enough space or is invalid msg var buf: [PACKET_DATA_SIZE]u8 = undefined; const msg = try bincode.writeToSlice(&buf, self.data, bincode.Params.standard); - return self.signature.verify(pubkey, msg); + return try self.signature.verify(pubkey, msg); } pub fn id(self: *const Self) Pubkey { @@ -1881,7 +1881,7 @@ test "sig verify duplicateShreds" { data.from = pubkey; const value = SignedGossipData.initSigned(&keypair, .{ .DuplicateShred = .{ 0, data } }); - try std.testing.expect(try value.verify(pubkey)); + try value.verify(pubkey); } test "sanitize GossipData" { diff --git a/src/gossip/message.zig b/src/gossip/message.zig index 2c0464471d..387bd6789b 100644 --- a/src/gossip/message.zig +++ b/src/gossip/message.zig @@ -33,27 +33,18 @@ pub const GossipMessage = union(enum(u32)) { switch (self.*) { .PullRequest => |*pull| { var value = pull[1]; - const is_verified = try value.verify(value.id()); - if (!is_verified) { - return error.InvalidPullRequest; - } + value.verify(value.id()) catch return error.InvalidPullRequest; }, .PullResponse => |*pull| { const values = pull[1]; for (values) |*value| { - const is_verified = try value.verify(value.id()); - if (!is_verified) { - return error.InvalidPullResponse; - } + value.verify(value.id()) catch return error.InvalidPullResponse; } }, .PushMessage => |*push| { const values = push[1]; for (values) |*value| { - const is_verified = try value.verify(value.id()); - if (!is_verified) { - return error.InvalidPushMessage; - } + value.verify(value.id()) catch return error.InvalidPushMessage; } }, .PruneMessage => |*prune| { diff --git a/src/gossip/ping_pong.zig b/src/gossip/ping_pong.zig index 8a1e41b859..72fafb1b36 100644 --- a/src/gossip/ping_pong.zig +++ b/src/gossip/ping_pong.zig @@ -30,7 +30,7 @@ pub const Ping = struct { return .{ .from = Pubkey.fromPublicKey(&keypair.public_key), .token = token, - .signature = .{ .data = signature.toBytes() }, + .signature = .fromSignature(signature), }; } @@ -39,17 +39,16 @@ pub const Ping = struct { random.bytes(&token); const signature = try keypair.sign(&token, null); - return Ping{ + return .{ .from = Pubkey.fromPublicKey(&keypair.public_key), .token = token, - .signature = .{ .data = signature.toBytes() }, + .signature = .fromSignature(signature), }; } pub fn verify(self: *const Ping) !void { - if (!try self.signature.verify(self.from, &self.token)) { + self.signature.verify(self.from, &self.token) catch return error.InvalidSignature; - } } }; @@ -66,14 +65,13 @@ pub const Pong = struct { return .{ .from = Pubkey.fromPublicKey(&keypair.public_key), .hash = hash, - .signature = .{ .data = signature.toBytes() }, + .signature = .fromSignature(signature), }; } pub fn verify(self: *const Pong) !void { - if (!try self.signature.verify(self.from, &self.hash.data)) { + self.signature.verify(self.from, &self.hash.data) catch return error.InvalidSignature; - } } pub fn initRandom(random: std.Random, keypair: *const KeyPair) !Pong { @@ -84,7 +82,7 @@ pub const Pong = struct { pub fn eql(self: *const Pong, other: *const @This()) bool { return std.mem.eql(u8, &self.from.data, &other.from.data) and std.mem.eql(u8, &self.hash.data, &other.hash.data) and - std.mem.eql(u8, &self.signature.data, &other.signature.data); + std.mem.eql(u8, &self.signature.toBytes(), &other.signature.toBytes()); } }; @@ -274,14 +272,15 @@ test "PingCache works" { } test "ping signatures match rust" { - var keypair = try KeyPair.fromSecretKey(try std.crypto.sign.Ed25519.SecretKey.fromBytes([_]u8{ + var keypair = try KeyPair.fromSecretKey(try .fromBytes(.{ 125, 52, 162, 97, 231, 139, 58, 13, 185, 212, 57, 142, 136, 12, 21, 127, 228, 71, 115, 126, 138, 52, 102, 69, 103, 185, 45, 255, 132, 222, 243, 138, 25, 117, 21, 11, 61, 170, 38, 18, 67, 196, 242, 219, 50, 154, 4, 254, 79, 227, 253, 229, 188, 230, 121, 12, 227, 248, 199, 156, 253, 144, 175, 67, })); - var ping = Ping.init([_]u8{0} ** PING_TOKEN_SIZE, &keypair) catch unreachable; - const signature = ping.signature.data; + var ping = Ping.init(@splat(0), &keypair) catch unreachable; + + const signature = ping.signature.toBytes(); const rust_sig = [_]u8{ 52, 171, 91, 205, 183, 211, 38, 219, 53, 155, 163, 118, 202, 169, 15, 237, 147, 87, 209, 20, 6, 115, 24, 114, 196, 41, 217, 55, 123, 245, 35, 138, 126, 47, 233, 182, 90, 206, 13, 173, 212, 107, 94, 120, 167, 254, 14, 11, 253, 199, 158, 4, 203, 42, 173, 143, 214, 209, 132, 158, 223, 62, 214, 11 }; try testing.expect(std.mem.eql(u8, &signature, &rust_sig)); diff --git a/src/gossip/prune.zig b/src/gossip/prune.zig index edb318db24..73f8f8fce8 100644 --- a/src/gossip/prune.zig +++ b/src/gossip/prune.zig @@ -84,8 +84,8 @@ pub const PruneData = struct { var d: [PACKET_DATA_SIZE]u8 = undefined; const data = try bincode.writeToSlice(&d, signable_data, .{}); // sign - var signature = try keypair.sign(data, null); - self.signature.data = signature.toBytes(); + const signature = try keypair.sign(data, null); + self.signature = .fromSignature(signature); } pub fn signWithPrefix(self: *PruneData, keypair: *const KeyPair) !void { @@ -100,8 +100,8 @@ pub const PruneData = struct { var d: [PACKET_DATA_SIZE]u8 = undefined; const data = try bincode.writeToSlice(&d, signable_data, .{}); // sign - var signature = try keypair.sign(data, null); - self.signature.data = signature.toBytes(); + const signature = try keypair.sign(data, null); + self.signature = .fromSignature(signature); } pub fn verify(self: *const PruneData) !void { @@ -123,8 +123,7 @@ pub const PruneData = struct { var d: [PACKET_DATA_SIZE]u8 = undefined; const data = try bincode.writeToSlice(&d, signable_data, .{}); // verify - if (!try self.signature.verify(self.pubkey, data)) - return error.InvalidSignature; + try self.signature.verify(self.pubkey, data); } pub fn verifyWithPrefix(self: *const PruneData) !void { @@ -139,8 +138,7 @@ pub const PruneData = struct { var d: [PACKET_DATA_SIZE]u8 = undefined; const data = try bincode.writeToSlice(&d, signable_data, .{}); // verify - if (!try self.signature.verify(self.pubkey, data)) - return error.InvalidSignature; + try self.signature.verify(self.pubkey, data); } }; @@ -179,7 +177,10 @@ test "sign/verify PruneData with prefix" { // check if signing works try prune_data.signWithPrefix(&keypair); - try std.testing.expectEqual(expected_signature.data, prune_data.signature.data); + try std.testing.expectEqual( + expected_signature.toBytes(), + prune_data.signature.toBytes(), + ); } test "PruneData sig verify" { @@ -211,6 +212,6 @@ test "PruneData sig verify" { }; try prune_v2.signWithoutPrefix(&keypair); - var sig_bytes = prune_v2.signature.data; + var sig_bytes = prune_v2.signature.toBytes(); try std.testing.expectEqualSlices(u8, &rust_bytes, &sig_bytes); } diff --git a/src/gossip/service.zig b/src/gossip/service.zig index 08e5ec8740..c8ac4c7acd 100644 --- a/src/gossip/service.zig +++ b/src/gossip/service.zig @@ -3105,7 +3105,7 @@ test "test packet verification" { data.LegacyContactInfo.wallclock = 0; var value = SignedGossipData.initSigned(&keypair, data); - try std.testing.expect(try value.verify(id)); + try value.verify(id); var values = [_]SignedGossipData{value}; const message = GossipMessage{ diff --git a/src/ledger/benchmarks.zig b/src/ledger/benchmarks.zig index 2fdb44c6ee..79a37f4af2 100644 --- a/src/ledger/benchmarks.zig +++ b/src/ledger/benchmarks.zig @@ -310,7 +310,7 @@ pub const BenchmarkLedger = struct { for (random_bytes[0..]) |*byte| { byte.* = rng.random().int(u8); } - signatures.appendAssumeCapacity(.{ .data = random_bytes }); + signatures.appendAssumeCapacity(.fromBytes(random_bytes)); } const slot = 5; diff --git a/src/ledger/shred.zig b/src/ledger/shred.zig index 106ced32a2..984a7ad385 100644 --- a/src/ledger/shred.zig +++ b/src/ledger/shred.zig @@ -121,7 +121,7 @@ pub const Shred = union(ShredType) { }; } - pub fn verify(self: Shred, signer: sig.core.Pubkey) bool { + pub fn verify(self: Shred, signer: sig.core.Pubkey) !void { return switch (self) { inline .data, .code => |s| @TypeOf(s).Generic.verify(s, signer), }; @@ -327,7 +327,7 @@ pub const DataShred = struct { const payload = try allocator.alloc(u8, constants.payload_size); errdefer allocator.free(payload); - @memcpy(payload[0..Signature.SIZE], &leader_signature.data); + @memcpy(payload[0..Signature.SIZE], &leader_signature.toBytes()); @memcpy(payload[Signature.SIZE..][0..shard.len], shard); @memset(payload[Signature.SIZE + shard.len ..], 0); @@ -527,10 +527,11 @@ fn GenericShred(shred_type: ShredType) type { return self.payload[start..end]; } - fn verify(self: Self, leader: sig.core.Pubkey) bool { - const signed_data = self.merkleRoot() catch return false; - const signature = layout.getLeaderSignature(self.payload) orelse return false; - return signature.verify(leader, &signed_data.data) catch return false; + fn verify(self: Self, leader: sig.core.Pubkey) !void { + const root = try self.merkleRoot(); + const signature = layout.getLeaderSignature(self.payload) orelse + return error.InvalidSignature; + return try signature.verify(leader, &root.data); } /// this is the data that is signed by the signature @@ -546,12 +547,8 @@ fn GenericShred(shred_type: ShredType) type { fn retransmitterSignature(self: Self) !Signature { const offset = try retransmitterSignatureOffset(self.common.variant); const end = offset + Signature.SIZE; - if (self.payload.len < end) { - return error.InvalidPayloadSize; - } - var sig_bytes: [Signature.SIZE]u8 = undefined; - @memcpy(&sig_bytes, self.payload[offset..end]); - return .{ .data = sig_bytes }; + if (self.payload.len < end) return error.InvalidPayloadSize; + return .fromBytes(self.payload[offset..][0..64].*); } }; } @@ -872,7 +869,7 @@ fn setRetransmitterSignatureFor(shred: []u8, variant: ShredVariant, signature: S if (shred.len < end) { return error.InvalidPayloadSize; } - @memcpy(shred[offset..end], &signature.data); + @memcpy(shred[offset..end], &signature.toBytes()); } pub const CommonHeader = struct { @@ -889,7 +886,7 @@ pub const CommonHeader = struct { pub const @"!bincode-config:variant" = ShredVariantConfig; const ZEROED_FOR_TEST: CommonHeader = .{ - .leader_signature = Signature{ .data = .{0} ** Signature.SIZE }, + .leader_signature = .ZEROES, .variant = ShredVariant{ .shred_type = .data, .proof_size = 0, .chained = false, .resigned = false }, .slot = 0, .index = 0, @@ -1130,10 +1127,8 @@ pub const layout = struct { } pub fn getLeaderSignature(shred: []const u8) ?Signature { - if (shred.len < Signature.SIZE) { - return null; - } - return .{ .data = shred[0..SIZE_OF_SIGNATURE].* }; + if (shred.len < Signature.SIZE) return null; + return .fromBytes(shred[0..SIZE_OF_SIGNATURE].*); } pub fn merkleRoot(shred: []const u8) ?Hash { @@ -1271,7 +1266,7 @@ test "getLeaderSignature" { 193, 194, 143, 192, 142, 141, 134, 85, 93, 252, 43, 200, 224, 101, 12, 28, 97, 202, 230, 215, 34, 217, 20, 7, }; - try std.testing.expect(std.mem.eql(u8, &expected_signature, &signature.data)); + try std.testing.expect(std.mem.eql(u8, &expected_signature, &signature.toBytes())); } test "layout.merkleRoot" { diff --git a/src/ledger/shred_inserter/shred_inserter.zig b/src/ledger/shred_inserter/shred_inserter.zig index e16807fadf..849f9544ed 100644 --- a/src/ledger/shred_inserter/shred_inserter.zig +++ b/src/ledger/shred_inserter/shred_inserter.zig @@ -318,7 +318,7 @@ pub const ShredInserter = struct { if (leader == null) { continue; } - if (!shred.verify(leader.?)) { + if (std.meta.isError(shred.verify(leader.?))) { self.metrics.num_recovered_failed_sig.inc(); continue; } diff --git a/src/replay/commit.zig b/src/replay/commit.zig index b31794ae85..7477410f14 100644 --- a/src/replay/commit.zig +++ b/src/replay/commit.zig @@ -109,10 +109,20 @@ pub const Committer = struct { const recent_blockhash = &transaction.transaction.msg.recent_blockhash; const signature = transaction.transaction.signatures[0]; - try self.status_cache - .insert(allocator, rng.random(), recent_blockhash, &message_hash.data, slot); - try self.status_cache - .insert(allocator, rng.random(), recent_blockhash, &signature.data, slot); + try self.status_cache.insert( + allocator, + rng.random(), + recent_blockhash, + &message_hash.data, + slot, + ); + try self.status_cache.insert( + allocator, + rng.random(), + recent_blockhash, + &signature.toBytes(), + slot, + ); // NOTE: we'll need to store the actual status at some point, probably for rpc. } diff --git a/src/replay/exec_async.zig b/src/replay/exec_async.zig index 2e19075f55..65e086d378 100644 --- a/src/replay/exec_async.zig +++ b/src/replay/exec_async.zig @@ -690,7 +690,7 @@ test "TransactionScheduler: signature verification failure" { const replaced_sigs = try tx_arena.allocator() .dupe(sig.core.Signature, transactions[5].signatures); - replaced_sigs[0].data[0] +%= 1; + replaced_sigs[0].r[0] +%= 1; transactions[5].signatures = replaced_sigs; const slot_hashes = try sig.runtime.sysvar.SlotHashes.init(allocator); diff --git a/src/replay/preprocess_transaction.zig b/src/replay/preprocess_transaction.zig index 5b3caeedf4..a3a1b921a8 100644 --- a/src/replay/preprocess_transaction.zig +++ b/src/replay/preprocess_transaction.zig @@ -43,9 +43,11 @@ pub fn preprocessTransaction( }; if (sig_verify == .run_sig_verify) { - txn.verifySignatures(msg_bytes.constSlice()) catch |err| return switch (err) { - error.SignatureVerificationFailed => .{ .err = .SignatureFailure }, - else => .{ .err = .SanitizeFailure }, + txn.verifySignatures(msg_bytes.constSlice()) catch |err| { + return switch (err) { + error.SignatureVerificationFailed => .{ .err = .SignatureFailure }, + else => .{ .err = .SanitizeFailure }, + }; }; } diff --git a/src/runtime/program/precompiles/ed25519.zig b/src/runtime/program/precompiles/ed25519.zig index 59e0b773b9..082fb591de 100644 --- a/src/runtime/program/precompiles/ed25519.zig +++ b/src/runtime/program/precompiles/ed25519.zig @@ -13,8 +13,6 @@ const TransactionError = sig.ledger.transaction_status.TransactionError; const getInstructionData = sig.runtime.program.precompiles.getInstructionData; const Ed25519 = std.crypto.sign.Ed25519; -const Curve = std.crypto.ecc.Edwards25519; -const Sha512 = std.crypto.hash.sha2.Sha512; pub const ID: Pubkey = .parse("Ed25519SigVerify111111111111111111111111111"); @@ -90,7 +88,7 @@ pub fn verify( sig_offsets.signature_offset, 32 * 2, // 1 scalar + 1 point ); - const signature: Ed25519.Signature = .fromBytes(signature_bytes[0..64].*); + const signature: sig.core.Signature = .fromBytes(signature_bytes[0..64].*); const pubkey_bytes = try getInstructionData( data, @@ -99,8 +97,9 @@ pub fn verify( sig_offsets.public_key_offset, 32, ); - // identity is rejected in verifySignature() - const pubkey: Ed25519.PublicKey = .{ .bytes = pubkey_bytes[0..32].* }; + // specifically not using `fromBytes`, since we want the encoding error to happen inside of + // the `verifySignature` call. + const pubkey: sig.core.Pubkey = .{ .data = pubkey_bytes[0..32].* }; const msg = try getInstructionData( data, @@ -110,61 +109,15 @@ pub fn verify( sig_offsets.message_data_size, ); - verifySignature( - &signature, - &pubkey, + sig.crypto.ed25519.verifySignature( + signature, + pubkey, msg, - feature_set, - slot, + feature_set.active(.ed25519_precompile_verify_strict, slot), ) catch return error.InvalidSignature; } } -fn verifySignature( - signature: *const Ed25519.Signature, - pubkey: *const Ed25519.PublicKey, - msg: []const u8, - feature_set: *const FeatureSet, - slot: Slot, -) !void { - try Curve.scalar.rejectNonCanonical(signature.s); - - const a = try Ed25519.Curve.fromBytes(pubkey.bytes); - try a.rejectIdentity(); - - // this guarantees that `st.expected_r` Z coordinate will be 1, which allows us to equate - // without that pesky inversion. - const r = try Ed25519.Curve.fromBytes(signature.r); - try r.rejectIdentity(); - - // https://github.com/dalek-cryptography/ed25519-dalek/blob/02001d8c3422fb0314b541fdb09d04760f7ab4ba/src/verifying.rs#L424-L427 - if (feature_set.active(.ed25519_precompile_verify_strict, slot)) { - try a.rejectLowOrder(); - try r.rejectLowOrder(); - } - - var h = Sha512.init(.{}); - h.update(&signature.r); - h.update(&pubkey.bytes); - h.update(msg); - - var hram64: [Sha512.digest_length]u8 = undefined; - h.final(&hram64); - const hram = Ed25519.Curve.scalar.reduce64(hram64); - - // We can use [8][S]B = [8]R + [8][k]A' or [S]B = R + [k]A' verification here. - // We opt for cofactorless since it's faster. - const computed_r = try Curve.basePoint.mulDoubleBasePublic(signature.s, a.neg(), hram); - if (!fastEqual(computed_r, r)) return error.InvalidSignature; -} - -/// Equate two ed25519 points with the assumption that b.z is 1. -fn fastEqual(a: Curve, b: Curve) bool { - const x1 = b.x.mul(a.z); - const y1 = b.y.mul(a.z); - return x1.equivalent(a.x) and y1.equivalent(a.y); -} - // https://github.com/anza-xyz/agave/blob/a8aef04122068ec36a7af0721e36ee58efa0bef2/sdk/src/ed25519_instruction.rs#L35 pub fn newInstruction( allocator: std.mem.Allocator, @@ -400,17 +353,18 @@ test "ed25519 signature offset" { // https://github.com/anza-xyz/agave/blob/2d834361c096198176dbdc4524d5003bccf6c192/precompiles/src/ed25519.rs#L446 test "ed25519_malleability" { + const allocator = std.testing.allocator; { const message = "hello"; const keypair = Ed25519.KeyPair.generate(); const signature = try keypair.sign(message, null); const instruction = try newInstruction( - std.testing.allocator, + allocator, &signature, &keypair.public_key, message, ); - defer std.testing.allocator.free(instruction.data); + defer allocator.free(instruction.data); const tx: sig.core.Transaction = .{ .msg = .{ .account_keys = &.{ID}, @@ -426,8 +380,8 @@ test "ed25519_malleability" { .signatures = &.{}, }; - _ = try verifyPrecompiles(std.testing.allocator, &tx, &FeatureSet.ALL_DISABLED, 0); - _ = try verifyPrecompiles(std.testing.allocator, &tx, &FeatureSet.ALL_ENABLED_AT_GENESIS, 0); + _ = try verifyPrecompiles(allocator, &tx, &FeatureSet.ALL_DISABLED, 0); + _ = try verifyPrecompiles(allocator, &tx, &FeatureSet.ALL_ENABLED_AT_GENESIS, 0); } { @@ -452,8 +406,8 @@ test "ed25519_malleability" { 0x71, 0x6f, 0xb8, 0x96, 0xff, 0xee, 0xac, 0x09, }, ); - const instruction = try newInstruction(std.testing.allocator, &signature, &pubkey, message); - defer std.testing.allocator.free(instruction.data); + const instruction = try newInstruction(allocator, &signature, &pubkey, message); + defer allocator.free(instruction.data); const tx: sig.core.Transaction = .{ .msg = .{ .account_keys = &.{ID}, @@ -469,10 +423,10 @@ test "ed25519_malleability" { .signatures = &.{}, }; - _ = try verifyPrecompiles(std.testing.allocator, &tx, &FeatureSet.ALL_DISABLED, 0); + _ = try verifyPrecompiles(allocator, &tx, &FeatureSet.ALL_DISABLED, 0); try std.testing.expectEqual( TransactionError{ .InstructionError = .{ 0, .{ .Custom = 0 } } }, - try verifyPrecompiles(std.testing.allocator, &tx, &FeatureSet.ALL_ENABLED_AT_GENESIS, 0), + try verifyPrecompiles(allocator, &tx, &FeatureSet.ALL_ENABLED_AT_GENESIS, 0), ); } } diff --git a/src/runtime/program_loader.zig b/src/runtime/program_loader.zig index 0cf3345f4b..a64d646047 100644 --- a/src/runtime/program_loader.zig +++ b/src/runtime/program_loader.zig @@ -41,7 +41,11 @@ pub fn loadPrograms( errdefer programs.deinit(allocator); for (accounts.keys(), accounts.values()) |pubkey, account| { - if (!account.executable) continue; + // https://github.com/firedancer-io/solfuzz-agave/blob/agave-v3.0.3/src/lib.rs#L771-L800 + if (!account.owner.equals(&bpf_loader.v1.ID) and + !account.owner.equals(&bpf_loader.v2.ID) and + !account.owner.equals(&bpf_loader.v3.ID) and + !account.owner.equals(&bpf_loader.v4.ID)) continue; var loaded_program = try loadProgram( allocator, @@ -506,10 +510,8 @@ test "loadPrograms: bad owner" { loaded_programs.deinit(allocator); } - switch (loaded_programs.get(program_key).?) { - .failed => {}, - .loaded => std.debug.panic("Program should not load!", .{}), - } + if (loaded_programs.get(program_key) != null) + std.debug.panic("Program should not load!", .{}); } } diff --git a/src/shred_network/repair_message.zig b/src/shred_network/repair_message.zig index e40261d955..750325c0e6 100644 --- a/src/shred_network/repair_message.zig +++ b/src/shred_network/repair_message.zig @@ -21,17 +21,15 @@ const SIGNED_REPAIR_TIME_WINDOW_SECS: u64 = 600; /// Analogous to [ShredRepairType](https://github.com/anza-xyz/agave/blob/8c5a33a81a0504fd25d0465bed35d153ff84819f/core/src/repair/serve_repair.rs#L95) pub const RepairRequest = union(enum) { /// Requesting `MAX_ORPHAN_REPAIR_RESPONSES` parent shreds - Orphan: Slot, + orphan: Slot, /// Requesting any shred with index greater than or equal to the particular index HighestShred: struct { Slot, u64 }, /// Requesting the missing shred at a particular index Shred: struct { Slot, u64 }, - const Self = @This(); - - pub fn slot(self: *const Self) Slot { - return switch (self.*) { - .Orphan => |x| x, + pub fn slot(self: RepairRequest) Slot { + return switch (self) { + .orphan => |x| x, .HighestShred => |x| x[0], .Shred => |x| x[0], }; @@ -56,94 +54,106 @@ pub fn serializeRepairRequest( defer zone.deinit(); const header: RepairRequestHeader = .{ - .signature = .{ .data = undefined }, + .signature = .ZEROES, .sender = .{ .data = keypair.public_key.bytes }, .recipient = recipient, .timestamp = timestamp, .nonce = nonce, }; const msg: RepairMessage = switch (request) { - .Shred => |r| .{ .WindowIndex = .{ + .Shred => |r| .{ .window_index = .{ .header = header, .slot = r[0], .shred_index = r[1], } }, - .HighestShred => |r| .{ .HighestWindowIndex = .{ + .HighestShred => |r| .{ .highest_window_index = .{ .header = header, .slot = r[0], .shred_index = r[1], } }, - .Orphan => |r| .{ .Orphan = .{ + .orphan => |r| .{ .orphan = .{ .header = header, .slot = r, } }, }; + var serialized = try bincode.writeToSlice(buf, msg, .{}); - var signer = try keypair.signer(null); // TODO noise + var signer = try keypair.signer(null); signer.update(serialized[0..4]); signer.update(serialized[4 + Signature.SIZE ..]); - @memcpy(serialized[4 .. 4 + Signature.SIZE], &signer.finalize().toBytes()); + + const signature = signer.finalize(); + @memcpy(serialized[4..][0..Signature.SIZE], &signature.toBytes()); return serialized; } +pub const RepairMessageType = enum(u8) { + pong = 7, + window_index, + highest_window_index, + orphan, + ancestor_hashes, +}; + /// Messaging data that is directly serialized and sent over repair sockets. /// Contains any header/identification as needed. /// /// Analogous to [RepairProtocol](https://github.com/anza-xyz/agave/blob/8c5a33a81a0504fd25d0465bed35d153ff84819f/core/src/repair/serve_repair.rs#L221) -pub const RepairMessage = union(enum(u8)) { - Pong: Pong = 7, - WindowIndex: struct { +pub const RepairMessage = union(RepairMessageType) { + pong: Pong, + window_index: struct { header: RepairRequestHeader, slot: Slot, shred_index: u64, }, - HighestWindowIndex: struct { + highest_window_index: struct { header: RepairRequestHeader, slot: Slot, shred_index: u64, }, - Orphan: struct { + orphan: struct { header: RepairRequestHeader, slot: Slot, }, - AncestorHashes: struct { + ancestor_hashes: struct { header: RepairRequestHeader, slot: Slot, }, - pub const Tag: type = @typeInfo(Self).@"union".tag_type.?; - const Self = @This(); - const MAX_SERIALIZED_SIZE: usize = 160; - pub fn eql(self: *const Self, other: *const Self) bool { - if (!std.mem.eql(u8, @tagName(self.*), @tagName(other.*))) { - return false; - } - switch (self.*) { - .Pong => |*s| return s.eql(&other.Pong), - .WindowIndex => |*s| { - const o = other.WindowIndex; - return s.header.eql(&o.header) and s.slot == o.slot and s.shred_index == o.shred_index; + pub fn eql(self: RepairMessage, other: RepairMessage) bool { + if (std.meta.activeTag(self) != std.meta.activeTag(other)) return false; + switch (self) { + .pong => |s| return s.eql(&other.pong), + .window_index => |s| { + const o = other.window_index; + return s.header.eql(o.header) and + s.slot == o.slot and + s.shred_index == o.shred_index; }, - .HighestWindowIndex => |*s| { - const o = other.HighestWindowIndex; - return s.header.eql(&o.header) and s.slot == o.slot and s.shred_index == o.shred_index; + .highest_window_index => |s| { + const o = other.highest_window_index; + return s.header.eql(o.header) and + s.slot == o.slot and + s.shred_index == o.shred_index; }, - .Orphan => |*s| { - return s.header.eql(&other.Orphan.header) and s.slot == other.Orphan.slot; + .orphan => |*s| { + return s.header.eql(other.orphan.header) and + s.slot == other.orphan.slot; }, - .AncestorHashes => |*s| { - return s.header.eql(&other.AncestorHashes.header) and s.slot == other.AncestorHashes.slot; + .ancestor_hashes => |s| { + return s.header.eql(other.ancestor_hashes.header) and + s.slot == other.ancestor_hashes.slot; }, } } /// Analogous to [ServeRepair::verify_signed_packet](https://github.com/anza-xyz/agave/blob/8c5a33a81a0504fd25d0465bed35d153ff84819f/core/src/repair/serve_repair.rs#L847) pub fn verify( - self: *const Self, + self: RepairMessage, /// bincode serialized data, from which this struct was deserialized serialized: []u8, /// to compare to the header. typically is this validator's own pubkey @@ -151,36 +161,27 @@ pub const RepairMessage = union(enum(u8)) { /// unix timestamp in milliseconds when this function is called current_timestamp_millis: u64, ) error{ IdMismatch, InvalidSignature, Malformed, TimeSkew }!void { - switch (self.*) { - .Pong => |pong| pong.verify() catch { - return error.InvalidSignature; - }, + switch (self) { + .pong => |p| try p.verify(), inline else => |msg| { // i am the intended recipient const header: RepairRequestHeader = msg.header; - if (!header.recipient.equals(&expected_recipient)) { - return error.IdMismatch; - } - - // message was generated recently - const time_diff = @as(i128, current_timestamp_millis) - @as(i128, header.timestamp); - const time_diff_abs = if (time_diff >= 0) time_diff else -time_diff; - if (time_diff_abs > SIGNED_REPAIR_TIME_WINDOW_SECS) { - return error.TimeSkew; - } - - // signature is valid - if (serialized.len < 4 + Signature.SIZE) { - return error.Malformed; - } - var verifier = header.signature.verifier(header.sender) catch { - return error.InvalidSignature; - }; - verifier.update(serialized[0..4]); - verifier.update(serialized[4 + Signature.SIZE ..]); - verifier.verify() catch { + if (!header.recipient.equals(&expected_recipient)) return error.IdMismatch; + + // ensure that the message was generated recently + const time_difference = + @abs(@as(i128, current_timestamp_millis) - @as(i128, header.timestamp)); + if (time_difference > SIGNED_REPAIR_TIME_WINDOW_SECS) return error.TimeSkew; + + // verify the signature is valid + if (serialized.len < 4 + Signature.SIZE) return error.Malformed; + + // Part of the message is the signature itself, and we omit that. + var message: [MAX_SERIALIZED_SIZE - Signature.SIZE]u8 = undefined; + @memcpy(message[0..4], serialized[0..4]); + @memcpy(message[4..].ptr, serialized[4 + Signature.SIZE ..]); + header.signature.verify(header.sender, message[0 .. serialized.len - Signature.SIZE]) catch return error.InvalidSignature; - }; }, } } @@ -193,7 +194,7 @@ pub const RepairRequestHeader = struct { timestamp: u64, nonce: Nonce, - fn eql(self: *const @This(), other: *const @This()) bool { + fn eql(self: RepairRequestHeader, other: RepairRequestHeader) bool { return self.signature.eql(&other.signature) and self.sender.equals(&other.sender) and self.recipient.equals(&other.recipient) and @@ -207,20 +208,21 @@ test "signed/serialized RepairRequest is valid" { var prng = std.Random.DefaultPrng.init(392138); const random = prng.random(); - inline for (.{ - RepairRequest{ .Orphan = random.int(Slot) }, - RepairRequest{ .Shred = .{ random.int(Slot), random.int(u64) } }, - RepairRequest{ .HighestShred = .{ random.int(Slot), random.int(u64) } }, + inline for ([_]RepairRequest{ + .{ .orphan = random.int(Slot) }, + .{ .Shred = .{ random.int(Slot), random.int(u64) } }, + .{ .HighestShred = .{ random.int(Slot), random.int(u64) } }, }) |request| { - var kp_noise: [32]u8 = undefined; - random.bytes(&kp_noise); - const keypair = try KeyPair.generateDeterministic(kp_noise); - const recipient = Pubkey.initRandom(random); const timestamp = random.int(u64); const nonce = random.int(Nonce); + // keypair with which we sign the repair request + const keypair = KeyPair.generate(); + + const recipient = Pubkey.initRandom(random); + var buf: [1232]u8 = undefined; - var serialized = try serializeRepairRequest( + const serialized = try serializeRepairRequest( &buf, request, &keypair, @@ -229,12 +231,13 @@ test "signed/serialized RepairRequest is valid" { nonce, ); - // TODO: `serializeRepairRequest` is currently non-deterministic because keypair.signer uses csprng. - // figure out a way to make it deterministic! - + // deserializing the repair request should return an identical struct and verification should succeed. var deserialized = try bincode.readFromSlice(allocator, RepairMessage, serialized, .{}); try deserialized.verify(serialized, recipient, timestamp); - random.bytes(serialized[10..15]); // >99.99% chance that this invalidates the signature + + // modify the signature of the request, then it will fail to verify it + serialized[4] +%= 10; + var bad = try bincode.readFromSlice(allocator, RepairMessage, serialized, .{}); if (bad.verify(serialized, recipient, timestamp)) |_| @panic("should err") else |_| {} } @@ -246,7 +249,7 @@ test "RepairRequestHeader serialization round trip" { prng.fill(&signature); const header: RepairRequestHeader = .{ - .signature = .{ .data = signature }, + .signature = .fromBytes(signature), .sender = Pubkey.initRandom(prng.random()), .recipient = Pubkey.initRandom(prng.random()), .timestamp = 5924, @@ -276,11 +279,11 @@ test "RepairRequestHeader serialization round trip" { serialized, .{}, ); - try std.testing.expect(header.eql(&roundtripped)); + try std.testing.expect(header.eql(roundtripped)); } -test "RepairProtocolMessage.Pong serialization round trip" { - try testHelpers.assertMessageSerializesCorrectly(57340, .Pong, &[_]u8{ +test "RepairProtocolMessage.pong serialization round trip" { + try testHelpers.assertMessageSerializesCorrectly(57340, .pong, &[_]u8{ 7, 0, 0, 0, 252, 143, 181, 36, 240, 87, 69, 104, 157, 159, 242, 94, 101, 48, 187, 120, 173, 241, 68, 167, 217, 67, 141, 46, 105, 85, 179, 69, 249, 140, 6, 145, 6, 201, 32, 10, 11, 24, 157, 240, 245, 65, 91, 80, 255, 89, 18, @@ -292,8 +295,8 @@ test "RepairProtocolMessage.Pong serialization round trip" { }); } -test "RepairProtocolMessage.WindowIndex serialization round trip" { - try testHelpers.assertMessageSerializesCorrectly(4823794, .WindowIndex, &[_]u8{ +test "RepairProtocolMessage.window_index serialization round trip" { + try testHelpers.assertMessageSerializesCorrectly(4823794, .window_index, &[_]u8{ 8, 0, 0, 0, 100, 7, 241, 74, 194, 88, 24, 128, 85, 15, 149, 108, 142, 133, 234, 217, 3, 79, 124, 171, 68, 30, 189, 219, 173, 11, 184, 159, 208, 104, 206, 31, 233, 86, 166, 102, 235, 97, 198, 145, 62, 149, 19, 202, 91, 237, 153, @@ -307,8 +310,8 @@ test "RepairProtocolMessage.WindowIndex serialization round trip" { }); } -test "RepairProtocolMessage.HighestWindowIndex serialization round trip" { - try testHelpers.assertMessageSerializesCorrectly(636345, .HighestWindowIndex, &[_]u8{ +test "RepairProtocolMessage.highest_window_index serialization round trip" { + try testHelpers.assertMessageSerializesCorrectly(636345, .highest_window_index, &[_]u8{ 9, 0, 0, 0, 44, 123, 16, 108, 173, 151, 229, 132, 4, 0, 5, 215, 25, 179, 235, 166, 181, 42, 30, 231, 218, 43, 166, 238, 92, 80, 234, 87, 30, 123, 140, 27, 65, 165, 32, 139, 235, 225, 146, 239, 107, 162, 4, 80, 215, 131, 42, @@ -322,8 +325,8 @@ test "RepairProtocolMessage.HighestWindowIndex serialization round trip" { }); } -test "RepairProtocolMessage.Orphan serialization round trip" { - try testHelpers.assertMessageSerializesCorrectly(734566, .Orphan, &[_]u8{ +test "RepairProtocolMessage.orphan serialization round trip" { + try testHelpers.assertMessageSerializesCorrectly(734566, .orphan, &[_]u8{ 10, 0, 0, 0, 52, 54, 182, 49, 197, 238, 253, 118, 145, 61, 198, 235, 42, 211, 229, 42, 2, 33, 5, 161, 179, 171, 26, 243, 51, 240, 82, 98, 121, 90, 210, 244, 120, 168, 226, 131, 209, 42, 251, 16, 90, 129, 113, 90, 195, 130, 55, @@ -336,8 +339,8 @@ test "RepairProtocolMessage.Orphan serialization round trip" { }); } -test "RepairProtocolMessage.AncestorHashes serialization round trip" { - try testHelpers.assertMessageSerializesCorrectly(6236757, .AncestorHashes, &[_]u8{ +test "RepairProtocolMessage.ancestor_hashes serialization round trip" { + try testHelpers.assertMessageSerializesCorrectly(6236757, .ancestor_hashes, &[_]u8{ 11, 0, 0, 0, 192, 86, 218, 156, 168, 139, 216, 200, 30, 181, 244, 121, 90, 41, 177, 117, 55, 40, 199, 207, 62, 118, 56, 134, 73, 88, 74, 2, 139, 189, 201, 150, 22, 75, 239, 15, 35, 125, 154, 130, 165, 120, 24, 154, 159, 42, 222, @@ -353,8 +356,8 @@ test "RepairProtocolMessage.AncestorHashes serialization round trip" { test "RepairProtocolMessage serializes to size <= MAX_SERIALIZED_SIZE" { var prng = std.Random.DefaultPrng.init(184837); for (0..10) |_| { - inline for (@typeInfo(RepairMessage.Tag).@"enum".fields) |enum_field| { - const tag = @field(RepairMessage.Tag, enum_field.name); + inline for (@typeInfo(RepairMessageType).@"enum".fields) |enum_field| { + const tag = @field(RepairMessageType, enum_field.name); const msg = testHelpers.randomRepairProtocolMessage(prng.random(), tag); var buf: [RepairMessage.MAX_SERIALIZED_SIZE]u8 = undefined; _ = try bincode.writeToSlice(&buf, msg, .{}); @@ -365,7 +368,7 @@ test "RepairProtocolMessage serializes to size <= MAX_SERIALIZED_SIZE" { const testHelpers = struct { fn assertMessageSerializesCorrectly( seed: u64, - tag: RepairMessage.Tag, + tag: RepairMessageType, expected: []const u8, ) !void { var prng = std.Random.DefaultPrng.init(seed); @@ -377,10 +380,10 @@ const testHelpers = struct { try std.testing.expect(std.mem.eql(u8, expected, serialized)); switch (msg) { - .Pong => |_| try msg.verify(serialized, undefined, 0), + .pong => |_| try msg.verify(serialized, undefined, 0), inline else => |m| { const result = msg.verify(serialized, m.header.recipient, m.header.timestamp); - if (result) |_| @panic("should fail due to signature") else |_| {} + try std.testing.expect(std.meta.isError(result)); }, } @@ -390,7 +393,7 @@ const testHelpers = struct { serialized, .{}, ); - try std.testing.expect(msg.eql(&roundtripped)); + try std.testing.expect(msg.eql(roundtripped)); // // rust template to generate expectation: // let header = RepairRequestHeader { @@ -400,7 +403,7 @@ const testHelpers = struct { // timestamp: , // nonce: , // }; - // let msg = RepairProtocol::AncestorHashes { + // let msg = RepairProtocol::ancestor_hashes { // header, // slot: , // }; @@ -413,7 +416,7 @@ const testHelpers = struct { random.bytes(&signature); return .{ - .signature = .{ .data = signature }, + .signature = .fromBytes(signature), .sender = Pubkey.initRandom(random), .recipient = Pubkey.initRandom(random), .timestamp = random.int(u64), @@ -423,30 +426,30 @@ const testHelpers = struct { fn randomRepairProtocolMessage( random: std.Random, - message_type: RepairMessage.Tag, + message_type: RepairMessageType, ) RepairMessage { return switch (message_type) { - .Pong => x: { + .pong => x: { var buf: [32]u8 = undefined; random.bytes(&buf); const kp = KeyPair.generateDeterministic(buf) catch unreachable; - break :x .{ .Pong = Pong.initRandom(random, &kp) catch unreachable }; + break :x .{ .pong = Pong.initRandom(random, &kp) catch unreachable }; }, - .WindowIndex => .{ .WindowIndex = .{ + .window_index => .{ .window_index = .{ .header = randomRepairRequestHeader(random), .slot = random.int(Slot), .shred_index = random.int(u64), } }, - .HighestWindowIndex => .{ .HighestWindowIndex = .{ + .highest_window_index => .{ .highest_window_index = .{ .header = randomRepairRequestHeader(random), .slot = random.int(Slot), .shred_index = random.int(u64), } }, - .Orphan => .{ .Orphan = .{ + .orphan => .{ .orphan = .{ .header = randomRepairRequestHeader(random), .slot = random.int(Slot), } }, - .AncestorHashes => .{ .AncestorHashes = .{ + .ancestor_hashes => .{ .ancestor_hashes = .{ .header = randomRepairRequestHeader(random), .slot = random.int(Slot), } }, @@ -459,21 +462,21 @@ const testHelpers = struct { if (!DEBUG) return; std.debug.print("_\n\n", .{}); switch (message.*) { - .Pong => |*msg| { + .pong => |*msg| { std.debug.print("from: {any}\n\n", .{msg.from}); std.debug.print("hash: {any}\n\n", .{msg.hash}); std.debug.print("signature: {any}\n\n", .{msg.signature}); }, - .WindowIndex => |*msg| { + .window_index => |*msg| { debugHeader(msg.header); }, - .HighestWindowIndex => |*msg| { + .highest_window_index => |*msg| { debugHeader(msg.header); }, - .Orphan => |*msg| { + .orphan => |*msg| { debugHeader(msg.header); }, - .AncestorHashes => |*msg| { + .ancestor_hashes => |*msg| { debugHeader(msg.header); }, } @@ -485,7 +488,7 @@ const testHelpers = struct { std.debug.print("nonce: {any}\n\n", .{header.nonce}); std.debug.print("recipient: {any}\n\n", .{header.recipient.data}); std.debug.print("sender: {any}\n\n", .{header.sender.data}); - std.debug.print("signature: {any}\n\n", .{header.signature.data}); + std.debug.print("signature: {any}\n\n", .{header.signature.toBytes()}); std.debug.print("timestamp: {any}\n\n", .{header.timestamp}); } }; diff --git a/src/shred_network/repair_service.zig b/src/shred_network/repair_service.zig index ce6522d7b5..f151dc6da1 100644 --- a/src/shred_network/repair_service.zig +++ b/src/shred_network/repair_service.zig @@ -675,8 +675,8 @@ test "RepairService sends repair request to gossip peer" { try std.testing.expect(160 == size); const msg = try bincode.readFromSlice(allocator, RepairMessage, buf[0..160], .{}); try msg.verify(buf[0..160], Pubkey.fromPublicKey(&peer_keypair.public_key), @intCast(std.time.milliTimestamp())); - try std.testing.expect(msg.HighestWindowIndex.slot == 13579); - try std.testing.expect(msg.HighestWindowIndex.shred_index == 0); + try std.testing.expect(msg.highest_window_index.slot == 13579); + try std.testing.expect(msg.highest_window_index.shred_index == 0); } test "RepairPeerProvider selects correct peers" { diff --git a/src/shred_network/shred_receiver.zig b/src/shred_network/shred_receiver.zig index 080e45a12c..13926f25a5 100644 --- a/src/shred_network/shred_receiver.zig +++ b/src/shred_network/shred_receiver.zig @@ -163,7 +163,7 @@ pub const ShredReceiver = struct { return null; }; const ping = switch (repair_ping) { - .Ping => |ping| ping, + .ping => |ping| ping, }; ping.verify() catch { metrics.ping_verify_fail_count.inc(); @@ -171,7 +171,7 @@ pub const ShredReceiver = struct { }; metrics.valid_ping_count.inc(); - const reply: RepairMessage = .{ .Pong = try Pong.init(&ping, keypair) }; + const reply: RepairMessage = .{ .pong = try Pong.init(&ping, keypair) }; return try Packet.initFromBincode( sig.net.SocketAddr.fromEndpoint(&packet.addr), @@ -192,9 +192,9 @@ test "handlePing" { const pong = try Pong.init(&ping, &my_keypair); const addr = sig.net.SocketAddr.initIpv4(.{ 127, 0, 0, 1 }, 88); - const input_ping_packet = try Packet.initFromBincode(addr, RepairPing{ .Ping = ping }); + const input_ping_packet = try Packet.initFromBincode(addr, RepairPing{ .ping = ping }); - const expected_pong_packet = try Packet.initFromBincode(addr, RepairMessage{ .Pong = pong }); + const expected_pong_packet = try Packet.initFromBincode(addr, RepairMessage{ .pong = pong }); const actual_pong_packet = try ShredReceiver.handlePingInner( allocator, &input_ping_packet, @@ -207,7 +207,7 @@ test "handlePing" { const evil_keypair = try sig.identity.KeyPair.generateDeterministic(.{64} ** 32); var evil_ping = ping; evil_ping.from = sig.core.Pubkey.fromPublicKey(&evil_keypair.public_key); - const evil_ping_packet = try Packet.initFromBincode(addr, RepairPing{ .Ping = evil_ping }); + const evil_ping_packet = try Packet.initFromBincode(addr, RepairPing{ .ping = evil_ping }); try std.testing.expectEqual(null, try ShredReceiver.handlePingInner( allocator, &evil_ping_packet, @@ -270,7 +270,7 @@ fn verifyShredSlots(slot: Slot, parent: Slot, root: Slot) bool { const REPAIR_RESPONSE_SERIALIZED_PING_BYTES = 132; -const RepairPing = union(enum) { Ping: Ping }; +const RepairPing = union(enum) { ping: Ping }; pub const ShredReceiverMetrics = struct { received_count: *Counter, diff --git a/src/shred_network/shred_verifier.zig b/src/shred_network/shred_verifier.zig index 56d7a874a6..2570856482 100644 --- a/src/shred_network/shred_verifier.zig +++ b/src/shred_network/shred_verifier.zig @@ -50,6 +50,16 @@ pub fn runShredVerifier( } } +pub const ShredVerificationFailure = error{ + InsufficientShredSize, + SlotMissing, + SignatureMissing, + SignedDataMissing, + LeaderUnknown, + FailedVerification, + FailedCaching, +}; + /// Analogous to [verify_shred_cpu](https://github.com/anza-xyz/agave/blob/83e7d84bcc4cf438905d07279bc07e012a49afd9/ledger/src/sigverify_shreds.rs#L35) fn verifyShred( packet: *const Packet, @@ -57,31 +67,19 @@ fn verifyShred( verified_merkle_roots: *VerifiedMerkleRoots, metrics: Metrics, ) ShredVerificationFailure!void { - const shred = shred_layout.getShred(packet) orelse return error.insufficient_shred_size; - const slot = shred_layout.getSlot(shred) orelse return error.slot_missing; - const signature = shred_layout.getLeaderSignature(shred) orelse return error.signature_missing; - const signed_data = shred_layout.merkleRoot(shred) orelse return error.signed_data_missing; + const shred = shred_layout.getShred(packet) orelse return error.InsufficientShredSize; + const slot = shred_layout.getSlot(shred) orelse return error.SlotMissing; + const signature = shred_layout.getLeaderSignature(shred) orelse return error.SignatureMissing; + const signed_data = shred_layout.merkleRoot(shred) orelse return error.SignedDataMissing; + + if (verified_merkle_roots.get(signed_data) != null) return; - if (verified_merkle_roots.get(signed_data)) |_| { - return; - } metrics.cache_miss_count.inc(); - const leader = leader_schedule.get(slot) orelse return error.leader_unknown; - const valid = signature.verify(leader, &signed_data.data) catch - return error.failed_verification; - if (!valid) return error.failed_verification; - verified_merkle_roots.insert(signed_data, {}) catch return error.failed_caching; -} + const leader = leader_schedule.get(slot) orelse return error.LeaderUnknown; -pub const ShredVerificationFailure = error{ - insufficient_shred_size, - slot_missing, - signature_missing, - signed_data_missing, - leader_unknown, - failed_verification, - failed_caching, -}; + signature.verify(leader, &signed_data.data) catch return error.FailedVerification; + verified_merkle_roots.insert(signed_data, {}) catch return error.FailedCaching; +} const Metrics = struct { received_count: *Counter, diff --git a/src/transaction_sender/mock_transfer_generator.zig b/src/transaction_sender/mock_transfer_generator.zig index 376ed86341..8546443c1a 100644 --- a/src/transaction_sender/mock_transfer_generator.zig +++ b/src/transaction_sender/mock_transfer_generator.zig @@ -416,7 +416,7 @@ pub const MockTransferService = struct { random.bytes(&noise); const signature = try from_keypair.sign(signable, noise); - break :blk .{ .data = signature.toBytes() }; + break :blk .fromSignature(signature); }; const signatures = try allocator.dupe(Signature, &.{signature}); diff --git a/src/vm/syscalls/cpi.zig b/src/vm/syscalls/cpi.zig index 0af0cac754..ceb224aad2 100644 --- a/src/vm/syscalls/cpi.zig +++ b/src/vm/syscalls/cpi.zig @@ -146,8 +146,6 @@ fn RefCell(comptime T: type) type { /// [agave] https://github.com/anza-xyz/agave/blob/359d7eb2b68639443d750ffcec0c7e358f138975/programs/bpf_loader/src/syscalls/cpi.rs#L57 fn VmValue(comptime T: type) type { return union(enum) { - const Self = @This(); - vm_address: struct { vm_addr: u64, memory_map: *const MemoryMap, @@ -155,16 +153,19 @@ fn VmValue(comptime T: type) type { }, translated_addr: usize, - pub fn get(self: Self, comptime state: memory.MemoryState) !(switch (state) { + pub fn get(self: @This(), comptime state: memory.MemoryState) !(switch (state) { .constant => *align(1) const T, .mutable => *align(1) T, }) { - switch (self) { - .translated_addr => |ptr| return @ptrFromInt(ptr), - .vm_address => |vma| { - return vma.memory_map.translateType(T, state, vma.vm_addr, vma.check_aligned); - }, - } + return switch (self) { + .translated_addr => |ptr| @ptrFromInt(ptr), + .vm_address => |vma| vma.memory_map.translateType( + T, + state, + vma.vm_addr, + vma.check_aligned, + ), + }; } }; } diff --git a/src/vm/syscalls/ecc.zig b/src/vm/syscalls/ecc.zig index 6f3c26bf50..4964ddbb71 100644 --- a/src/vm/syscalls/ecc.zig +++ b/src/vm/syscalls/ecc.zig @@ -15,6 +15,7 @@ const FeatureSet = sig.core.FeatureSet; const Edwards25519 = std.crypto.ecc.Edwards25519; const Ristretto255 = std.crypto.ecc.Ristretto255; +const ed25519 = sig.crypto.ed25519; const Keccak256 = std.crypto.hash.sha3.Keccak256; const Secp256k1 = std.crypto.ecc.Secp256k1; @@ -150,129 +151,6 @@ pub fn curveGroupOp( } } -pub const weak_mul = struct { - inline fn cMov(p: *Edwards25519, a: Edwards25519, c: u64) void { - p.x.cMov(a.x, c); - p.y.cMov(a.y, c); - p.z.cMov(a.z, c); - p.t.cMov(a.t, c); - } - - inline fn pcSelect(comptime n: usize, pc: *const [n]Edwards25519, b: u8) Edwards25519 { - var t = Edwards25519.identityElement; - comptime var i: u8 = 1; - inline while (i < pc.len) : (i += 1) { - cMov(&t, pc[i], ((@as(usize, b ^ i) -% 1) >> 8) & 1); - } - return t; - } - - fn pcMul16(pc: *const [16]Edwards25519, s: [32]u8, comptime vartime: bool) Edwards25519 { - var q = Edwards25519.identityElement; - var pos: usize = 252; - while (true) : (pos -= 4) { - const slot: u4 = @truncate((s[pos >> 3] >> @as(u3, @truncate(pos)))); - if (vartime) { - if (slot != 0) { - q = q.add(pc[slot]); - } - } else { - q = q.add(pcSelect(16, pc, slot)); - } - if (pos == 0) break; - q = q.dbl().dbl().dbl().dbl(); - } - // try q.rejectIdentity(); - return q; - } - - fn precompute(p: Edwards25519, comptime count: usize) [1 + count]Edwards25519 { - var pc: [1 + count]Edwards25519 = undefined; - pc[0] = Edwards25519.identityElement; - pc[1] = p; - var i: usize = 2; - while (i <= count) : (i += 1) { - pc[i] = if (i % 2 == 0) pc[i / 2].dbl() else pc[i - 1].add(p); - } - return pc; - } - - fn slide(s: [32]u8) [2 * 32]i8 { - const reduced = if ((s[s.len - 1] & 0x80) == 0) s else Edwards25519.scalar.reduce(s); - var e: [2 * 32]i8 = undefined; - for (reduced, 0..) |x, i| { - e[i * 2 + 0] = @as(i8, @as(u4, @truncate(x))); - e[i * 2 + 1] = @as(i8, @as(u4, @truncate(x >> 4))); - } - // Now, e[0..63] is between 0 and 15, e[63] is between 0 and 7 - var carry: i8 = 0; - for (e[0..63]) |*x| { - x.* += carry; - carry = (x.* + 8) >> 4; - x.* -= carry * 16; - } - e[63] += carry; - // Now, e[*] is between -8 and 8, including e[63] - return e; - } - - const basePointPc = pc: { - @setEvalBranchQuota(10000); - break :pc precompute(Edwards25519.basePoint, 15); - }; - - /// NOTE: Does not perform checks for weak points! - pub fn mul(p: Edwards25519, s: [32]u8) Edwards25519 { - const xpc = if (p.is_base) basePointPc else precompute(p, 15); - // xpc[4].rejectIdentity() catch return error.WeakPublicKey; - return pcMul16(&xpc, s, false); - } - - /// Multiscalar multiplication *IN VARIABLE TIME* for public data - /// Computes ps0*ss0 + ps1*ss1 + ps2*ss2... faster than doing many of these operations individually - /// - /// NOTE: Does not perform checks for weak points! - pub fn mulMulti( - comptime count: usize, - ps: [count]Edwards25519, - ss: [count][32]u8, - ) Edwards25519 { - var pcs: [count][9]Edwards25519 = undefined; - - var bpc: [9]Edwards25519 = undefined; - @memcpy(&bpc, basePointPc[0..bpc.len]); - - for (ps, 0..) |p, i| { - if (p.is_base) { - pcs[i] = bpc; - } else { - pcs[i] = precompute(p, 8); - // pcs[i][4].rejectIdentity() catch return error.WeakPublicKey; - } - } - var es: [count][2 * 32]i8 = undefined; - for (ss, 0..) |s, i| { - es[i] = slide(s); - } - var q = Edwards25519.identityElement; - var pos: usize = 2 * 32 - 1; - while (true) : (pos -= 1) { - for (es, 0..) |e, i| { - const slot = e[pos]; - if (slot > 0) { - q = q.add(pcs[i][@as(usize, @intCast(slot))]); - } else if (slot < 0) { - q = q.sub(pcs[i][@as(usize, @intCast(-slot))]); - } - } - if (pos == 0) break; - q = q.dbl().dbl().dbl().dbl(); - } - // try q.rejectIdentity(); - return q; - } -}; - fn groupOp(comptime T: type, group_op: GroupOp, left: [32]u8, right: [32]u8) !T { switch (group_op) { .add, .subtract => { @@ -292,11 +170,7 @@ fn groupOp(comptime T: type, group_op: GroupOp, left: [32]u8, right: [32]u8) !T .multiply => { try Edwards25519.scalar.rejectNonCanonical(left); const input_point = try T.fromBytes(right); - return switch (T) { - Edwards25519 => weak_mul.mul(input_point, left), - Ristretto255 => .{ .p = weak_mul.mul(input_point.p, left) }, - else => unreachable, - }; + return ed25519.mul(T == Ristretto255, input_point, left); }, } } @@ -359,7 +233,7 @@ pub fn curveMultiscalarMul( }; } - const msm = sig.crypto.ed25519.pippenger.mulMulti( + const msm = sig.crypto.ed25519.mulMultiRuntime( 512, true, id == .ristretto, diff --git a/src/zksdk/el_gamal.zig b/src/zksdk/el_gamal.zig index 90f6f4283a..b3e720c230 100644 --- a/src/zksdk/el_gamal.zig +++ b/src/zksdk/el_gamal.zig @@ -19,6 +19,7 @@ const sig = @import("../sig.zig"); const Ristretto255 = std.crypto.ecc.Ristretto255; const Edwards25519 = std.crypto.ecc.Edwards25519; const Scalar = Edwards25519.scalar.Scalar; +const ed25519 = sig.crypto.ed25519; const pedersen = sig.zksdk.pedersen; pub const Pubkey = struct { @@ -28,11 +29,11 @@ pub const Pubkey = struct { pub fn fromSecret(secret: Keypair.Secret) Pubkey { const scalar = secret.scalar; std.debug.assert(!scalar.isZero()); - // unreachable because `H` is known to not be an identity and `scalar` cannot be zero. - return .{ .point = Ristretto255.mul( + return .{ .point = ed25519.mul( + true, pedersen.H, scalar.invert().toBytes(), - ) catch unreachable }; + ) }; } pub fn fromBytes(bytes: [32]u8) !Pubkey { diff --git a/src/zksdk/merlin.zig b/src/zksdk/merlin.zig index e35021035d..7f74658a31 100644 --- a/src/zksdk/merlin.zig +++ b/src/zksdk/merlin.zig @@ -3,7 +3,11 @@ //! https://merlin.cool/use/protocol.html const std = @import("std"); +const builtin = @import("builtin"); const sig = @import("../sig.zig"); + +const zksdk = sig.zksdk; + const Keccak1600 = std.crypto.core.keccak.KeccakF(1600); const Ed25519 = std.crypto.ecc.Edwards25519; const Scalar = Ed25519.scalar.Scalar; @@ -186,36 +190,141 @@ pub const Strobe128 = struct { pub const Transcript = struct { strobe: Strobe128, - pub fn init(comptime label: []const u8) Transcript { - var transcript: Transcript = .{ - .strobe = Strobe128.init("Merlin v1.0"), - }; - transcript.appendMessage("dom-sep", label); + const DomainSeperator = enum { + @"zero-ciphertext-instruction", + @"zero-ciphertext-proof", + @"pubkey-validity-instruction", + @"pubkey-proof", + @"percentage-with-cap-proof", + @"percentage-with-cap-instruction", + @"ciphertext-commitment-equality-proof", + @"ciphertext-commitment-equality-instruction", + @"ciphertext-ciphertext-equality-proof", + @"ciphertext-ciphertext-equality-instruction", + + @"inner-product", + @"range-proof", + @"batched-range-proof-instruction", + + @"validity-proof", + @"batched-validity-proof", + + @"grouped-ciphertext-validity-2-handles-instruction", + @"batched-grouped-ciphertext-validity-2-handles-instruction", + + @"grouped-ciphertext-validity-3-handles-instruction", + @"batched-grouped-ciphertext-validity-3-handles-instruction", + }; + const TranscriptInput = struct { + label: []const u8, + message: Message, + }; + + const Message = union(enum) { + bytes: []const u8, + + point: Ristretto255, + pubkey: zksdk.el_gamal.Pubkey, + scalar: Scalar, + ciphertext: zksdk.el_gamal.Ciphertext, + commitment: zksdk.pedersen.Commitment, + u64: u64, + + grouped_2: zksdk.el_gamal.GroupedElGamalCiphertext(2), + grouped_3: zksdk.el_gamal.GroupedElGamalCiphertext(3), + }; + + pub fn init(comptime seperator: DomainSeperator, inputs: []const TranscriptInput) Transcript { + var transcript: Transcript = .{ .strobe = Strobe128.init("Merlin v1.0") }; + transcript.appendDomSep(seperator); + for (inputs) |input| transcript.appendMessage(input.label, input.message); return transcript; } - /// NOTE: be very careful with this function, there are only a specific few - /// usages of it. generally speaking, use the a helper function if it exists. - pub fn appendMessage( - self: *Transcript, - comptime label: []const u8, - message: []const u8, - ) void { + pub fn initTest(label: []const u8) Transcript { + comptime if (!builtin.is_test) @compileError("should only be used during tests"); + var transcript: Transcript = .{ .strobe = Strobe128.init("Merlin v1.0") }; + transcript.appendBytes("dom-sep", label); + return transcript; + } + + fn appendBytes(self: *Transcript, label: []const u8, bytes: []const u8) void { var data_len: [4]u8 = undefined; - std.mem.writeInt(u32, &data_len, @intCast(message.len), .little); + std.mem.writeInt(u32, &data_len, @intCast(bytes.len), .little); self.strobe.metaAd(label, false); self.strobe.metaAd(&data_len, true); - self.strobe.ad(message, false); + self.strobe.ad(bytes, false); } - pub fn appendDomSep(self: *Transcript, comptime label: []const u8) void { - self.appendMessage("dom-sep", label); + fn appendMessage(self: *Transcript, label: []const u8, message: Message) void { + var buffer: [64]u8 = @splat(0); + const bytes: []const u8 = switch (message) { + .bytes => |b| b, + .point => |*point| &point.toBytes(), + .pubkey => |*pubkey| &pubkey.toBytes(), + .scalar => |*scalar| &scalar.toBytes(), + .ciphertext => |*ct| b: { + @memcpy(buffer[0..32], &ct.commitment.point.toBytes()); + @memcpy(buffer[32..64], &ct.handle.point.toBytes()); + break :b &buffer; + }, + .commitment => |*c| &c.toBytes(), + .u64 => |x| b: { + std.mem.writeInt(u64, buffer[0..8], x, .little); + break :b buffer[0..8]; + }, + inline .grouped_2, .grouped_3 => |*g| &g.toBytes(), + }; + self.appendBytes(label, bytes); + } + + pub inline fn append( + self: *Transcript, + comptime session: *Session, + comptime t: Input.Type, + comptime label: []const u8, + data: t.Data(), + ) if (t == .validate_point) error{IdentityElement}!void else void { + // if validate_point fails to validate, we no longer want to check the contract + // because the function calling append will now return early. + errdefer session.cancel(); + + if (t == .bytes and !builtin.is_test) + @compileError("message type `bytes` only allowed in tests"); + + // assert correctness + const input = comptime session.nextInput(t, label); + if (t == .validate_point) try data.rejectIdentity(); + + // add the message + self.appendMessage(input.label, @unionInit( + Message, + @tagName(switch (t) { + .validate_point => .point, + else => t, + }), + data, + )); } - pub fn challengeBytes( + /// Helper function to be used in proof creation. We often need to test what will + /// happen if points are zeroed, and to make sure that the verification fails. + /// Shouldn't be used outside of the `init` functions. + pub inline fn appendNoValidate( self: *Transcript, + comptime session: *Session, comptime label: []const u8, + point: Ristretto255, + ) void { + const input = comptime session.nextInput(.validate_point, label); + point.rejectIdentity() catch {}; // ignore the error + self.appendMessage(input.label, .{ .point = point }); + } + + fn challengeBytes( + self: *Transcript, + label: []const u8, destination: []u8, ) void { var data_len: [4]u8 = undefined; @@ -226,75 +335,134 @@ pub const Transcript = struct { self.strobe.prf(destination, false); } - pub fn challengeScalar( + pub inline fn challengeScalar( self: *Transcript, + comptime session: *Session, comptime label: []const u8, ) Scalar { - var buffer: [64]u8 = .{0} ** 64; - self.challengeBytes(label, &buffer); + const input = comptime session.nextInput(.challenge, label); + var buffer: [64]u8 = @splat(0); + self.challengeBytes(input.label, &buffer); // Specifically need reduce64 instead of Scalar.fromBytes64, since // we need the Barret reduction to be done with 10 limbs, not 5. const compressed = Ed25519.scalar.reduce64(buffer); return Scalar.fromBytes(compressed); } - pub fn validateAndAppendPoint( - self: *Transcript, - comptime label: []const u8, - point: Ristretto255, - ) !void { - try point.rejectIdentity(); - self.appendPoint(label, point); - } - - // helper functions + // domain seperation helpers - pub fn appendPoint(self: *Transcript, comptime label: []const u8, point: Ristretto255) void { - self.appendMessage(label, &point.toBytes()); + pub fn appendDomSep(self: *Transcript, comptime seperator: DomainSeperator) void { + self.appendBytes("dom-sep", @tagName(seperator)); } - pub fn appendScalar(self: *Transcript, comptime label: []const u8, scalar: Scalar) void { - self.appendMessage(label, &scalar.toBytes()); - } - - pub fn appendPubkey( + pub fn appendHandleDomSep( self: *Transcript, - comptime label: []const u8, - pubkey: sig.zksdk.ElGamalPubkey, + comptime mode: enum { batched, unbatched }, + comptime handles: enum { two, three }, ) void { - self.appendPoint(label, pubkey.point); + self.appendDomSep(switch (mode) { + .batched => .@"batched-validity-proof", + .unbatched => .@"validity-proof", + }); + self.appendMessage("handles", .{ .u64 = switch (handles) { + .two => 2, + .three => 3, + } }); } - pub fn appendCiphertext( + pub fn appendRangeProof( self: *Transcript, - comptime label: []const u8, - ciphertext: sig.zksdk.ElGamalCiphertext, + comptime mode: enum { range, inner }, + n: comptime_int, ) void { - var buffer: [64]u8 = .{0} ** 64; - @memcpy(buffer[0..32], &ciphertext.commitment.point.toBytes()); - @memcpy(buffer[32..64], &ciphertext.handle.point.toBytes()); - self.appendMessage(label, &buffer); + self.appendDomSep(switch (mode) { + .range => .@"range-proof", + .inner => .@"inner-product", + }); + self.appendMessage("n", .{ .u64 = n }); } - pub fn appendCommitment( - self: *Transcript, - comptime label: []const u8, - commitment: sig.zksdk.pedersen.Commitment, - ) void { - self.appendMessage(label, &commitment.point.toBytes()); - } + // sessions + + pub const Input = struct { + label: []const u8, + type: Type, + + const Type = enum { + bytes, + scalar, + challenge, + point, + validate_point, + pubkey, + + pub fn Data(comptime t: Type) type { + return switch (t) { + .bytes => []const u8, + .scalar => Scalar, + .validate_point, .point => Ristretto255, + .pubkey => zksdk.el_gamal.Pubkey, + .challenge => unreachable, // call `challenge*` + }; + } + }; - pub fn appendU64(self: *Transcript, comptime label: []const u8, x: u64) void { - var buffer: [8]u8 = .{0} ** 8; - std.mem.writeInt(u64, &buffer, x, .little); - self.appendMessage(label, &buffer); + fn check(self: Input, t: Type, label: []const u8) void { + std.debug.assert(self.type == t); + std.debug.assert(std.mem.eql(u8, self.label, label)); + } + }; + + pub const Contract = []const Input; + + pub const Session = struct { + i: u8, + contract: Contract, + err: bool, // if validate_point errors, we skip the finish() check + + pub inline fn nextInput(comptime self: *Session, t: Input.Type, label: []const u8) Input { + comptime { + defer self.i += 1; + const input = self.contract[self.i]; + input.check(t, label); + return input; + } + } + + pub inline fn finish(comptime self: *Session) void { + // For performance, we have certain computations (specifically in `init` functions) + // which skip the last parts of transcript when they aren't needed (i.e ciphertext_ciphertext proof). + // + // By performing this check, we still ensure that they do those extra computations when in Debug mode, + // but are allowed to skip them in a release build. + if (builtin.mode == .Debug and !self.err and self.i != self.contract.len) { + @compileError("contract unfulfilled"); + } + } + + inline fn cancel(comptime self: *Session) void { + comptime self.err = true; + } + }; + + pub inline fn getSession(comptime contract: []const Input) Session { + comptime { + // contract should always end in a challenge + const last_contract = contract[contract.len - 1]; + std.debug.assert(last_contract.type == .challenge); + return .{ .i = 0, .contract = contract, .err = false }; + } } }; test "equivalence" { - var transcript = Transcript.init("test protocol"); + var transcript = Transcript.initTest("test protocol"); - transcript.appendMessage("some label", "some data"); + comptime var session = Transcript.getSession(&.{ + .{ .label = "some label", .type = .bytes }, + .{ .label = "challenge", .type = .challenge }, + }); + transcript.append(&session, .bytes, "some label", "some data"); var bytes: [32]u8 = undefined; transcript.challengeBytes("challenge", &bytes); diff --git a/src/zksdk/pedersen.zig b/src/zksdk/pedersen.zig index c9e71a96b8..4d322b139b 100644 --- a/src/zksdk/pedersen.zig +++ b/src/zksdk/pedersen.zig @@ -4,7 +4,7 @@ const sig = @import("../sig.zig"); const Ristretto255 = std.crypto.ecc.Ristretto255; const Edwards25519 = std.crypto.ecc.Edwards25519; const Scalar = Edwards25519.scalar.Scalar; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const Pubkey = sig.zksdk.ElGamalPubkey; /// Pedersen basepoint. @@ -72,8 +72,8 @@ pub const DecryptHandle = struct { point: Ristretto255, pub fn init(pubkey: *const Pubkey, opening: *const Opening) DecryptHandle { - const point = weak_mul.mul(pubkey.point.p, opening.scalar.toBytes()); - return .{ .point = .{ .p = point } }; + const point = ed25519.mul(true, pubkey.point, opening.scalar.toBytes()); + return .{ .point = point }; } pub fn fromBytes(bytes: [32]u8) !DecryptHandle { @@ -100,12 +100,12 @@ pub const DecryptHandle = struct { pub fn init(s: Scalar, opening: *const Opening) Commitment { // G and H are not identities and opening.scalar cannot be zero, // so this function cannot return an error. - const point = Edwards25519.mulMulti( + const point = ed25519.mulMulti( 2, - .{ G.p, H.p }, + .{ G, H }, .{ s.toBytes(), opening.scalar.toBytes() }, - ) catch unreachable; - return .{ .point = .{ .p = point } }; + ); + return .{ .point = point }; } pub fn initScalar(s: Scalar) struct { Commitment, Opening } { diff --git a/src/zksdk/range_proof/bulletproofs.zig b/src/zksdk/range_proof/bulletproofs.zig index 42f275dc36..80150aa370 100644 --- a/src/zksdk/range_proof/bulletproofs.zig +++ b/src/zksdk/range_proof/bulletproofs.zig @@ -5,16 +5,16 @@ //! [paper](https://eprint.iacr.org/2017/1066) (Section 4.3). const std = @import("std"); +const builtin = @import("builtin"); const sig = @import("../../sig.zig"); const table = @import("table"); pub const InnerProductProof = @import("ipp.zig").Proof; // pub so tests can run -const pippenger = sig.crypto.ed25519.pippenger; const pedersen = sig.zksdk.pedersen; const Edwards25519 = std.crypto.ecc.Edwards25519; const Ristretto255 = std.crypto.ecc.Ristretto255; const Scalar = std.crypto.ecc.Edwards25519.scalar.Scalar; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const Transcript = sig.zksdk.Transcript; const ProofType = sig.runtime.program.zk_elgamal.ProofType; @@ -29,6 +29,30 @@ pub fn Proof(bit_size: comptime_int) type { const logn: u64 = std.math.log2_int(u64, bit_size); const max = (2 * bit_size) + (2 * logn) + 5 + 8; + const contract: Transcript.Contract = &[_]Transcript.Input{ + .{ .label = "A", .type = .validate_point }, + .{ .label = "S", .type = .validate_point }, + .{ .label = "y", .type = .challenge }, + .{ .label = "z", .type = .challenge }, + + .{ .label = "T_1", .type = .validate_point }, + .{ .label = "T_2", .type = .validate_point }, + .{ .label = "x", .type = .challenge }, + + .{ .label = "t_x", .type = .scalar }, + .{ .label = "t_x_blinding", .type = .scalar }, + .{ .label = "e_blinding", .type = .scalar }, + .{ .label = "w", .type = .challenge }, + + .{ .label = "c", .type = .challenge }, + + // InnerProductProof(bit_size).contract runs here + + .{ .label = "ipp_a", .type = .scalar }, + .{ .label = "ipp_b", .type = .scalar }, + .{ .label = "d", .type = .challenge }, + }; + return struct { A: Ristretto255, S: Ristretto255, @@ -123,8 +147,7 @@ pub fn Proof(bit_size: comptime_int) type { } std.debug.assert(nm == bit_size); - transcript.appendDomSep("range-proof"); - transcript.appendU64("n", bit_size); + transcript.appendRangeProof(.range, bit_size); // bit-decompose values and generate their Pedersen vector commitment const a_blinding: Scalar = .random(); @@ -135,8 +158,12 @@ pub fn Proof(bit_size: comptime_int) type { for (0..n) |j| { // init functions aren't exposed, so doesn't need to be constant time. const v = (amount >> @intCast(j)) & 0b1 != 0; - const point = if (v) table.G[bit] else table.H[bit].neg(); - A = A.add(.{ .p = point }); + const point: Ristretto255 = if (v) + table.G[bit] + else + // TODO: use ristretto neg() alias when added to stdlib + .{ .p = table.H[bit].p.neg() }; + A = A.add(point); bit += 1; } } @@ -149,18 +176,21 @@ pub fn Proof(bit_size: comptime_int) type { } const s_blinding = Scalar.random(); - const S: Ristretto255 = .{ .p = Edwards25519.mulMulti( + const S = sig.crypto.ed25519.mulMulti( 1 + bit_size * 2, - .{pedersen.H.p} ++ table.G[0..bit_size].* ++ table.H[0..bit_size].*, + .{pedersen.H} ++ table.G[0..bit_size].* ++ table.H[0..bit_size].*, .{s_blinding.toBytes()} ++ s_L ++ s_R, - ) catch unreachable }; + ); - transcript.appendPoint("A", A); - transcript.appendPoint("S", S); + comptime var session = Transcript.getSession(contract); + defer session.finish(); + + transcript.appendNoValidate(&session, "A", A); + transcript.appendNoValidate(&session, "S", S); // y and z are used to merge multiple inner product relations into one inner product - const y = transcript.challengeScalar("y"); - const z = transcript.challengeScalar("z"); + const y = transcript.challengeScalar(&session, "y"); + const z = transcript.challengeScalar(&session, "z"); var l_poly: VecPoly1 = .zero; var r_poly: VecPoly1 = .zero; @@ -196,12 +226,12 @@ pub fn Proof(bit_size: comptime_int) type { const T_1, const t_1_blinding = pedersen.initScalar(t_poly.b); const T_2, const t_2_blinding = pedersen.initScalar(t_poly.c); - transcript.appendPoint("T_1", T_1.point); - transcript.appendPoint("T_2", T_2.point); + transcript.appendNoValidate(&session, "T_1", T_1.point); + transcript.appendNoValidate(&session, "T_2", T_2.point); // evaluate t(x) on challenge x and homomorphically compute the openings for // z^2 * V_1 + z^3 * V_2 + ... + z^{m+1} * V_m + delta(y, z)*G + x*T_1 + x^2*T_2 - const x = transcript.challengeScalar("x"); + const x = transcript.challengeScalar(&session, "x"); var agg_opening = ZERO; var agg_scalar = z; @@ -219,27 +249,27 @@ pub fn Proof(bit_size: comptime_int) type { const t_x = t_poly.evaluate(x); const t_x_blinding = t_binding_poly.evaluate(x); - transcript.appendScalar("t_x", t_x); - transcript.appendScalar("t_x_blinding", t_x_blinding); + transcript.append(&session, .scalar, "t_x", t_x); + transcript.append(&session, .scalar, "t_x_blinding", t_x_blinding); // homomorphically compuate the openings for A + x*S const e_blinding = s_blinding.mul(x).add(a_blinding); - transcript.appendScalar("e_blinding", e_blinding); + transcript.append(&session, .scalar, "e_blinding", e_blinding); // compute the inner product argument on the commitment: // P = + + *Q - const w = transcript.challengeScalar("w"); - const Q = weak_mul.mul(pedersen.G.p, w.toBytes()); + const w = transcript.challengeScalar(&session, "w"); + const Q = ed25519.straus.mulByKnown(pedersen.G, w.toBytes()); const G_factors: [bit_size]Scalar = @splat(ONE); const H_factors = genPowers(bit_size, y.invert()); - _ = transcript.challengeScalar("c"); + _ = transcript.challengeScalar(&session, "c"); var l_vec = l_poly.eval(x); var r_vec = r_poly.eval(x); const ipp_proof = InnerProductProof(bit_size).init( - .{ .p = Q }, + Q, &G_factors, &H_factors, &l_vec, @@ -247,6 +277,12 @@ pub fn Proof(bit_size: comptime_int) type { transcript, ); + if (builtin.mode == .Debug) { + transcript.append(&session, .scalar, "ipp_a", ipp_proof.a); + transcript.append(&session, .scalar, "ipp_b", ipp_proof.b); + _ = transcript.challengeScalar(&session, "d"); + } + return .{ .A = A, .S = S, @@ -269,27 +305,29 @@ pub fn Proof(bit_size: comptime_int) type { ) !void { std.debug.assert(commitments.len == bit_lengths.len); - transcript.appendDomSep("range-proof"); - transcript.appendU64("n", bit_size); + transcript.appendRangeProof(.range, bit_size); + + comptime var session = Transcript.getSession(contract); + defer session.finish(); - try transcript.validateAndAppendPoint("A", self.A); - try transcript.validateAndAppendPoint("S", self.S); + try transcript.append(&session, .validate_point, "A", self.A); + try transcript.append(&session, .validate_point, "S", self.S); - const y = transcript.challengeScalar("y"); - const z = transcript.challengeScalar("z"); + const y = transcript.challengeScalar(&session, "y"); + const z = transcript.challengeScalar(&session, "z"); - try transcript.validateAndAppendPoint("T_1", self.T_1); - try transcript.validateAndAppendPoint("T_2", self.T_2); + try transcript.append(&session, .validate_point, "T_1", self.T_1); + try transcript.append(&session, .validate_point, "T_2", self.T_2); - const x = transcript.challengeScalar("x"); + const x = transcript.challengeScalar(&session, "x"); - transcript.appendScalar("t_x", self.t_x); - transcript.appendScalar("t_x_blinding", self.t_x_blinding); - transcript.appendScalar("e_blinding", self.e_blinding); + transcript.append(&session, .scalar, "t_x", self.t_x); + transcript.append(&session, .scalar, "t_x_blinding", self.t_x_blinding); + transcript.append(&session, .scalar, "e_blinding", self.e_blinding); - const w = transcript.challengeScalar("w"); + const w = transcript.challengeScalar(&session, "w"); // only left for legacy reasons, use `d` instead - _ = transcript.challengeScalar("c"); + _ = transcript.challengeScalar(&session, "c"); const x_sq, // const x_inv_sq, // @@ -299,10 +337,10 @@ pub fn Proof(bit_size: comptime_int) type { const a = self.ipp.a; const b = self.ipp.b; - transcript.appendScalar("ipp_a", a); - transcript.appendScalar("ipp_b", b); + transcript.append(&session, .scalar, "ipp_a", a); + transcript.append(&session, .scalar, "ipp_b", b); - const d = transcript.challengeScalar("d"); + const d = transcript.challengeScalar(&session, "d"); // (numbers use u128 as the example) // points scalars @@ -345,8 +383,8 @@ pub fn Proof(bit_size: comptime_int) type { for (self.ipp.L_vec) |l| points.appendAssumeCapacity(l); for (self.ipp.R_vec) |r| points.appendAssumeCapacity(r); - points.appendSliceAssumeCapacity(table.H_ristretto[0..bit_size]); - points.appendSliceAssumeCapacity(table.G_ristretto[0..bit_size]); + points.appendSliceAssumeCapacity(table.H[0..bit_size]); + points.appendSliceAssumeCapacity(table.G[0..bit_size]); const d_txb = d.mul(self.t_x_blinding); const H = Edwards25519.scalar.neg(d_txb.add(self.e_blinding).toBytes()); @@ -412,7 +450,7 @@ pub fn Proof(bit_size: comptime_int) type { scalars.appendAssumeCapacity(basepoint_scalar.toBytes()); // G points.appendAssumeCapacity(pedersen.G); - const check: Ristretto255 = pippenger.mulMulti( + const check: Ristretto255 = sig.crypto.ed25519.mulMultiRuntime( max, false, true, @@ -658,18 +696,14 @@ pub fn Data(bit_size: comptime_int) type { return @bitCast(self); } + // sig fmt: off fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init("batched-range-proof-instruction"); - transcript.appendMessage( - "commitments", - std.mem.sliceAsBytes(&self.commitments), - ); - transcript.appendMessage( - "bit-lengths", - std.mem.sliceAsBytes(&self.bit_lengths), - ); - return transcript; + return .init(.@"batched-range-proof-instruction", &.{ + .{ .label = "commitments", .message = .{ .bytes = std.mem.sliceAsBytes(&self.commitments) } }, + .{ .label = "bit-lengths", .message = .{ .bytes = std.mem.sliceAsBytes(&self.bit_lengths) } }, + }); } + // sig fmt: on }; }; } @@ -699,8 +733,8 @@ pub fn genPowers(comptime n: usize, x: Scalar) [n]Scalar { test "single rangeproof" { const commitment, const opening = pedersen.initValue(u64, 55); - var creation_transcript = Transcript.init("Test"); - var verification_transcript = Transcript.init("Test"); + var creation_transcript = Transcript.initTest("Test"); + var verification_transcript = Transcript.initTest("Test"); const proof = try Proof(32).init( &.{55}, @@ -721,8 +755,8 @@ test "aggregated rangeproof" { const comm2, const opening2 = pedersen.initValue(u64, 77); const comm3, const opening3 = pedersen.initValue(u64, 99); - var creation_transcript = Transcript.init("Test"); - var verification_transcript = Transcript.init("Test"); + var creation_transcript = Transcript.initTest("Test"); + var verification_transcript = Transcript.initTest("Test"); const proof = try Proof(128).init( &.{ 55, 77, 99 }, @@ -753,7 +787,7 @@ test "proof string" { const proof = try Proof(128).fromBase64(proof_string); // zig fmt: on - var verification_transcript = Transcript.init("Test"); + var verification_transcript = Transcript.initTest("Test"); try proof.verify( &.{ commitment_1, commitment_2, commitment_3 }, &.{ 64, 32, 32 }, diff --git a/src/zksdk/range_proof/ipp.zig b/src/zksdk/range_proof/ipp.zig index 15ec770423..d41ea2ba3a 100644 --- a/src/zksdk/range_proof/ipp.zig +++ b/src/zksdk/range_proof/ipp.zig @@ -6,9 +6,8 @@ const Edwards25519 = std.crypto.ecc.Edwards25519; const Sha3 = std.crypto.hash.sha3.Sha3_512; const Ristretto255 = std.crypto.ecc.Ristretto255; const Scalar = std.crypto.ecc.Edwards25519.scalar.Scalar; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const Transcript = sig.zksdk.Transcript; -const pippenger = sig.crypto.ed25519.pippenger; const bp = sig.zksdk.bulletproofs; /// Inner-Product (Sub)Proof @@ -36,7 +35,7 @@ const bp = sig.zksdk.bulletproofs; /// - Bulletproofs paper (Bünz et al., 2018): https://eprint.iacr.org/2017/1066 /// - Dalek Bulletproofs implementation and docs: https://doc.dalek.rs/bulletproofs/ /// - Agave IPP implementation: https://github.com/anza-xyz/agave/blob/93699947720534741b2b4d9b6e1696d81e386dcc/zk-sdk/src/range_proof/inner_product.rs -pub fn Proof(bit_size: comptime_int) type { +pub fn Proof(comptime bit_size: u64) type { const logn: u64 = std.math.log2_int(u64, bit_size); const max_elements = bit_size * 2 + // g_times_a_times_s and h_times_b_div_s @@ -53,6 +52,15 @@ pub fn Proof(bit_size: comptime_int) type { const Self = @This(); pub const BYTE_LEN = (2 * logn * 32) + 64; + pub const contract: Transcript.Contract = c: { + const triple: [3]Transcript.Input = .{ + .{ .label = "L", .type = .validate_point }, + .{ .label = "R", .type = .validate_point }, + .{ .label = "u", .type = .challenge }, + }; + break :c (&triple) ** logn; + }; + /// Modifies the mutable array pointers in undefined ways, so don't rely on the value /// of them after `init`. pub fn init( @@ -65,21 +73,23 @@ pub fn Proof(bit_size: comptime_int) type { ) Self { var G_buffer = table.G[0..bit_size].*; var H_buffer = table.H[0..bit_size].*; - var G: []Edwards25519 = &G_buffer; - var H: []Edwards25519 = &H_buffer; + var G: []Ristretto255 = &G_buffer; + var H: []Ristretto255 = &H_buffer; var a: []Scalar = a_vec; var b: []Scalar = b_vec; - transcript.appendDomSep("inner-product"); - transcript.appendU64("n", bit_size); + transcript.appendRangeProof(.inner, bit_size); + + comptime var session = Transcript.getSession(contract); + defer session.finish(); var L_vec: std.BoundedArray(Ristretto255, logn) = .{}; var R_vec: std.BoundedArray(Ristretto255, logn) = .{}; - var n: u64 = bit_size; - while (n != 1) { - const first_round = n == bit_size; - n = n / 2; + const rounds = @ctz(bit_size); + inline for (0..rounds) |i| { + const first_round = (i == 0); + const n = bit_size >> @intCast(i + 1); const a_L = a[0..n]; const a_R = a[n..]; @@ -96,7 +106,7 @@ pub fn Proof(bit_size: comptime_int) type { // after the first round, the size has been divded by two, meaning we // only need to have bit_size / 2 + 1 elements in the arrays. var scalars: std.BoundedArray([32]u8, bit_size + 1) = .{}; - var points: std.BoundedArray(Edwards25519, bit_size + 1) = .{}; + var points: std.BoundedArray(Ristretto255, bit_size + 1) = .{}; if (first_round) { for (a_L, G_factors[n .. n * 2]) |ai, gi| { @@ -113,17 +123,15 @@ pub fn Proof(bit_size: comptime_int) type { for (G_R) |gi| points.appendAssumeCapacity(gi); for (H_L) |hi| points.appendAssumeCapacity(hi); - points.appendAssumeCapacity(Q.p); - - const L: Ristretto255 = .{ - .p = pippenger.mulMulti( - 257, // 128 + 128 + 1 - false, - false, - points.constSlice(), - scalars.constSlice(), - ), - }; + points.appendAssumeCapacity(Q); + + const L = sig.crypto.ed25519.pippenger.mulMultiRuntime( + 257, // 128 + 128 + 1 + false, + true, + points.constSlice(), + scalars.constSlice(), + ); // reset the arrays points.len = 0; @@ -144,48 +152,46 @@ pub fn Proof(bit_size: comptime_int) type { for (G_L) |gi| points.appendAssumeCapacity(gi); for (H_R) |hi| points.appendAssumeCapacity(hi); - points.appendAssumeCapacity(Q.p); - - const R: Ristretto255 = .{ - .p = pippenger.mulMulti( - 257, // 128 + 128 + 1 - false, - false, - points.constSlice(), - scalars.constSlice(), - ), - }; + points.appendAssumeCapacity(Q); + + const R = sig.crypto.ed25519.pippenger.mulMultiRuntime( + 257, // 128 + 128 + 1 + false, + true, + points.constSlice(), + scalars.constSlice(), + ); L_vec.appendAssumeCapacity(L); R_vec.appendAssumeCapacity(R); - transcript.appendPoint("L", L); - transcript.appendPoint("R", R); + transcript.appendNoValidate(&session, "L", L); + transcript.appendNoValidate(&session, "R", R); - const u = transcript.challengeScalar("u"); + const u = transcript.challengeScalar(&session, "u"); const u_inv = u.invert(); - for (0..n) |i| { - a_L[i] = a_L[i].mul(u).add(u_inv.mul(a_R[i])); - b_L[i] = b_L[i].mul(u_inv).add(u.mul(b_R[i])); + for (0..n) |j| { + a_L[j] = a_L[j].mul(u).add(u_inv.mul(a_R[j])); + b_L[j] = b_L[j].mul(u_inv).add(u.mul(b_R[j])); // For the first round, unroll the Hprime = H * y_inv scalar multiplications // into multiscalar multiplications, for performance. // zig fmt: off - const first = if (first_round) u_inv.mul(G_factors[i]) else u_inv; - const second = if (first_round) u.mul(G_factors[n + i]) else u; - const third = if (first_round) u.mul(H_factors[i]) else u; - const fourth = if (first_round) u_inv.mul(H_factors[n + i]) else u_inv; + const first = if (first_round) u_inv.mul(G_factors[j]) else u_inv; + const second = if (first_round) u.mul(G_factors[n + j]) else u; + const third = if (first_round) u.mul(H_factors[j]) else u; + const fourth = if (first_round) u_inv.mul(H_factors[n + j]) else u_inv; // zig fmt: on - G_L[i] = weak_mul.mulMulti( + G_L[j] = ed25519.mulMulti( 2, - .{ G_L[i], G_R[i] }, + .{ G_L[j], G_R[j] }, .{ first.toBytes(), second.toBytes() }, ); - H_L[i] = weak_mul.mulMulti( + H_L[j] = ed25519.mulMulti( 2, - .{ H_L[i], H_R[i] }, + .{ H_L[j], H_R[j] }, .{ third.toBytes(), fourth.toBytes() }, ); } @@ -215,7 +221,9 @@ pub fn Proof(bit_size: comptime_int) type { Q: Ristretto255, transcript: *Transcript, ) !void { - const u_sq, const u_inv_sq, const s = try self.verificationScalars(transcript); + const u_sq, // + const u_inv_sq, // + const s = try self.verificationScalars(transcript); var scalars: std.BoundedArray([32]u8, max_elements) = .{}; var points: std.BoundedArray(Ristretto255, max_elements) = .{}; @@ -241,12 +249,12 @@ pub fn Proof(bit_size: comptime_int) type { } points.appendAssumeCapacity(Q); - for (table.G[0..bit_size]) |g| points.appendAssumeCapacity(.{ .p = g }); - for (table.H[0..bit_size]) |h| points.appendAssumeCapacity(.{ .p = h }); + for (table.G[0..bit_size]) |g| points.appendAssumeCapacity(g); + for (table.H[0..bit_size]) |h| points.appendAssumeCapacity(h); for (self.L_vec) |l| points.appendAssumeCapacity(l); for (self.R_vec) |r| points.appendAssumeCapacity(r); - const check = pippenger.mulMulti( + const check = sig.crypto.ed25519.mulMultiRuntime( max_elements, false, true, @@ -264,15 +272,17 @@ pub fn Proof(bit_size: comptime_int) type { [logn]Scalar, // u_inv_sq [bit_size]Scalar, // s } { - transcript.appendDomSep("inner-product"); - transcript.appendU64("n", bit_size); + transcript.appendRangeProof(.inner, bit_size); + + comptime var session = Transcript.getSession(contract); + defer session.finish(); // 1. Recompute x_k,...,x_1 based on the proof transcript var challenges: [logn]Scalar = undefined; - for (&challenges, self.L_vec, self.R_vec) |*c, L, R| { - try transcript.validateAndAppendPoint("L", L); - try transcript.validateAndAppendPoint("R", R); - c.* = transcript.challengeScalar("u"); + inline for (&challenges, self.L_vec, self.R_vec) |*c, L, R| { + try transcript.append(&session, .validate_point, "L", L); + try transcript.append(&session, .validate_point, "R", R); + c.* = transcript.challengeScalar(&session, "u"); } // 2. Compute 1/(u_k...u_1) and 1/u_k, ..., 1/u_1 @@ -386,19 +396,21 @@ test "basic correctness" { for (b, H_factors) |bi, yi| try scalars.append(bi.mul(yi).toBytes()); try scalars.append(c.toBytes()); - var points: std.BoundedArray(Edwards25519, P_len) = .{}; + var points: std.BoundedArray(Ristretto255, P_len) = .{}; try points.appendSlice(table.G[0..n]); try points.appendSlice(table.H[0..n]); - try points.append(Q.p); + try points.append(Q); - const P: Ristretto255 = .{ .p = weak_mul.mulMulti( + const P = ed25519.mulMultiRuntime( P_len, - points.buffer, - scalars.buffer, - ) }; + false, + true, + points.constSlice(), + scalars.constSlice(), + ); - var prover_transcript = Transcript.init("innerproducttest"); - var verifier_transcript = Transcript.init("innerproducttest"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof(32).init( Q, diff --git a/src/zksdk/sigma_proofs/ciphertext_ciphertext.zig b/src/zksdk/sigma_proofs/ciphertext_ciphertext.zig index eaaee9ec22..25e59b6c7e 100644 --- a/src/zksdk/sigma_proofs/ciphertext_ciphertext.zig +++ b/src/zksdk/sigma_proofs/ciphertext_ciphertext.zig @@ -2,6 +2,7 @@ //! [agave](https://github.com/anza-xyz/agave/blob/5a9906ebf4f24cd2a2b15aca638d609ceed87797/zk-sdk/src/sigma_proofs/ciphertext_ciphertext_equality.rs) const std = @import("std"); +const builtin = @import("builtin"); const sig = @import("../../sig.zig"); const Edwards25519 = std.crypto.ecc.Edwards25519; @@ -13,7 +14,7 @@ const ElGamalPubkey = sig.zksdk.ElGamalPubkey; const Ristretto255 = std.crypto.ecc.Ristretto255; const Scalar = std.crypto.ecc.Edwards25519.scalar.Scalar; const Transcript = sig.zksdk.Transcript; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const ProofType = sig.runtime.program.zk_elgamal.ProofType; pub const Proof = struct { @@ -25,6 +26,19 @@ pub const Proof = struct { z_x: Scalar, z_r: Scalar, + const contract: Transcript.Contract = &.{ + .{ .label = "Y_0", .type = .validate_point }, + .{ .label = "Y_1", .type = .validate_point }, + .{ .label = "Y_2", .type = .validate_point }, + .{ .label = "Y_3", .type = .validate_point }, + .{ .label = "c", .type = .challenge }, + + .{ .label = "z_s", .type = .scalar }, + .{ .label = "z_x", .type = .scalar }, + .{ .label = "z_r", .type = .scalar }, + .{ .label = "w", .type = .challenge }, // w used for batch verification + }; + pub fn init( first_kp: *const ElGamalKeypair, second_pubkey: *const ElGamalPubkey, @@ -33,55 +47,68 @@ pub const Proof = struct { amount: u64, transcript: *Transcript, ) Proof { - transcript.appendDomSep("ciphertext-ciphertext-equality-proof"); + transcript.appendDomSep(.@"ciphertext-ciphertext-equality-proof"); const P_first = first_kp.public.point; const D_first = first_ciphertext.handle.point; const P_second = second_pubkey.point; - const s = first_kp.secret.scalar; - const x = pedersen.scalarFromInt(u64, amount); const r = second_opening.scalar; + const s = first_kp.secret.scalar; + var x = pedersen.scalarFromInt(u64, amount); var y_s = Scalar.random(); var y_x = Scalar.random(); var y_r = Scalar.random(); defer { + std.crypto.secureZero(u64, &x.limbs); std.crypto.secureZero(u64, &y_s.limbs); std.crypto.secureZero(u64, &y_x.limbs); std.crypto.secureZero(u64, &y_r.limbs); } - const Y_0 = weak_mul.mul(P_first.p, y_s.toBytes()); - const Y_1 = weak_mul.mulMulti( + const Y_0 = ed25519.mul(true, P_first, y_s.toBytes()); + // TODO: another optimization to explore is pre-computing the `G` and `H` straus lookup + // tables here, we need some way of checking that they are in-fact the G and H point + // inside of the `mulMulti`. maybe have a wrapper ristretto struct? + const Y_1 = ed25519.mulMulti( 2, - .{ pedersen.G.p, D_first.p }, + .{ pedersen.G, D_first }, .{ y_x.toBytes(), y_s.toBytes() }, ); - const Y_2 = weak_mul.mulMulti( + const Y_2 = ed25519.mulMulti( 2, - .{ pedersen.G.p, pedersen.H.p }, + .{ pedersen.G, pedersen.H }, .{ y_x.toBytes(), y_r.toBytes() }, ); - const Y_3 = weak_mul.mul(P_second.p, y_r.toBytes()); + const Y_3 = ed25519.mul(true, P_second, y_r.toBytes()); - transcript.appendPoint("Y_0", .{ .p = Y_0 }); - transcript.appendPoint("Y_1", .{ .p = Y_1 }); - transcript.appendPoint("Y_2", .{ .p = Y_2 }); - transcript.appendPoint("Y_3", .{ .p = Y_3 }); + comptime var session = Transcript.getSession(contract); + defer session.finish(); - const c = transcript.challengeScalar("c"); - _ = transcript.challengeScalar("w"); + transcript.appendNoValidate(&session, "Y_0", Y_0); + transcript.appendNoValidate(&session, "Y_1", Y_1); + transcript.appendNoValidate(&session, "Y_2", Y_2); + transcript.appendNoValidate(&session, "Y_3", Y_3); + + const c = transcript.challengeScalar(&session, "c"); const z_s = c.mul(s).add(y_s); const z_x = c.mul(x).add(y_x); const z_r = c.mul(r).add(y_r); + if (builtin.mode == .Debug) { + transcript.append(&session, .scalar, "z_s", z_s); + transcript.append(&session, .scalar, "z_x", z_x); + transcript.append(&session, .scalar, "z_r", z_r); + _ = transcript.challengeScalar(&session, "w"); + } + return .{ - .Y_0 = .{ .p = Y_0 }, - .Y_1 = .{ .p = Y_1 }, - .Y_2 = .{ .p = Y_2 }, - .Y_3 = .{ .p = Y_3 }, + .Y_0 = Y_0, + .Y_1 = Y_1, + .Y_2 = Y_2, + .Y_3 = Y_3, .z_s = z_s, .z_x = z_x, .z_r = z_r, @@ -96,7 +123,7 @@ pub const Proof = struct { second_ciphertext: *const ElGamalCiphertext, transcript: *Transcript, ) !void { - transcript.appendDomSep("ciphertext-ciphertext-equality-proof"); + transcript.appendDomSep(.@"ciphertext-ciphertext-equality-proof"); const P_first = first_pubkey.point; const C_first = first_ciphertext.commitment.point; @@ -106,18 +133,22 @@ pub const Proof = struct { const C_second = second_ciphertext.commitment.point; const D_second = second_ciphertext.handle.point; - try transcript.validateAndAppendPoint("Y_0", self.Y_0); - try transcript.validateAndAppendPoint("Y_1", self.Y_1); - try transcript.validateAndAppendPoint("Y_2", self.Y_2); - try transcript.validateAndAppendPoint("Y_3", self.Y_3); + comptime var session = Transcript.getSession(contract); + defer session.finish(); + + try transcript.append(&session, .validate_point, "Y_0", self.Y_0); + try transcript.append(&session, .validate_point, "Y_1", self.Y_1); + try transcript.append(&session, .validate_point, "Y_2", self.Y_2); + try transcript.append(&session, .validate_point, "Y_3", self.Y_3); - const c = transcript.challengeScalar("c").toBytes(); + const c = transcript.challengeScalar(&session, "c").toBytes(); - transcript.appendScalar("z_s", self.z_s); - transcript.appendScalar("z_x", self.z_x); - transcript.appendScalar("z_r", self.z_r); + transcript.append(&session, .scalar, "z_s", self.z_s); + transcript.append(&session, .scalar, "z_x", self.z_x); + transcript.append(&session, .scalar, "z_r", self.z_r); + + const w = transcript.challengeScalar(&session, "w"); - const w = transcript.challengeScalar("w"); // w used for batch verification const ww = w.mul(w); const www = ww.mul(w); @@ -149,18 +180,18 @@ pub const Proof = struct { // Y_0 // zig fmt: off - const check = weak_mul.mulMulti(11, .{ - pedersen.G.p, - pedersen.H.p, - P_first.p, - D_first.p, - Y_1.p, - C_first.p, - Y_2.p, - C_second.p, - Y_3.p, - D_second.p, - P_second.p, + const check = ed25519.mulMulti(11, .{ + pedersen.G, + pedersen.H, + P_first, + D_first, + Y_1, + C_first, + Y_2, + C_second, + Y_3, + D_second, + P_second, }, .{ w.add(ww).mul(self.z_x).toBytes(), // z_x * (w + ww) Edwards25519.scalar.sub(self.z_r.mul(ww).toBytes(), c), // -c + (z_r * ww) @@ -176,7 +207,7 @@ pub const Proof = struct { }); // zig fmt: on - if (!Y_0.equivalent(.{ .p = check })) { + if (!Y_0.equivalent(check)) { return error.AlgebraicRelation; } } @@ -254,14 +285,17 @@ pub const Data = struct { self.first_ciphertext.toBytes() ++ self.second_ciphertext.toBytes(); } + // zig fmt: off fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init("ciphertext-ciphertext-equality-instruction"); - transcript.appendPubkey("first-pubkey", self.first_pubkey); - transcript.appendPubkey("second-pubkey", self.second_pubkey); - transcript.appendCiphertext("first-ciphertext", self.first_ciphertext); - transcript.appendCiphertext("second-ciphertext", self.second_ciphertext); - return transcript; + return .init(.@"ciphertext-ciphertext-equality-instruction", &.{ + .{ .label = "first-pubkey", .message = .{ .pubkey = self.first_pubkey } }, + .{ .label = "second-pubkey", .message = .{ .pubkey = self.second_pubkey } }, + + .{ .label = "first-ciphertext", .message = .{ .ciphertext = self.first_ciphertext } }, + .{ .label = "second-ciphertext", .message = .{ .ciphertext = self.second_ciphertext } }, + }); } + // zig fmt: on }; pub fn init( @@ -402,8 +436,8 @@ test "correctness" { &second_opening, ); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_kp, @@ -438,8 +472,8 @@ test "different messages" { &second_opening, ); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_kp, @@ -479,7 +513,7 @@ test "proof string" { const proof = try Proof.fromBase64(proof_string); // zig fmt: on - var verifier_transcript = Transcript.init("Test"); + var verifier_transcript = Transcript.initTest("Test"); try proof.verify( &first_pubkey, diff --git a/src/zksdk/sigma_proofs/ciphertext_commitment.zig b/src/zksdk/sigma_proofs/ciphertext_commitment.zig index 08a226819d..13922f62a8 100644 --- a/src/zksdk/sigma_proofs/ciphertext_commitment.zig +++ b/src/zksdk/sigma_proofs/ciphertext_commitment.zig @@ -2,6 +2,7 @@ //! [agave](https://github.com/anza-xyz/agave/blob/5a9906ebf4f24cd2a2b15aca638d609ceed87797/zk-sdk/src/sigma_proofs/ciphertext_commitment_equality.rs) const std = @import("std"); +const builtin = @import("builtin"); const sig = @import("../../sig.zig"); const Edwards25519 = std.crypto.ecc.Edwards25519; @@ -13,7 +14,7 @@ const ElGamalPubkey = sig.zksdk.ElGamalPubkey; const Ristretto255 = std.crypto.ecc.Ristretto255; const Scalar = std.crypto.ecc.Edwards25519.scalar.Scalar; const Transcript = sig.zksdk.Transcript; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const ProofType = sig.runtime.program.zk_elgamal.ProofType; pub const Proof = struct { @@ -24,6 +25,18 @@ pub const Proof = struct { z_x: Scalar, z_r: Scalar, + const contract: Transcript.Contract = &.{ + .{ .label = "Y_0", .type = .validate_point }, + .{ .label = "Y_1", .type = .validate_point }, + .{ .label = "Y_2", .type = .validate_point }, + .{ .label = "c", .type = .challenge }, + + .{ .label = "z_s", .type = .scalar }, + .{ .label = "z_x", .type = .scalar }, + .{ .label = "z_r", .type = .scalar }, + .{ .label = "w", .type = .challenge }, // w used for batch verification + }; + pub fn init( kp: *const ElGamalKeypair, ciphertext: *const ElGamalCiphertext, @@ -31,51 +44,61 @@ pub const Proof = struct { amount: u64, transcript: *Transcript, ) Proof { - transcript.appendDomSep("ciphertext-commitment-equality-proof"); + transcript.appendDomSep(.@"ciphertext-commitment-equality-proof"); const P = kp.public; const D = ciphertext.handle.point; - const s = kp.secret.scalar; - const x = pedersen.scalarFromInt(u64, amount); const r = opening.scalar; + const s = kp.secret.scalar; + var x = pedersen.scalarFromInt(u64, amount); var y_s = Scalar.random(); var y_x = Scalar.random(); var y_r = Scalar.random(); defer { + std.crypto.secureZero(u64, &x.limbs); std.crypto.secureZero(u64, &y_s.limbs); std.crypto.secureZero(u64, &y_x.limbs); std.crypto.secureZero(u64, &y_r.limbs); } - const Y_0 = weak_mul.mul(P.point.p, y_s.toBytes()); - const Y_1 = weak_mul.mulMulti( + const Y_0 = ed25519.mul(true, P.point, y_s.toBytes()); + const Y_1 = ed25519.mulMulti( 2, - .{ pedersen.G.p, D.p }, + .{ pedersen.G, D }, .{ y_x.toBytes(), y_s.toBytes() }, ); - const Y_2 = weak_mul.mulMulti( + const Y_2 = ed25519.mulMulti( 2, - .{ pedersen.G.p, pedersen.H.p }, + .{ pedersen.G, pedersen.H }, .{ y_x.toBytes(), y_r.toBytes() }, ); - transcript.appendPoint("Y_0", .{ .p = Y_0 }); - transcript.appendPoint("Y_1", .{ .p = Y_1 }); - transcript.appendPoint("Y_2", .{ .p = Y_2 }); + comptime var session = Transcript.getSession(contract); + defer session.finish(); + + transcript.appendNoValidate(&session, "Y_0", Y_0); + transcript.appendNoValidate(&session, "Y_1", Y_1); + transcript.appendNoValidate(&session, "Y_2", Y_2); - const c = transcript.challengeScalar("c"); - _ = transcript.challengeScalar("w"); + const c = transcript.challengeScalar(&session, "c"); const z_s = c.mul(s).add(y_s); const z_x = c.mul(x).add(y_x); const z_r = c.mul(r).add(y_r); + if (builtin.mode == .Debug) { + transcript.append(&session, .scalar, "z_s", z_s); + transcript.append(&session, .scalar, "z_x", z_x); + transcript.append(&session, .scalar, "z_r", z_r); + _ = transcript.challengeScalar(&session, "w"); + } + return .{ - .Y_0 = .{ .p = Y_0 }, - .Y_1 = .{ .p = Y_1 }, - .Y_2 = .{ .p = Y_2 }, + .Y_0 = Y_0, + .Y_1 = Y_1, + .Y_2 = Y_2, .z_s = z_s, .z_x = z_x, .z_r = z_r, @@ -89,19 +112,26 @@ pub const Proof = struct { commitment: *const pedersen.Commitment, transcript: *Transcript, ) !void { - transcript.appendDomSep("ciphertext-commitment-equality-proof"); + transcript.appendDomSep(.@"ciphertext-commitment-equality-proof"); const P = pubkey.point; const C_ciphertext = ciphertext.commitment.point; const D = ciphertext.handle.point; const C_commitment = commitment.point; - try transcript.validateAndAppendPoint("Y_0", self.Y_0); - try transcript.validateAndAppendPoint("Y_1", self.Y_1); - try transcript.validateAndAppendPoint("Y_2", self.Y_2); + comptime var session = Transcript.getSession(contract); + defer session.finish(); + + try transcript.append(&session, .validate_point, "Y_0", self.Y_0); + try transcript.append(&session, .validate_point, "Y_1", self.Y_1); + try transcript.append(&session, .validate_point, "Y_2", self.Y_2); + + const c = transcript.challengeScalar(&session, "c").toBytes(); - const c = transcript.challengeScalar("c").toBytes(); - const w = transcript.challengeScalar("w"); + transcript.append(&session, .scalar, "z_s", self.z_s); + transcript.append(&session, .scalar, "z_x", self.z_x); + transcript.append(&session, .scalar, "z_r", self.z_r); + const w = transcript.challengeScalar(&session, "w"); const c_negated = Scalar.fromBytes(Edwards25519.scalar.neg(c)); const z_s_w = self.z_s.mul(w); @@ -120,16 +150,15 @@ pub const Proof = struct { // ----------------------- MSM // Y_2 - // zig fmt: off - const check = weak_mul.mulMulti(8, .{ - pedersen.G.p, - pedersen.H.p, - self.Y_0.p, - self.Y_1.p, - P.p, - C_ciphertext.p, - D.p, - C_commitment.p, + const check = ed25519.mulMulti(8, .{ + pedersen.G, + pedersen.H, + self.Y_0, + self.Y_1, + P, + C_ciphertext, + D, + C_commitment, }, .{ self.z_x.mul(w).add(self.z_x).toBytes(), c_negated_w.mul(w).add(self.z_r).toBytes(), @@ -140,9 +169,8 @@ pub const Proof = struct { z_s_w.toBytes(), c_negated.toBytes(), }); - // zig fmt: on - if (!self.Y_2.equivalent(.{ .p = check })) { + if (!self.Y_2.equivalent(check)) { return error.AlgebraicRelation; } } @@ -216,11 +244,11 @@ pub const Data = struct { } fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init("ciphertext-commitment-equality-instruction"); - transcript.appendPubkey("pubkey", self.pubkey); - transcript.appendCiphertext("ciphertext", self.ciphertext); - transcript.appendCommitment("commitment", self.commitment); - return transcript; + return .init(.@"ciphertext-commitment-equality-instruction", &.{ + .{ .label = "pubkey", .message = .{ .pubkey = self.pubkey } }, + .{ .label = "ciphertext", .message = .{ .ciphertext = self.ciphertext } }, + .{ .label = "commitment", .message = .{ .commitment = self.commitment } }, + }); } }; @@ -294,8 +322,8 @@ test "success case" { const ciphertext = el_gamal.encrypt(u64, message, &kp.public); const commitment, const opening = pedersen.initValue(u64, message); - var prover_transcript = Transcript.init("Test"); - var verifier_transcript = Transcript.init("Test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &kp, @@ -320,8 +348,8 @@ test "fail case" { const ciphertext = el_gamal.encrypt(u64, encrypted_message, &kp.public); const commitment, const opening = pedersen.initValue(u64, committed_message); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &kp, @@ -352,8 +380,8 @@ test "public key zeroed" { const ciphertext = el_gamal.encrypt(u64, message, &kp.public); const commitment, const opening = pedersen.initValue(u64, message); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &kp, @@ -384,8 +412,8 @@ test "all zoered" { const commitment = try pedersen.Commitment.fromBytes(.{0} ** 32); const opening = try pedersen.Opening.fromBytes(.{0} ** 32); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &kp, @@ -413,8 +441,8 @@ test "commitment zeroed" { const commitment = try pedersen.Commitment.fromBytes(.{0} ** 32); const opening = try pedersen.Opening.fromBytes(.{0} ** 32); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &kp, @@ -441,8 +469,8 @@ test "ciphertext zeroed" { const ciphertext = try ElGamalCiphertext.fromBytes(.{0} ** 64); const commitment, const opening = pedersen.initValue(u64, message); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &kp, @@ -475,7 +503,7 @@ test "proof strings" { const proof = try Proof.fromBase64(proof_string); // zig fmt: on - var verifier_transcript = Transcript.init("Test"); + var verifier_transcript = Transcript.initTest("Test"); try proof.verify( &pubkey, &ciphertext, diff --git a/src/zksdk/sigma_proofs/grouped_ciphertext/handles_2.zig b/src/zksdk/sigma_proofs/grouped_ciphertext/handles_2.zig index cc9cd67409..c087412be6 100644 --- a/src/zksdk/sigma_proofs/grouped_ciphertext/handles_2.zig +++ b/src/zksdk/sigma_proofs/grouped_ciphertext/handles_2.zig @@ -2,6 +2,7 @@ //! [agave](https://github.com/anza-xyz/agave/blob/5a9906ebf4f24cd2a2b15aca638d609ceed87797/zk-sdk/src/sigma_proofs/grouped_ciphertext_validity/handles_2.rs) const std = @import("std"); +const builtin = @import("builtin"); const sig = @import("../../../sig.zig"); const Edwards25519 = std.crypto.ecc.Edwards25519; @@ -12,7 +13,7 @@ const ElGamalPubkey = sig.zksdk.ElGamalPubkey; const Ristretto255 = std.crypto.ecc.Ristretto255; const Scalar = std.crypto.ecc.Edwards25519.scalar.Scalar; const Transcript = sig.zksdk.Transcript; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const GroupedElGamalCiphertext = el_gamal.GroupedElGamalCiphertext; const ProofType = sig.runtime.program.zk_elgamal.ProofType; @@ -23,6 +24,22 @@ pub const Proof = struct { z_r: Scalar, z_x: Scalar, + // the extra contract on top of the base `contract` used in `init`. + const batched_contract: Transcript.Contract = &.{ + .{ .label = "t", .type = .challenge }, + }; + + const contract: Transcript.Contract = &.{ + .{ .label = "Y_0", .type = .validate_point }, + .{ .label = "Y_1", .type = .validate_point }, + .{ .label = "Y_2", .type = .point }, + .{ .label = "c", .type = .challenge }, + + .{ .label = "z_r", .type = .scalar }, + .{ .label = "z_x", .type = .scalar }, + .{ .label = "w", .type = .challenge }, + }; + pub fn initBatched( first_pubkey: *const ElGamalPubkey, second_pubkey: *const ElGamalPubkey, @@ -32,10 +49,11 @@ pub const Proof = struct { opening_hi: *const pedersen.Opening, transcript: *Transcript, ) Proof { - transcript.appendDomSep("batched-validity-proof"); - transcript.appendU64("handles", 2); + transcript.appendHandleDomSep(.batched, .two); - const t = transcript.challengeScalar("t"); + comptime var session = Transcript.getSession(batched_contract); + defer session.finish(); + const t = transcript.challengeScalar(&session, "t"); const scalar_lo = pedersen.scalarFromInt(u64, amount_lo); const scalar_hi = pedersen.scalarFromInt(u64, amount_hi); @@ -61,13 +79,12 @@ pub const Proof = struct { opening: *const pedersen.Opening, transcript: *Transcript, ) Proof { - transcript.appendDomSep("validity-proof"); - transcript.appendU64("handles", 2); + transcript.appendHandleDomSep(.unbatched, .two); const P_first = first_pubkey.point; const P_second = second_pubkey.point; - const x: Scalar = switch (@TypeOf(amount)) { + var x: Scalar = switch (@TypeOf(amount)) { Scalar => amount, u64 => pedersen.scalarFromInt(u64, amount), else => unreachable, @@ -77,29 +94,37 @@ pub const Proof = struct { var y_r = Scalar.random(); var y_x = Scalar.random(); defer { + std.crypto.secureZero(u64, &x.limbs); std.crypto.secureZero(u64, &y_r.limbs); std.crypto.secureZero(u64, &y_x.limbs); } - const Y_0: Ristretto255 = .{ .p = weak_mul.mulMulti( + const Y_0 = ed25519.mulMulti( 2, - .{ pedersen.H.p, pedersen.G.p }, + .{ pedersen.H, pedersen.G }, .{ y_r.toBytes(), y_x.toBytes() }, - ) }; - const Y_1: Ristretto255 = .{ .p = weak_mul.mul(P_first.p, y_r.toBytes()) }; - const Y_2: Ristretto255 = .{ .p = weak_mul.mul(P_second.p, y_r.toBytes()) }; + ); + const Y_1: Ristretto255 = ed25519.mul(true, P_first, y_r.toBytes()); + const Y_2: Ristretto255 = ed25519.mul(true, P_second, y_r.toBytes()); - transcript.appendPoint("Y_0", Y_0); - transcript.appendPoint("Y_1", Y_1); - transcript.appendPoint("Y_2", Y_2); + comptime var session = Transcript.getSession(contract); + defer session.finish(); - const c = transcript.challengeScalar("c"); - _ = transcript.challengeScalar("w"); + transcript.appendNoValidate(&session, "Y_0", Y_0); + transcript.appendNoValidate(&session, "Y_1", Y_1); + transcript.append(&session, .point, "Y_2", Y_2); + const c = transcript.challengeScalar(&session, "c"); // masked message and opening const z_r = c.mul(r).add(y_r); const z_x = c.mul(x).add(y_x); + if (builtin.mode == .Debug) { + transcript.append(&session, .scalar, "z_r", z_r); + transcript.append(&session, .scalar, "z_x", z_x); + _ = transcript.challengeScalar(&session, "w"); + } + return .{ .Y_0 = Y_0, .Y_1 = Y_1, @@ -134,25 +159,31 @@ pub const Proof = struct { params: Params(batched), transcript: *Transcript, ) !void { + // for batched we have the batched contract which includes the initial + // `t` challenge, and then the base one that's shared between batched and non batched. + comptime var session = Transcript.getSession(if (batched) + batched_contract ++ contract + else + contract); + defer session.finish(); + const t = if (batched) t: { - transcript.appendDomSep("batched-validity-proof"); - transcript.appendU64("handles", 2); - break :t transcript.challengeScalar("t"); + transcript.appendHandleDomSep(.batched, .two); + break :t transcript.challengeScalar(&session, "t"); } else void; // shouldn't be referenced - transcript.appendDomSep("validity-proof"); - transcript.appendU64("handles", 2); + transcript.appendHandleDomSep(.unbatched, .two); - try transcript.validateAndAppendPoint("Y_0", self.Y_0); - try transcript.validateAndAppendPoint("Y_1", self.Y_1); + try transcript.append(&session, .validate_point, "Y_0", self.Y_0); + try transcript.append(&session, .validate_point, "Y_1", self.Y_1); // Y_2 can be all zero point if the second public key is all zero - transcript.appendPoint("Y_2", self.Y_2); + transcript.append(&session, .point, "Y_2", self.Y_2); - const c = transcript.challengeScalar("c").toBytes(); + const c = transcript.challengeScalar(&session, "c").toBytes(); - transcript.appendScalar("z_r", self.z_r); - transcript.appendScalar("z_x", self.z_x); - const w = transcript.challengeScalar("w"); + transcript.append(&session, .scalar, "z_r", self.z_r); + transcript.append(&session, .scalar, "z_x", self.z_x); + const w = transcript.challengeScalar(&session, "w"); const c_negated = Scalar.fromBytes(Edwards25519.scalar.neg(c)); const w_negated = Scalar.fromBytes(Edwards25519.scalar.neg(w.toBytes())); @@ -187,17 +218,17 @@ pub const Proof = struct { const c_negated_w = c_negated.mul(w); const z_r_w = self.z_r.mul(w); - var points: std.BoundedArray(Edwards25519, 12) = .{}; + var points: std.BoundedArray(Ristretto255, 12) = .{}; var scalars: std.BoundedArray([32]u8, 12) = .{}; try points.appendSlice(&.{ - pedersen.G.p, - pedersen.H.p, - self.Y_1.p, - self.Y_2.p, - params.first_pubkey.point.p, - params.commitment.point.p, - params.first_handle.point.p, + pedersen.G, + pedersen.H, + self.Y_1, + self.Y_2, + params.first_pubkey.point, + params.commitment.point, + params.first_handle.point, }); try scalars.appendSlice(&.{ @@ -212,8 +243,8 @@ pub const Proof = struct { if (batched) { try points.appendSlice(&.{ - params.commitment_hi.point.p, - params.first_handle_hi.point.p, + params.commitment_hi.point, + params.first_handle_hi.point, }); try scalars.appendSlice(&.{ c_negated.mul(t).toBytes(), @@ -223,8 +254,8 @@ pub const Proof = struct { if (second_pubkey_not_zero) { try points.appendSlice(&.{ - params.second_pubkey.point.p, - params.second_handle.point.p, + params.second_pubkey.point, + params.second_handle.point, }); try scalars.appendSlice(&.{ z_r_w.mul(w).toBytes(), @@ -233,28 +264,31 @@ pub const Proof = struct { } if (batched and second_pubkey_not_zero) { - try points.append(params.second_handle_hi.point.p); + try points.append(params.second_handle_hi.point); try scalars.append(c_negated_w.mul(w).mul(t).toBytes()); } - const check = switch (points.len) { - inline + // assert the only possible lengths to help the optimizer a bit + switch (points.len) { // batched is false + pubkey2_not_zero is false - 7, + 7 => {}, // batched is true + pubkey2_not_zero is false // batched is false + pubkey2_not_zero is true - 9, + 9 => {}, // batched is true + pubkey2_not_zero is true + 12 => {}, + else => unreachable, // nothing else should be possible! + } + + const check = ed25519.straus.mulMultiRuntime( 12, - => |N| weak_mul.mulMulti( - N, - points.constSlice()[0..N].*, - scalars.constSlice()[0..N].*, - ), - else => unreachable, - }; + false, + true, + points.constSlice(), + scalars.constSlice(), + ); - if (!self.Y_0.equivalent(.{ .p = check })) { + if (!self.Y_0.equivalent(check)) { return error.AlgebraicRelation; } } @@ -323,13 +357,15 @@ pub const Data = struct { self.grouped_ciphertext.toBytes(); } + // zig fmt: off fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init("grouped-ciphertext-validity-2-handles-instruction"); - transcript.appendPubkey("first-pubkey", self.first_pubkey); - transcript.appendPubkey("second-pubkey", self.second_pubkey); - transcript.appendMessage("grouped-ciphertext", &self.grouped_ciphertext.toBytes()); - return transcript; + return .init(.@"grouped-ciphertext-validity-2-handles-instruction", &.{ + .{ .label = "first-pubkey", .message = .{ .pubkey = self.first_pubkey } }, + .{ .label = "second-pubkey", .message = .{ .pubkey = self.second_pubkey } }, + .{ .label = "grouped-ciphertext", .message = .{ .grouped_2 = self.grouped_ciphertext } }, + }); } + // zig fmt: on }; pub fn init( @@ -445,16 +481,16 @@ pub const BatchedData = struct { self.grouped_ciphertext_hi.toBytes(); } + // zig fmt: off fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init( - "batched-grouped-ciphertext-validity-2-handles-instruction", - ); - transcript.appendPubkey("first-pubkey", self.first_pubkey); - transcript.appendPubkey("second-pubkey", self.second_pubkey); - transcript.appendMessage("grouped-ciphertext-lo", &self.grouped_ciphertext_lo.toBytes()); - transcript.appendMessage("grouped-ciphertext-hi", &self.grouped_ciphertext_hi.toBytes()); - return transcript; + return .init(.@"batched-grouped-ciphertext-validity-2-handles-instruction", &.{ + .{ .label = "first-pubkey", .message = .{ .pubkey = self.first_pubkey } }, + .{ .label = "second-pubkey", .message = .{ .pubkey = self.second_pubkey } }, + .{ .label = "grouped-ciphertext-lo", .message = .{ .grouped_2 = self.grouped_ciphertext_lo } }, + .{ .label = "grouped-ciphertext-hi", .message = .{ .grouped_2 = self.grouped_ciphertext_hi } }, + }); } + // zig fmt: on }; pub fn init( @@ -577,8 +613,8 @@ test "correctness" { const first_handle = pedersen.DecryptHandle.init(&first_pubkey, &opening); const second_handle = pedersen.DecryptHandle.init(&second_pubkey, &opening); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_pubkey, @@ -614,8 +650,8 @@ test "first pubkey zeroed" { const first_handle = pedersen.DecryptHandle.init(&first_pubkey, &opening); const second_handle = pedersen.DecryptHandle.init(&second_pubkey, &opening); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_pubkey, @@ -656,8 +692,8 @@ test "zeroed ciphertext" { const first_handle = pedersen.DecryptHandle.init(&first_pubkey, &opening); const second_handle = pedersen.DecryptHandle.init(&second_pubkey, &opening); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_pubkey, @@ -695,8 +731,8 @@ test "zeroed decryption handle" { const first_handle = pedersen.DecryptHandle.init(&first_pubkey, &zeroed_opening); const second_handle = pedersen.DecryptHandle.init(&second_pubkey, &zeroed_opening); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_pubkey, @@ -740,7 +776,7 @@ test "proof string" { const proof = try Proof.fromBase64(proof_string); // zig fmt: on - var verifier_transcript = Transcript.init("Test"); + var verifier_transcript = Transcript.initTest("Test"); try proof.verify( false, .{ @@ -773,8 +809,8 @@ test "batched sanity" { const second_handle_lo = pedersen.DecryptHandle.init(&second_pubkey, &opening_lo); const second_handle_hi = pedersen.DecryptHandle.init(&second_pubkey, &opening_hi); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.initBatched( &first_pubkey, @@ -832,7 +868,7 @@ test "batched proof string" { const proof = try Proof.fromBase64(proof_string); // zig fmt: on - var verifier_transcript = Transcript.init("Test"); + var verifier_transcript = Transcript.initTest("Test"); try proof.verify( true, diff --git a/src/zksdk/sigma_proofs/grouped_ciphertext/handles_3.zig b/src/zksdk/sigma_proofs/grouped_ciphertext/handles_3.zig index acaf5b1b09..bd70b30827 100644 --- a/src/zksdk/sigma_proofs/grouped_ciphertext/handles_3.zig +++ b/src/zksdk/sigma_proofs/grouped_ciphertext/handles_3.zig @@ -2,6 +2,7 @@ //! [agave](https://github.com/anza-xyz/agave/blob/5a9906ebf4f24cd2a2b15aca638d609ceed87797/zk-sdk/src/sigma_proofs/grouped_ciphertext_validity/handles_3.rs) const std = @import("std"); +const builtin = @import("builtin"); const sig = @import("../../../sig.zig"); const Edwards25519 = std.crypto.ecc.Edwards25519; @@ -12,7 +13,7 @@ const ElGamalPubkey = sig.zksdk.ElGamalPubkey; const Ristretto255 = std.crypto.ecc.Ristretto255; const Scalar = std.crypto.ecc.Edwards25519.scalar.Scalar; const Transcript = sig.zksdk.Transcript; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const GroupedElGamalCiphertext = sig.zksdk.GroupedElGamalCiphertext; const ProofType = sig.runtime.program.zk_elgamal.ProofType; @@ -24,6 +25,23 @@ pub const Proof = struct { z_r: Scalar, z_x: Scalar, + // the extra contract on top of the base `contract` used in `init`. + const batched_contract: Transcript.Contract = &.{ + .{ .label = "t", .type = .challenge }, + }; + + const contract: Transcript.Contract = &.{ + .{ .label = "Y_0", .type = .validate_point }, + .{ .label = "Y_1", .type = .validate_point }, + .{ .label = "Y_2", .type = .validate_point }, + .{ .label = "Y_3", .type = .point }, + .{ .label = "c", .type = .challenge }, + + .{ .label = "z_r", .type = .scalar }, + .{ .label = "z_x", .type = .scalar }, + .{ .label = "w", .type = .challenge }, + }; + pub fn initBatched( first_pubkey: *const ElGamalPubkey, second_pubkey: *const ElGamalPubkey, @@ -34,10 +52,11 @@ pub const Proof = struct { opening_hi: *const pedersen.Opening, transcript: *Transcript, ) Proof { - transcript.appendDomSep("batched-validity-proof"); - transcript.appendU64("handles", 3); + transcript.appendHandleDomSep(.batched, .three); - const t = transcript.challengeScalar("t"); + comptime var session = Transcript.getSession(batched_contract); + defer session.finish(); + const t = transcript.challengeScalar(&session, "t"); const scalar_lo = pedersen.scalarFromInt(u64, amount_lo); const scalar_hi = pedersen.scalarFromInt(u64, amount_hi); @@ -65,14 +84,16 @@ pub const Proof = struct { opening: *const pedersen.Opening, transcript: *Transcript, ) Proof { - transcript.appendDomSep("validity-proof"); - transcript.appendU64("handles", 3); + transcript.appendHandleDomSep(.unbatched, .three); + + comptime var session = Transcript.getSession(contract); + defer session.finish(); const P_first = first_pubkey.point; const P_second = second_pubkey.point; const P_third = third_pubkey.point; - const x: Scalar = switch (@TypeOf(amount)) { + var x: Scalar = switch (@TypeOf(amount)) { u64 => pedersen.scalarFromInt(u64, amount), Scalar => amount, else => unreachable, @@ -82,30 +103,36 @@ pub const Proof = struct { var y_r = Scalar.random(); var y_x = Scalar.random(); defer { + std.crypto.secureZero(u64, &x.limbs); std.crypto.secureZero(u64, &y_r.limbs); std.crypto.secureZero(u64, &y_x.limbs); } - const Y_0: Ristretto255 = .{ .p = weak_mul.mulMulti( + const Y_0 = ed25519.mulMulti( 2, - .{ pedersen.H.p, pedersen.G.p }, + .{ pedersen.H, pedersen.G }, .{ y_r.toBytes(), y_x.toBytes() }, - ) }; - const Y_1: Ristretto255 = .{ .p = weak_mul.mul(P_first.p, y_r.toBytes()) }; - const Y_2: Ristretto255 = .{ .p = weak_mul.mul(P_second.p, y_r.toBytes()) }; - const Y_3: Ristretto255 = .{ .p = weak_mul.mul(P_third.p, y_r.toBytes()) }; + ); + const Y_1: Ristretto255 = ed25519.mul(true, P_first, y_r.toBytes()); + const Y_2: Ristretto255 = ed25519.mul(true, P_second, y_r.toBytes()); + const Y_3: Ristretto255 = ed25519.mul(true, P_third, y_r.toBytes()); - transcript.appendPoint("Y_0", Y_0); - transcript.appendPoint("Y_1", Y_1); - transcript.appendPoint("Y_2", Y_2); - transcript.appendPoint("Y_3", Y_3); + transcript.appendNoValidate(&session, "Y_0", Y_0); + transcript.appendNoValidate(&session, "Y_1", Y_1); + transcript.appendNoValidate(&session, "Y_2", Y_2); + transcript.append(&session, .point, "Y_3", Y_3); - const c = transcript.challengeScalar("c"); - _ = transcript.challengeScalar("w"); + const c = transcript.challengeScalar(&session, "c"); const z_r = c.mul(r).add(y_r); const z_x = c.mul(x).add(y_x); + if (builtin.mode == .Debug) { + transcript.append(&session, .scalar, "z_r", z_r); + transcript.append(&session, .scalar, "z_x", z_x); + _ = transcript.challengeScalar(&session, "w"); + } + return .{ .Y_0 = Y_0, .Y_1 = Y_1, @@ -149,26 +176,30 @@ pub const Proof = struct { params: Params(batched), transcript: *Transcript, ) !void { + comptime var session = Transcript.getSession(if (batched) + batched_contract ++ contract + else + contract); + defer session.finish(); + const t = if (batched) t: { - transcript.appendDomSep("batched-validity-proof"); - transcript.appendU64("handles", 3); - break :t transcript.challengeScalar("t"); + transcript.appendHandleDomSep(.batched, .three); + break :t transcript.challengeScalar(&session, "t"); } else void; // shouldn't be referenced - transcript.appendDomSep("validity-proof"); - transcript.appendU64("handles", 3); + transcript.appendHandleDomSep(.unbatched, .three); - try transcript.validateAndAppendPoint("Y_0", self.Y_0); - try transcript.validateAndAppendPoint("Y_1", self.Y_1); - try transcript.validateAndAppendPoint("Y_2", self.Y_2); - transcript.appendPoint("Y_3", self.Y_3); + try transcript.append(&session, .validate_point, "Y_0", self.Y_0); + try transcript.append(&session, .validate_point, "Y_1", self.Y_1); + try transcript.append(&session, .validate_point, "Y_2", self.Y_2); + transcript.append(&session, .point, "Y_3", self.Y_3); - const c = transcript.challengeScalar("c"); + const c = transcript.challengeScalar(&session, "c"); const c_negated = Scalar.fromBytes(Edwards25519.scalar.neg(c.toBytes())); - transcript.appendScalar("z_r", self.z_r); - transcript.appendScalar("z_x", self.z_x); - const w = transcript.challengeScalar("w"); + transcript.append(&session, .scalar, "z_r", self.z_r); + transcript.append(&session, .scalar, "z_x", self.z_x); + const w = transcript.challengeScalar(&session, "w"); const ww = w.mul(w); const www = ww.mul(w); @@ -196,22 +227,22 @@ pub const Proof = struct { // ----------------------- MSM // Y_0 - var points: std.BoundedArray(Edwards25519, 16) = .{}; + var points: std.BoundedArray(Ristretto255, 16) = .{}; var scalars: std.BoundedArray([32]u8, 16) = .{}; try points.appendSlice(&.{ - pedersen.G.p, - pedersen.H.p, - params.commitment.point.p, - params.first_pubkey.point.p, - self.Y_1.p, - params.first_handle.point.p, - params.second_pubkey.point.p, - self.Y_2.p, - params.second_handle.point.p, - params.third_pubkey.point.p, - self.Y_3.p, - params.third_handle.point.p, + pedersen.G, + pedersen.H, + params.commitment.point, + params.first_pubkey.point, + self.Y_1, + params.first_handle.point, + params.second_pubkey.point, + self.Y_2, + params.second_handle.point, + params.third_pubkey.point, + self.Y_3, + params.third_handle.point, }); // zig fmt: off try scalars.appendSlice(&.{ @@ -232,10 +263,10 @@ pub const Proof = struct { if (batched) { try points.appendSlice(&.{ - params.commitment_hi.point.p, - params.first_handle_hi.point.p, - params.second_handle_hi.point.p, - params.third_handle_hi.point.p, + params.commitment_hi.point, + params.first_handle_hi.point, + params.second_handle_hi.point, + params.third_handle_hi.point, }); try scalars.appendSlice(&.{ c_negated.mul(t).toBytes(), // -c * t @@ -245,19 +276,21 @@ pub const Proof = struct { }); } - const check = switch (points.len) { - inline // - 12, - 16, - => |N| weak_mul.mulMulti( - N, - points.constSlice()[0..N].*, - scalars.constSlice()[0..N].*, - ), + // give the optimizer a little hint on the two possible lengths + switch (points.len) { + 12, 16 => {}, else => unreachable, - }; + } + + const check = ed25519.straus.mulMultiRuntime( + 16, + false, + true, + points.constSlice(), + scalars.constSlice(), + ); - if (!self.Y_0.equivalent(.{ .p = check })) { + if (!self.Y_0.equivalent(check)) { return error.AlgebraicRelation; } } @@ -331,14 +364,16 @@ pub const Data = struct { self.grouped_ciphertext.toBytes(); } + // zig fmt: off fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init("grouped-ciphertext-validity-3-handles-instruction"); - transcript.appendPubkey("first-pubkey", self.first_pubkey); - transcript.appendPubkey("second-pubkey", self.second_pubkey); - transcript.appendPubkey("third-pubkey", self.third_pubkey); - transcript.appendMessage("grouped-ciphertext", &self.grouped_ciphertext.toBytes()); - return transcript; + return .init(.@"grouped-ciphertext-validity-3-handles-instruction", &.{ + .{ .label = "first-pubkey", .message = .{ .pubkey = self.first_pubkey } }, + .{ .label = "second-pubkey", .message = .{ .pubkey = self.second_pubkey } }, + .{ .label = "third-pubkey", .message = .{ .pubkey = self.third_pubkey } }, + .{ .label = "grouped-ciphertext", .message = .{ .grouped_3 = self.grouped_ciphertext } }, + }); } + // zig fmt: on }; pub fn init( @@ -467,17 +502,17 @@ pub const BatchedData = struct { self.grouped_ciphertext_hi.toBytes(); } + // zig fmt: off fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init( - "batched-grouped-ciphertext-validity-3-handles-instruction", - ); - transcript.appendPubkey("first-pubkey", self.first_pubkey); - transcript.appendPubkey("second-pubkey", self.second_pubkey); - transcript.appendPubkey("third-pubkey", self.third_pubkey); - transcript.appendMessage("grouped-ciphertext-lo", &self.grouped_ciphertext_lo.toBytes()); - transcript.appendMessage("grouped-ciphertext-hi", &self.grouped_ciphertext_hi.toBytes()); - return transcript; + return .init(.@"batched-grouped-ciphertext-validity-3-handles-instruction", &.{ + .{ .label = "first-pubkey", .message = .{ .pubkey = self.first_pubkey } }, + .{ .label = "second-pubkey", .message = .{ .pubkey = self.second_pubkey } }, + .{ .label = "third-pubkey", .message = .{ .pubkey = self.third_pubkey } }, + .{ .label = "grouped-ciphertext-lo", .message = .{ .grouped_3 = self.grouped_ciphertext_lo } }, + .{ .label = "grouped-ciphertext-hi", .message = .{ .grouped_3 = self.grouped_ciphertext_hi } }, + }); } + // zig fmt: on }; pub fn init( @@ -617,8 +652,8 @@ test "correctness" { const second_handle = pedersen.DecryptHandle.init(&second_pubkey, &opening); const third_handle = pedersen.DecryptHandle.init(&third_pubkey, &opening); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_pubkey, @@ -660,8 +695,8 @@ test "first/second pubkey zeroed" { const second_handle = pedersen.DecryptHandle.init(&second_pubkey, &opening); const third_handle = pedersen.DecryptHandle.init(&third_pubkey, &opening); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_pubkey, @@ -709,8 +744,8 @@ test "zeroed ciphertext" { const second_handle = pedersen.DecryptHandle.init(&second_pubkey, &opening); const third_handle = pedersen.DecryptHandle.init(&third_pubkey, &opening); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_pubkey, @@ -755,8 +790,8 @@ test "zeroed decryption handle" { const second_handle = pedersen.DecryptHandle.init(&second_pubkey, &zeroed_opening); const third_handle = pedersen.DecryptHandle.init(&third_pubkey, &zeroed_opening); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init( &first_pubkey, @@ -809,7 +844,7 @@ test "proof string" { const proof = try Proof.fromBase64(proof_string); // zig fmt: on - var verifier_transcript = Transcript.init("Test"); + var verifier_transcript = Transcript.initTest("Test"); try proof.verify( false, @@ -851,8 +886,8 @@ test "batched correctness" { const third_handle_lo = pedersen.DecryptHandle.init(&third_pubkey, &opening_lo); const third_handle_hi = pedersen.DecryptHandle.init(&third_pubkey, &opening_hi); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.initBatched( &first_pubkey, @@ -923,7 +958,7 @@ test "batched proof string" { const proof = try Proof.fromBase64(proof_string); // zig fmt: on - var verifier_transcript = Transcript.init("Test"); + var verifier_transcript = Transcript.initTest("Test"); try proof.verify( true, diff --git a/src/zksdk/sigma_proofs/percentage_with_cap.zig b/src/zksdk/sigma_proofs/percentage_with_cap.zig index aab30b616c..beddf60229 100644 --- a/src/zksdk/sigma_proofs/percentage_with_cap.zig +++ b/src/zksdk/sigma_proofs/percentage_with_cap.zig @@ -2,6 +2,7 @@ //! [agave](https://github.com/anza-xyz/agave/blob/5a9906ebf4f24cd2a2b15aca638d609ceed87797/zk-sdk/src/sigma_proofs/percentage_with_cap.rs) const std = @import("std"); +const builtin = @import("builtin"); const sig = @import("../../sig.zig"); const Edwards25519 = std.crypto.ecc.Edwards25519; @@ -9,13 +10,27 @@ const pedersen = sig.zksdk.pedersen; const Ristretto255 = std.crypto.ecc.Ristretto255; const Scalar = std.crypto.ecc.Edwards25519.scalar.Scalar; const Transcript = sig.zksdk.Transcript; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const ProofType = sig.runtime.program.zk_elgamal.ProofType; pub const Proof = struct { max_proof: MaxProof, equality_proof: EqualityProof, + const contract: Transcript.Contract = &.{ + .{ .label = "Y_max_proof", .type = .validate_point }, + .{ .label = "Y_delta", .type = .validate_point }, + .{ .label = "Y_claimed", .type = .validate_point }, + .{ .label = "c", .type = .challenge }, + + .{ .label = "z_max", .type = .scalar }, + .{ .label = "c_max_proof", .type = .scalar }, + .{ .label = "z_x", .type = .scalar }, + .{ .label = "z_delta_real", .type = .scalar }, + .{ .label = "z_claimed", .type = .scalar }, + .{ .label = "w", .type = .challenge }, + }; + pub fn init( percentage_commitment: *const pedersen.Commitment, percentage_opening: *const pedersen.Opening, @@ -28,7 +43,7 @@ pub const Proof = struct { max_value: u64, transcript: *Transcript, ) Proof { - transcript.appendDomSep("percentage-with-cap-proof"); + transcript.appendDomSep(.@"percentage-with-cap-proof"); var transcript_percentage_above_max = transcript.*; var transcript_percentage_below_max = transcript.*; @@ -54,12 +69,22 @@ pub const Proof = struct { const below_max = percentage_amount <= max_value; const active = if (below_max) proof_below_max else proof_above_max; - transcript.appendPoint("Y_max_proof", active.max_proof.Y_max_proof); - transcript.appendPoint("Y_delta", active.equality_proof.Y_delta); - transcript.appendPoint("Y_claimed", active.equality_proof.Y_claimed); - - _ = transcript.challengeScalar("c"); - _ = transcript.challengeScalar("w"); + if (builtin.mode == .Debug) { + comptime var session = Transcript.getSession(contract); + defer session.finish(); + + transcript.appendNoValidate(&session, "Y_max_proof", active.max_proof.Y_max_proof); + transcript.appendNoValidate(&session, "Y_delta", active.equality_proof.Y_delta); + transcript.appendNoValidate(&session, "Y_claimed", active.equality_proof.Y_claimed); + _ = transcript.challengeScalar(&session, "c"); + + transcript.append(&session, .scalar, "z_max", active.max_proof.z_max_proof); + transcript.append(&session, .scalar, "c_max_proof", active.max_proof.c_max_proof); + transcript.append(&session, .scalar, "z_x", active.equality_proof.z_x); + transcript.append(&session, .scalar, "z_delta_real", active.equality_proof.z_delta); + transcript.append(&session, .scalar, "z_claimed", active.equality_proof.z_claimed); + _ = transcript.challengeScalar(&session, "w"); + } return .{ .max_proof = active.max_proof, @@ -79,22 +104,23 @@ pub const Proof = struct { const z_x = Scalar.random(); const z_delta = Scalar.random(); const z_claimed = Scalar.random(); - const c_equality = Scalar.random(); + var c_equality = Scalar.random(); + defer std.crypto.secureZero(u64, &c_equality.limbs); - const Y_delta = weak_mul.mulMulti(3, .{ - pedersen.G.p, - pedersen.H.p, - C_delta.p, + const Y_delta = ed25519.mulMulti(3, .{ + pedersen.G, + pedersen.H, + C_delta, }, .{ z_x.toBytes(), z_delta.toBytes(), Edwards25519.scalar.neg(c_equality.toBytes()), }); - const Y_claimed = weak_mul.mulMulti(3, .{ - pedersen.G.p, - pedersen.H.p, - C_claimed.p, + const Y_claimed = ed25519.mulMulti(3, .{ + pedersen.G, + pedersen.H, + C_claimed, }, .{ z_x.toBytes(), z_claimed.toBytes(), @@ -102,8 +128,8 @@ pub const Proof = struct { }); const equality_proof: EqualityProof = .{ - .Y_delta = .{ .p = Y_delta }, - .Y_claimed = .{ .p = Y_claimed }, + .Y_delta = Y_delta, + .Y_claimed = Y_claimed, .z_x = z_x, .z_delta = z_delta, .z_claimed = z_claimed, @@ -111,18 +137,20 @@ pub const Proof = struct { const r_percentage = percentage_opening.scalar; - const y_max_proof = Scalar.random(); + var y_max_proof = Scalar.random(); // Scalar.random() cannot return zero, and H isn't an identity. const Y_max_proof = pedersen.H.mul(y_max_proof.toBytes()) catch unreachable; + defer std.crypto.secureZero(u64, &y_max_proof.limbs); - transcript.appendPoint("Y_max_proof", Y_max_proof); - transcript.appendPoint("Y_delta", .{ .p = Y_delta }); - transcript.appendPoint("Y_claimed", .{ .p = Y_claimed }); + comptime var session = Transcript.getSession(contract); + defer session.finish(); - const c = transcript.challengeScalar("c").toBytes(); - const c_max_proof = Edwards25519.scalar.sub(c, c_equality.toBytes()); + transcript.appendNoValidate(&session, "Y_max_proof", Y_max_proof); + transcript.appendNoValidate(&session, "Y_delta", Y_delta); + transcript.appendNoValidate(&session, "Y_claimed", Y_claimed); - _ = transcript.challengeScalar("w"); + const c = transcript.challengeScalar(&session, "c").toBytes(); + const c_max_proof = Edwards25519.scalar.sub(c, c_equality.toBytes()); const z_max_proof = Scalar.fromBytes(c_max_proof).mul(r_percentage).add(y_max_proof); @@ -132,6 +160,15 @@ pub const Proof = struct { .c_max_proof = Scalar.fromBytes(c_max_proof), }; + if (builtin.mode == .Debug) { + transcript.append(&session, .scalar, "z_max", z_max_proof); + transcript.append(&session, .scalar, "c_max_proof", Scalar.fromBytes(c_max_proof)); + transcript.append(&session, .scalar, "z_x", z_x); + transcript.append(&session, .scalar, "z_delta_real", z_delta); + transcript.append(&session, .scalar, "z_claimed", z_claimed); + _ = transcript.challengeScalar(&session, "w"); + } + return .{ .max_proof = max_proof, .equality_proof = equality_proof, @@ -152,10 +189,10 @@ pub const Proof = struct { const z_max_proof = Scalar.random(); const c_max_proof = Scalar.random(); - const Y_max_proof = weak_mul.mulMulti(3, .{ - pedersen.H.p, - C_percentage.p, - pedersen.G.p, + const Y_max_proof = ed25519.mulMulti(3, .{ + pedersen.H, + C_percentage, + pedersen.G, }, .{ z_max_proof.toBytes(), Edwards25519.scalar.neg(c_max_proof.toBytes()), @@ -163,57 +200,74 @@ pub const Proof = struct { }); const max_proof: MaxProof = .{ - .Y_max_proof = .{ .p = Y_max_proof }, + .Y_max_proof = Y_max_proof, .z_max_proof = z_max_proof, .c_max_proof = c_max_proof, }; - const x = pedersen.scalarFromInt(u64, delta_amount); + var x = pedersen.scalarFromInt(u64, delta_amount); + defer std.crypto.secureZero(u64, &x.limbs); const r_delta = delta_opening.scalar; const r_claimed = claimed_opening.scalar; - const y_x = Scalar.random(); - const y_delta = Scalar.random(); - const y_claimed = Scalar.random(); + var y_x = Scalar.random(); + var y_delta = Scalar.random(); + var y_claimed = Scalar.random(); + defer { + std.crypto.secureZero(u64, &y_x.limbs); + std.crypto.secureZero(u64, &y_delta.limbs); + std.crypto.secureZero(u64, &y_claimed.limbs); + } - const Y_delta = weak_mul.mulMulti(2, .{ - pedersen.G.p, - pedersen.H.p, + const Y_delta = ed25519.mulMulti(2, .{ + pedersen.G, + pedersen.H, }, .{ y_x.toBytes(), y_delta.toBytes(), }); - const Y_claimed = weak_mul.mulMulti(2, .{ - pedersen.G.p, - pedersen.H.p, + const Y_claimed = ed25519.mulMulti(2, .{ + pedersen.G, + pedersen.H, }, .{ y_x.toBytes(), y_claimed.toBytes(), }); - transcript.appendPoint("Y_max_proof", .{ .p = Y_max_proof }); - transcript.appendPoint("Y_delta", .{ .p = Y_delta }); - transcript.appendPoint("Y_claimed", .{ .p = Y_claimed }); + comptime var session = Transcript.getSession(contract); + defer session.finish(); - const c = transcript.challengeScalar("c").toBytes(); - const c_equality = Scalar.fromBytes(Edwards25519.scalar.sub(c, c_max_proof.toBytes())); + transcript.appendNoValidate(&session, "Y_max_proof", Y_max_proof); + transcript.appendNoValidate(&session, "Y_delta", Y_delta); + transcript.appendNoValidate(&session, "Y_claimed", Y_claimed); - _ = transcript.challengeScalar("w"); + const c = transcript.challengeScalar(&session, "c").toBytes(); + var c_equality = Scalar.fromBytes(Edwards25519.scalar.sub(c, c_max_proof.toBytes())); + defer std.crypto.secureZero(u64, &c_equality.limbs); const z_x = c_equality.mul(x).add(y_x); const z_delta = c_equality.mul(r_delta).add(y_delta); const z_claimed = c_equality.mul(r_claimed).add(y_claimed); const equality_proof: EqualityProof = .{ - .Y_delta = .{ .p = Y_delta }, - .Y_claimed = .{ .p = Y_claimed }, + .Y_delta = Y_delta, + .Y_claimed = Y_claimed, .z_x = z_x, .z_delta = z_delta, .z_claimed = z_claimed, }; + if (builtin.mode == .Debug) { + transcript.append(&session, .scalar, "z_max", z_max_proof); + transcript.append(&session, .scalar, "c_max_proof", c_max_proof); + transcript.append(&session, .scalar, "z_x", z_x); + transcript.append(&session, .scalar, "z_delta_real", z_delta); + transcript.append(&session, .scalar, "z_claimed", z_claimed); + _ = transcript.challengeScalar(&session, "w"); + } + return .{ .max_proof = max_proof, .equality_proof = equality_proof, @@ -228,7 +282,7 @@ pub const Proof = struct { max_value: u64, transcript: *Transcript, ) !void { - transcript.appendDomSep("percentage-with-cap-proof"); + transcript.appendDomSep(.@"percentage-with-cap-proof"); const m = pedersen.scalarFromInt(u64, max_value); @@ -236,9 +290,12 @@ pub const Proof = struct { const C_delta = delta_commitment.point; const C_claimed = claimed_commitment.point; - try transcript.validateAndAppendPoint("Y_max_proof", self.max_proof.Y_max_proof); - try transcript.validateAndAppendPoint("Y_delta", self.equality_proof.Y_delta); - try transcript.validateAndAppendPoint("Y_claimed", self.equality_proof.Y_claimed); + comptime var session = Transcript.getSession(contract); + defer session.finish(); + + try transcript.append(&session, .validate_point, "Y_max_proof", self.max_proof.Y_max_proof); + try transcript.append(&session, .validate_point, "Y_delta", self.equality_proof.Y_delta); + try transcript.append(&session, .validate_point, "Y_claimed", self.equality_proof.Y_claimed); const Y_max = self.max_proof.Y_max_proof; const z_max = self.max_proof.z_max_proof; @@ -250,16 +307,17 @@ pub const Proof = struct { const z_delta_real = self.equality_proof.z_delta; const z_claimed = self.equality_proof.z_claimed; - const c = transcript.challengeScalar("c").toBytes(); + const c = transcript.challengeScalar(&session, "c").toBytes(); const c_max_proof = self.max_proof.c_max_proof; const c_equality = Edwards25519.scalar.sub(c, c_max_proof.toBytes()); - transcript.appendScalar("z_max", z_max); - transcript.appendScalar("z_x", z_x); - transcript.appendScalar("z_delta_real", z_delta_real); - transcript.appendScalar("z_claimed", z_claimed); + transcript.append(&session, .scalar, "z_max", z_max); + transcript.append(&session, .scalar, "c_max_proof", c_max_proof); + transcript.append(&session, .scalar, "z_x", z_x); + transcript.append(&session, .scalar, "z_delta_real", z_delta_real); + transcript.append(&session, .scalar, "z_claimed", z_claimed); - const w = transcript.challengeScalar("w"); + const w = transcript.challengeScalar(&session, "w"); const ww = w.mul(w); // We store points and scalars in the following arrays: @@ -289,14 +347,14 @@ pub const Proof = struct { break :h Edwards25519.scalar.sub(z_max.toBytes(), b.toBytes()); }; - const check = weak_mul.mulMulti(7, .{ - pedersen.G.p, - pedersen.H.p, - C_max.p, - Y_delta_real.p, - C_delta.p, - Y_claimed.p, - C_claimed.p, + const check = ed25519.mulMulti(7, .{ + pedersen.G, + pedersen.H, + C_max, + Y_delta_real, + C_delta, + Y_claimed, + C_claimed, }, .{ g, // c_max * m - (w + ww) z_x h, // z_max - (w z_delta + ww z_claimed) @@ -307,7 +365,7 @@ pub const Proof = struct { ww.mul(Scalar.fromBytes(c_equality)).toBytes(), // ww * c_eq }); - if (!Y_max.equivalent(.{ .p = check })) { + if (!Y_max.equivalent(check)) { return error.AlgebraicRelation; } } @@ -395,14 +453,16 @@ pub const Data = struct { self.claimed_commitment.toBytes() ++ @as([8]u8, @bitCast(self.max_value)); } + // zig fmt: off fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init("percentage-with-cap-instruction"); - transcript.appendCommitment("percentage-commitment", self.percentage_commitment); - transcript.appendCommitment("delta-commitment", self.delta_commitment); - transcript.appendCommitment("claimed-commitment", self.claimed_commitment); - transcript.appendU64("max-value", self.max_value); - return transcript; + return .init(.@"percentage-with-cap-instruction", &.{ + .{ .label = "percentage-commitment", .message = .{ .commitment = self.percentage_commitment } }, + .{ .label = "delta-commitment", .message = .{ .commitment = self.delta_commitment } }, + .{ .label = "claimed-commitment", .message = .{ .commitment = self.claimed_commitment } }, + .{ .label = "max-value", .message = .{ .u64 = self.max_value } }, + }); } + // zig fmt: on }; pub fn init( @@ -593,8 +653,8 @@ test "above max proof" { const claimed_commitment, const claimed_opening = pedersen.initValue(u64, 0); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); var proof = Proof.init( &percentage_commitment, @@ -657,8 +717,8 @@ test "below max proof" { try std.testing.expect(b.equivalent(d)); } - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); var proof = Proof.init( &percentage_commitment, @@ -711,8 +771,8 @@ test "is zero" { }; const claimed_commitment, const claimed_opening = pedersen.initValue(u64, delta); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); var proof = Proof.init( &percentage_commitment, @@ -753,7 +813,7 @@ test "proof string" { const proof = try Proof.fromBase64(proof_string); // zig fmt: on - var verifier_transcript = Transcript.init("test"); + var verifier_transcript = Transcript.initTest("test"); try proof.verify( &percentage_commitment, diff --git a/src/zksdk/sigma_proofs/pubkey_validity.zig b/src/zksdk/sigma_proofs/pubkey_validity.zig index 024bb10da1..f439c49113 100644 --- a/src/zksdk/sigma_proofs/pubkey_validity.zig +++ b/src/zksdk/sigma_proofs/pubkey_validity.zig @@ -11,31 +11,41 @@ const ElGamalPubkey = sig.zksdk.ElGamalPubkey; const Ristretto255 = std.crypto.ecc.Ristretto255; const Scalar = std.crypto.ecc.Edwards25519.scalar.Scalar; const Transcript = sig.zksdk.Transcript; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const ProofType = sig.runtime.program.zk_elgamal.ProofType; pub const Proof = struct { Y: Ristretto255, z: Scalar, + const contract: Transcript.Contract = &.{ + .{ .label = "Y", .type = .validate_point }, + .{ .label = "c", .type = .challenge }, + }; + pub fn init( kp: *const ElGamalKeypair, transcript: *Transcript, ) Proof { - transcript.appendDomSep("pubkey-proof"); + transcript.appendDomSep(.@"pubkey-proof"); const s = kp.secret.scalar; std.debug.assert(!s.isZero()); + var s_inv = s.invert(); + defer std.crypto.secureZero(u64, &s_inv.limbs); var y = Scalar.random(); defer std.crypto.secureZero(u64, &y.limbs); // Scalar.random() cannot return zero, and H isn't an identity - const Y = pedersen.H.mul(y.toBytes()) catch unreachable; - transcript.appendPoint("Y", Y); + const Y = ed25519.straus.mulByKnown(pedersen.H, y.toBytes()); + + comptime var session = Transcript.getSession(contract); + defer session.finish(); - const c = transcript.challengeScalar("c"); - const z = c.mul(s.invert()).add(y); + transcript.appendNoValidate(&session, "Y", Y); + const c = transcript.challengeScalar(&session, "c"); + const z = c.mul(s_inv).add(y); return .{ .Y = Y, @@ -48,10 +58,16 @@ pub const Proof = struct { pubkey: *const ElGamalPubkey, transcript: *Transcript, ) !void { - transcript.appendDomSep("pubkey-proof"); + transcript.appendDomSep(.@"pubkey-proof"); + + // [agave] https://github.com/solana-program/zk-elgamal-proof/blob/8c84822593d393c2305eea917fdffd1ec2525aa7/zk-sdk/src/sigma_proofs/pubkey_validity.rs#L107-L109 + try pubkey.point.rejectIdentity(); + + comptime var session = Transcript.getSession(contract); + defer session.finish(); - try transcript.validateAndAppendPoint("Y", self.Y); - const c = transcript.challengeScalar("c"); + try transcript.append(&session, .validate_point, "Y", self.Y); + const c = transcript.challengeScalar(&session, "c"); // points scalars // 0 H z @@ -59,15 +75,15 @@ pub const Proof = struct { // ----------------------- MSM // Y - const check = weak_mul.mulMulti(2, .{ - pedersen.H.p, - pubkey.point.p, + const check = ed25519.mulMulti(2, .{ + pedersen.H, + pubkey.point, }, .{ self.z.toBytes(), Edwards25519.scalar.neg(c.toBytes()), }); - if (!self.Y.equivalent(.{ .p = check })) { + if (!self.Y.equivalent(check)) { return error.AlgebraicRelation; } } @@ -119,9 +135,9 @@ pub const Data = struct { } fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init("pubkey-validity-instruction"); - transcript.appendPubkey("pubkey", self.pubkey); - return transcript; + return .init(.@"pubkey-validity-instruction", &.{ + .{ .label = "pubkey", .message = .{ .pubkey = self.pubkey } }, + }); } }; @@ -162,8 +178,8 @@ pub const Data = struct { test "correctness" { const kp = ElGamalKeypair.random(); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init(&kp, &prover_transcript); try proof.verify(&kp.public, &verifier_transcript); @@ -173,8 +189,8 @@ test "incorrect pubkey" { const kp = ElGamalKeypair.random(); const incorrect_kp = ElGamalKeypair.random(); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const proof = Proof.init(&kp, &prover_transcript); @@ -193,6 +209,6 @@ test "proof string" { const proof = try Proof.fromBase64(proof_string); // zig fmt: on - var verifier_transcript = Transcript.init("test"); + var verifier_transcript = Transcript.initTest("test"); try proof.verify(&pubkey, &verifier_transcript); } diff --git a/src/zksdk/sigma_proofs/zero_ciphertext.zig b/src/zksdk/sigma_proofs/zero_ciphertext.zig index eb2b55555e..e6ddccfdda 100644 --- a/src/zksdk/sigma_proofs/zero_ciphertext.zig +++ b/src/zksdk/sigma_proofs/zero_ciphertext.zig @@ -13,7 +13,7 @@ const ElGamalPubkey = sig.zksdk.ElGamalPubkey; const Ristretto255 = std.crypto.ecc.Ristretto255; const Scalar = std.crypto.ecc.Edwards25519.scalar.Scalar; const Transcript = sig.zksdk.Transcript; -const weak_mul = sig.vm.syscalls.ecc.weak_mul; +const ed25519 = sig.crypto.ed25519; const ProofType = sig.runtime.program.zk_elgamal.ProofType; pub const Proof = struct { @@ -21,12 +21,21 @@ pub const Proof = struct { D: Ristretto255, z: Scalar, + const contract: Transcript.Contract = &.{ + .{ .label = "Y_P", .type = .validate_point }, + .{ .label = "Y_D", .type = .point }, + .{ .label = "c", .type = .challenge }, + + .{ .label = "z", .type = .scalar }, + .{ .label = "w", .type = .challenge }, // w used for batch verification + }; + pub fn init( kp: *const ElGamalKeypair, ciphertext: *const ElGamalCiphertext, transcript: *Transcript, ) Proof { - transcript.appendDomSep("zero-ciphertext-proof"); + transcript.appendDomSep(.@"zero-ciphertext-proof"); const P = kp.public.point; const s = kp.secret.scalar; @@ -36,19 +45,26 @@ pub const Proof = struct { var y = Scalar.random(); defer std.crypto.secureZero(u64, &y.limbs); - // random() guarantees that y isn't zero and P must not be zero. - const Y_P = P.mul(y.toBytes()) catch unreachable; - const Y_D: Ristretto255 = .{ .p = weak_mul.mul(D.p, y.toBytes()) }; + const Y_P, const Y_D = ed25519.mulManyWithSameScalar( + 2, + .{ P, D }, + y.toBytes(), + ); - transcript.appendPoint("Y_P", Y_P); - transcript.appendPoint("Y_D", Y_D); + comptime var session = Transcript.getSession(contract); + defer session.finish(); - const c = transcript.challengeScalar("c"); - _ = transcript.challengeScalar("w"); + transcript.appendNoValidate(&session, "Y_P", Y_P); + transcript.append(&session, .point, "Y_D", Y_D); + + const c = transcript.challengeScalar(&session, "c"); // compute the masked secret key const z = s.mul(c).add(y); + transcript.append(&session, .scalar, "z", z); + _ = transcript.challengeScalar(&session, "w"); + return .{ .P = Y_P, .D = Y_D, @@ -62,21 +78,23 @@ pub const Proof = struct { ciphertext: *const ElGamalCiphertext, transcript: *Transcript, ) !void { - transcript.appendDomSep("zero-ciphertext-proof"); + transcript.appendDomSep(.@"zero-ciphertext-proof"); const P = pubkey.point; const C = ciphertext.commitment.point; const D = ciphertext.handle.point; const Y_P = self.P; - // record Y in transcript and receieve challenge scalars - try transcript.validateAndAppendPoint("Y_P", self.P); - transcript.appendPoint("Y_D", self.D); + comptime var session = Transcript.getSession(contract); + defer session.finish(); + + try transcript.append(&session, .validate_point, "Y_P", self.P); + transcript.append(&session, .point, "Y_D", self.D); - const c = transcript.challengeScalar("c"); + const c = transcript.challengeScalar(&session, "c"); - transcript.appendScalar("z", self.z); - const w = transcript.challengeScalar("w"); // w used for batch verification + transcript.append(&session, .scalar, "z", self.z); + const w = transcript.challengeScalar(&session, "w"); const w_negated = Edwards25519.scalar.neg(w.toBytes()); const Y_D = self.D; @@ -90,15 +108,13 @@ pub const Proof = struct { // ----------------------- MSM // Y_P - // we need to use weak_mul since the protocol itself relies - // on producing identity points in order to indicate that the proof was valid. // zig fmt: off - const check = weak_mul.mulMulti(5, .{ - pedersen.H.p, - P.p, - C.p, - D.p, - Y_D.p, + const check = ed25519.mulMulti(5, .{ + pedersen.H, + P, + C, + D, + Y_D, }, .{ Edwards25519.scalar.neg(c.toBytes()), // -c self.z.toBytes(), // z @@ -108,7 +124,7 @@ pub const Proof = struct { }); // zig fmt: on - if (!Y_P.equivalent(.{ .p = check })) { + if (!Y_P.equivalent(check)) { return error.AlgebraicRelation; } } @@ -166,10 +182,10 @@ pub const Data = struct { } fn newTranscript(self: Context) Transcript { - var transcript = Transcript.init("zero-ciphertext-instruction"); - transcript.appendPubkey("pubkey", self.pubkey); - transcript.appendCiphertext("ciphertext", self.ciphertext); - return transcript; + return .init(.@"zero-ciphertext-instruction", &.{ + .{ .label = "pubkey", .message = .{ .pubkey = self.pubkey } }, + .{ .label = "ciphertext", .message = .{ .ciphertext = self.ciphertext } }, + }); } }; @@ -229,8 +245,8 @@ pub const Data = struct { test "sanity" { var kp = ElGamalKeypair.random(); - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); // general case: encryption of 0 { @@ -254,8 +270,8 @@ test "edge case" { var kp = ElGamalKeypair.random(); { - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); // All zero ciphertext should be a valid encoding for the scalar "0" var ciphertext = try ElGamalCiphertext.fromBytes(.{0} ** 64); @@ -269,11 +285,11 @@ test "edge case" { { // zeroed commitment - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const zeroed_commitment: pedersen.Commitment = .{ - .point = try Ristretto255.fromBytes(.{0} ** 32), + .point = try Ristretto255.fromBytes(@splat(0)), }; const opening = pedersen.Opening.random(); const handle = pedersen.DecryptHandle.init(&kp.public, &opening); @@ -293,13 +309,13 @@ test "edge case" { { // zeroed handle - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); const commitment, _ = pedersen.initValue(u64, 0); const ciphertext: ElGamalCiphertext = .{ .commitment = commitment, - .handle = .{ .point = try Ristretto255.fromBytes(.{0} ** 32) }, + .handle = .{ .point = try Ristretto255.fromBytes(@splat(0)) }, }; const proof = Proof.init(&kp, &ciphertext, &prover_transcript); @@ -311,10 +327,10 @@ test "edge case" { // if the public key is zeroed, then the proof should always reject { - var prover_transcript = Transcript.init("test"); - var verifier_transcript = Transcript.init("test"); + var prover_transcript = Transcript.initTest("Test"); + var verifier_transcript = Transcript.initTest("Test"); - const public: ElGamalPubkey = .{ .point = try Ristretto255.fromBytes(.{0} ** 32) }; + const public: ElGamalPubkey = .{ .point = try Ristretto255.fromBytes(@splat(0)) }; const ciphertext = el_gamal.encrypt(u64, 0, &public); const proof = Proof.init(&kp, &ciphertext, &prover_transcript); @@ -337,6 +353,6 @@ test "proof string" { const proof = try Proof.fromBase64(proof_string); // zig fmt: on - var verifier_transcript = Transcript.init("test"); + var verifier_transcript = Transcript.initTest("test"); try proof.verify(&pubkey, &ciphertext, &verifier_transcript); }