diff --git a/build/main.ses.cjs b/build/main.ses.cjs
new file mode 100644
index 00000000..bcb46e79
--- /dev/null
+++ b/build/main.ses.cjs
@@ -0,0 +1,7742 @@
+'use strict';
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+var binFileUtils = require('@iden3/binfileutils');
+var ffjavascript = require('ffjavascript');
+var Blake2b = require('blake2b-wasm');
+require('readline');
+var crypto = require('crypto');
+var fastFile = require('fastfile');
+var circom_runtime = require('circom_runtime');
+var r1csfile = require('r1csfile');
+var jsSha3 = require('js-sha3');
+
+function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
+
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () { return e[k]; }
+ });
+ }
+ });
+ }
+ n["default"] = e;
+ return Object.freeze(n);
+}
+
+var binFileUtils__namespace = /*#__PURE__*/_interopNamespace(binFileUtils);
+var Blake2b__default = /*#__PURE__*/_interopDefaultLegacy(Blake2b);
+var crypto__default = /*#__PURE__*/_interopDefaultLegacy(crypto);
+var fastFile__namespace = /*#__PURE__*/_interopNamespace(fastFile);
+var jsSha3__default = /*#__PURE__*/_interopDefaultLegacy(jsSha3);
+
+ffjavascript.Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
+ffjavascript.Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
+
+const bls12381q = ffjavascript.Scalar.e("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16);
+const bn128q = ffjavascript.Scalar.e("21888242871839275222246405745257275088696311157297823662689037894645226208583");
+
+const singleThread = true;
+
+async function getCurveFromQ(q) {
+ let curve;
+ if (ffjavascript.Scalar.eq(q, bn128q)) {
+ curve = await ffjavascript.buildBn128(singleThread);
+ } else if (ffjavascript.Scalar.eq(q, bls12381q)) {
+ curve = await ffjavascript.buildBls12381(singleThread);
+ } else {
+ throw new Error(`Curve not supported: ${ffjavascript.Scalar.toString(q)}`);
+ }
+ return curve;
+}
+
+async function getCurveFromName(name) {
+ let curve;
+ const normName = normalizeName(name);
+ if (["BN128", "BN254", "ALTBN128"].indexOf(normName) >= 0) {
+ curve = await ffjavascript.buildBn128(singleThread);
+ } else if (["BLS12381"].indexOf(normName) >= 0) {
+ curve = await ffjavascript.buildBls12381(singleThread);
+ } else {
+ throw new Error(`Curve not supported: ${name}`);
+ }
+ return curve;
+
+ function normalizeName(n) {
+ return n.toUpperCase().match(/[A-Za-z0-9]+/g).join("");
+ }
+
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+const _revTable = [];
+for (let i=0; i<256; i++) {
+ _revTable[i] = _revSlow(i, 8);
+}
+
+function _revSlow(idx, bits) {
+ let res =0;
+ let a = idx;
+ for (let i=0; i>=1;
+ }
+ return res;
+}
+
+
+function log2( V )
+{
+ return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) );
+}
+
+
+function formatHash(b, title) {
+ const a = new DataView(b.buffer, b.byteOffset, b.byteLength);
+ let S = "";
+ for (let i=0; i<4; i++) {
+ if (i>0) S += "\n";
+ S += "\t\t";
+ for (let j=0; j<4; j++) {
+ if (j>0) S += " ";
+ S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0");
+ }
+ }
+ if (title) S = title + "\n" + S;
+ return S;
+}
+
+function hashIsEqual(h1, h2) {
+ if (h1.byteLength != h2.byteLength) return false;
+ var dv1 = new Int8Array(h1);
+ var dv2 = new Int8Array(h2);
+ for (var i = 0 ; i != h1.byteLength ; i++)
+ {
+ if (dv1[i] != dv2[i]) return false;
+ }
+ return true;
+}
+
+function cloneHasher(h) {
+ const ph = h.getPartialHash();
+ const res = Blake2b__default["default"](64);
+ res.setPartialHash(ph);
+ return res;
+}
+
+async function sameRatio$2(curve, g1s, g1sx, g2s, g2sx) {
+ if (curve.G1.isZero(g1s)) return false;
+ if (curve.G1.isZero(g1sx)) return false;
+ if (curve.G2.isZero(g2s)) return false;
+ if (curve.G2.isZero(g2sx)) return false;
+ // return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
+ const res = await curve.pairingEq(g1s, g2sx, curve.G1.neg(g1sx), g2s);
+ return res;
+}
+
+
+function askEntropy() {
+ {
+ return window.prompt("Enter a random text. (Entropy): ", "");
+ }
+}
+
+async function getRandomRng(entropy) {
+ // Generate a random Rng
+ while (!entropy) {
+ entropy = await askEntropy();
+ }
+ const hasher = Blake2b__default["default"](64);
+ hasher.update(crypto__default["default"].randomBytes(64));
+ const enc = new TextEncoder(); // always utf-8
+ hasher.update(enc.encode(entropy));
+ const hash = Buffer.from(hasher.digest());
+
+ const seed = [];
+ for (let i=0;i<8;i++) {
+ seed[i] = hash.readUInt32BE(i*4);
+ }
+ const rng = new ffjavascript.ChaCha(seed);
+ return rng;
+}
+
+function rngFromBeaconParams(beaconHash, numIterationsExp) {
+ let nIterationsInner;
+ let nIterationsOuter;
+ if (numIterationsExp<32) {
+ nIterationsInner = (1 << numIterationsExp) >>> 0;
+ nIterationsOuter = 1;
+ } else {
+ nIterationsInner = 0x100000000;
+ nIterationsOuter = (1 << (numIterationsExp-32)) >>> 0;
+ }
+
+ let curHash = beaconHash;
+ for (let i=0; i.
+*/
+
+async function writeHeader(fd, zkey) {
+
+ // Write the header
+ ///////////
+ await binFileUtils__namespace.startWriteSection(fd, 1);
+ await fd.writeULE32(1); // Groth
+ await binFileUtils__namespace.endWriteSection(fd);
+
+ // Write the Groth header section
+ ///////////
+
+ const curve = await getCurveFromQ(zkey.q);
+
+ await binFileUtils__namespace.startWriteSection(fd, 2);
+ const primeQ = curve.q;
+ const n8q = (Math.floor( (ffjavascript.Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
+
+ const primeR = curve.r;
+ const n8r = (Math.floor( (ffjavascript.Scalar.bitLength(primeR) - 1) / 64) +1)*8;
+
+ await fd.writeULE32(n8q);
+ await binFileUtils__namespace.writeBigInt(fd, primeQ, n8q);
+ await fd.writeULE32(n8r);
+ await binFileUtils__namespace.writeBigInt(fd, primeR, n8r);
+ await fd.writeULE32(zkey.nVars); // Total number of bars
+ await fd.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE)
+ await fd.writeULE32(zkey.domainSize); // domainSize
+ await writeG1(fd, curve, zkey.vk_alpha_1);
+ await writeG1(fd, curve, zkey.vk_beta_1);
+ await writeG2(fd, curve, zkey.vk_beta_2);
+ await writeG2(fd, curve, zkey.vk_gamma_2);
+ await writeG1(fd, curve, zkey.vk_delta_1);
+ await writeG2(fd, curve, zkey.vk_delta_2);
+
+ await binFileUtils__namespace.endWriteSection(fd);
+
+
+}
+
+async function writeG1(fd, curve, p) {
+ const buff = new Uint8Array(curve.G1.F.n8*2);
+ curve.G1.toRprLEM(buff, 0, p);
+ await fd.write(buff);
+}
+
+async function writeG2(fd, curve, p) {
+ const buff = new Uint8Array(curve.G2.F.n8*2);
+ curve.G2.toRprLEM(buff, 0, p);
+ await fd.write(buff);
+}
+
+async function readG1(fd, curve, toObject) {
+ const buff = await fd.read(curve.G1.F.n8*2);
+ const res = curve.G1.fromRprLEM(buff, 0);
+ return toObject ? curve.G1.toObject(res) : res;
+}
+
+async function readG2(fd, curve, toObject) {
+ const buff = await fd.read(curve.G2.F.n8*2);
+ const res = curve.G2.fromRprLEM(buff, 0);
+ return toObject ? curve.G2.toObject(res) : res;
+}
+
+
+async function readHeader$1(fd, sections, toObject) {
+ // Read Header
+ /////////////////////
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, 1);
+ const protocolId = await fd.readULE32();
+ await binFileUtils__namespace.endReadSection(fd);
+
+ if (protocolId == 1) {
+ return await readHeaderGroth16(fd, sections, toObject);
+ } else if (protocolId == 2) {
+ return await readHeaderPlonk(fd, sections);
+ } else {
+ throw new Error("Protocol not supported: ");
+ }
+}
+
+
+
+
+async function readHeaderGroth16(fd, sections, toObject) {
+ const zkey = {};
+
+ zkey.protocol = "groth16";
+
+ // Read Groth Header
+ /////////////////////
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, 2);
+ const n8q = await fd.readULE32();
+ zkey.n8q = n8q;
+ zkey.q = await binFileUtils__namespace.readBigInt(fd, n8q);
+
+ const n8r = await fd.readULE32();
+ zkey.n8r = n8r;
+ zkey.r = await binFileUtils__namespace.readBigInt(fd, n8r);
+
+ let curve = await getCurveFromQ(zkey.q);
+
+ zkey.nVars = await fd.readULE32();
+ zkey.nPublic = await fd.readULE32();
+ zkey.domainSize = await fd.readULE32();
+ zkey.power = log2(zkey.domainSize);
+ zkey.vk_alpha_1 = await readG1(fd, curve, toObject);
+ zkey.vk_beta_1 = await readG1(fd, curve, toObject);
+ zkey.vk_beta_2 = await readG2(fd, curve, toObject);
+ zkey.vk_gamma_2 = await readG2(fd, curve, toObject);
+ zkey.vk_delta_1 = await readG1(fd, curve, toObject);
+ zkey.vk_delta_2 = await readG2(fd, curve, toObject);
+ await binFileUtils__namespace.endReadSection(fd);
+
+ return zkey;
+
+}
+
+
+
+
+async function readHeaderPlonk(fd, sections, protocol, toObject) {
+ const zkey = {};
+
+ zkey.protocol = "plonk";
+
+ // Read Plonk Header
+ /////////////////////
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, 2);
+ const n8q = await fd.readULE32();
+ zkey.n8q = n8q;
+ zkey.q = await binFileUtils__namespace.readBigInt(fd, n8q);
+
+ const n8r = await fd.readULE32();
+ zkey.n8r = n8r;
+ zkey.r = await binFileUtils__namespace.readBigInt(fd, n8r);
+
+ let curve = await getCurveFromQ(zkey.q);
+
+ zkey.nVars = await fd.readULE32();
+ zkey.nPublic = await fd.readULE32();
+ zkey.domainSize = await fd.readULE32();
+ zkey.power = log2(zkey.domainSize);
+ zkey.nAdditions = await fd.readULE32();
+ zkey.nConstrains = await fd.readULE32();
+ zkey.k1 = await fd.read(n8r);
+ zkey.k2 = await fd.read(n8r);
+
+ zkey.Qm = await readG1(fd, curve, toObject);
+ zkey.Ql = await readG1(fd, curve, toObject);
+ zkey.Qr = await readG1(fd, curve, toObject);
+ zkey.Qo = await readG1(fd, curve, toObject);
+ zkey.Qc = await readG1(fd, curve, toObject);
+ zkey.S1 = await readG1(fd, curve, toObject);
+ zkey.S2 = await readG1(fd, curve, toObject);
+ zkey.S3 = await readG1(fd, curve, toObject);
+ zkey.X_2 = await readG2(fd, curve, toObject);
+
+ await binFileUtils__namespace.endReadSection(fd);
+
+ return zkey;
+}
+
+async function readZKey(fileName, toObject) {
+ const {fd, sections} = await binFileUtils__namespace.readBinFile(fileName, "zkey", 1);
+
+ const zkey = await readHeader$1(fd, sections, "groth16");
+
+ const Fr = new ffjavascript.F1Field(zkey.r);
+ const Rr = ffjavascript.Scalar.mod(ffjavascript.Scalar.shl(1, zkey.n8r*8), zkey.r);
+ const Rri = Fr.inv(Rr);
+ const Rri2 = Fr.mul(Rri, Rri);
+
+ let curve = await getCurveFromQ(zkey.q);
+
+ // Read IC Section
+ ///////////
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, 3);
+ zkey.IC = [];
+ for (let i=0; i<= zkey.nPublic; i++) {
+ const P = await readG1(fd, curve, toObject);
+ zkey.IC.push(P);
+ }
+ await binFileUtils__namespace.endReadSection(fd);
+
+
+ // Read Coefs
+ ///////////
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, 4);
+ const nCCoefs = await fd.readULE32();
+ zkey.ccoefs = [];
+ for (let i=0; i0) {
+ const paramsBuff = new Uint8Array(params);
+ await fd.writeULE32(paramsBuff.byteLength);
+ await fd.write(paramsBuff);
+ } else {
+ await fd.writeULE32(0);
+ }
+
+}
+
+async function writeMPCParams(fd, curve, mpcParams) {
+ await binFileUtils__namespace.startWriteSection(fd, 10);
+ await fd.write(mpcParams.csHash);
+ await fd.writeULE32(mpcParams.contributions.length);
+ for (let i=0; i.
+*/
+
+
+async function write(fd, witness, prime) {
+
+ await binFileUtils__namespace.startWriteSection(fd, 1);
+ const n8 = (Math.floor( (ffjavascript.Scalar.bitLength(prime) - 1) / 64) +1)*8;
+ await fd.writeULE32(n8);
+ await binFileUtils__namespace.writeBigInt(fd, prime, n8);
+ await fd.writeULE32(witness.length);
+ await binFileUtils__namespace.endWriteSection(fd);
+
+ await binFileUtils__namespace.startWriteSection(fd, 2);
+ for (let i=0; i.
+*/
+const {stringifyBigInts: stringifyBigInts$3} = ffjavascript.utils;
+
+async function groth16Prove(zkeyFileName, witnessFileName, logger) {
+ const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils__namespace.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
+
+ const wtns = await readHeader(fdWtns, sectionsWtns);
+
+ const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils__namespace.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
+
+ const zkey = await readHeader$1(fdZKey, sectionsZKey);
+
+ if (zkey.protocol != "groth16") {
+ throw new Error("zkey file is not groth16");
+ }
+
+ if (!ffjavascript.Scalar.eq(zkey.r, wtns.q)) {
+ throw new Error("Curve of the witness does not match the curve of the proving key");
+ }
+
+ if (wtns.nWitness != zkey.nVars) {
+ throw new Error(`Invalid witness length. Circuit: ${zkey.nVars}, witness: ${wtns.nWitness}`);
+ }
+
+ const curve = await getCurveFromQ(zkey.q);
+ const Fr = curve.Fr;
+ const G1 = curve.G1;
+ const G2 = curve.G2;
+
+ const power = log2(zkey.domainSize);
+
+ if (logger) logger.debug("Reading Wtns");
+ const buffWitness = await binFileUtils__namespace.readSection(fdWtns, sectionsWtns, 2);
+ if (logger) logger.debug("Reading Coeffs");
+ const buffCoeffs = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 4);
+
+ if (logger) logger.debug("Building ABC");
+ const [buffA_T, buffB_T, buffC_T] = await buildABC1(curve, zkey, buffWitness, buffCoeffs, logger);
+
+ const inc = power == Fr.s ? curve.Fr.shift : curve.Fr.w[power+1];
+
+ const buffA = await Fr.ifft(buffA_T, "", "", logger, "IFFT_A");
+ const buffAodd = await Fr.batchApplyKey(buffA, Fr.e(1), inc);
+ const buffAodd_T = await Fr.fft(buffAodd, "", "", logger, "FFT_A");
+
+ const buffB = await Fr.ifft(buffB_T, "", "", logger, "IFFT_B");
+ const buffBodd = await Fr.batchApplyKey(buffB, Fr.e(1), inc);
+ const buffBodd_T = await Fr.fft(buffBodd, "", "", logger, "FFT_B");
+
+ const buffC = await Fr.ifft(buffC_T, "", "", logger, "IFFT_C");
+ const buffCodd = await Fr.batchApplyKey(buffC, Fr.e(1), inc);
+ const buffCodd_T = await Fr.fft(buffCodd, "", "", logger, "FFT_C");
+
+ if (logger) logger.debug("Join ABC");
+ const buffPodd_T = await joinABC(curve, zkey, buffAodd_T, buffBodd_T, buffCodd_T, logger);
+
+ let proof = {};
+
+ if (logger) logger.debug("Reading A Points");
+ const buffBasesA = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 5);
+ proof.pi_a = await curve.G1.multiExpAffine(buffBasesA, buffWitness, logger, "multiexp A");
+
+ if (logger) logger.debug("Reading B1 Points");
+ const buffBasesB1 = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 6);
+ let pib1 = await curve.G1.multiExpAffine(buffBasesB1, buffWitness, logger, "multiexp B1");
+
+ if (logger) logger.debug("Reading B2 Points");
+ const buffBasesB2 = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 7);
+ proof.pi_b = await curve.G2.multiExpAffine(buffBasesB2, buffWitness, logger, "multiexp B2");
+
+ if (logger) logger.debug("Reading C Points");
+ const buffBasesC = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 8);
+ proof.pi_c = await curve.G1.multiExpAffine(buffBasesC, buffWitness.slice((zkey.nPublic+1)*curve.Fr.n8), logger, "multiexp C");
+
+ if (logger) logger.debug("Reading H Points");
+ const buffBasesH = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 9);
+ const resH = await curve.G1.multiExpAffine(buffBasesH, buffPodd_T, logger, "multiexp H");
+
+ const r = curve.Fr.random();
+ const s = curve.Fr.random();
+
+ proof.pi_a = G1.add( proof.pi_a, zkey.vk_alpha_1 );
+ proof.pi_a = G1.add( proof.pi_a, G1.timesFr( zkey.vk_delta_1, r ));
+
+ proof.pi_b = G2.add( proof.pi_b, zkey.vk_beta_2 );
+ proof.pi_b = G2.add( proof.pi_b, G2.timesFr( zkey.vk_delta_2, s ));
+
+ pib1 = G1.add( pib1, zkey.vk_beta_1 );
+ pib1 = G1.add( pib1, G1.timesFr( zkey.vk_delta_1, s ));
+
+ proof.pi_c = G1.add(proof.pi_c, resH);
+
+
+ proof.pi_c = G1.add( proof.pi_c, G1.timesFr( proof.pi_a, s ));
+ proof.pi_c = G1.add( proof.pi_c, G1.timesFr( pib1, r ));
+ proof.pi_c = G1.add( proof.pi_c, G1.timesFr( zkey.vk_delta_1, Fr.neg(Fr.mul(r,s) )));
+
+
+ let publicSignals = [];
+
+ for (let i=1; i<= zkey.nPublic; i++) {
+ const b = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8);
+ publicSignals.push(ffjavascript.Scalar.fromRprLE(b));
+ }
+
+ proof.pi_a = G1.toObject(G1.toAffine(proof.pi_a));
+ proof.pi_b = G2.toObject(G2.toAffine(proof.pi_b));
+ proof.pi_c = G1.toObject(G1.toAffine(proof.pi_c));
+
+ proof.protocol = "groth16";
+ proof.curve = curve.name;
+
+ await fdZKey.close();
+ await fdWtns.close();
+
+ proof = stringifyBigInts$3(proof);
+ publicSignals = stringifyBigInts$3(publicSignals);
+
+ return {proof, publicSignals};
+}
+
+
+async function buildABC1(curve, zkey, witness, coeffs, logger) {
+ const n8 = curve.Fr.n8;
+ const sCoef = 4*3 + zkey.n8r;
+ const nCoef = (coeffs.byteLength-4) / sCoef;
+
+ const outBuffA = new ffjavascript.BigBuffer(zkey.domainSize * n8);
+ const outBuffB = new ffjavascript.BigBuffer(zkey.domainSize * n8);
+ const outBuffC = new ffjavascript.BigBuffer(zkey.domainSize * n8);
+
+ const outBuf = [ outBuffA, outBuffB ];
+ for (let i=0; i1) {
+ const promises2 = [];
+ for (let i=0; i v) {
+ n = k - 1;
+ } else if (va < v) {
+ m = k + 1;
+ } else {
+ n = k;
+ }
+ }
+ return 4 + m*sCoef;
+ }
+}
+*/
+
+async function joinABC(curve, zkey, a, b, c, logger) {
+ const MAX_CHUNK_SIZE = 1 << 22;
+
+ const n8 = curve.Fr.n8;
+ const nElements = Math.floor(a.byteLength / curve.Fr.n8);
+
+ const promises = [];
+
+ for (let i=0; i.
+*/
+
+async function wtnsCalculate(input, wasmFileName, wtnsFileName, options) {
+
+ const fdWasm = await fastFile__namespace.readExisting(wasmFileName);
+ const wasm = await fdWasm.read(fdWasm.totalSize);
+ await fdWasm.close();
+
+ const wc = await circom_runtime.WitnessCalculatorBuilder(wasm);
+ if (wc.circom_version() == 1) {
+ const w = await wc.calculateBinWitness(input);
+
+ const fdWtns = await binFileUtils__namespace.createBinFile(wtnsFileName, "wtns", 2, 2);
+
+ await writeBin(fdWtns, w, wc.prime);
+ await fdWtns.close();
+ } else {
+ const fdWtns = await fastFile__namespace.createOverride(wtnsFileName);
+
+ const w = await wc.calculateWTNSBin(input);
+
+ await fdWtns.write(w);
+ await fdWtns.close();
+ }
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+async function groth16FullProve(input, wasmFile, zkeyFileName, logger) {
+ const wtns= {
+ type: "mem"
+ };
+ await wtnsCalculate(input, wasmFile, wtns);
+ return await groth16Prove(zkeyFileName, wtns, logger);
+}
+
+/*
+ Copyright 2018 0kims association.
+
+ This file is part of snarkjs.
+
+ snarkjs is a free software: you can redistribute it and/or
+ modify it under the terms of the GNU General Public License as published by the
+ Free Software Foundation, either version 3 of the License, or (at your option)
+ any later version.
+
+ snarkjs is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ more details.
+
+ You should have received a copy of the GNU General Public License along with
+ snarkjs. If not, see .
+*/
+const {unstringifyBigInts: unstringifyBigInts$1} = ffjavascript.utils;
+
+async function groth16Verify(vk_verifier, publicSignals, proof, logger) {
+/*
+ let cpub = vk_verifier.IC[0];
+ for (let s= 0; s< vk_verifier.nPublic; s++) {
+ cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
+ }
+*/
+
+ vk_verifier = unstringifyBigInts$1(vk_verifier);
+ proof = unstringifyBigInts$1(proof);
+ publicSignals = unstringifyBigInts$1(publicSignals);
+
+ const curve = await getCurveFromName(vk_verifier.curve);
+
+ const IC0 = curve.G1.fromObject(vk_verifier.IC[0]);
+ const IC = new Uint8Array(curve.G1.F.n8*2 * publicSignals.length);
+ const w = new Uint8Array(curve.Fr.n8 * publicSignals.length);
+
+ for (let i=0; i.
+*/
+
+function p256$1(n) {
+ let nstr = n.toString(16);
+ while (nstr.length < 64) nstr = "0"+nstr;
+ nstr = `"0x${nstr}"`;
+ return nstr;
+}
+
+async function groth16ExportSolidityCallData(proof, pub) {
+
+ let inputs = "";
+ for (let i=0; i.
+*/
+
+var groth16 = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ fullProve: groth16FullProve,
+ prove: groth16Prove,
+ verify: groth16Verify,
+ exportSolidityCallData: groth16ExportSolidityCallData
+});
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+function hashToG2(curve, hash) {
+ const hashV = new DataView(hash.buffer, hash.byteOffset, hash.byteLength);
+ const seed = [];
+ for (let i=0; i<8; i++) {
+ seed[i] = hashV.getUint32(i*4);
+ }
+
+ const rng = new ffjavascript.ChaCha(seed);
+
+ const g2_sp = curve.G2.fromRng(rng);
+
+ return g2_sp;
+}
+
+function getG2sp(curve, persinalization, challenge, g1s, g1sx) {
+
+ const h = Blake2b__default["default"](64);
+ const b1 = new Uint8Array([persinalization]);
+ h.update(b1);
+ h.update(challenge);
+ const b3 = curve.G1.toUncompressed(g1s);
+ h.update( b3);
+ const b4 = curve.G1.toUncompressed(g1sx);
+ h.update( b4);
+ const hash =h.digest();
+
+ return hashToG2(curve, hash);
+}
+
+function calculatePubKey(k, curve, personalization, challengeHash, rng ) {
+ k.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
+ k.g1_sx = curve.G1.toAffine(curve.G1.timesFr(k.g1_s, k.prvKey));
+ k.g2_sp = curve.G2.toAffine(getG2sp(curve, personalization, challengeHash, k.g1_s, k.g1_sx));
+ k.g2_spx = curve.G2.toAffine(curve.G2.timesFr(k.g2_sp, k.prvKey));
+ return k;
+}
+
+function createPTauKey(curve, challengeHash, rng) {
+ const key = {
+ tau: {},
+ alpha: {},
+ beta: {}
+ };
+ key.tau.prvKey = curve.Fr.fromRng(rng);
+ key.alpha.prvKey = curve.Fr.fromRng(rng);
+ key.beta.prvKey = curve.Fr.fromRng(rng);
+ calculatePubKey(key.tau, curve, 0, challengeHash, rng);
+ calculatePubKey(key.alpha, curve, 1, challengeHash, rng);
+ calculatePubKey(key.beta, curve, 2, challengeHash, rng);
+ return key;
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+async function writePTauHeader(fd, curve, power, ceremonyPower) {
+ // Write the header
+ ///////////
+
+ if (! ceremonyPower) ceremonyPower = power;
+ await fd.writeULE32(1); // Header type
+ const pHeaderSize = fd.pos;
+ await fd.writeULE64(0); // Temporally set to 0 length
+
+ await fd.writeULE32(curve.F1.n64*8);
+
+ const buff = new Uint8Array(curve.F1.n8);
+ ffjavascript.Scalar.toRprLE(buff, 0, curve.q, curve.F1.n8);
+ await fd.write(buff);
+ await fd.writeULE32(power); // power
+ await fd.writeULE32(ceremonyPower); // power
+
+ const headerSize = fd.pos - pHeaderSize - 8;
+
+ const oldPos = fd.pos;
+
+ await fd.writeULE64(headerSize, pHeaderSize);
+
+ fd.pos = oldPos;
+}
+
+async function readPTauHeader(fd, sections) {
+ if (!sections[1]) throw new Error(fd.fileName + ": File has no header");
+ if (sections[1].length>1) throw new Error(fd.fileName +": File has more than one header");
+
+ fd.pos = sections[1][0].p;
+ const n8 = await fd.readULE32();
+ const buff = await fd.read(n8);
+ const q = ffjavascript.Scalar.fromRprLE(buff);
+
+ const curve = await getCurveFromQ(q);
+
+ if (curve.F1.n64*8 != n8) throw new Error(fd.fileName +": Invalid size");
+
+ const power = await fd.readULE32();
+ const ceremonyPower = await fd.readULE32();
+
+ if (fd.pos-sections[1][0].p != sections[1][0].size) throw new Error("Invalid PTau header size");
+
+ return {curve, power, ceremonyPower};
+}
+
+
+async function readPtauPubKey(fd, curve, montgomery) {
+
+ const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3);
+
+ return fromPtauPubKeyRpr(buff, 0, curve, montgomery);
+}
+
+function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
+
+ const key = {
+ tau: {},
+ alpha: {},
+ beta: {}
+ };
+
+ key.tau.g1_s = readG1();
+ key.tau.g1_sx = readG1();
+ key.alpha.g1_s = readG1();
+ key.alpha.g1_sx = readG1();
+ key.beta.g1_s = readG1();
+ key.beta.g1_sx = readG1();
+ key.tau.g2_spx = readG2();
+ key.alpha.g2_spx = readG2();
+ key.beta.g2_spx = readG2();
+
+ return key;
+
+ function readG1() {
+ let p;
+ if (montgomery) {
+ p = curve.G1.fromRprLEM( buff, pos );
+ } else {
+ p = curve.G1.fromRprUncompressed( buff, pos );
+ }
+ pos += curve.G1.F.n8*2;
+ return p;
+ }
+
+ function readG2() {
+ let p;
+ if (montgomery) {
+ p = curve.G2.fromRprLEM( buff, pos );
+ } else {
+ p = curve.G2.fromRprUncompressed( buff, pos );
+ }
+ pos += curve.G2.F.n8*2;
+ return p;
+ }
+}
+
+function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
+
+ writeG1(key.tau.g1_s);
+ writeG1(key.tau.g1_sx);
+ writeG1(key.alpha.g1_s);
+ writeG1(key.alpha.g1_sx);
+ writeG1(key.beta.g1_s);
+ writeG1(key.beta.g1_sx);
+ writeG2(key.tau.g2_spx);
+ writeG2(key.alpha.g2_spx);
+ writeG2(key.beta.g2_spx);
+
+ async function writeG1(p) {
+ if (montgomery) {
+ curve.G1.toRprLEM(buff, pos, p);
+ } else {
+ curve.G1.toRprUncompressed(buff, pos, p);
+ }
+ pos += curve.F1.n8*2;
+ }
+
+ async function writeG2(p) {
+ if (montgomery) {
+ curve.G2.toRprLEM(buff, pos, p);
+ } else {
+ curve.G2.toRprUncompressed(buff, pos, p);
+ }
+ pos += curve.F2.n8*2;
+ }
+
+ return buff;
+}
+
+async function writePtauPubKey(fd, curve, key, montgomery) {
+ const buff = new Uint8Array(curve.F1.n8*2*6 + curve.F2.n8*2*3);
+ toPtauPubKeyRpr(buff, 0, curve, key, montgomery);
+ await fd.write(buff);
+}
+
+async function readContribution(fd, curve) {
+ const c = {};
+
+ c.tauG1 = await readG1();
+ c.tauG2 = await readG2();
+ c.alphaG1 = await readG1();
+ c.betaG1 = await readG1();
+ c.betaG2 = await readG2();
+ c.key = await readPtauPubKey(fd, curve, true);
+ c.partialHash = await fd.read(216);
+ c.nextChallenge = await fd.read(64);
+ c.type = await fd.readULE32();
+
+ const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
+ toPtauPubKeyRpr(buffV, 0, curve, c.key, false);
+
+ const responseHasher = Blake2b__default["default"](64);
+ responseHasher.setPartialHash(c.partialHash);
+ responseHasher.update(buffV);
+ c.responseHash = responseHasher.digest();
+
+ const paramLength = await fd.readULE32();
+ const curPos = fd.pos;
+ let lastType =0;
+ while (fd.pos-curPos < paramLength) {
+ const buffType = await readDV(1);
+ if (buffType[0]<= lastType) throw new Error("Parameters in the contribution must be sorted");
+ lastType = buffType[0];
+ if (buffType[0]==1) { // Name
+ const buffLen = await readDV(1);
+ const buffStr = await readDV(buffLen[0]);
+ c.name = new TextDecoder().decode(buffStr);
+ } else if (buffType[0]==2) {
+ const buffExp = await readDV(1);
+ c.numIterationsExp = buffExp[0];
+ } else if (buffType[0]==3) {
+ const buffLen = await readDV(1);
+ c.beaconHash = await readDV(buffLen[0]);
+ } else {
+ throw new Error("Parameter not recognized");
+ }
+ }
+ if (fd.pos != curPos + paramLength) {
+ throw new Error("Parametes do not match");
+ }
+
+ return c;
+
+ async function readG1() {
+ const pBuff = await fd.read(curve.G1.F.n8*2);
+ return curve.G1.fromRprLEM( pBuff );
+ }
+
+ async function readG2() {
+ const pBuff = await fd.read(curve.G2.F.n8*2);
+ return curve.G2.fromRprLEM( pBuff );
+ }
+
+ async function readDV(n) {
+ const b = await fd.read(n);
+ return new Uint8Array(b);
+ }
+}
+
+async function readContributions(fd, curve, sections) {
+ if (!sections[7]) throw new Error(fd.fileName + ": File has no contributions");
+ if (sections[7][0].length>1) throw new Error(fd.fileName +": File has more than one contributions section");
+
+ fd.pos = sections[7][0].p;
+ const nContributions = await fd.readULE32();
+ const contributions = [];
+ for (let i=0; i0) {
+ const paramsBuff = new Uint8Array(params);
+ await fd.writeULE32(paramsBuff.byteLength);
+ await fd.write(paramsBuff);
+ } else {
+ await fd.writeULE32(0);
+ }
+
+
+ async function writeG1(p) {
+ curve.G1.toRprLEM(buffG1, 0, p);
+ await fd.write(buffG1);
+ }
+
+ async function writeG2(p) {
+ curve.G2.toRprLEM(buffG2, 0, p);
+ await fd.write(buffG2);
+ }
+
+}
+
+async function writeContributions(fd, curve, contributions) {
+
+ await fd.writeULE32(7); // Header type
+ const pContributionsSize = fd.pos;
+ await fd.writeULE64(0); // Temporally set to 0 length
+
+ await fd.writeULE32(contributions.length);
+ for (let i=0; i< contributions.length; i++) {
+ await writeContribution(fd, curve, contributions[i]);
+ }
+ const contributionsSize = fd.pos - pContributionsSize - 8;
+
+ const oldPos = fd.pos;
+
+ await fd.writeULE64(contributionsSize, pContributionsSize);
+ fd.pos = oldPos;
+}
+
+function calculateFirstChallengeHash(curve, power, logger) {
+ if (logger) logger.debug("Calculating First Challenge Hash");
+
+ const hasher = new Blake2b__default["default"](64);
+
+ const vG1 = new Uint8Array(curve.G1.F.n8*2);
+ const vG2 = new Uint8Array(curve.G2.F.n8*2);
+ curve.G1.toRprUncompressed(vG1, 0, curve.G1.g);
+ curve.G2.toRprUncompressed(vG2, 0, curve.G2.g);
+
+ hasher.update(Blake2b__default["default"](64).digest());
+
+ let n;
+
+ n=(2 ** power)*2 -1;
+ if (logger) logger.debug("Calculate Initial Hash: tauG1");
+ hashBlock(vG1, n);
+ n= 2 ** power;
+ if (logger) logger.debug("Calculate Initial Hash: tauG2");
+ hashBlock(vG2, n);
+ if (logger) logger.debug("Calculate Initial Hash: alphaTauG1");
+ hashBlock(vG1, n);
+ if (logger) logger.debug("Calculate Initial Hash: betaTauG1");
+ hashBlock(vG1, n);
+ hasher.update(vG2);
+
+ return hasher.digest();
+
+ function hashBlock(buff, n) {
+ const blockSize = 500000;
+ const nBlocks = Math.floor(n / blockSize);
+ const rem = n % blockSize;
+ const bigBuff = new Uint8Array(blockSize * buff.byteLength);
+ for (let i=0; i.
+*/
+
+async function newAccumulator(curve, power, fileName, logger) {
+
+ await Blake2b__default["default"].ready();
+
+ const fd = await binFileUtils__namespace.createBinFile(fileName, "ptau", 1, 7);
+
+ await writePTauHeader(fd, curve, power, 0);
+
+ const buffG1 = curve.G1.oneAffine;
+ const buffG2 = curve.G2.oneAffine;
+
+ // Write tauG1
+ ///////////
+ await binFileUtils__namespace.startWriteSection(fd, 2);
+ const nTauG1 = (2 ** power) * 2 -1;
+ for (let i=0; i< nTauG1; i++) {
+ await fd.write(buffG1);
+ if ((logger)&&((i%100000) == 0)&&i) logger.log("tauG1: " + i);
+ }
+ await binFileUtils__namespace.endWriteSection(fd);
+
+ // Write tauG2
+ ///////////
+ await binFileUtils__namespace.startWriteSection(fd, 3);
+ const nTauG2 = (2 ** power);
+ for (let i=0; i< nTauG2; i++) {
+ await fd.write(buffG2);
+ if ((logger)&&((i%100000) == 0)&&i) logger.log("tauG2: " + i);
+ }
+ await binFileUtils__namespace.endWriteSection(fd);
+
+ // Write alphaTauG1
+ ///////////
+ await binFileUtils__namespace.startWriteSection(fd, 4);
+ const nAlfaTauG1 = (2 ** power);
+ for (let i=0; i< nAlfaTauG1; i++) {
+ await fd.write(buffG1);
+ if ((logger)&&((i%100000) == 0)&&i) logger.log("alphaTauG1: " + i);
+ }
+ await binFileUtils__namespace.endWriteSection(fd);
+
+ // Write betaTauG1
+ ///////////
+ await binFileUtils__namespace.startWriteSection(fd, 5);
+ const nBetaTauG1 = (2 ** power);
+ for (let i=0; i< nBetaTauG1; i++) {
+ await fd.write(buffG1);
+ if ((logger)&&((i%100000) == 0)&&i) logger.log("betaTauG1: " + i);
+ }
+ await binFileUtils__namespace.endWriteSection(fd);
+
+ // Write betaG2
+ ///////////
+ await binFileUtils__namespace.startWriteSection(fd, 6);
+ await fd.write(buffG2);
+ await binFileUtils__namespace.endWriteSection(fd);
+
+ // Contributions
+ ///////////
+ await binFileUtils__namespace.startWriteSection(fd, 7);
+ await fd.writeULE32(0); // 0 Contributions
+ await binFileUtils__namespace.endWriteSection(fd);
+
+ await fd.close();
+
+ const firstChallengeHash = calculateFirstChallengeHash(curve, power, logger);
+
+ if (logger) logger.debug(formatHash(Blake2b__default["default"](64).digest(), "Blank Contribution Hash:"));
+
+ if (logger) logger.info(formatHash(firstChallengeHash, "First Contribution Hash:"));
+
+ return firstChallengeHash;
+
+}
+
+// Format of the outpu
+
+async function exportChallenge(pTauFilename, challengeFilename, logger) {
+ await Blake2b__default["default"].ready();
+ const {fd: fdFrom, sections} = await binFileUtils__namespace.readBinFile(pTauFilename, "ptau", 1);
+
+ const {curve, power} = await readPTauHeader(fdFrom, sections);
+
+ const contributions = await readContributions(fdFrom, curve, sections);
+ let lastResponseHash, curChallengeHash;
+ if (contributions.length == 0) {
+ lastResponseHash = Blake2b__default["default"](64).digest();
+ curChallengeHash = calculateFirstChallengeHash(curve, power);
+ } else {
+ lastResponseHash = contributions[contributions.length-1].responseHash;
+ curChallengeHash = contributions[contributions.length-1].nextChallenge;
+ }
+
+ if (logger) logger.info(formatHash(lastResponseHash, "Last Response Hash: "));
+
+ if (logger) logger.info(formatHash(curChallengeHash, "New Challenge Hash: "));
+
+
+ const fdTo = await fastFile__namespace.createOverride(challengeFilename);
+
+ const toHash = Blake2b__default["default"](64);
+ await fdTo.write(lastResponseHash);
+ toHash.update(lastResponseHash);
+
+ await exportSection(2, "G1", (2 ** power) * 2 -1, "tauG1");
+ await exportSection(3, "G2", (2 ** power) , "tauG2");
+ await exportSection(4, "G1", (2 ** power) , "alphaTauG1");
+ await exportSection(5, "G1", (2 ** power) , "betaTauG1");
+ await exportSection(6, "G2", 1 , "betaG2");
+
+ await fdFrom.close();
+ await fdTo.close();
+
+ const calcCurChallengeHash = toHash.digest();
+
+ if (!hashIsEqual (curChallengeHash, calcCurChallengeHash)) {
+ if (logger) logger.info(formatHash(calcCurChallengeHash, "Calc Curret Challenge Hash: "));
+
+ if (logger) logger.error("PTau file is corrupted. Calculated new challenge hash does not match with the eclared one");
+ throw new Error("PTau file is corrupted. Calculated new challenge hash does not match with the eclared one");
+ }
+
+ return curChallengeHash;
+
+ async function exportSection(sectionId, groupName, nPoints, sectionName) {
+ const G = curve[groupName];
+ const sG = G.F.n8*2;
+ const nPointsChunk = Math.floor((1<<24)/sG);
+
+ await binFileUtils__namespace.startReadUniqueSection(fdFrom, sections, sectionId);
+ for (let i=0; i< nPoints; i+= nPointsChunk) {
+ if (logger) logger.debug(`Exporting ${sectionName}: ${i}/${nPoints}`);
+ const n = Math.min(nPoints-i, nPointsChunk);
+ let buff;
+ buff = await fdFrom.read(n*sG);
+ buff = await G.batchLEMtoU(buff);
+ await fdTo.write(buff);
+ toHash.update(buff);
+ }
+ await binFileUtils__namespace.endReadSection(fdFrom);
+ }
+
+
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, logger) {
+
+ await Blake2b__default["default"].ready();
+
+ const noHash = new Uint8Array(64);
+ for (let i=0; i<64; i++) noHash[i] = 0xFF;
+
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
+ const {curve, power} = await readPTauHeader(fdOld, sections);
+ const contributions = await readContributions(fdOld, curve, sections);
+ const currentContribution = {};
+
+ if (name) currentContribution.name = name;
+
+ const sG1 = curve.F1.n8*2;
+ const scG1 = curve.F1.n8; // Compresed size
+ const sG2 = curve.F2.n8*2;
+ const scG2 = curve.F2.n8; // Compresed size
+
+ const fdResponse = await fastFile__namespace.readExisting(contributionFilename);
+
+ if (fdResponse.totalSize !=
+ 64 + // Old Hash
+ ((2 ** power)*2-1)*scG1 +
+ (2 ** power)*scG2 +
+ (2 ** power)*scG1 +
+ (2 ** power)*scG1 +
+ scG2 +
+ sG1*6 + sG2*3)
+ throw new Error("Size of the contribution is invalid");
+
+ let lastChallengeHash;
+
+ if (contributions.length>0) {
+ lastChallengeHash = contributions[contributions.length-1].nextChallenge;
+ } else {
+ lastChallengeHash = calculateFirstChallengeHash(curve, power, logger);
+ }
+
+ const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, importPoints ? 7: 2);
+ await writePTauHeader(fdNew, curve, power);
+
+ const contributionPreviousHash = await fdResponse.read(64);
+
+ if (hashIsEqual(noHash,lastChallengeHash)) {
+ lastChallengeHash = contributionPreviousHash;
+ contributions[contributions.length-1].nextChallenge = lastChallengeHash;
+ }
+
+ if(!hashIsEqual(contributionPreviousHash,lastChallengeHash))
+ throw new Error("Wrong contribution. this contribution is not based on the previus hash");
+
+ const hasherResponse = new Blake2b__default["default"](64);
+ hasherResponse.update(contributionPreviousHash);
+
+ const startSections = [];
+ let res;
+ res = await processSection(fdResponse, fdNew, "G1", 2, (2 ** power) * 2 -1, [1], "tauG1");
+ currentContribution.tauG1 = res[0];
+ res = await processSection(fdResponse, fdNew, "G2", 3, (2 ** power) , [1], "tauG2");
+ currentContribution.tauG2 = res[0];
+ res = await processSection(fdResponse, fdNew, "G1", 4, (2 ** power) , [0], "alphaG1");
+ currentContribution.alphaG1 = res[0];
+ res = await processSection(fdResponse, fdNew, "G1", 5, (2 ** power) , [0], "betaG1");
+ currentContribution.betaG1 = res[0];
+ res = await processSection(fdResponse, fdNew, "G2", 6, 1 , [0], "betaG2");
+ currentContribution.betaG2 = res[0];
+
+ currentContribution.partialHash = hasherResponse.getPartialHash();
+
+
+ const buffKey = await fdResponse.read(curve.F1.n8*2*6+curve.F2.n8*2*3);
+
+ currentContribution.key = fromPtauPubKeyRpr(buffKey, 0, curve, false);
+
+ hasherResponse.update(new Uint8Array(buffKey));
+ const hashResponse = hasherResponse.digest();
+
+ if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: "));
+
+ if (importPoints) {
+ const nextChallengeHasher = new Blake2b__default["default"](64);
+ nextChallengeHasher.update(hashResponse);
+
+ await hashSection(nextChallengeHasher, fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger);
+ await hashSection(nextChallengeHasher, fdNew, "G2", 3, (2 ** power) , "tauG2", logger);
+ await hashSection(nextChallengeHasher, fdNew, "G1", 4, (2 ** power) , "alphaTauG1", logger);
+ await hashSection(nextChallengeHasher, fdNew, "G1", 5, (2 ** power) , "betaTauG1", logger);
+ await hashSection(nextChallengeHasher, fdNew, "G2", 6, 1 , "betaG2", logger);
+
+ currentContribution.nextChallenge = nextChallengeHasher.digest();
+
+ if (logger) logger.info(formatHash(currentContribution.nextChallenge, "Next Challenge Hash: "));
+ } else {
+ currentContribution.nextChallenge = noHash;
+ }
+
+ contributions.push(currentContribution);
+
+ await writeContributions(fdNew, curve, contributions);
+
+ await fdResponse.close();
+ await fdNew.close();
+ await fdOld.close();
+
+ return currentContribution.nextChallenge;
+
+ async function processSection(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
+ if (importPoints) {
+ return await processSectionImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName);
+ } else {
+ return await processSectionNoImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName);
+ }
+ }
+
+ async function processSectionImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
+
+ const G = curve[groupName];
+ const scG = G.F.n8;
+ const sG = G.F.n8*2;
+
+ const singularPoints = [];
+
+ await binFileUtils__namespace.startWriteSection(fdTo, sectionId);
+ const nPointsChunk = Math.floor((1<<24)/sG);
+
+ startSections[sectionId] = fdTo.pos;
+
+ for (let i=0; i< nPoints; i += nPointsChunk) {
+ if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`);
+ const n = Math.min(nPoints-i, nPointsChunk);
+
+ const buffC = await fdFrom.read(n * scG);
+ hasherResponse.update(buffC);
+
+ const buffLEM = await G.batchCtoLEM(buffC);
+
+ await fdTo.write(buffLEM);
+ for (let j=0; j=i) && (sp < i+n)) {
+ const P = G.fromRprLEM(buffLEM, (sp-i)*sG);
+ singularPoints.push(P);
+ }
+ }
+ }
+
+ await binFileUtils__namespace.endWriteSection(fdTo);
+
+ return singularPoints;
+ }
+
+
+ async function processSectionNoImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
+
+ const G = curve[groupName];
+ const scG = G.F.n8;
+
+ const singularPoints = [];
+
+ const nPointsChunk = Math.floor((1<<24)/scG);
+
+ for (let i=0; i< nPoints; i += nPointsChunk) {
+ if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`);
+ const n = Math.min(nPoints-i, nPointsChunk);
+
+ const buffC = await fdFrom.read(n * scG);
+ hasherResponse.update(buffC);
+
+ for (let j=0; j=i) && (sp < i+n)) {
+ const P = G.fromRprCompressed(buffC, (sp-i)*scG);
+ singularPoints.push(P);
+ }
+ }
+ }
+
+ return singularPoints;
+ }
+
+
+ async function hashSection(nextChallengeHasher, fdTo, groupName, sectionId, nPoints, sectionName, logger) {
+
+ const G = curve[groupName];
+ const sG = G.F.n8*2;
+ const nPointsChunk = Math.floor((1<<24)/sG);
+
+ const oldPos = fdTo.pos;
+ fdTo.pos = startSections[sectionId];
+
+ for (let i=0; i< nPoints; i += nPointsChunk) {
+ if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`);
+ const n = Math.min(nPoints-i, nPointsChunk);
+
+ const buffLEM = await fdTo.read(n * sG);
+
+ const buffU = await G.batchLEMtoU(buffLEM);
+
+ nextChallengeHasher.update(buffU);
+ }
+
+ fdTo.pos = oldPos;
+ }
+
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+const sameRatio$1 = sameRatio$2;
+
+async function verifyContribution(curve, cur, prev, logger) {
+ let sr;
+ if (cur.type == 1) { // Verify the beacon.
+ const beaconKey = keyFromBeacon(curve, prev.nextChallenge, cur.beaconHash, cur.numIterationsExp);
+
+ if (!curve.G1.eq(cur.key.tau.g1_s, beaconKey.tau.g1_s)) {
+ if (logger) logger.error(`BEACON key (tauG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
+ return false;
+ }
+ if (!curve.G1.eq(cur.key.tau.g1_sx, beaconKey.tau.g1_sx)) {
+ if (logger) logger.error(`BEACON key (tauG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
+ return false;
+ }
+ if (!curve.G2.eq(cur.key.tau.g2_spx, beaconKey.tau.g2_spx)) {
+ if (logger) logger.error(`BEACON key (tauG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
+ return false;
+ }
+
+ if (!curve.G1.eq(cur.key.alpha.g1_s, beaconKey.alpha.g1_s)) {
+ if (logger) logger.error(`BEACON key (alphaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
+ return false;
+ }
+ if (!curve.G1.eq(cur.key.alpha.g1_sx, beaconKey.alpha.g1_sx)) {
+ if (logger) logger.error(`BEACON key (alphaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
+ return false;
+ }
+ if (!curve.G2.eq(cur.key.alpha.g2_spx, beaconKey.alpha.g2_spx)) {
+ if (logger) logger.error(`BEACON key (alphaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
+ return false;
+ }
+
+ if (!curve.G1.eq(cur.key.beta.g1_s, beaconKey.beta.g1_s)) {
+ if (logger) logger.error(`BEACON key (betaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
+ return false;
+ }
+ if (!curve.G1.eq(cur.key.beta.g1_sx, beaconKey.beta.g1_sx)) {
+ if (logger) logger.error(`BEACON key (betaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
+ return false;
+ }
+ if (!curve.G2.eq(cur.key.beta.g2_spx, beaconKey.beta.g2_spx)) {
+ if (logger) logger.error(`BEACON key (betaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
+ return false;
+ }
+ }
+
+ cur.key.tau.g2_sp = curve.G2.toAffine(getG2sp(curve, 0, prev.nextChallenge, cur.key.tau.g1_s, cur.key.tau.g1_sx));
+ cur.key.alpha.g2_sp = curve.G2.toAffine(getG2sp(curve, 1, prev.nextChallenge, cur.key.alpha.g1_s, cur.key.alpha.g1_sx));
+ cur.key.beta.g2_sp = curve.G2.toAffine(getG2sp(curve, 2, prev.nextChallenge, cur.key.beta.g1_s, cur.key.beta.g1_sx));
+
+ sr = await sameRatio$1(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
+ if (sr !== true) {
+ if (logger) logger.error("INVALID key (tau) in challenge #"+cur.id);
+ return false;
+ }
+
+ sr = await sameRatio$1(curve, cur.key.alpha.g1_s, cur.key.alpha.g1_sx, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
+ if (sr !== true) {
+ if (logger) logger.error("INVALID key (alpha) in challenge #"+cur.id);
+ return false;
+ }
+
+ sr = await sameRatio$1(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
+ if (sr !== true) {
+ if (logger) logger.error("INVALID key (beta) in challenge #"+cur.id);
+ return false;
+ }
+
+ sr = await sameRatio$1(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
+ if (sr !== true) {
+ if (logger) logger.error("INVALID tau*G1. challenge #"+cur.id+" It does not follow the previous contribution");
+ return false;
+ }
+
+ sr = await sameRatio$1(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2);
+ if (sr !== true) {
+ if (logger) logger.error("INVALID tau*G2. challenge #"+cur.id+" It does not follow the previous contribution");
+ return false;
+ }
+
+ sr = await sameRatio$1(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
+ if (sr !== true) {
+ if (logger) logger.error("INVALID alpha*G1. challenge #"+cur.id+" It does not follow the previous contribution");
+ return false;
+ }
+
+ sr = await sameRatio$1(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
+ if (sr !== true) {
+ if (logger) logger.error("INVALID beta*G1. challenge #"+cur.id+" It does not follow the previous contribution");
+ return false;
+ }
+
+ sr = await sameRatio$1(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, prev.betaG2, cur.betaG2);
+ if (sr !== true) {
+ if (logger) logger.error("INVALID beta*G2. challenge #"+cur.id+"It does not follow the previous contribution");
+ return false;
+ }
+
+ if (logger) logger.info("Powers Of tau file OK!");
+ return true;
+}
+
+async function verify(tauFilename, logger) {
+ let sr;
+ await Blake2b__default["default"].ready();
+
+ const {fd, sections} = await binFileUtils__namespace.readBinFile(tauFilename, "ptau", 1);
+ const {curve, power, ceremonyPower} = await readPTauHeader(fd, sections);
+ const contrs = await readContributions(fd, curve, sections);
+
+ if (logger) logger.debug("power: 2**" + power);
+ // Verify Last contribution
+
+ if (logger) logger.debug("Computing initial contribution hash");
+ const initialContribution = {
+ tauG1: curve.G1.g,
+ tauG2: curve.G2.g,
+ alphaG1: curve.G1.g,
+ betaG1: curve.G1.g,
+ betaG2: curve.G2.g,
+ nextChallenge: calculateFirstChallengeHash(curve, ceremonyPower, logger),
+ responseHash: Blake2b__default["default"](64).digest()
+ };
+
+ if (contrs.length == 0) {
+ if (logger) logger.error("This file has no contribution! It cannot be used in production");
+ return false;
+ }
+
+ let prevContr;
+ if (contrs.length>1) {
+ prevContr = contrs[contrs.length-2];
+ } else {
+ prevContr = initialContribution;
+ }
+ const curContr = contrs[contrs.length-1];
+ if (logger) logger.debug("Validating contribution #"+contrs[contrs.length-1].id);
+ const res = await verifyContribution(curve, curContr, prevContr, logger);
+ if (!res) return false;
+
+
+ const nextContributionHasher = Blake2b__default["default"](64);
+ nextContributionHasher.update(curContr.responseHash);
+
+ // Verify powers and compute nextChallengeHash
+
+ // await test();
+
+ // Verify Section tau*G1
+ if (logger) logger.debug("Verifying powers in tau*G1 section");
+ const rTau1 = await processSection(2, "G1", "tauG1", (2 ** power)*2-1, [0, 1], logger);
+ sr = await sameRatio$1(curve, rTau1.R1, rTau1.R2, curve.G2.g, curContr.tauG2);
+ if (sr !== true) {
+ if (logger) logger.error("tauG1 section. Powers do not match");
+ return false;
+ }
+ if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) {
+ if (logger) logger.error("First element of tau*G1 section must be the generator");
+ return false;
+ }
+ if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) {
+ if (logger) logger.error("Second element of tau*G1 section does not match the one in the contribution section");
+ return false;
+ }
+
+ // await test();
+
+ // Verify Section tau*G2
+ if (logger) logger.debug("Verifying powers in tau*G2 section");
+ const rTau2 = await processSection(3, "G2", "tauG2", 2 ** power, [0, 1], logger);
+ sr = await sameRatio$1(curve, curve.G1.g, curContr.tauG1, rTau2.R1, rTau2.R2);
+ if (sr !== true) {
+ if (logger) logger.error("tauG2 section. Powers do not match");
+ return false;
+ }
+ if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) {
+ if (logger) logger.error("First element of tau*G2 section must be the generator");
+ return false;
+ }
+ if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) {
+ if (logger) logger.error("Second element of tau*G2 section does not match the one in the contribution section");
+ return false;
+ }
+
+ // Verify Section alpha*tau*G1
+ if (logger) logger.debug("Verifying powers in alpha*tau*G1 section");
+ const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 2 ** power, [0], logger);
+ sr = await sameRatio$1(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, curve.G2.g, curContr.tauG2);
+ if (sr !== true) {
+ if (logger) logger.error("alphaTauG1 section. Powers do not match");
+ return false;
+ }
+ if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) {
+ if (logger) logger.error("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section");
+ return false;
+ }
+
+ // Verify Section beta*tau*G1
+ if (logger) logger.debug("Verifying powers in beta*tau*G1 section");
+ const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 2 ** power, [0], logger);
+ sr = await sameRatio$1(curve, rBetaTauG1.R1, rBetaTauG1.R2, curve.G2.g, curContr.tauG2);
+ if (sr !== true) {
+ if (logger) logger.error("betaTauG1 section. Powers do not match");
+ return false;
+ }
+ if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) {
+ if (logger) logger.error("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section");
+ return false;
+ }
+
+ //Verify Beta G2
+ const betaG2 = await processSectionBetaG2(logger);
+ if (!curve.G2.eq(curContr.betaG2, betaG2)) {
+ if (logger) logger.error("betaG2 element in betaG2 section does not match the one in the contribution section");
+ return false;
+ }
+
+
+ const nextContributionHash = nextContributionHasher.digest();
+
+ // Check the nextChallengeHash
+ if (power == ceremonyPower) {
+ if (!hashIsEqual(nextContributionHash,curContr.nextChallenge)) {
+ if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section");
+ return false;
+ }
+ }
+
+ if (logger) logger.info(formatHash(nextContributionHash, "Next challenge hash: "));
+
+ // Verify Previous contributions
+
+ printContribution(curContr, prevContr);
+ for (let i = contrs.length-2; i>=0; i--) {
+ const curContr = contrs[i];
+ const prevContr = (i>0) ? contrs[i-1] : initialContribution;
+ const res = await verifyContribution(curve, curContr, prevContr, logger);
+ if (!res) return false;
+ printContribution(curContr, prevContr);
+ }
+ if (logger) logger.info("-----------------------------------------------------");
+
+ if ((!sections[12]) || (!sections[13]) || (!sections[14]) || (!sections[15])) {
+ if (logger) logger.warn(
+ "this file does not contain phase2 precalculated values. Please run: \n" +
+ " snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony."
+ );
+ } else {
+ let res;
+ res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1", logger);
+ if (!res) return false;
+ res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2", logger);
+ if (!res) return false;
+ res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1", logger);
+ if (!res) return false;
+ res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1", logger);
+ if (!res) return false;
+ }
+
+ await fd.close();
+
+ if (logger) logger.info("Powers of Tau Ok!");
+
+ return true;
+
+ function printContribution(curContr, prevContr) {
+ if (!logger) return;
+ logger.info("-----------------------------------------------------");
+ logger.info(`Contribution #${curContr.id}: ${curContr.name ||""}`);
+
+ logger.info(formatHash(curContr.nextChallenge, "Next Challenge: "));
+
+ const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
+ toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false);
+
+ const responseHasher = Blake2b__default["default"](64);
+ responseHasher.setPartialHash(curContr.partialHash);
+ responseHasher.update(buffV);
+ const responseHash = responseHasher.digest();
+
+ logger.info(formatHash(responseHash, "Response Hash:"));
+
+ logger.info(formatHash(prevContr.nextChallenge, "Response Hash:"));
+
+ if (curContr.type == 1) {
+ logger.info(`Beacon generator: ${byteArray2hex(curContr.beaconHash)}`);
+ logger.info(`Beacon iterations Exp: ${curContr.numIterationsExp}`);
+ }
+
+ }
+
+ async function processSectionBetaG2(logger) {
+ const G = curve.G2;
+ const sG = G.F.n8*2;
+ const buffUv = new Uint8Array(sG);
+
+ if (!sections[6]) {
+ logger.error("File has no BetaG2 section");
+ throw new Error("File has no BetaG2 section");
+ }
+ if (sections[6].length>1) {
+ logger.error("File has no BetaG2 section");
+ throw new Error("File has more than one GetaG2 section");
+ }
+ fd.pos = sections[6][0].p;
+
+ const buff = await fd.read(sG);
+ const P = G.fromRprLEM(buff);
+
+ G.toRprUncompressed(buffUv, 0, P);
+ nextContributionHasher.update(buffUv);
+
+ return P;
+ }
+
+ async function processSection(idSection, groupName, sectionName, nPoints, singularPointIndexes, logger) {
+ const MAX_CHUNK_SIZE = 1<<16;
+ const G = curve[groupName];
+ const sG = G.F.n8*2;
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, idSection);
+
+ const singularPoints = [];
+
+ let R1 = G.zero;
+ let R2 = G.zero;
+
+ let lastBase = G.zero;
+
+ for (let i=0; i0) {
+ const firstBase = G.fromRprLEM(bases, 0);
+ const r = crypto__default["default"].randomBytes(4).readUInt32BE(0, true);
+
+ R1 = G.add(R1, G.timesScalar(lastBase, r));
+ R2 = G.add(R2, G.timesScalar(firstBase, r));
+ }
+
+ const r1 = await G.multiExpAffine(bases.slice(0, (n-1)*sG), scalars);
+ const r2 = await G.multiExpAffine(bases.slice(sG), scalars);
+
+ R1 = G.add(R1, r1);
+ R2 = G.add(R2, r2);
+
+ lastBase = G.fromRprLEM( bases, (n-1)*sG);
+
+ for (let j=0; j=i) && (sp < i+n)) {
+ const P = G.fromRprLEM(bases, (sp-i)*sG);
+ singularPoints.push(P);
+ }
+ }
+
+ }
+ await binFileUtils__namespace.endReadSection(fd);
+
+ return {
+ R1: R1,
+ R2: R2,
+ singularPoints: singularPoints
+ };
+
+ }
+
+ async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName, logger) {
+
+ if (logger) logger.debug(`Verifying phase2 calculated values ${sectionName}...`);
+ const G = curve[gName];
+ const sG = G.F.n8*2;
+
+ const seed= new Array(8);
+ for (let i=0; i<8; i++) {
+ seed[i] = crypto__default["default"].randomBytes(4).readUInt32BE(0, true);
+ }
+
+ for (let p=0; p<= power; p ++) {
+ const res = await verifyPower(p);
+ if (!res) return false;
+ }
+
+ if (tauSection == 2) {
+ const res = await verifyPower(power+1);
+ if (!res) return false;
+ }
+
+ return true;
+
+ async function verifyPower(p) {
+ if (logger) logger.debug(`Power ${p}...`);
+ const n8r = curve.Fr.n8;
+ const nPoints = 2 ** p;
+ let buff_r = new Uint32Array(nPoints);
+ let buffG;
+
+ let rng = new ffjavascript.ChaCha(seed);
+
+ if (logger) logger.debug(`Creating random numbers Powers${p}...`);
+ for (let i=0; i.
+*/
+
+/*
+ This function creates a new section in the fdTo file with id idSection.
+ It multiplies the pooints in fdFrom by first, first*inc, first*inc^2, ....
+ nPoint Times.
+ It also updates the newChallengeHasher with the new points
+*/
+
+async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, logger) {
+ const MAX_CHUNK_SIZE = 1 << 16;
+ const G = curve[groupName];
+ const sG = G.F.n8*2;
+ const nPoints = sections[idSection][0].size / sG;
+
+ await binFileUtils__namespace.startReadUniqueSection(fdOld, sections,idSection );
+ await binFileUtils__namespace.startWriteSection(fdNew, idSection);
+
+ let t = first;
+ for (let i=0; i.
+*/
+
+async function challengeContribute(curve, challengeFilename, responesFileName, entropy, logger) {
+ await Blake2b__default["default"].ready();
+
+ const fdFrom = await fastFile__namespace.readExisting(challengeFilename);
+
+
+ const sG1 = curve.F1.n64*8*2;
+ const sG2 = curve.F2.n64*8*2;
+ const domainSize = (fdFrom.totalSize + sG1 - 64 - sG2) / (4*sG1 + sG2);
+ let e = domainSize;
+ let power = 0;
+ while (e>1) {
+ e = e /2;
+ power += 1;
+ }
+
+ if (2 ** power != domainSize) throw new Error("Invalid file size");
+ if (logger) logger.debug("Power to tau size: "+power);
+
+ const rng = await getRandomRng(entropy);
+
+ const fdTo = await fastFile__namespace.createOverride(responesFileName);
+
+ // Calculate the hash
+ const challengeHasher = Blake2b__default["default"](64);
+ for (let i=0; i {
+ logger.debug(k + ".g1_s: " + curve.G1.toString(key[k].g1_s, 16));
+ logger.debug(k + ".g1_sx: " + curve.G1.toString(key[k].g1_sx, 16));
+ logger.debug(k + ".g2_sp: " + curve.G2.toString(key[k].g2_sp, 16));
+ logger.debug(k + ".g2_spx: " + curve.G2.toString(key[k].g2_spx, 16));
+ logger.debug("");
+ });
+ }
+
+ const responseHasher = Blake2b__default["default"](64);
+
+ await fdTo.write(challengeHash);
+ responseHasher.update(challengeHash);
+
+ await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power)*2-1, curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG1" , logger );
+ await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G2", (2 ** power) , curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG2" , logger );
+ await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power) , key.alpha.prvKey, key.tau.prvKey, "COMPRESSED", "alphaTauG1", logger );
+ await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power) , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG1" , logger );
+ await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G2", 1 , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG2" , logger );
+
+ // Write and hash key
+ const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
+ toPtauPubKeyRpr(buffKey, 0, curve, key, false);
+ await fdTo.write(buffKey);
+ responseHasher.update(buffKey);
+ const responseHash = responseHasher.digest();
+ if (logger) logger.info(formatHash(responseHash, "Contribution Response Hash: "));
+
+ await fdTo.close();
+ await fdFrom.close();
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+async function beacon$1(oldPtauFilename, newPTauFilename, name, beaconHashStr,numIterationsExp, logger) {
+ const beaconHash = hex2ByteArray(beaconHashStr);
+ if ( (beaconHash.byteLength == 0)
+ || (beaconHash.byteLength*2 !=beaconHashStr.length))
+ {
+ if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
+ return false;
+ }
+ if (beaconHash.length>=256) {
+ if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
+ return false;
+ }
+
+ numIterationsExp = parseInt(numIterationsExp);
+ if ((numIterationsExp<10)||(numIterationsExp>63)) {
+ if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
+ return false;
+ }
+
+
+ await Blake2b__default["default"].ready();
+
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
+ const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
+ if (power != ceremonyPower) {
+ if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
+ return false;
+ }
+ if (sections[12]) {
+ if (logger) logger.warn("Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
+ }
+ const contributions = await readContributions(fdOld, curve, sections);
+ const curContribution = {
+ name: name,
+ type: 1, // Beacon
+ numIterationsExp: numIterationsExp,
+ beaconHash: beaconHash
+ };
+
+ let lastChallengeHash;
+
+ if (contributions.length>0) {
+ lastChallengeHash = contributions[contributions.length-1].nextChallenge;
+ } else {
+ lastChallengeHash = calculateFirstChallengeHash(curve, power, logger);
+ }
+
+ curContribution.key = keyFromBeacon(curve, lastChallengeHash, beaconHash, numIterationsExp);
+
+ const responseHasher = new Blake2b__default["default"](64);
+ responseHasher.update(lastChallengeHash);
+
+ const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 7);
+ await writePTauHeader(fdNew, curve, power);
+
+ const startSections = [];
+
+ let firstPoints;
+ firstPoints = await processSection(2, "G1", (2 ** power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1", logger );
+ curContribution.tauG1 = firstPoints[1];
+ firstPoints = await processSection(3, "G2", (2 ** power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2", logger );
+ curContribution.tauG2 = firstPoints[1];
+ firstPoints = await processSection(4, "G1", (2 ** power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1", logger );
+ curContribution.alphaG1 = firstPoints[0];
+ firstPoints = await processSection(5, "G1", (2 ** power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1", logger );
+ curContribution.betaG1 = firstPoints[0];
+ firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2", logger );
+ curContribution.betaG2 = firstPoints[0];
+
+ curContribution.partialHash = responseHasher.getPartialHash();
+
+ const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
+
+ toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
+
+ responseHasher.update(new Uint8Array(buffKey));
+ const hashResponse = responseHasher.digest();
+
+ if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: "));
+
+ const nextChallengeHasher = new Blake2b__default["default"](64);
+ nextChallengeHasher.update(hashResponse);
+
+ await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger);
+ await hashSection(fdNew, "G2", 3, (2 ** power) , "tauG2", logger);
+ await hashSection(fdNew, "G1", 4, (2 ** power) , "alphaTauG1", logger);
+ await hashSection(fdNew, "G1", 5, (2 ** power) , "betaTauG1", logger);
+ await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger);
+
+ curContribution.nextChallenge = nextChallengeHasher.digest();
+
+ if (logger) logger.info(formatHash(curContribution.nextChallenge, "Next Challenge Hash: "));
+
+ contributions.push(curContribution);
+
+ await writeContributions(fdNew, curve, contributions);
+
+ await fdOld.close();
+ await fdNew.close();
+
+ return hashResponse;
+
+ async function processSection(sectionId, groupName, NPoints, first, inc, sectionName, logger) {
+ const res = [];
+ fdOld.pos = sections[sectionId][0].p;
+
+ await binFileUtils__namespace.startWriteSection(fdNew, sectionId);
+
+ startSections[sectionId] = fdNew.pos;
+
+ const G = curve[groupName];
+ const sG = G.F.n8*2;
+ const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
+ let t = first;
+ for (let i=0 ; i.
+*/
+
+async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logger) {
+ await Blake2b__default["default"].ready();
+
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
+ const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
+ if (power != ceremonyPower) {
+ if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
+ throw new Error("This file has been reduced. You cannot contribute into a reduced file.");
+ }
+ if (sections[12]) {
+ if (logger) logger.warn("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
+ }
+ const contributions = await readContributions(fdOld, curve, sections);
+ const curContribution = {
+ name: name,
+ type: 0, // Beacon
+ };
+
+ let lastChallengeHash;
+
+ const rng = await getRandomRng(entropy);
+
+ if (contributions.length>0) {
+ lastChallengeHash = contributions[contributions.length-1].nextChallenge;
+ } else {
+ lastChallengeHash = calculateFirstChallengeHash(curve, power, logger);
+ }
+
+ // Generate a random key
+
+
+ curContribution.key = createPTauKey(curve, lastChallengeHash, rng);
+
+
+ const responseHasher = new Blake2b__default["default"](64);
+ responseHasher.update(lastChallengeHash);
+
+ const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 7);
+ await writePTauHeader(fdNew, curve, power);
+
+ const startSections = [];
+
+ let firstPoints;
+ firstPoints = await processSection(2, "G1", (2 ** power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1" );
+ curContribution.tauG1 = firstPoints[1];
+ firstPoints = await processSection(3, "G2", (2 ** power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2" );
+ curContribution.tauG2 = firstPoints[1];
+ firstPoints = await processSection(4, "G1", (2 ** power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1" );
+ curContribution.alphaG1 = firstPoints[0];
+ firstPoints = await processSection(5, "G1", (2 ** power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1" );
+ curContribution.betaG1 = firstPoints[0];
+ firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2" );
+ curContribution.betaG2 = firstPoints[0];
+
+ curContribution.partialHash = responseHasher.getPartialHash();
+
+ const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
+
+ toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
+
+ responseHasher.update(new Uint8Array(buffKey));
+ const hashResponse = responseHasher.digest();
+
+ if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: "));
+
+ const nextChallengeHasher = new Blake2b__default["default"](64);
+ nextChallengeHasher.update(hashResponse);
+
+ await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1");
+ await hashSection(fdNew, "G2", 3, (2 ** power) , "tauG2");
+ await hashSection(fdNew, "G1", 4, (2 ** power) , "alphaTauG1");
+ await hashSection(fdNew, "G1", 5, (2 ** power) , "betaTauG1");
+ await hashSection(fdNew, "G2", 6, 1 , "betaG2");
+
+ curContribution.nextChallenge = nextChallengeHasher.digest();
+
+ if (logger) logger.info(formatHash(curContribution.nextChallenge, "Next Challenge Hash: "));
+
+ contributions.push(curContribution);
+
+ await writeContributions(fdNew, curve, contributions);
+
+ await fdOld.close();
+ await fdNew.close();
+
+ return hashResponse;
+
+ async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) {
+ const res = [];
+ fdOld.pos = sections[sectionId][0].p;
+
+ await binFileUtils__namespace.startWriteSection(fdNew, sectionId);
+
+ startSections[sectionId] = fdNew.pos;
+
+ const G = curve[groupName];
+ const sG = G.F.n8*2;
+ const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
+ let t = first;
+ for (let i=0 ; i.
+*/
+
+async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
+
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
+ const {curve, power} = await readPTauHeader(fdOld, sections);
+
+ const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 11);
+ await writePTauHeader(fdNew, curve, power);
+
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 2);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 3);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 4);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 5);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 6);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 7);
+
+ await processSection(2, 12, "G1", "tauG1" );
+ await processSection(3, 13, "G2", "tauG2" );
+ await processSection(4, 14, "G1", "alphaTauG1" );
+ await processSection(5, 15, "G1", "betaTauG1" );
+
+ await fdOld.close();
+ await fdNew.close();
+
+ // await fs.promises.unlink(newPTauFilename+ ".tmp");
+
+ return;
+
+ async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
+ if (logger) logger.debug("Starting section: "+sectionName);
+
+ await binFileUtils__namespace.startWriteSection(fdNew, newSectionId);
+
+ for (let p=0; p<=power; p++) {
+ await processSectionPower(p);
+ }
+
+ if (oldSectionId == 2) {
+ await processSectionPower(power+1);
+ }
+
+ await binFileUtils__namespace.endWriteSection(fdNew);
+
+
+ async function processSectionPower(p) {
+ const nPoints = 2 ** p;
+ const G = curve[Gstr];
+ curve.Fr;
+ const sGin = G.F.n8*2;
+ G.F.n8*3;
+
+ let buff;
+ buff = new ffjavascript.BigBuffer(nPoints*sGin);
+
+ await binFileUtils__namespace.startReadUniqueSection(fdOld, sections, oldSectionId);
+ if ((oldSectionId == 2)&&(p==power+1)) {
+ await fdOld.readToBuffer(buff, 0,(nPoints-1)*sGin );
+ buff.set(curve.G1.zeroAffine, (nPoints-1)*sGin );
+ } else {
+ await fdOld.readToBuffer(buff, 0,nPoints*sGin );
+ }
+ await binFileUtils__namespace.endReadSection(fdOld, true);
+
+
+ buff = await G.lagrangeEvaluations(buff, "affine", "affine", logger, sectionName);
+ await fdNew.write(buff);
+
+/*
+ if (p <= curve.Fr.s) {
+ buff = await G.ifft(buff, "affine", "affine", logger, sectionName);
+ await fdNew.write(buff);
+ } else if (p == curve.Fr.s+1) {
+ const smallM = 1<.
+*/
+
+async function truncate(ptauFilename, template, logger) {
+
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(ptauFilename, "ptau", 1);
+ const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
+
+ const sG1 = curve.G1.F.n8*2;
+ const sG2 = curve.G2.F.n8*2;
+
+ for (let p=1; p.
+*/
+
+async function convert(oldPtauFilename, newPTauFilename, logger) {
+
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
+ const {curve, power} = await readPTauHeader(fdOld, sections);
+
+ const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 11);
+ await writePTauHeader(fdNew, curve, power);
+
+ // const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
+
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 2);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 3);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 4);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 5);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 6);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 7);
+
+ await processSection(2, 12, "G1", "tauG1" );
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 13);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 14);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 15);
+
+ await fdOld.close();
+ await fdNew.close();
+
+ // await fs.promises.unlink(newPTauFilename+ ".tmp");
+
+ return;
+
+ async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
+ if (logger) logger.debug("Starting section: "+sectionName);
+
+ await binFileUtils__namespace.startWriteSection(fdNew, newSectionId);
+
+ const size = sections[newSectionId][0].size;
+ const chunkSize = fdOld.pageSize;
+ await binFileUtils__namespace.startReadUniqueSection(fdOld, sections, newSectionId);
+ for (let p=0; p.
+*/
+
+async function exportJson(pTauFilename, verbose) {
+ const {fd, sections} = await binFileUtils__namespace.readBinFile(pTauFilename, "ptau", 1);
+
+ const {curve, power} = await readPTauHeader(fd, sections);
+
+ const pTau = {};
+ pTau.q = curve.q;
+ pTau.power = power;
+ pTau.contributions = await readContributions(fd, curve, sections);
+
+ pTau.tauG1 = await exportSection(2, "G1", (2 ** power)*2 -1, "tauG1");
+ pTau.tauG2 = await exportSection(3, "G2", (2 ** power), "tauG2");
+ pTau.alphaTauG1 = await exportSection(4, "G1", (2 ** power), "alphaTauG1");
+ pTau.betaTauG1 = await exportSection(5, "G1", (2 ** power), "betaTauG1");
+ pTau.betaG2 = await exportSection(6, "G2", 1, "betaG2");
+
+ pTau.lTauG1 = await exportLagrange(12, "G1", "lTauG1");
+ pTau.lTauG2 = await exportLagrange(13, "G2", "lTauG2");
+ pTau.lAlphaTauG1 = await exportLagrange(14, "G1", "lAlphaTauG2");
+ pTau.lBetaTauG1 = await exportLagrange(15, "G1", "lBetaTauG2");
+
+ await fd.close();
+
+ return pTau;
+
+
+
+ async function exportSection(sectionId, groupName, nPoints, sectionName) {
+ const G = curve[groupName];
+ const sG = G.F.n8*2;
+
+ const res = [];
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, sectionId);
+ for (let i=0; i< nPoints; i++) {
+ if ((verbose)&&i&&(i%10000 == 0)) console.log(`${sectionName}: ` + i);
+ const buff = await fd.read(sG);
+ res.push(G.fromRprLEM(buff, 0));
+ }
+ await binFileUtils__namespace.endReadSection(fd);
+
+ return res;
+ }
+
+ async function exportLagrange(sectionId, groupName, sectionName) {
+ const G = curve[groupName];
+ const sG = G.F.n8*2;
+
+ const res = [];
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, sectionId);
+ for (let p=0; p<=power; p++) {
+ if (verbose) console.log(`${sectionName}: Power: ${p}`);
+ res[p] = [];
+ const nPoints = (2 ** p);
+ for (let i=0; i.
+*/
+
+var powersoftau = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ newAccumulator: newAccumulator,
+ exportChallenge: exportChallenge,
+ importResponse: importResponse,
+ verify: verify,
+ challengeContribute: challengeContribute,
+ beacon: beacon$1,
+ contribute: contribute,
+ preparePhase2: preparePhase2,
+ truncate: truncate,
+ convert: convert,
+ exportJson: exportJson
+});
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+function r1csPrint(r1cs, syms, logger) {
+ for (let i=0; i {
+ let S = "";
+ const keys = Object.keys(lc);
+ keys.forEach( (k) => {
+ let name = syms.varIdx2Name[k];
+ if (name == "one") name = "";
+
+ let vs = r1cs.curve.Fr.toString(lc[k]);
+ if (vs == "1") vs = ""; // Do not show ones
+ if (vs == "-1") vs = "-"; // Do not show ones
+ if ((S!="")&&(vs[0]!="-")) vs = "+"+vs;
+ if (S!="") vs = " "+vs;
+ S= S + vs + name;
+ });
+ return S;
+ };
+ const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`;
+ if (logger) logger.info(S);
+ }
+
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+const bls12381r = ffjavascript.Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
+const bn128r = ffjavascript.Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
+
+async function r1csInfo(r1csName, logger) {
+
+ const cir = await r1csfile.readR1cs(r1csName);
+
+ if (ffjavascript.Scalar.eq(cir.prime, bn128r)) {
+ if (logger) logger.info("Curve: bn-128");
+ } else if (ffjavascript.Scalar.eq(cir.prime, bls12381r)) {
+ if (logger) logger.info("Curve: bls12-381");
+ } else {
+ if (logger) logger.info(`Unknown Curve. Prime: ${ffjavascript.Scalar.toString(cir.prime)}`);
+ }
+ if (logger) logger.info(`# of Wires: ${cir.nVars}`);
+ if (logger) logger.info(`# of Constraints: ${cir.nConstraints}`);
+ if (logger) logger.info(`# of Private Inputs: ${cir.nPrvInputs}`);
+ if (logger) logger.info(`# of Public Inputs: ${cir.nPubInputs}`);
+ if (logger) logger.info(`# of Labels: ${cir.nLabels}`);
+ if (logger) logger.info(`# of Outputs: ${cir.nOutputs}`);
+
+ return cir;
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+function stringifyBigInts$2(Fr, o) {
+ if (o instanceof Uint8Array) {
+ return Fr.toString(o);
+ } else if (Array.isArray(o)) {
+ return o.map(stringifyBigInts$2.bind(null, Fr));
+ } else if (typeof o == "object") {
+ const res = {};
+ const keys = Object.keys(o);
+ keys.forEach( (k) => {
+ res[k] = stringifyBigInts$2(Fr, o[k]);
+ });
+ return res;
+ } else if ((typeof(o) == "bigint") || o.eq !== undefined) {
+ return o.toString(10);
+ } else {
+ return o;
+ }
+}
+
+
+async function r1csExportJson(r1csFileName, logger) {
+
+ const cir = await r1csfile.readR1cs(r1csFileName, true, true, true, logger);
+ const Fr=cir.curve.Fr;
+ delete cir.curve;
+
+ return stringifyBigInts$2(Fr, cir);
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+var r1cs = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ print: r1csPrint,
+ info: r1csInfo,
+ exportJson: r1csExportJson
+});
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+async function loadSymbols(symFileName) {
+ const sym = {
+ labelIdx2Name: [ "one" ],
+ varIdx2Name: [ "one" ],
+ componentIdx2Name: []
+ };
+ const fd = await fastFile__namespace.readExisting(symFileName);
+ const buff = await fd.read(fd.totalSize);
+ const symsStr = new TextDecoder("utf-8").decode(buff);
+ const lines = symsStr.split("\n");
+ for (let i=0; i.
+*/
+
+async function wtnsDebug(input, wasmFileName, wtnsFileName, symName, options, logger) {
+
+ const fdWasm = await fastFile__namespace.readExisting(wasmFileName);
+ const wasm = await fdWasm.read(fdWasm.totalSize);
+ await fdWasm.close();
+
+
+ let wcOps = {
+ sanityCheck: true
+ };
+ let sym = await loadSymbols(symName);
+ if (options.set) {
+ if (!sym) sym = await loadSymbols(symName);
+ wcOps.logSetSignal= function(labelIdx, value) {
+ if (logger) logger.info("SET " + sym.labelIdx2Name[labelIdx] + " <-- " + value.toString());
+ };
+ }
+ if (options.get) {
+ if (!sym) sym = await loadSymbols(symName);
+ wcOps.logGetSignal= function(varIdx, value) {
+ if (logger) logger.info("GET " + sym.labelIdx2Name[varIdx] + " --> " + value.toString());
+ };
+ }
+ if (options.trigger) {
+ if (!sym) sym = await loadSymbols(symName);
+ wcOps.logStartComponent= function(cIdx) {
+ if (logger) logger.info("START: " + sym.componentIdx2Name[cIdx]);
+ };
+ wcOps.logFinishComponent= function(cIdx) {
+ if (logger) logger.info("FINISH: " + sym.componentIdx2Name[cIdx]);
+ };
+ }
+ wcOps.sym = sym;
+
+ const wc = await circom_runtime.WitnessCalculatorBuilder(wasm, wcOps);
+ const w = await wc.calculateWitness(input);
+
+ const fdWtns = await binFileUtils__namespace.createBinFile(wtnsFileName, "wtns", 2, 2);
+
+ await write(fdWtns, w, wc.prime);
+
+ await fdWtns.close();
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+async function wtnsExportJson(wtnsFileName) {
+
+ const w = await read(wtnsFileName);
+
+ return w;
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+var wtns = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ calculate: wtnsCalculate,
+ debug: wtnsDebug,
+ exportJson: wtnsExportJson
+});
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+const SUBARRAY_SIZE = 0x40000;
+
+const BigArrayHandler = {
+ get: function(obj, prop) {
+ if (!isNaN(prop)) {
+ return obj.getElement(prop);
+ } else return obj[prop];
+ },
+ set: function(obj, prop, value) {
+ if (!isNaN(prop)) {
+ return obj.setElement(prop, value);
+ } else {
+ obj[prop] = value;
+ return true;
+ }
+ }
+};
+
+class _BigArray {
+ constructor (initSize) {
+ this.length = initSize || 0;
+ this.arr = new Array(SUBARRAY_SIZE);
+
+ for (let i=0; i= this.length) this.length = idx+1;
+ return true;
+ }
+ getKeys() {
+ const newA = new BigArray();
+ for (let i=0; i.
+*/
+
+
+async function newZKey(r1csName, ptauName, zkeyName, logger) {
+
+ const TAU_G1 = 0;
+ const TAU_G2 = 1;
+ const ALPHATAU_G1 = 2;
+ const BETATAU_G1 = 3;
+ await Blake2b__default["default"].ready();
+ const csHasher = Blake2b__default["default"](64);
+
+ const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24);
+ const {curve, power} = await readPTauHeader(fdPTau, sectionsPTau);
+ const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24);
+ const r1cs = await r1csfile.readR1csHeader(fdR1cs, sectionsR1cs, false);
+
+ const fdZKey = await binFileUtils.createBinFile(zkeyName, "zkey", 1, 10, 1<<22, 1<<24);
+
+ const sG1 = curve.G1.F.n8*2;
+ const sG2 = curve.G2.F.n8*2;
+
+ if (r1cs.prime != curve.r) {
+ if (logger) logger.error("r1cs curve does not match powers of tau ceremony curve");
+ return -1;
+ }
+
+ const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
+
+ if (cirPower > power) {
+ if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`);
+ return -1;
+ }
+
+ if (!sectionsPTau[12]) {
+ if (logger) logger.error("Powers of tau is not prepared.");
+ return -1;
+ }
+
+ const nPublic = r1cs.nOutputs + r1cs.nPubInputs;
+ const domainSize = 2 ** cirPower;
+
+ // Write the header
+ ///////////
+ await binFileUtils.startWriteSection(fdZKey, 1);
+ await fdZKey.writeULE32(1); // Groth
+ await binFileUtils.endWriteSection(fdZKey);
+
+ // Write the Groth header section
+ ///////////
+
+ await binFileUtils.startWriteSection(fdZKey, 2);
+ const primeQ = curve.q;
+ const n8q = (Math.floor( (ffjavascript.Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
+
+ const primeR = curve.r;
+ const n8r = (Math.floor( (ffjavascript.Scalar.bitLength(primeR) - 1) / 64) +1)*8;
+ const Rr = ffjavascript.Scalar.mod(ffjavascript.Scalar.shl(1, n8r*8), primeR);
+ const R2r = curve.Fr.e(ffjavascript.Scalar.mod(ffjavascript.Scalar.mul(Rr,Rr), primeR));
+
+ await fdZKey.writeULE32(n8q);
+ await binFileUtils.writeBigInt(fdZKey, primeQ, n8q);
+ await fdZKey.writeULE32(n8r);
+ await binFileUtils.writeBigInt(fdZKey, primeR, n8r);
+ await fdZKey.writeULE32(r1cs.nVars); // Total number of bars
+ await fdZKey.writeULE32(nPublic); // Total number of public vars (not including ONE)
+ await fdZKey.writeULE32(domainSize); // domainSize
+
+ let bAlpha1;
+ bAlpha1 = await fdPTau.read(sG1, sectionsPTau[4][0].p);
+ await fdZKey.write(bAlpha1);
+ bAlpha1 = await curve.G1.batchLEMtoU(bAlpha1);
+ csHasher.update(bAlpha1);
+
+ let bBeta1;
+ bBeta1 = await fdPTau.read(sG1, sectionsPTau[5][0].p);
+ await fdZKey.write(bBeta1);
+ bBeta1 = await curve.G1.batchLEMtoU(bBeta1);
+ csHasher.update(bBeta1);
+
+ let bBeta2;
+ bBeta2 = await fdPTau.read(sG2, sectionsPTau[6][0].p);
+ await fdZKey.write(bBeta2);
+ bBeta2 = await curve.G2.batchLEMtoU(bBeta2);
+ csHasher.update(bBeta2);
+
+ const bg1 = new Uint8Array(sG1);
+ curve.G1.toRprLEM(bg1, 0, curve.G1.g);
+ const bg2 = new Uint8Array(sG2);
+ curve.G2.toRprLEM(bg2, 0, curve.G2.g);
+ const bg1U = new Uint8Array(sG1);
+ curve.G1.toRprUncompressed(bg1U, 0, curve.G1.g);
+ const bg2U = new Uint8Array(sG2);
+ curve.G2.toRprUncompressed(bg2U, 0, curve.G2.g);
+
+ await fdZKey.write(bg2); // gamma2
+ await fdZKey.write(bg1); // delta1
+ await fdZKey.write(bg2); // delta2
+ csHasher.update(bg2U); // gamma2
+ csHasher.update(bg1U); // delta1
+ csHasher.update(bg2U); // delta2
+ await binFileUtils.endWriteSection(fdZKey);
+
+ if (logger) logger.info("Reading r1cs");
+ let sR1cs = await binFileUtils.readSection(fdR1cs, sectionsR1cs, 2);
+
+ const A = new BigArray(r1cs.nVars);
+ const B1 = new BigArray(r1cs.nVars);
+ const B2 = new BigArray(r1cs.nVars);
+ const C = new BigArray(r1cs.nVars- nPublic -1);
+ const IC = new Array(nPublic+1);
+
+ if (logger) logger.info("Reading tauG1");
+ let sTauG1 = await binFileUtils.readSection(fdPTau, sectionsPTau, 12, (domainSize -1)*sG1, domainSize*sG1);
+ if (logger) logger.info("Reading tauG2");
+ let sTauG2 = await binFileUtils.readSection(fdPTau, sectionsPTau, 13, (domainSize -1)*sG2, domainSize*sG2);
+ if (logger) logger.info("Reading alphatauG1");
+ let sAlphaTauG1 = await binFileUtils.readSection(fdPTau, sectionsPTau, 14, (domainSize -1)*sG1, domainSize*sG1);
+ if (logger) logger.info("Reading betatauG1");
+ let sBetaTauG1 = await binFileUtils.readSection(fdPTau, sectionsPTau, 15, (domainSize -1)*sG1, domainSize*sG1);
+
+ await processConstraints();
+
+ await composeAndWritePoints(3, "G1", IC, "IC");
+
+ await writeHs();
+
+ await hashHPoints();
+
+ await composeAndWritePoints(8, "G1", C, "C");
+ await composeAndWritePoints(5, "G1", A, "A");
+ await composeAndWritePoints(6, "G1", B1, "B1");
+ await composeAndWritePoints(7, "G2", B2, "B2");
+
+ const csHash = csHasher.digest();
+ // Contributions section
+ await binFileUtils.startWriteSection(fdZKey, 10);
+ await fdZKey.write(csHash);
+ await fdZKey.writeULE32(0);
+ await binFileUtils.endWriteSection(fdZKey);
+
+ if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
+
+
+ await fdZKey.close();
+ await fdR1cs.close();
+ await fdPTau.close();
+
+ return csHash;
+
+ async function writeHs() {
+ await binFileUtils.startWriteSection(fdZKey, 9);
+ const buffOut = new ffjavascript.BigBuffer(domainSize*sG1);
+ if (cirPower < curve.Fr.s) {
+ let sTauG1 = await binFileUtils.readSection(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1);
+ for (let i=0; i< domainSize; i++) {
+ if ((logger)&&(i%10000 == 0)) logger.debug(`spliting buffer: ${i}/${domainSize}`);
+ const buff = sTauG1.slice( (i*2+1)*sG1, (i*2+1)*sG1 + sG1 );
+ buffOut.set(buff, i*sG1);
+ }
+ } else if (cirPower == curve.Fr.s) {
+ const o = sectionsPTau[12][0].p + ((2 ** (cirPower+1)) -1)*sG1;
+ await fdPTau.readToBuffer(buffOut, 0, domainSize*sG1, o + domainSize*sG1);
+ } else {
+ if (logger) logger.error("Circuit too big");
+ throw new Error("Circuit too big for this curve");
+ }
+ await fdZKey.write(buffOut);
+ await binFileUtils.endWriteSection(fdZKey);
+ }
+
+ async function processConstraints() {
+ const buffCoeff = new Uint8Array(12 + curve.Fr.n8);
+ const buffCoeffV = new DataView(buffCoeff.buffer);
+ const bOne = new Uint8Array(curve.Fr.n8);
+ curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1));
+
+ let r1csPos = 0;
+
+ function r1cs_readULE32() {
+ const buff = sR1cs.slice(r1csPos, r1csPos+4);
+ r1csPos += 4;
+ const buffV = new DataView(buff.buffer);
+ return buffV.getUint32(0, true);
+ }
+
+ const coefs = new BigArray();
+ for (let c=0; c=0) {
+ n = curve.Fr.fromRprLE(sR1cs.slice(c[3], c[3] + curve.Fr.n8), 0);
+ } else {
+ n = curve.Fr.fromRprLE(bOne, 0);
+ }
+ const nR2 = curve.Fr.mul(n, R2r);
+ curve.Fr.toRprLE(buffCoeff, 12, nR2);
+ buffSection.set(buffCoeff, coefsPos);
+ coefsPos += buffCoeff.length;
+ }
+
+ }
+
+ async function composeAndWritePoints(idSection, groupName, arr, sectionName) {
+ const CHUNK_SIZE= 1<<15;
+ const G = curve[groupName];
+
+ hashU32(arr.length);
+ await binFileUtils.startWriteSection(fdZKey, idSection);
+
+ let opPromises = [];
+
+ let i=0;
+ while (i {
+ if (logger) logger.debug(`Writing points end ${sectionName}: ${_i}/${arr.length}`);
+ return r;
+ }));
+ i += n;
+ t++;
+ }
+
+ const result = await Promise.all(opPromises);
+
+ for (let k=0; k 2<<14) {
+ bBases = new ffjavascript.BigBuffer(acc*sGin);
+ bScalars = new ffjavascript.BigBuffer(acc*curve.Fr.n8);
+ } else {
+ bBases = new Uint8Array(acc*sGin);
+ bScalars = new Uint8Array(acc*curve.Fr.n8);
+ }
+ let pB =0;
+ let pS =0;
+
+ const sBuffs = [
+ sTauG1,
+ sTauG2,
+ sAlphaTauG1,
+ sBetaTauG1
+ ];
+
+ const bOne = new Uint8Array(curve.Fr.n8);
+ curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1));
+
+ let offset = 0;
+ for (let i=0; i=0) {
+ bScalars.set(
+ sR1cs.slice(
+ arr[i][j][2],
+ arr[i][j][2] + curve.Fr.n8
+ ),
+ offset*curve.Fr.n8
+ );
+ } else {
+ bScalars.set(bOne, offset*curve.Fr.n8);
+ }
+ offset ++;
+ }
+ }
+
+ if (arr.length>1) {
+ const task = [];
+ task.push({cmd: "ALLOCSET", var: 0, buff: bBases});
+ task.push({cmd: "ALLOCSET", var: 1, buff: bScalars});
+ task.push({cmd: "ALLOC", var: 2, len: arr.length*sGmid});
+ pB = 0;
+ pS = 0;
+ let pD =0;
+ for (let i=0; i.
+*/
+
+async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, name, logger) {
+
+ const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await binFileUtils__namespace.readBinFile(zkeyNameOld, "zkey", 2);
+ const zkeyHeader = await readHeader$1(fdZKeyOld, sectionsZKeyOld, false);
+ if (zkeyHeader.protocol != "groth16") {
+ throw new Error("zkey file is not groth16");
+ }
+
+ const curve = await getCurveFromQ(zkeyHeader.q);
+ const sG1 = curve.G1.F.n8*2;
+ const sG2 = curve.G2.F.n8*2;
+
+ const oldMPCParams = await readMPCParams(fdZKeyOld, curve, sectionsZKeyOld);
+ const newMPCParams = {};
+
+ const fdMPCParams = await fastFile__namespace.readExisting(mpcparamsName);
+
+ fdMPCParams.pos =
+ sG1*3 + sG2*3 + // vKey
+ 8 + sG1*zkeyHeader.nVars + // IC + C
+ 4 + sG1*(zkeyHeader.domainSize-1) + // H
+ 4 + sG1*zkeyHeader.nVars + // A
+ 4 + sG1*zkeyHeader.nVars + // B1
+ 4 + sG2*zkeyHeader.nVars; // B2
+
+ // csHash
+ newMPCParams.csHash = await fdMPCParams.read(64);
+
+ const nConttributions = await fdMPCParams.readUBE32();
+ newMPCParams.contributions = [];
+ for (let i=0; i newMPCParams.contributions.length) {
+ if (logger) logger.error("The impoerted file does not include new contributions");
+ return false;
+ }
+
+ for (let i=0; i.
+*/
+const sameRatio = sameRatio$2;
+
+
+
+async function phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, logger) {
+
+ let sr;
+ await Blake2b__default["default"].ready();
+
+ const {fd, sections} = await binFileUtils__namespace.readBinFile(zkeyFileName, "zkey", 2);
+ const zkey = await readHeader$1(fd, sections, false);
+ if (zkey.protocol != "groth16") {
+ throw new Error("zkey file is not groth16");
+ }
+
+ const curve = await getCurveFromQ(zkey.q);
+ const sG1 = curve.G1.F.n8*2;
+
+ const mpcParams = await readMPCParams(fd, curve, sections);
+
+ const accumulatedHasher = Blake2b__default["default"](64);
+ accumulatedHasher.update(mpcParams.csHash);
+ let curDelta = curve.G1.g;
+ for (let i=0; i=0; i--) {
+ const c = mpcParams.contributions[i];
+ if (logger) logger.info("-------------------------");
+ if (logger) logger.info(formatHash(c.contributionHash, `contribution #${i+1} ${c.name ? c.name : ""}:`));
+ if (c.type == 1) {
+ if (logger) logger.info(`Beacon generator: ${byteArray2hex(c.beaconHash)}`);
+ if (logger) logger.info(`Beacon iterations Exp: ${c.numIterationsExp}`);
+ }
+ }
+ if (logger) logger.info("-------------------------");
+
+ if (logger) logger.info("ZKey Ok!");
+
+ return true;
+
+
+ async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) {
+ const MAX_CHUNK_SIZE = 1<<20;
+ const G = curve[groupName];
+ const sG = G.F.n8*2;
+ await binFileUtils__namespace.startReadUniqueSection(fd1, sections1, idSection);
+ await binFileUtils__namespace.startReadUniqueSection(fd2, sections2, idSection);
+
+ let R1 = G.zero;
+ let R2 = G.zero;
+
+ const nPoints = sections1[idSection][0].size / sG;
+
+ for (let i=0; i.
+*/
+
+async function phase2verifyFromR1cs(r1csFileName, pTauFileName, zkeyFileName, logger) {
+
+ // const initFileName = "~" + zkeyFileName + ".init";
+ const initFileName = {type: "bigMem"};
+ await newZKey(r1csFileName, pTauFileName, initFileName, logger);
+
+ return await phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, logger);
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+async function phase2contribute(zkeyNameOld, zkeyNameNew, name, entropy, logger) {
+ await Blake2b__default["default"].ready();
+
+ const {fd: fdOld, sections: sections} = await binFileUtils__namespace.readBinFile(zkeyNameOld, "zkey", 2);
+ const zkey = await readHeader$1(fdOld, sections);
+ if (zkey.protocol != "groth16") {
+ throw new Error("zkey file is not groth16");
+ }
+
+ const curve = await getCurveFromQ(zkey.q);
+
+ const mpcParams = await readMPCParams(fdOld, curve, sections);
+
+ const fdNew = await binFileUtils__namespace.createBinFile(zkeyNameNew, "zkey", 1, 10);
+
+
+ const rng = await getRandomRng(entropy);
+
+ const transcriptHasher = Blake2b__default["default"](64);
+ transcriptHasher.update(mpcParams.csHash);
+ for (let i=0; i.
+*/
+
+
+async function beacon(zkeyNameOld, zkeyNameNew, name, beaconHashStr, numIterationsExp, logger) {
+ await Blake2b__default["default"].ready();
+
+ const beaconHash = hex2ByteArray(beaconHashStr);
+ if ( (beaconHash.byteLength == 0)
+ || (beaconHash.byteLength*2 !=beaconHashStr.length))
+ {
+ if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
+ return false;
+ }
+ if (beaconHash.length>=256) {
+ if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
+ return false;
+ }
+
+ numIterationsExp = parseInt(numIterationsExp);
+ if ((numIterationsExp<10)||(numIterationsExp>63)) {
+ if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
+ return false;
+ }
+
+
+ const {fd: fdOld, sections: sections} = await binFileUtils__namespace.readBinFile(zkeyNameOld, "zkey", 2);
+ const zkey = await readHeader$1(fdOld, sections);
+
+ if (zkey.protocol != "groth16") {
+ throw new Error("zkey file is not groth16");
+ }
+
+
+ const curve = await getCurveFromQ(zkey.q);
+
+ const mpcParams = await readMPCParams(fdOld, curve, sections);
+
+ const fdNew = await binFileUtils__namespace.createBinFile(zkeyNameNew, "zkey", 1, 10);
+
+ const rng = await rngFromBeaconParams(beaconHash, numIterationsExp);
+
+ const transcriptHasher = Blake2b__default["default"](64);
+ transcriptHasher.update(mpcParams.csHash);
+ for (let i=0; i.
+*/
+
+async function bellmanContribute(curve, challengeFilename, responesFileName, entropy, logger) {
+ await Blake2b__default["default"].ready();
+
+ const rng = await getRandomRng(entropy);
+
+ const delta = curve.Fr.fromRng(rng);
+ const invDelta = curve.Fr.inv(delta);
+
+ const sG1 = curve.G1.F.n8*2;
+ const sG2 = curve.G2.F.n8*2;
+
+ const fdFrom = await fastFile__namespace.readExisting(challengeFilename);
+ const fdTo = await fastFile__namespace.createOverride(responesFileName);
+
+
+ await copy(sG1); // alpha1
+ await copy(sG1); // beta1
+ await copy(sG2); // beta2
+ await copy(sG2); // gamma2
+ const oldDelta1 = await readG1();
+ const delta1 = curve.G1.timesFr(oldDelta1, delta);
+ await writeG1(delta1);
+ const oldDelta2 = await readG2();
+ const delta2 = curve.G2.timesFr(oldDelta2, delta);
+ await writeG2(delta2);
+
+ // IC
+ const nIC = await fdFrom.readUBE32();
+ await fdTo.writeUBE32(nIC);
+ await copy(nIC*sG1);
+
+ // H
+ const nH = await fdFrom.readUBE32();
+ await fdTo.writeUBE32(nH);
+ await applyKeyToChallengeSection(fdFrom, fdTo, null, curve, "G1", nH, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "H", logger);
+
+ // L
+ const nL = await fdFrom.readUBE32();
+ await fdTo.writeUBE32(nL);
+ await applyKeyToChallengeSection(fdFrom, fdTo, null, curve, "G1", nL, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "L", logger);
+
+ // A
+ const nA = await fdFrom.readUBE32();
+ await fdTo.writeUBE32(nA);
+ await copy(nA*sG1);
+
+ // B1
+ const nB1 = await fdFrom.readUBE32();
+ await fdTo.writeUBE32(nB1);
+ await copy(nB1*sG1);
+
+ // B2
+ const nB2 = await fdFrom.readUBE32();
+ await fdTo.writeUBE32(nB2);
+ await copy(nB2*sG2);
+
+
+ //////////
+ /// Read contributions
+ //////////
+ const transcriptHasher = Blake2b__default["default"](64);
+
+ const mpcParams = {};
+ // csHash
+ mpcParams.csHash = await fdFrom.read(64);
+ transcriptHasher.update(mpcParams.csHash);
+
+ const nConttributions = await fdFrom.readUBE32();
+ mpcParams.contributions = [];
+ for (let i=0; i.
+*/
+const {stringifyBigInts: stringifyBigInts$1} = ffjavascript.utils;
+
+async function zkeyExportVerificationKey(zkeyName, /* logger */ ) {
+
+ const {fd, sections} = await binFileUtils__namespace.readBinFile(zkeyName, "zkey", 2);
+ const zkey = await readHeader$1(fd, sections);
+
+ let res;
+ if (zkey.protocol == "groth16") {
+ res = await groth16Vk(zkey, fd, sections);
+ } else if (zkey.protocol == "plonk") {
+ res = await plonkVk(zkey);
+ } else {
+ throw new Error("zkey file is not groth16");
+ }
+
+ await fd.close();
+
+ return res;
+}
+
+
+async function groth16Vk(zkey, fd, sections) {
+ const curve = await getCurveFromQ(zkey.q);
+ const sG1 = curve.G1.F.n8*2;
+
+ const alphaBeta = await curve.pairing( zkey.vk_alpha_1 , zkey.vk_beta_2 );
+
+ let vKey = {
+ protocol: zkey.protocol,
+ curve: curve.name,
+ nPublic: zkey.nPublic,
+
+ vk_alpha_1: curve.G1.toObject(zkey.vk_alpha_1),
+
+ vk_beta_2: curve.G2.toObject(zkey.vk_beta_2),
+ vk_gamma_2: curve.G2.toObject(zkey.vk_gamma_2),
+ vk_delta_2: curve.G2.toObject(zkey.vk_delta_2),
+
+ vk_alphabeta_12: curve.Gt.toObject(alphaBeta)
+ };
+
+ // Read IC Section
+ ///////////
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, 3);
+ vKey.IC = [];
+ for (let i=0; i<= zkey.nPublic; i++) {
+ const buff = await fd.read(sG1);
+ const P = curve.G1.toObject(buff);
+ vKey.IC.push(P);
+ }
+ await binFileUtils__namespace.endReadSection(fd);
+
+ vKey = stringifyBigInts$1(vKey);
+
+ return vKey;
+}
+
+
+async function plonkVk(zkey) {
+ const curve = await getCurveFromQ(zkey.q);
+
+ let vKey = {
+ protocol: zkey.protocol,
+ curve: curve.name,
+ nPublic: zkey.nPublic,
+ power: zkey.power,
+
+ k1: curve.Fr.toObject(zkey.k1),
+ k2: curve.Fr.toObject(zkey.k2),
+
+ Qm: curve.G1.toObject(zkey.Qm),
+ Ql: curve.G1.toObject(zkey.Ql),
+ Qr: curve.G1.toObject(zkey.Qr),
+ Qo: curve.G1.toObject(zkey.Qo),
+ Qc: curve.G1.toObject(zkey.Qc),
+ S1: curve.G1.toObject(zkey.S1),
+ S2: curve.G1.toObject(zkey.S2),
+ S3: curve.G1.toObject(zkey.S3),
+
+ X_2: curve.G2.toObject(zkey.X_2),
+
+ w: curve.Fr.toObject(curve.Fr.w[zkey.power])
+ };
+
+ vKey = stringifyBigInts$1(vKey);
+
+ return vKey;
+}
+
+var ejs = {};
+
+// Not ready yet
+// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
+
+
+
+async function exportSolidityVerifier(zKeyName, templates, logger) {
+
+ const verificationKey = await zkeyExportVerificationKey(zKeyName);
+
+ let template = templates[verificationKey.protocol];
+
+ return ejs.render(template , verificationKey);
+}
+
+/*
+ Copyright 2018 0KIMS association.
+
+ This file is part of snarkJS.
+
+ snarkJS is a free software: you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ snarkJS is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with snarkJS. If not, see .
+*/
+
+var zkey = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ newZKey: newZKey,
+ exportBellman: phase2exportMPCParams,
+ importBellman: phase2importMPCParams,
+ verifyFromR1cs: phase2verifyFromR1cs,
+ verifyFromInit: phase2verifyFromInit,
+ contribute: phase2contribute,
+ beacon: beacon,
+ exportJson: zkeyExportJson,
+ bellmanContribute: bellmanContribute,
+ exportVerificationKey: zkeyExportVerificationKey,
+ exportSolidityVerifier: exportSolidityVerifier
+});
+
+/*
+ Copyright 2021 0kims association.
+
+ This file is part of snarkjs.
+
+ snarkjs is a free software: you can redistribute it and/or
+ modify it under the terms of the GNU General Public License as published by the
+ Free Software Foundation, either version 3 of the License, or (at your option)
+ any later version.
+
+ snarkjs is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ more details.
+
+ You should have received a copy of the GNU General Public License along with
+ snarkjs. If not, see .
+*/
+
+
+async function plonkSetup(r1csName, ptauName, zkeyName, logger) {
+
+ await Blake2b__default["default"].ready();
+
+ const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24);
+ const {curve, power} = await readPTauHeader(fdPTau, sectionsPTau);
+ const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24);
+ const r1cs = await r1csfile.readR1csHeader(fdR1cs, sectionsR1cs, false);
+
+ const sG1 = curve.G1.F.n8*2;
+ const G1 = curve.G1;
+ const sG2 = curve.G2.F.n8*2;
+ const Fr = curve.Fr;
+ const n8r = curve.Fr.n8;
+
+ if (logger) logger.info("Reading r1cs");
+ let sR1cs = await binFileUtils.readSection(fdR1cs, sectionsR1cs, 2);
+
+ const plonkConstraints = new BigArray();
+ const plonkAdditions = new BigArray();
+ let plonkNVars = r1cs.nVars;
+
+ const nPublic = r1cs.nOutputs + r1cs.nPubInputs;
+
+ await processConstraints();
+
+ const fdZKey = await binFileUtils.createBinFile(zkeyName, "zkey", 1, 14, 1<<22, 1<<24);
+
+
+ if (r1cs.prime != curve.r) {
+ if (logger) logger.error("r1cs curve does not match powers of tau ceremony curve");
+ return -1;
+ }
+
+ let cirPower = log2(plonkConstraints.length -1) +1;
+ if (cirPower < 3) cirPower = 3; // As the t polinomal is n+5 whe need at least a power of 4
+ const domainSize = 2 ** cirPower;
+
+ if (logger) logger.info("Plonk constraints: " + plonkConstraints.length);
+ if (cirPower > power) {
+ if (logger) logger.error(`circuit too big for this power of tau ceremony. ${plonkConstraints.length} > 2**${power}`);
+ return -1;
+ }
+
+ if (!sectionsPTau[12]) {
+ if (logger) logger.error("Powers of tau is not prepared.");
+ return -1;
+ }
+
+
+ const LPoints = new ffjavascript.BigBuffer(domainSize*sG1);
+ const o = sectionsPTau[12][0].p + ((2 ** (cirPower)) -1)*sG1;
+ await fdPTau.readToBuffer(LPoints, 0, domainSize*sG1, o);
+
+ const [k1, k2] = getK1K2();
+
+ const vk = {};
+
+
+ await writeAdditions(3, "Additions");
+ await writeWitnessMap(4, 0, "Amap");
+ await writeWitnessMap(5, 1, "Bmap");
+ await writeWitnessMap(6, 2, "Cmap");
+ await writeQMap(7, 3, "Qm");
+ await writeQMap(8, 4, "Ql");
+ await writeQMap(9, 5, "Qr");
+ await writeQMap(10, 6, "Qo");
+ await writeQMap(11, 7, "Qc");
+ await writeSigma(12, "sigma");
+ await writeLs(13, "lagrange polynomials");
+
+ // Write PTau points
+ ////////////
+
+ await binFileUtils.startWriteSection(fdZKey, 14);
+ const buffOut = new ffjavascript.BigBuffer((domainSize+6)*sG1);
+ await fdPTau.readToBuffer(buffOut, 0, (domainSize+6)*sG1, sectionsPTau[2][0].p);
+ await fdZKey.write(buffOut);
+ await binFileUtils.endWriteSection(fdZKey);
+
+
+ await writeHeaders();
+
+ await fdZKey.close();
+ await fdR1cs.close();
+ await fdPTau.close();
+
+ if (logger) logger.info("Setup Finished");
+
+ return ;
+
+ async function processConstraints() {
+
+ let r1csPos = 0;
+
+ function r1cs_readULE32() {
+ const buff = sR1cs.slice(r1csPos, r1csPos+4);
+ r1csPos += 4;
+ const buffV = new DataView(buff.buffer);
+ return buffV.getUint32(0, true);
+ }
+
+ function r1cs_readCoef() {
+ const res = Fr.fromRprLE(sR1cs.slice(r1csPos, r1csPos+curve.Fr.n8));
+ r1csPos += curve.Fr.n8;
+ return res;
+ }
+
+ function r1cs_readCoefs() {
+ const coefs = [];
+ const res = {
+ k: curve.Fr.zero
+ };
+ const nA = r1cs_readULE32();
+ for (let i=0; i> 1);
+ const arr2 = coefs.slice(coefs.length >> 1);
+ const coef1 = reduceCoef(arr1);
+ const coef2 = reduceCoef(arr2);
+
+ const sl = coef1[0];
+ const sr = coef2[0];
+ const so = plonkNVars++;
+ const qm = curve.Fr.zero;
+ const ql = Fr.neg(coef1[1]);
+ const qr = Fr.neg(coef2[1]);
+ const qo = curve.Fr.one;
+ const qc = curve.Fr.zero;
+
+ plonkConstraints.push([sl, sr, so, qm, ql, qr, qo, qc]);
+
+ plonkAdditions.push([sl, sr, coef1[1], coef2[1]]);
+
+ return [so, curve.Fr.one];
+ }
+
+ for (let s = 1; s <= nPublic ; s++) {
+ const sl = s;
+ const sr = 0;
+ const so = 0;
+ const qm = curve.Fr.zero;
+ const ql = curve.Fr.one;
+ const qr = curve.Fr.zero;
+ const qo = curve.Fr.zero;
+ const qc = curve.Fr.zero;
+
+ plonkConstraints.push([sl, sr, so, qm, ql, qr, qo, qc]);
+ }
+
+ for (let c=0; c.
+*/
+const {stringifyBigInts} = ffjavascript.utils;
+const { keccak256: keccak256$1 } = jsSha3__default["default"];
+
+async function plonk16Prove(zkeyFileName, witnessFileName, logger) {
+ const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils__namespace.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
+
+ const wtns = await readHeader(fdWtns, sectionsWtns);
+
+ const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils__namespace.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
+
+ const zkey = await readHeader$1(fdZKey, sectionsZKey);
+ if (zkey.protocol != "plonk") {
+ throw new Error("zkey file is not groth16");
+ }
+
+ if (!ffjavascript.Scalar.eq(zkey.r, wtns.q)) {
+ throw new Error("Curve of the witness does not match the curve of the proving key");
+ }
+
+ if (wtns.nWitness != zkey.nVars -zkey.nAdditions) {
+ throw new Error(`Invalid witness length. Circuit: ${zkey.nVars}, witness: ${wtns.nWitness}, ${zkey.nAdditions}`);
+ }
+
+ const curve = await getCurveFromQ(zkey.q);
+ const Fr = curve.Fr;
+ const G1 = curve.G1;
+ const n8r = curve.Fr.n8;
+
+ if (logger) logger.debug("Reading Wtns");
+ const buffWitness = await binFileUtils__namespace.readSection(fdWtns, sectionsWtns, 2);
+ // First element in plonk is not used and can be any value. (But always the same).
+ // We set it to zero to go faster in the exponentiations.
+ buffWitness.set(Fr.zero, 0);
+ const buffInternalWitness = new ffjavascript.BigBuffer(n8r*zkey.nAdditions);
+
+ await calculateAdditions();
+
+ let A,B,C,Z;
+ let A4, B4, C4, Z4;
+ let pol_a,pol_b,pol_c, pol_z, pol_t, pol_r;
+ let proof = {};
+
+ const sigmaBuff = new ffjavascript.BigBuffer(zkey.domainSize*n8r*4*3);
+ let o = sectionsZKey[12][0].p + zkey.domainSize*n8r;
+ await fdZKey.readToBuffer(sigmaBuff, 0 , zkey.domainSize*n8r*4, o);
+ o += zkey.domainSize*n8r*5;
+ await fdZKey.readToBuffer(sigmaBuff, zkey.domainSize*n8r*4 , zkey.domainSize*n8r*4, o);
+ o += zkey.domainSize*n8r*5;
+ await fdZKey.readToBuffer(sigmaBuff, zkey.domainSize*n8r*8 , zkey.domainSize*n8r*4, o);
+
+ const pol_s1 = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
+ await fdZKey.readToBuffer(pol_s1, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p);
+
+ const pol_s2 = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
+ await fdZKey.readToBuffer(pol_s2, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p + 5*zkey.domainSize*n8r);
+
+ const PTau = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 14);
+
+
+ const ch = {};
+
+ await round1();
+ await round2();
+ await round3();
+ await round4();
+ await round5();
+
+
+ ///////////////////////
+ // Final adjustments //
+ ///////////////////////
+
+ proof.protocol = "plonk";
+ proof.curve = curve.name;
+
+ await fdZKey.close();
+ await fdWtns.close();
+
+ let publicSignals = [];
+
+ for (let i=1; i<= zkey.nPublic; i++) {
+ const pub = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8);
+ publicSignals.push(ffjavascript.Scalar.fromRprLE(pub));
+ }
+
+ proof.A = G1.toObject(proof.A);
+ proof.B = G1.toObject(proof.B);
+ proof.C = G1.toObject(proof.C);
+ proof.Z = G1.toObject(proof.Z);
+
+ proof.T1 = G1.toObject(proof.T1);
+ proof.T2 = G1.toObject(proof.T2);
+ proof.T3 = G1.toObject(proof.T3);
+
+ proof.eval_a = Fr.toObject(proof.eval_a);
+ proof.eval_b = Fr.toObject(proof.eval_b);
+ proof.eval_c = Fr.toObject(proof.eval_c);
+ proof.eval_s1 = Fr.toObject(proof.eval_s1);
+ proof.eval_s2 = Fr.toObject(proof.eval_s2);
+ proof.eval_zw = Fr.toObject(proof.eval_zw);
+ proof.eval_t = Fr.toObject(proof.eval_t);
+ proof.eval_r = Fr.toObject(proof.eval_r);
+
+ proof.Wxi = G1.toObject(proof.Wxi);
+ proof.Wxiw = G1.toObject(proof.Wxiw);
+
+ delete proof.eval_t;
+
+ proof = stringifyBigInts(proof);
+ publicSignals = stringifyBigInts(publicSignals);
+
+ return {proof, publicSignals};
+
+ async function calculateAdditions() {
+ const additionsBuff = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 3);
+
+ const sSum = 8+curve.Fr.n8*2;
+
+ for (let i=0; i0)&&(Fr.isZero(p.slice(deg*n8r, deg*n8r+n8r)))) deg--;
+ return deg;
+ }
+
+ function printPol(P) {
+ const n=(P.byteLength/n8r);
+ console.log("[");
+ for (let i=0; i (zkey.domainSize*3 -4) ) {
+ if (!Fr.isZero(a)) {
+ throw new Error("T Polynomial is not divisible");
+ }
+ }
+ }
+
+ if (logger) logger.debug("ifft Tz");
+ const tz = await Fr.ifft(Tz);
+ for (let i=0; i (zkey.domainSize*3 +5) ) {
+ if (!Fr.isZero(a)) {
+ throw new Error("Tz Polynomial is not well calculated");
+ }
+ } else {
+ t.set(
+ Fr.add(
+ t.slice(i*n8r, (i+1)*n8r),
+ a
+ ),
+ i*n8r
+ );
+ }
+ }
+
+ pol_t = t.slice(0, (zkey.domainSize*3+6)*n8r);
+
+ proof.T1 = await expTau( t.slice(0, zkey.domainSize*n8r) , "multiexp T1");
+ proof.T2 = await expTau( t.slice(zkey.domainSize*n8r, zkey.domainSize*2*n8r) , "multiexp T2");
+ proof.T3 = await expTau( t.slice(zkey.domainSize*2*n8r, (zkey.domainSize*3+6)*n8r) , "multiexp T3");
+
+ function mul2(a,b, ap, bp, p) {
+ let r, rz;
+
+
+ const a_b = Fr.mul(a,b);
+ const a_bp = Fr.mul(a,bp);
+ const ap_b = Fr.mul(ap,b);
+ const ap_bp = Fr.mul(ap,bp);
+
+ r = a_b;
+
+ let a0 = Fr.add(a_bp, ap_b);
+
+ let a1 = ap_bp;
+
+ rz = a0;
+ if (p) {
+ rz = Fr.add(rz, Fr.mul(Z1[p], a1));
+ }
+
+ return [r, rz];
+ }
+
+ function mul4(a,b,c,d, ap, bp, cp, dp, p) {
+ let r, rz;
+
+
+ const a_b = Fr.mul(a,b);
+ const a_bp = Fr.mul(a,bp);
+ const ap_b = Fr.mul(ap,b);
+ const ap_bp = Fr.mul(ap,bp);
+
+ const c_d = Fr.mul(c,d);
+ const c_dp = Fr.mul(c,dp);
+ const cp_d = Fr.mul(cp,d);
+ const cp_dp = Fr.mul(cp,dp);
+
+ r = Fr.mul(a_b, c_d);
+
+ let a0 = Fr.mul(ap_b, c_d);
+ a0 = Fr.add(a0, Fr.mul(a_bp, c_d));
+ a0 = Fr.add(a0, Fr.mul(a_b, cp_d));
+ a0 = Fr.add(a0, Fr.mul(a_b, c_dp));
+
+ let a1 = Fr.mul(ap_bp, c_d);
+ a1 = Fr.add(a1, Fr.mul(ap_b, cp_d));
+ a1 = Fr.add(a1, Fr.mul(ap_b, c_dp));
+ a1 = Fr.add(a1, Fr.mul(a_bp, cp_d));
+ a1 = Fr.add(a1, Fr.mul(a_bp, c_dp));
+ a1 = Fr.add(a1, Fr.mul(a_b, cp_dp));
+
+ let a2 = Fr.mul(a_bp, cp_dp);
+ a2 = Fr.add(a2, Fr.mul(ap_b, cp_dp));
+ a2 = Fr.add(a2, Fr.mul(ap_bp, c_dp));
+ a2 = Fr.add(a2, Fr.mul(ap_bp, cp_d));
+
+ let a3 = Fr.mul(ap_bp, cp_dp);
+
+ rz = a0;
+ if (p) {
+ rz = Fr.add(rz, Fr.mul(Z1[p], a1));
+ rz = Fr.add(rz, Fr.mul(Z2[p], a2));
+ rz = Fr.add(rz, Fr.mul(Z3[p], a3));
+ }
+
+ return [r, rz];
+ }
+ }
+
+ async function round4() {
+ const pol_qm = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
+ await fdZKey.readToBuffer(pol_qm, 0 , zkey.domainSize*n8r, sectionsZKey[7][0].p);
+
+ const pol_ql = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
+ await fdZKey.readToBuffer(pol_ql, 0 , zkey.domainSize*n8r, sectionsZKey[8][0].p);
+
+ const pol_qr = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
+ await fdZKey.readToBuffer(pol_qr, 0 , zkey.domainSize*n8r, sectionsZKey[9][0].p);
+
+ const pol_qo = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
+ await fdZKey.readToBuffer(pol_qo, 0 , zkey.domainSize*n8r, sectionsZKey[10][0].p);
+
+ const pol_qc = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
+ await fdZKey.readToBuffer(pol_qc, 0 , zkey.domainSize*n8r, sectionsZKey[11][0].p);
+
+ const pol_s3 = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
+ await fdZKey.readToBuffer(pol_s3, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p + 10*zkey.domainSize*n8r);
+
+ const transcript4 = new Uint8Array(G1.F.n8*2*3);
+ G1.toRprUncompressed(transcript4, 0, proof.T1);
+ G1.toRprUncompressed(transcript4, G1.F.n8*2, proof.T2);
+ G1.toRprUncompressed(transcript4, G1.F.n8*4, proof.T3);
+ ch.xi = hashToFr(transcript4);
+
+ if (logger) logger.debug("xi: " + Fr.toString(ch.xi));
+
+ proof.eval_a = evalPol(pol_a, ch.xi);
+ proof.eval_b = evalPol(pol_b, ch.xi);
+ proof.eval_c = evalPol(pol_c, ch.xi);
+ proof.eval_s1 = evalPol(pol_s1, ch.xi);
+ proof.eval_s2 = evalPol(pol_s2, ch.xi);
+ proof.eval_t = evalPol(pol_t, ch.xi);
+ proof.eval_zw = evalPol(pol_z, Fr.mul(ch.xi, Fr.w[zkey.power]));
+
+ const coef_ab = Fr.mul(proof.eval_a, proof.eval_b);
+
+ let e2a = proof.eval_a;
+ const betaxi = Fr.mul(ch.beta, ch.xi);
+ e2a = Fr.add( e2a, betaxi);
+ e2a = Fr.add( e2a, ch.gamma);
+
+ let e2b = proof.eval_b;
+ e2b = Fr.add( e2b, Fr.mul(betaxi, zkey.k1));
+ e2b = Fr.add( e2b, ch.gamma);
+
+ let e2c = proof.eval_c;
+ e2c = Fr.add( e2c, Fr.mul(betaxi, zkey.k2));
+ e2c = Fr.add( e2c, ch.gamma);
+
+ const e2 = Fr.mul(Fr.mul(Fr.mul(e2a, e2b), e2c), ch.alpha);
+
+ let e3a = proof.eval_a;
+ e3a = Fr.add( e3a, Fr.mul(ch.beta, proof.eval_s1));
+ e3a = Fr.add( e3a, ch.gamma);
+
+ let e3b = proof.eval_b;
+ e3b = Fr.add( e3b, Fr.mul(ch.beta, proof.eval_s2));
+ e3b = Fr.add( e3b, ch.gamma);
+
+ let e3 = Fr.mul(e3a, e3b);
+ e3 = Fr.mul(e3, ch.beta);
+ e3 = Fr.mul(e3, proof.eval_zw);
+ e3 = Fr.mul(e3, ch.alpha);
+
+ ch.xim= ch.xi;
+ for (let i=0; i=0; i--) {
+ res = Fr.add(Fr.mul(res, x), P.slice(i*n8r, (i+1)*n8r));
+ }
+ return res;
+ }
+
+ function divPol1(P, d) {
+ const n = P.byteLength/n8r;
+ const res = new ffjavascript.BigBuffer(n*n8r);
+ res.set(Fr.zero, (n-1) *n8r);
+ res.set(P.slice((n-1)*n8r, n*n8r), (n-2)*n8r);
+ for (let i=n-3; i>=0; i--) {
+ res.set(
+ Fr.add(
+ P.slice((i+1)*n8r, (i+2)*n8r),
+ Fr.mul(
+ d,
+ res.slice((i+1)*n8r, (i+2)*n8r)
+ )
+ ),
+ i*n8r
+ );
+ }
+ if (!Fr.eq(
+ P.slice(0, n8r),
+ Fr.mul(
+ Fr.neg(d),
+ res.slice(0, n8r)
+ )
+ )) {
+ throw new Error("Polinomial does not divide");
+ }
+ return res;
+ }
+
+ async function expTau(b, name) {
+ const n = b.byteLength/n8r;
+ const PTauN = PTau.slice(0, n*curve.G1.F.n8*2);
+ const bm = await curve.Fr.batchFromMontgomery(b);
+ let res = await curve.G1.multiExpAffine(PTauN, bm, logger, name);
+ res = curve.G1.toAffine(res);
+ return res;
+ }
+
+
+ async function to4T(A, pz) {
+ pz = pz || [];
+ let a = await Fr.ifft(A);
+ const a4 = new ffjavascript.BigBuffer(n8r*zkey.domainSize*4);
+ a4.set(a, 0);
+
+ const a1 = new ffjavascript.BigBuffer(n8r*(zkey.domainSize + pz.length));
+ a1.set(a, 0);
+ for (let i= 0; i.
+*/
+
+async function plonkFullProve(input, wasmFile, zkeyFileName, logger) {
+ const wtns= {
+ type: "mem"
+ };
+ await wtnsCalculate(input, wasmFile, wtns);
+ return await plonk16Prove(zkeyFileName, wtns, logger);
+}
+
+/*
+ Copyright 2021 0kims association.
+
+ This file is part of snarkjs.
+
+ snarkjs is a free software: you can redistribute it and/or
+ modify it under the terms of the GNU General Public License as published by the
+ Free Software Foundation, either version 3 of the License, or (at your option)
+ any later version.
+
+ snarkjs is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ more details.
+
+ You should have received a copy of the GNU General Public License along with
+ snarkjs. If not, see .
+*/
+const {unstringifyBigInts} = ffjavascript.utils;
+const { keccak256 } = jsSha3__default["default"];
+
+
+async function plonkVerify(vk_verifier, publicSignals, proof, logger) {
+ vk_verifier = unstringifyBigInts(vk_verifier);
+ proof = unstringifyBigInts(proof);
+ publicSignals = unstringifyBigInts(publicSignals);
+
+ const curve = await getCurveFromName(vk_verifier.curve);
+
+ const Fr = curve.Fr;
+ const G1 = curve.G1;
+
+ proof = fromObjectProof(curve,proof);
+ vk_verifier = fromObjectVk(curve, vk_verifier);
+ if (!isWellConstructed(curve, proof)) {
+ logger.error("Proof is not well constructed");
+ return false;
+ }
+ const challanges = calculateChallanges(curve, proof);
+ if (logger) {
+ logger.debug("beta: " + Fr.toString(challanges.beta, 16));
+ logger.debug("gamma: " + Fr.toString(challanges.gamma, 16));
+ logger.debug("alpha: " + Fr.toString(challanges.alpha, 16));
+ logger.debug("xi: " + Fr.toString(challanges.xi, 16));
+ logger.debug("v1: " + Fr.toString(challanges.v[1], 16));
+ logger.debug("v6: " + Fr.toString(challanges.v[6], 16));
+ logger.debug("u: " + Fr.toString(challanges.u, 16));
+ }
+ const L = calculateLagrangeEvaluations(curve, challanges, vk_verifier);
+ if (logger) {
+ logger.debug("Lagrange Evaluations: ");
+ for (let i=1; i.
+*/
+
+function i2hex(i) {
+ return ("0" + i.toString(16)).slice(-2);
+}
+
+function p256(n) {
+ let nstr = n.toString(16);
+ while (nstr.length < 64) nstr = "0"+nstr;
+ nstr = `"0x${nstr}"`;
+ return nstr;
+}
+
+async function plonkExportSolidityCallData(proof, pub) {
+
+ const curve = await getCurveFromName(proof.curve);
+ const G1 = curve.G1;
+ const Fr = curve.Fr;
+
+ let inputs = "";
+ for (let i=0; i.
+*/
+
+var plonk = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ setup: plonkSetup,
+ fullProve: plonkFullProve,
+ prove: plonk16Prove,
+ verify: plonkVerify,
+ exportSolidityCallData: plonkExportSolidityCallData
+});
+
+exports.groth16 = groth16;
+exports.plonk = plonk;
+exports.powersOfTau = powersoftau;
+exports.r1cs = r1cs;
+exports.wtns = wtns;
+exports.zKey = zkey;
diff --git a/config/rollup.ses.config.js b/config/rollup.ses.config.js
new file mode 100644
index 00000000..e5de4af6
--- /dev/null
+++ b/config/rollup.ses.config.js
@@ -0,0 +1,31 @@
+import config from './rollup.iife.config';
+import fs from "fs";
+import { builtinModules as builtin } from "module";
+import jscc from 'rollup-plugin-jscc';
+import replace from "@rollup/plugin-replace";
+
+const pkg = JSON.parse(fs.readFileSync("./package.json"));
+delete pkg.dependencies["ejs"];
+
+export default {
+ input: "main.js",
+ output: {
+ file: "build/main.ses.cjs",
+ format: "cjs",
+ },
+ external: [
+ ...Object.keys(pkg.dependencies),
+ ...builtin,
+ ],
+ plugins:[
+ ...config.plugins,
+ jscc({
+ values: { _SES: process.env.SES },
+ }),
+ replace({
+ // To silence to warning, the current default is false, but they are changing it next version
+ preventAssignment: false,
+ "process.ses": !!process.env.SES
+ }),
+ ]
+};
diff --git a/package-lock.json b/package-lock.json
index 62d35b4a..589746ba 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -33,6 +33,7 @@
"eslint": "^8.7.0",
"mocha": "^9.1.4",
"rollup": "^2.36.2",
+ "rollup-plugin-jscc": "^2.0.0",
"rollup-plugin-terser": "^7.0.2",
"rollup-plugin-visualizer": "^4.2.0"
}
@@ -206,6 +207,24 @@
"ffjavascript": "^0.2.48"
}
},
+ "node_modules/@jsbits/escape-regex-str": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/@jsbits/escape-regex-str/-/escape-regex-str-1.0.3.tgz",
+ "integrity": "sha512-0800vYI2fg1nuUq/T9Tqv8DMOLLNiRAltxFbKIbR7szrvW6qTuI2+zGK51hV7NAAmUr4G83Kvpj2R6Yyg07iIw==",
+ "dev": true,
+ "engines": {
+ "node": ">=4.2"
+ }
+ },
+ "node_modules/@jsbits/get-package-version": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/@jsbits/get-package-version/-/get-package-version-1.0.3.tgz",
+ "integrity": "sha512-IJy1jRL01x7p6UEpgKa1lVLstMUx8EiIR8pPoS5sBfsHEoeLkzYiNpAfxPx8zLDUJyS1yBbChJjcWdPqyH285w==",
+ "dev": true,
+ "engines": {
+ "node": ">=4.2"
+ }
+ },
"node_modules/@rollup/plugin-commonjs": {
"version": "17.1.0",
"resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-17.1.0.tgz",
@@ -1514,6 +1533,22 @@
"js-yaml": "bin/js-yaml.js"
}
},
+ "node_modules/jscc": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/jscc/-/jscc-1.1.1.tgz",
+ "integrity": "sha512-anpZkTXwZbxfxLEBMciKxXMHx2xOLK2qhynIhTnoSyC+wGOEPrAoofxnADgblbarn0kijVMt1U71cQGmRF/1Og==",
+ "dev": true,
+ "dependencies": {
+ "@jsbits/escape-regex-str": "^1.0.2",
+ "@jsbits/get-package-version": "^1.0.2",
+ "magic-string": "^0.25.1",
+ "perf-regexes": "^1.0.1",
+ "skip-regex": "^1.0.2"
+ },
+ "engines": {
+ "node": ">=6.0"
+ }
+ },
"node_modules/json-schema-traverse": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
@@ -1878,6 +1913,15 @@
"node": "*"
}
},
+ "node_modules/perf-regexes": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/perf-regexes/-/perf-regexes-1.0.1.tgz",
+ "integrity": "sha512-L7MXxUDtqr4PUaLFCDCXBfGV/6KLIuSEccizDI7JxT+c9x1G1v04BQ4+4oag84SHaCdrBgQAIs/Cqn+flwFPng==",
+ "dev": true,
+ "engines": {
+ "node": ">=6.14"
+ }
+ },
"node_modules/picomatch": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
@@ -2022,6 +2066,23 @@
"fsevents": "~2.3.2"
}
},
+ "node_modules/rollup-plugin-jscc": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/rollup-plugin-jscc/-/rollup-plugin-jscc-2.0.0.tgz",
+ "integrity": "sha512-5jG9q79K2u5uRBTKA+GA4gqt1zA7qHQRpcabZMoVs913gr75s428O7K3r58n2vADDzwIhiOKMo7rCMhOyks6dw==",
+ "dev": true,
+ "dependencies": {
+ "@jsbits/get-package-version": "^1.0.3",
+ "jscc": "^1.1.1",
+ "rollup-pluginutils": "^2.8.2"
+ },
+ "engines": {
+ "node": ">=10.12.0"
+ },
+ "peerDependencies": {
+ "rollup": ">=2"
+ }
+ },
"node_modules/rollup-plugin-terser": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz",
@@ -2067,6 +2128,21 @@
"rollup": ">=1.20.0"
}
},
+ "node_modules/rollup-pluginutils": {
+ "version": "2.8.2",
+ "resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz",
+ "integrity": "sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==",
+ "dev": true,
+ "dependencies": {
+ "estree-walker": "^0.6.1"
+ }
+ },
+ "node_modules/rollup-pluginutils/node_modules/estree-walker": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz",
+ "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==",
+ "dev": true
+ },
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@@ -2117,6 +2193,15 @@
"node": ">=8"
}
},
+ "node_modules/skip-regex": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/skip-regex/-/skip-regex-1.0.2.tgz",
+ "integrity": "sha512-pEjMUbwJ5Pl/6Vn6FsamXHXItJXSRftcibixDmNCWbWhic0hzHrwkMZo0IZ7fMRH9KxcWDFSkzhccB4285PutA==",
+ "dev": true,
+ "engines": {
+ "node": ">=4.2"
+ }
+ },
"node_modules/source-map": {
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
@@ -2584,6 +2669,18 @@
"ffjavascript": "^0.2.48"
}
},
+ "@jsbits/escape-regex-str": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/@jsbits/escape-regex-str/-/escape-regex-str-1.0.3.tgz",
+ "integrity": "sha512-0800vYI2fg1nuUq/T9Tqv8DMOLLNiRAltxFbKIbR7szrvW6qTuI2+zGK51hV7NAAmUr4G83Kvpj2R6Yyg07iIw==",
+ "dev": true
+ },
+ "@jsbits/get-package-version": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/@jsbits/get-package-version/-/get-package-version-1.0.3.tgz",
+ "integrity": "sha512-IJy1jRL01x7p6UEpgKa1lVLstMUx8EiIR8pPoS5sBfsHEoeLkzYiNpAfxPx8zLDUJyS1yBbChJjcWdPqyH285w==",
+ "dev": true
+ },
"@rollup/plugin-commonjs": {
"version": "17.1.0",
"resolved": "https://registry.npmjs.org/@rollup/plugin-commonjs/-/plugin-commonjs-17.1.0.tgz",
@@ -3572,6 +3669,19 @@
"argparse": "^2.0.1"
}
},
+ "jscc": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/jscc/-/jscc-1.1.1.tgz",
+ "integrity": "sha512-anpZkTXwZbxfxLEBMciKxXMHx2xOLK2qhynIhTnoSyC+wGOEPrAoofxnADgblbarn0kijVMt1U71cQGmRF/1Og==",
+ "dev": true,
+ "requires": {
+ "@jsbits/escape-regex-str": "^1.0.2",
+ "@jsbits/get-package-version": "^1.0.2",
+ "magic-string": "^0.25.1",
+ "perf-regexes": "^1.0.1",
+ "skip-regex": "^1.0.2"
+ }
+ },
"json-schema-traverse": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
@@ -3846,6 +3956,12 @@
"integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==",
"dev": true
},
+ "perf-regexes": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/perf-regexes/-/perf-regexes-1.0.1.tgz",
+ "integrity": "sha512-L7MXxUDtqr4PUaLFCDCXBfGV/6KLIuSEccizDI7JxT+c9x1G1v04BQ4+4oag84SHaCdrBgQAIs/Cqn+flwFPng==",
+ "dev": true
+ },
"picomatch": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
@@ -3945,6 +4061,17 @@
"fsevents": "~2.3.2"
}
},
+ "rollup-plugin-jscc": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/rollup-plugin-jscc/-/rollup-plugin-jscc-2.0.0.tgz",
+ "integrity": "sha512-5jG9q79K2u5uRBTKA+GA4gqt1zA7qHQRpcabZMoVs913gr75s428O7K3r58n2vADDzwIhiOKMo7rCMhOyks6dw==",
+ "dev": true,
+ "requires": {
+ "@jsbits/get-package-version": "^1.0.3",
+ "jscc": "^1.1.1",
+ "rollup-pluginutils": "^2.8.2"
+ }
+ },
"rollup-plugin-terser": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz",
@@ -3980,6 +4107,23 @@
"yargs": "^16.2.0"
}
},
+ "rollup-pluginutils": {
+ "version": "2.8.2",
+ "resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz",
+ "integrity": "sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==",
+ "dev": true,
+ "requires": {
+ "estree-walker": "^0.6.1"
+ },
+ "dependencies": {
+ "estree-walker": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz",
+ "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==",
+ "dev": true
+ }
+ }
+ },
"safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@@ -4010,6 +4154,12 @@
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
"dev": true
},
+ "skip-regex": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/skip-regex/-/skip-regex-1.0.2.tgz",
+ "integrity": "sha512-pEjMUbwJ5Pl/6Vn6FsamXHXItJXSRftcibixDmNCWbWhic0hzHrwkMZo0IZ7fMRH9KxcWDFSkzhccB4285PutA==",
+ "dev": true
+ },
"source-map": {
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
diff --git a/package.json b/package.json
index 63bf6b4c..af748d19 100644
--- a/package.json
+++ b/package.json
@@ -9,12 +9,14 @@
"import": "./main.js",
"require": "./build/main.cjs"
},
+ "browser": "./build/main.ses.cjs",
"scripts": {
"test": "mocha",
"build": "rollup -c config/rollup.cjs.config.js",
"buildcli": "rollup -c config/rollup.cli.config.js",
"buildiife": "BROWSER=true rollup -c config/rollup.iife.config.js",
- "buildiifemin": "BROWSER=true rollup -c config/rollup.iife_min.config.js"
+ "buildiifemin": "BROWSER=true rollup -c config/rollup.iife_min.config.js",
+ "buildses": "BROWSER=true SES=true rollup -c config/rollup.ses.config.js"
},
"bin": {
"snarkjs": "build/cli.cjs"
@@ -59,6 +61,7 @@
"eslint": "^8.7.0",
"mocha": "^9.1.4",
"rollup": "^2.36.2",
+ "rollup-plugin-jscc": "^2.0.0",
"rollup-plugin-terser": "^7.0.2",
"rollup-plugin-visualizer": "^4.2.0"
}
diff --git a/src/curves.js b/src/curves.js
index 156e6bfa..54019530 100644
--- a/src/curves.js
+++ b/src/curves.js
@@ -6,12 +6,14 @@ const bn128r = Scalar.e("2188824287183927522224640574525727508854836440041603434
const bls12381q = Scalar.e("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16);
const bn128q = Scalar.e("21888242871839275222246405745257275088696311157297823662689037894645226208583");
+const singleThread = process.ses
+
export async function getCurveFromR(r) {
let curve;
if (Scalar.eq(r, bn128r)) {
- curve = await buildBn128();
+ curve = await buildBn128(singleThread);
} else if (Scalar.eq(r, bls12381r)) {
- curve = await buildBls12381();
+ curve = await buildBls12381(singleThrread);
} else {
throw new Error(`Curve not supported: ${Scalar.toString(r)}`);
}
@@ -21,9 +23,9 @@ export async function getCurveFromR(r) {
export async function getCurveFromQ(q) {
let curve;
if (Scalar.eq(q, bn128q)) {
- curve = await buildBn128();
+ curve = await buildBn128(singleThread);
} else if (Scalar.eq(q, bls12381q)) {
- curve = await buildBls12381();
+ curve = await buildBls12381(singleThread);
} else {
throw new Error(`Curve not supported: ${Scalar.toString(q)}`);
}
@@ -34,9 +36,9 @@ export async function getCurveFromName(name) {
let curve;
const normName = normalizeName(name);
if (["BN128", "BN254", "ALTBN128"].indexOf(normName) >= 0) {
- curve = await buildBn128();
+ curve = await buildBn128(singleThread);
} else if (["BLS12381"].indexOf(normName) >= 0) {
- curve = await buildBls12381();
+ curve = await buildBls12381(singleThread);
} else {
throw new Error(`Curve not supported: ${name}`);
}