diff --git a/cli.js b/cli.js index 50fa0f6..1d85ad2 100755 --- a/cli.js +++ b/cli.js @@ -122,6 +122,20 @@ const commands = [ options: "-verbose|v -entropy|e", action: powersOfTawContribute }, + { + cmd: "powersoftaw import <", + description: "import a response to a ptaw file", + alias: ["pti"], + options: "-verbose|v -nopoints -nocheck", + action: powersOfTawImport + }, + { + cmd: "powersoftaw verify ", + description: "verifies a powers of tau file", + alias: ["ptv"], + options: "-verbose|v", + action: powersOfTawVerify + }, ]; @@ -515,6 +529,42 @@ async function powersOfTawContribute(params, options) { } +async function powersOfTawImport(params, options) { + let oldPtauName; + let response; + let newPtauName; + let importPoints = true; + let doCheck = true; + + oldPtauName = params[0]; + response = params[1]; + newPtauName = params[2]; + + if (options.nopoints) importPoints = false; + if (options.nocheck) doCheck = false; + + const res = await powersOfTaw.impoertResponse(oldPtauName, response, newPtauName, importPoints, options.verbose); + + if (res) return res; + if (!doCheck) return; + + // TODO Verify +} + +async function powersOfTawVerify(params, options) { + let ptauName; + + ptauName = params[0]; + + const res = await powersOfTaw.verify(ptauName, options.verbose); + if (res) { + console.log("Powers of tau OK!"); + } else { + console.log("=======>INVALID Powers of tau<=========="); + } +} + + function generateVerifier_original(verificationKey) { let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_original.sol"), "utf-8"); diff --git a/package-lock.json b/package-lock.json index 857fbe2..5af892d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -151,12 +151,23 @@ "requires": { "blake2b-wasm": "^1.1.0", "nanoassert": "^1.0.0" + }, + "dependencies": { + "blake2b-wasm": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-1.1.7.tgz", + "integrity": "sha512-oFIHvXhlz/DUgF0kq5B1CqxIDjIJwh9iDeUUGQUcvgiGz7Wdw03McEO7CfLBy7QKGdsydcMCgO9jFNBAFCtFcA==", + "requires": { + "nanoassert": "^1.0.0" + } + } } }, "blake2b-wasm": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-1.1.7.tgz", - "integrity": "sha512-oFIHvXhlz/DUgF0kq5B1CqxIDjIJwh9iDeUUGQUcvgiGz7Wdw03McEO7CfLBy7QKGdsydcMCgO9jFNBAFCtFcA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-2.1.0.tgz", + "integrity": "sha512-8zKXt9nk4cUCBU2jaUcSYcPA+UESwWOmb9Gsi8J35BifVb+tjVmbDhZbvmVmZEk6xZN1y35RNW6VqOwb0mkqsg==", + "dev": true, "requires": { "nanoassert": "^1.0.0" } diff --git a/package.json b/package.json index 3ef43d2..3988256 100644 --- a/package.json +++ b/package.json @@ -39,6 +39,7 @@ "yargs": "^12.0.5" }, "devDependencies": { + "blake2b-wasm": "^2.1.0", "eslint": "^6.8.0", "lodash": "^4.17.15", "mocha": "^7.1.1" diff --git a/src/keypair.js b/src/keypair.js index 3df689b..807a6be 100644 --- a/src/keypair.js +++ b/src/keypair.js @@ -40,5 +40,13 @@ function createKeyPair(curve, personalization, challangeHash, rng ) { return k; } -module.exports.create = createKeyPair; +function createPTauKey(curve, challangeHash, rng) { + const key = {}; + key.tau = createKeyPair(curve, 0, challangeHash, rng); + key.alpha = createKeyPair(curve, 1, challangeHash, rng); + key.beta = createKeyPair(curve, 2, challangeHash, rng); + return key; +} + +module.exports.createPTauKey = createPTauKey; module.exports.getG2sp = getG2sp; diff --git a/src/partialhash.js b/src/partialhash.js new file mode 100644 index 0000000..64602f8 --- /dev/null +++ b/src/partialhash.js @@ -0,0 +1,31 @@ + +const blake2wasm = require("blake2b-wasm"); + + + +async function run() { + await blake2wasm.ready(); + + const hasher1 = blake2wasm(64); + + hasher1.update(Uint8Array.of(1,2,3,4)); + + const ph = hasher1.getPartialHash(); + + hasher1.update(Uint8Array.of(5,6,7,8)); + + console.log(hasher1.digest("hex")); + + const hasher2 = blake2wasm(64); + + hasher2.setPartialHash(ph); + + hasher2.update(Uint8Array.of(5,6,7,8)); + + console.log(hasher2.digest("hex")); + +} + +run().then(() => { + process.exit(); +}); diff --git a/src/powersoftau_contribute.js b/src/powersoftau_contribute.js index 74dcfbc..f859f4d 100644 --- a/src/powersoftau_contribute.js +++ b/src/powersoftau_contribute.js @@ -18,11 +18,12 @@ const fastFile = require("fastfile"); const assert = require("assert"); -const blake2b = require("blake2b"); +const blake2b = require("blake2b-wasm"); const readline = require("readline"); const crypto = require("crypto"); const ChaCha = require("ffjavascript").ChaCha; const fs = require("fs"); +const utils = require("./powersoftau_utils"); const buildTaskManager = require("./taskmanager"); @@ -42,6 +43,8 @@ function askEntropy() { async function contribute(curve, challangeFilename, responesFileName, entropy, verbose) { + await blake2b.ready(); + const MAX_CHUNK_SIZE = 1024; let stats = await fs.promises.stat(challangeFilename); @@ -75,19 +78,24 @@ async function contribute(curve, challangeFilename, responesFileName, entropy, v for (let i=0; i { + console.log(k, ".g1_s_x: " + key[k].g1_s[0].toString(16)); + console.log(k, ".g1_s_y: " + key[k].g1_s[1].toString(16)); + console.log(k, ".g1_sx_x: " + key[k].g1_sx[0].toString(16)); + console.log(k, ".g1_sx_y: " + key[k].g1_sx[1].toString(16)); + console.log(k, ".g2_sp_x_c0: " + key[k].g2_sp[0][0].toString(16)); + console.log(k, ".g2_sp_x_c1: " + key[k].g2_sp[0][1].toString(16)); + console.log(k, ".g2_sp_y_c0: " + key[k].g2_sp[1][0].toString(16)); + console.log(k, ".g2_sp_y_c1: " + key[k].g2_sp[1][1].toString(16)); + console.log(k, ".g2_spx_x_c0: " + key[k].g2_spx[0][0].toString(16)); + console.log(k, ".g2_spx_x_c1: " + key[k].g2_spx[0][1].toString(16)); + console.log(k, ".g2_spx_y_c0: " + key[k].g2_spx[1][0].toString(16)); + console.log(k, ".g2_spx_y_c1: " + key[k].g2_spx[1][1].toString(16)); + console.log(""); + }); } @@ -138,14 +145,14 @@ async function contribute(curve, challangeFilename, responesFileName, entropy, v await taskManager.addTask({ cmd: "MULG1", first: t, - inc: kTau.prvKey.toString(), + inc: key.tau.prvKey.toString(), buff: buff, n: n, writePos: writePointer }, async function(r) { return await fdTo.write(r.buff, r.writePos); }); - t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n)); + t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n)); writePointer += n*scG1; } @@ -158,19 +165,19 @@ async function contribute(curve, challangeFilename, responesFileName, entropy, v await taskManager.addTask({ cmd: "MULG2", first: t, - inc: kTau.prvKey.toString(), + inc: key.tau.prvKey.toString(), buff: buff, n: n, writePos: writePointer }, async function(r) { return await fdTo.write(r.buff, r.writePos); }); - t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n)); + t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n)); writePointer += n*scG2; } // AlphaTauG1 - t = curve.Fr.e(kAlpha.prvKey); + t = curve.Fr.e(key.alpha.prvKey); for (let i=0; i1) assert(false, "Version not supported"); - - const nSections = await fdFrom.readULE32(); - - // Scan sections - let sections = []; - for (let i=0; i1) assert(false, "File has more than one header"); - - fdFrom.pos = sections[1][0].p; - const n8 = await fdFrom.readULE32(); - const qBuff = await fdFrom.read(n8); - const q = Scalar.fromRprLE(qBuff); - let curve; - if (Scalar.eq(q, bn128.q)) { - curve = bn128; - } else { - assert(false, "Curve not supported"); - } - assert(curve.F1.n64*8 == n8, "Invalid size"); - - const power = await fdFrom.readULE32(); - const nContributions = await fdFrom.readULE32(); + const {curve, power} = await utils.readPTauHeader(fdFrom, sections); + const contributions = await utils.readContributions(fdFrom, curve, sections); let challangeHash; - if (nContributions == 0) { - challangeHash = Buffer.from(blake2b(64).digest()); + if (contributions.length == 0) { + challangeHash = Blake2b(64).digest(); } else { - assert(false, "Not implemented"); + challangeHash = contributions[contributions.length-1].newChallange; } const fdTo = await fastFile.createOverride(challangeFilename); - const toHash = blake2b(64); + const toHash = Blake2b(64); fdTo.write(challangeHash); toHash.update(challangeHash); + const buffG1 = new ArrayBuffer(curve.F1.n8*2); + const buffG1v = new Uint8Array(buffG1); + const buffG2 = new ArrayBuffer(curve.F2.n8*2); + const buffG2v = new Uint8Array(buffG2); + // Process tauG1 if (!sections[2]) assert(false, "File has no tauG1 section"); if (sections[2].length>1) assert(false, "File has more than one tauG1 section"); @@ -136,26 +100,27 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) { console.log("Challange Hash: " +newChallangeHash); + async function readG1() { - const pBuff = await fdFrom.read(curve.F1.n64*8*2); + const pBuff = await fdFrom.read(curve.F1.n8*2); return curve.G1.fromRprLEM( pBuff ); } async function readG2() { - const pBuff = await fdFrom.read(curve.F1.n64*8*2*2); + const pBuff = await fdFrom.read(curve.F2.n8*2); return curve.G2.fromRprLEM( pBuff ); } async function writeG1(p) { - const rpr = curve.G1.toRprBE(p); - await fdTo.write(rpr); - toHash.update(rpr); + curve.G1.toRprBE(buffG1, 0, p); + await fdTo.write(buffG1); + toHash.update(buffG1v); } async function writeG2(p) { - const rpr = curve.G2.toRprBE(p); - await fdTo.write(rpr); - toHash.update(rpr); + curve.G2.toRprBE(buffG2, 0, p); + await fdTo.write(buffG2); + toHash.update(buffG2v); } } diff --git a/src/powersoftau_import.js b/src/powersoftau_import.js index 4be0e3a..9a3b7e3 100644 --- a/src/powersoftau_import.js +++ b/src/powersoftau_import.js @@ -1,60 +1,22 @@ - const assert = require("assert"); const fastFile = require("fastfile"); -const Scalar = require("Scalar"); -const bn128 = require("ffjavascript").bn128; -const Blake2 = require("blake2"); +const Blake2b = require("blake2b-wasm"); const fs = require("fs"); +const utils = require("./powersoftau_utils"); +async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, importPoints, verbose) { -async function importResponse(oldPtauFilename, contributionFilename, newPotFilename, verbose) { - const fdOld = await fastFile.readExisting(oldPtauFilename); + await Blake2b.ready(); - const b = await fdOld.read(4); - - if (b.toString() != "ptau") assert(false, "Old ptau file: invalid format."); - - let v = await fdOld.readULE32(); - - if (v>1) assert(false, "Old ptau file: Version not supported"); - - const nSections = await fdOld.readULE32(); - - // Scan sections - let sections = []; - for (let i=0; i1) assert(false, "Old ptau file: File has more than one header"); - - fdOld.pos = sections[1][0].p; - const n8 = await fdOld.readULE32(); - const qBuff = await fdOld.read(n8); - const q = Scalar.fromRprLE(qBuff); - let curve; - if (Scalar.eq(q, bn128.q)) { - curve = bn128; - } else { - assert(false, "Old ptau file: Curve not supported"); - } - assert(curve.F1.n64*8 == n8, "Old ptau file: Invalid size"); - - const power = await fdOld.readULE32(); - const nContributions = await fdOld.readULE32(); - const sG1 = curve.F1.n64*8*2; - const scG1 = curve.F1.n64*8; // Compresed size - const sG2 = curve.F2.n64*8*2; - const scG2 = curve.F2.n64*8; // Compresed size + const {fd: fdOld, sections} = await utils.readBinFile(oldPtauFilename, "ptau", 1); + const {curve, power} = await utils.readPTauHeader(fdOld, sections); + const contributions = await utils.readContributions(fdOld, curve, sections); + const currentContribution = {}; + const sG1 = curve.F1.n8*2; + const scG1 = curve.F1.n8; // Compresed size + const sG2 = curve.F2.n8*2; + const scG2 = curve.F2.n8; // Compresed size let stats = await fs.promises.stat(contributionFilename); assert.equal(stats.size, @@ -68,39 +30,83 @@ async function importResponse(oldPtauFilename, contributionFilename, newPotFilen "Size of the contribution is invalid" ); - const fdNew = await fastFile.createOverride(newPotFilename); - - await fdNew.write(Buffer.from("ptau"), 0); // Magic "r1cs" - - await fd.writeULE32(1); // Version - await fd.writeULE32(7); // Number of Sections - - // Write the header - /////////// - await fd.writeULE32(1); // Header type - const pHeaderSize = fd.pos; - await fd.writeULE64(0); // Temporally set to 0 length - - const primeQ = curve.q; - - await fd.writeULE32(curve.F1.n64*8); - await fd.write(Scalar.toRprLE(primeQ, curve.F1.n64*8)); - await fd.writeULE32(power); // power - await fd.writeULE32(0); // Total number of public contributions - - const headerSize = fd.pos - pHeaderSize - 8; - + let lastChallangeHash; + if (contributions.length>0) { + lastChallangeHash = contributions[contributions.length-1].nextChallange; + } else { + lastChallangeHash = utils.calculateFirstChallangeHash(curve, power); + } + const fdNew = await utils.createBinFile(newPTauFilename, "ptau", 1, 7); + await utils.writePTauHeader(fdNew, curve, power); const fdResponse = await fastFile.readExisting(contributionFilename); - const hasherResponse = new Blake2(64); const contributionPreviousHash = await fdResponse.read(64); - hasherResponse.update(contributionPreviousHash); + + assert(utils.hashIsEqual(contributionPreviousHash,lastChallangeHash), + "Wrong contribution. this contribution is not based on the previus hash"); + + const hasherResponse = new Blake2b(64); + hasherResponse.update(new Uint8Array(contributionPreviousHash)); + + const hasherNewChallange = new Blake2b(64); + hasherNewChallange.update(lastChallangeHash); + + await processSection(fdResponse, fdNew, 2, (1 << power) * 2 -1, "G1", "tauG1", 1); + await processSection(fdResponse, fdNew, 3, (1 << power) , "G2", "tauG2", 1); + await processSection(fdResponse, fdNew, 4, (1 << power) , "G1", "alphaG1", 0); + await processSection(fdResponse, fdNew, 5, (1 << power) , "G1", "betaG1", 0); + await processSection(fdResponse, fdNew, 6, 1 , "G2", "betaG2", 0); + + currentContribution.nextChallange = hasherNewChallange.digest(); + currentContribution.partialHash = hasherResponse.getPartialHash(); + const buffKey = await fdResponse.read(curve.F1.n8*2*6+curve.F2.n8*2*3); + currentContribution.key = utils.fromPtauPubKeyRpr(buffKey, 0, curve, false); + hasherResponse.update(new Uint8Array(buffKey)); + const hashResponse = hasherResponse.digest(); + + if (verbose) { + console.log("Contribution Response Hash imported: "); + console.log(utils.formatHash(hashResponse)); + } + + contributions.push(currentContribution); + + await utils.writeContributions(fdNew, curve, contributions); + + await fdResponse.close(); + await fdNew.close(); + + async function processSection(fdFrom, fdTo, sectionId, n, G, name, contributionId) { + + const buffU = new ArrayBuffer(curve[G].F.n8*2); + const buffUv = new Uint8Array(buffU); + const scG = curve[G].F.n8; + + await fdTo.writeULE32(sectionId); // tauG1 + const pSection = fdTo.pos; + await fdTo.writeULE64(0); // Temporally set to 0 length + for (let i=0; i< n; i++) { + const buffC = await fdFrom.read(scG); + hasherResponse.update(new Uint8Array(buffC)); + const P = curve[G].fromRprCompressed(buffC); + if (i==contributionId) currentContribution[name] = P; + curve[G].toRprBE(buffU, 0, P); + hasherNewChallange.update(buffUv); + curve[G].toRprLEM(buffU, 0, P); + await fdTo.write(buffU); + if ((verbose)&&((i%100000) == 0)&&i) console.log(name +": " + i); + } + const sSize = fdTo.pos - pSection -8; + const lastPos = fdTo.pos; + await fdTo.writeULE64(sSize, pSection); + fdTo.pos = lastPos; + } } module.exports = importResponse; diff --git a/src/powersoftau_new.js b/src/powersoftau_new.js index 32d67f1..7bd3b86 100644 --- a/src/powersoftau_new.js +++ b/src/powersoftau_new.js @@ -1,67 +1,65 @@ /* -Header +Header(1) n8 prime power - nContributions -tauG1 - [(1<1) assert(false, fd.fileName +": File has more than one header"); + + fd.pos = sections[1][0].p; + const n8 = await fd.readULE32(); + const buff = await fd.read(n8); + const q = Scalar.fromRprLE(buff); + let curve; + if (Scalar.eq(q, bn128.q)) { + curve = bn128; + } else { + assert(false, fd.fileName +": Curve not supported"); + } + assert(curve.F1.n64*8 == n8, fd.fileName +": Invalid size"); + + const power = await fd.readULE32(); + + assert.equal(fd.pos-sections[1][0].p, sections[1][0].size); + + return {curve, power}; +} + + +async function readPtauPubKey(fd, curve, montgomery) { + + const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3); + + return fromPtauPubKeyRpr(buff, 0, curve, montgomery); +/* + const key = { + tau: {}, + alpha: {}, + beta: {} + }; + + key.tau.g1_s = await readG1(); + key.tau.g1_sx = await readG1(); + key.alpha.g1_s = await readG1(); + key.alpha.g1_sx = await readG1(); + key.beta.g1_s = await readG1(); + key.beta.g1_sx = await readG1(); + key.tau.g2_spx = await readG2(); + key.alpha.g2_spx = await readG2(); + key.beta.g2_spx = await readG2(); + + return key; + + async function readG1() { + const pBuff = await fd.read(curve.F1.n8*2); + if (montgomery) { + return curve.G1.fromRprLEM( pBuff ); + } else { + return curve.G1.fromRprBE( pBuff ); + } + } + + async function readG2() { + const pBuff = await fd.read(curve.F2.n8*2); + if (montgomery) { + return curve.G2.fromRprLEM( pBuff ); + } else { + return curve.G2.fromRprBE( pBuff ); + } + } +*/ +} + +function fromPtauPubKeyRpr(buff, pos, curve, montgomery) { + + const key = { + tau: {}, + alpha: {}, + beta: {} + }; + + key.tau.g1_s = readG1(); + key.tau.g1_sx = readG1(); + key.alpha.g1_s = readG1(); + key.alpha.g1_sx = readG1(); + key.beta.g1_s = readG1(); + key.beta.g1_sx = readG1(); + key.tau.g2_spx = readG2(); + key.alpha.g2_spx = readG2(); + key.beta.g2_spx = readG2(); + + return key; + + function readG1() { + let p; + if (montgomery) { + p = curve.G1.fromRprLEM( buff, pos ); + } else { + p = curve.G1.fromRprBE( buff, pos ); + } + pos += curve.G1.F.n8*2; + return p; + } + + function readG2() { + let p; + if (montgomery) { + p = curve.G2.fromRprLEM( buff, pos ); + } else { + p = curve.G2.fromRprBE( buff, pos ); + } + pos += curve.G2.F.n8*2; + return p; + } +} + +function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) { + + writeG1(key.tau.g1_s); + writeG1(key.tau.g1_sx); + writeG1(key.alpha.g1_s); + writeG1(key.alpha.g1_sx); + writeG1(key.beta.g1_s); + writeG1(key.beta.g1_sx); + writeG2(key.tau.g2_spx); + writeG2(key.alpha.g2_spx); + writeG2(key.beta.g2_spx); + + async function writeG1(p) { + if (montgomery) { + curve.G1.toRprLEM(buff, pos, p); + } else { + curve.G1.toRprBE(buff, pos, p); + } + pos += curve.F1.n8*2; + } + + async function writeG2(p) { + if (montgomery) { + curve.G2.toRprLEM(buff, pos, p); + } else { + curve.G2.toRprBE(buff, pos, p); + } + pos += curve.F2.n8*2; + } + + return buff; +} + +async function writePtauPubKey(fd, curve, key, montgomery) { + const buff = new ArrayBuffer(curve.F1.n8*2*6 + curve.F2.n8*2*3); + toPtauPubKeyRpr(buff, 0, curve, key, montgomery); + await fd.write(buff); +/* + const buffG1 = new ArrayBuffer(curve.F1.n8*2); + const buffG2 = new ArrayBuffer(curve.F2.n8*2); + + await writeG1(key.tau.g1_s); + await writeG1(key.tau.g1_sx); + await writeG1(key.alpha.g1_s); + await writeG1(key.alpha.g1_sx); + await writeG1(key.beta.g1_s); + await writeG1(key.beta.g1_sx); + await writeG2(key.tau.g2_spx); + await writeG2(key.alpha.g2_spx); + await writeG2(key.beta.g2_spx); + + async function writeG1(p) { + if (montgomery) { + curve.G1.toRprLEM(buffG1, 0, p); + } else { + curve.G1.toRprBE(buffG1, 0, p); + } + await fd.write(buffG1); + } + + async function writeG2(p) { + if (montgomery) { + curve.G2.toRprLEM(buffG2, 0, p); + } else { + curve.G2.toRprBE(buffG2, 0, p); + } + await fd.write(buffG2); + } +*/ +} + +async function readContribution(fd, curve) { + const c = {}; + + c.tauG1 = await readG1(); + c.tauG2 = await readG2(); + c.alphaG1 = await readG1(); + c.betaG1 = await readG1(); + c.betaG2 = await readG2(); + c.key = await readPtauPubKey(fd, curve, true); + c.partialHash = new Uint8Array(await fd.read(216)); + c.nextChallange = new Uint8Array(await fd.read(64)); + + return c; + + async function readG1() { + const pBuff = await fd.read(curve.F1.n8*2); + return curve.G1.fromRprLEM( pBuff ); + } + + async function readG2() { + const pBuff = await fd.read(curve.F2.n8*2); + return curve.G2.fromRprLEM( pBuff ); + } +} + +async function readContributions(fd, curve, sections) { + if (!sections[7]) assert(false, fd.fileName + ": File has no contributions"); + if (sections[7][0].length>1) assert(false, fd.fileName +": File has more than one contributions section"); + + fd.pos = sections[7][0].p; + const nContributions = await fd.readULE32(); + const contributions = []; + for (let i=0; i0) S += "\n"; + S += "\t\t"; + for (let j=0; j<4; j++) { + if (j>0) S += " "; + S += a.getUint32(i*4+j).toString(16).padStart(8, "0"); + } + } + return S; } + +function hashIsEqual(h1, h2) { + if (h1.byteLength != h2.byteLength) return false; + var dv1 = new Int8Array(h1); + var dv2 = new Int8Array(h2); + for (var i = 0 ; i != h1.byteLength ; i++) + { + if (dv1[i] != dv2[i]) return false; + } + return true; +} + +function calculateFirstChallangeHash(curve, power) { + const hasher = new Blake2b(64); + + const buffG1 = new ArrayBuffer(curve.G1.F.n8*2); + const vG1 = new Uint8Array(buffG1); + const buffG2 = new ArrayBuffer(curve.G2.F.n8*2); + const vG2 = new Uint8Array(buffG2); + curve.G1.toRprBE(buffG1, 0, curve.G1.g); + curve.G2.toRprBE(buffG2, 0, curve.G2.g); + + const blankHasher = new Blake2b(64); + hasher.update(blankHasher.digest()); + + let n; + n=(1 << power)*2 -1; + for (let i=0; i1) { + prevContr = contrs[contrs.length-2]; + } else { + prevContr = initialContribution; + } + const curContr = contrs[contrs.length-1]; + if (verbose) console.log("Validating contribution #"+contrs[contrs.length-1].id); + const res = verifyContribution(curve, curContr,prevContr, verbose); + if (!res) return false; + + + const nextContributionHasher = Blake2b(64); + nextContributionHasher.update(prevContr.nextChallange); + const key = curContr.key; + + // Verify powers and compute nextChallangeHash + + // await test(); + + // Verify Section tau*G1 + if (verbose) console.log("Verifying powers in tau*G1 section"); + const rTau1 = await processSection(2, "G1", "tauG1", (1 << power)*2-1, [0, 1]); + if (!sameRatio(curve, rTau1.R1, rTau1.R2, key.tau.g2_sp, key.tau.g2_spx)) { + console.log("tauG1 section. Powers do not match"); + return false; + } + if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) { + console.log("First element of tau*G1 section must be the generator"); + return false; + } + if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) { + console.log("Second element of tau*G1 section does not match the one in the contribution section"); + return false; + } + + // await test(); + + // Verify Section tau*G2 + if (verbose) console.log("Verifying powers in tau*G2 section"); + const rTau2 = await processSection(3, "G2", "tauG2", 1 << power, [0, 1]); + if (!sameRatio(curve, key.tau.g1_s, key.tau.g1_sx, rTau2.R1, rTau2.R2)) { + console.log("tauG2 section. Powers do not match"); + return false; + } + if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) { + console.log("First element of tau*G2 section must be the generator"); + return false; + } + if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) { + console.log("Second element of tau*G2 section does not match the one in the contribution section"); + return false; + } + + // Verify Section alpha*tau*G1 + if (verbose) console.log("Verifying powers in alpha*tau*G1 section"); + const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 1 << power, [0]); + if (!sameRatio(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, key.tau.g2_sp, key.tau.g2_spx)) { + console.log("alphaTauG1 section. Powers do not match"); + return false; + } + if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) { + console.log("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section"); + return false; + } + + // Verify Section beta*tau*G1 + if (verbose) console.log("Verifying powers in beta*tau*G1 section"); + const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 1 << power, [0]); + if (!sameRatio(curve, rBetaTauG1.R1, rBetaTauG1.R2, key.tau.g2_sp, key.tau.g2_spx)) { + console.log("betaTauG1 section. Powers do not match"); + return false; + } + if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) { + console.log("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section"); + return false; + } + + //Verify Beta G2 + const betaG2 = await processSectionBetaG2(); + if (!curve.G2.eq(curContr.betaG2, betaG2)) { + console.log("betaG2 element in betaG2 section does not match the one in the contribution section"); + return false; + } + await fd.close(); + + + const nextContributionHash = nextContributionHasher.digest(); + + // Check the nextChallangeHash + if (!utils.hashIsEqual(nextContributionHash,curContr.nextChallange)) { + console.log("Hash of the values does not math the next challange of the last contributor in the contributions section"); + return false; + } + + if (verbose) { + console.log("Next challange hash: "); + console.log(utils.formatHash(nextContributionHash)); + } + + // Verify Previous contributions + + printContribution(curContr, prevContr); + + for (let i = contrs.length-2; i>=0; i--) { + const curContr = contrs[i]; + const prevContr = (curContr>0) ? contrs[i-1] : initialContribution; + verifyContribution(curve, curContr, prevContr); + printContribution(curContr, prevContr); + } + console.log("-----------------------------------------------------"); + return true; + + function printContribution(curContr, prevContr) { + console.log("-----------------------------------------------------"); + console.log(`Contribution #${curContr.id}:`); + console.log("\tNext Challange"); + console.log(utils.formatHash(curContr.nextChallange)); + + const buff = new ArrayBuffer(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3); + const buffV = new Uint8Array(buff); + utils.toPtauPubKeyRpr(buff, 0, curve, key, false); + + const responseHasher = Blake2b(64); + responseHasher.setPartialHash(curContr.partialHash); + responseHasher.update(buffV); + const responseHash = responseHasher.digest(); + + console.log("\tResponse Hash"); + console.log(utils.formatHash(responseHash)); + + console.log("\tBased on challange"); + console.log(utils.formatHash(prevContr.nextChallange)); + } + + async function processSectionBetaG2() { + const G = curve.G2; + const sG = G.F.n8*2; + const buffU = new ArrayBuffer(sG); + const buffUv = new Uint8Array(buffU); + + if (!sections[6]) assert(false, "File has no BetaG2 section"); + if (sections[6].length>1) assert(false, "File has more than one GetaG2 section"); + fd.pos = sections[6][0].p; + + const buff = await fd.read(sG); + const P = G.fromRprLEM(buff); + + G.toRprBE(buffU, 0, P); + nextContributionHasher.update(buffUv); + + return P; + } + + async function processSection(idSection, gName, sectionName, nPoints, singularPointIds) { + const MAX_CHUNK_SIZE = 1024; + const G = curve[gName]; + const sG = G.F.n8*2; + const buffU = new ArrayBuffer(G.F.n8*2); + const buffUv = new Uint8Array(buffU); + + const singularPoints = []; + + if (!sections[idSection]) assert(false, `File has no ${sectionName} section`); + if (sections[idSection].length>1) assert(false, `File has more than one ${sectionName} section`); + fd.pos = sections[idSection][0].p; + + const seed= new Array(8); + for (let i=0; i<8; i++) { + seed[i] = crypto.randomBytes(4).readUInt32BE(0, true); + } + + const taskManager = await buildTaskManager(verifyThread, { + ffjavascript: "ffjavascript" + },{ + curve: curve.name, + seed: seed + }); + + let R1 = G.zero; + let R2 = G.zero; + + for (let i=0; i=0) singularPoints.push(P); + } + } + + if (fd.pos != sections[idSection][0].p + sections[idSection][0].size) assert(false, `Invalid ${sectionName} section size`); + + await taskManager.finish(); + return { + R1: R1, + R2: R2, + singularPoints: singularPoints + }; + } + + async function test() { + const NN=2; + + fd.pos = sections[3][0].p + curve.G2.F.n8*2*6; + + const buff = await fd.read(curve.G2.F.n8*2*NN); + + const ctx= { + modules: { + ffjavascript: require("ffjavascript"), + assert: require("assert") + } + }; + verifyThread(ctx, {cmd: "INIT", curve: "bn128", seed: [0,0,0,0,0,0,0,0]}); + + const r = verifyThread(ctx, { + cmd: "MUL", + G: "G2", + n: NN, + TotalPoints: NN, + buff: buff.slice(), + offset: 0 + }); + + if (!sameRatio(curve, key.tau.g1_s, key.tau.g1_sx, r.R1, r.R2)) { + console.log("Test does not match"); + } else { + console.log("!!!!!!TEST OK!!!!!!!"); + } + + } + +} + + +function verifyThread(ctx, task) { + const pow = 16; + const NSet = 1< 0) { + const r = ctx.rndPerm(task.offset + i-1); + acc2[r] = G.add(acc2[r], P); + } + } + reduceExp(G, acc1, pow); + reduceExp(G, acc2, pow); + return { + R1: acc1[0], + R2: acc2[0] + }; + } else { + ctx.assert(false, "Op not implemented"); + } + + function reduceExp(G, accs, p) { + if (p==1) return; + const half = 1 << (p-1); + + for (let i=0; i>4; + let idx = pageId.indexOf(page); + if (idx < 0) idx = loadPage(page); + return pages[page][n & 0xF] % (NSet-1); + }; + + } + +} + + +module.exports = verify; diff --git a/src/powersoftaw.js b/src/powersoftaw.js index 6f65e3d..eb54a54 100644 --- a/src/powersoftaw.js +++ b/src/powersoftaw.js @@ -2,3 +2,5 @@ module.exports.newAccumulator = require("./powersoftau_new"); module.exports.exportChallange = require("./powersoftau_export"); module.exports.contribute = require("./powersoftau_contribute"); +module.exports.impoertResponse = require("./powersoftau_import"); +module.exports.verify = require("./powersoftau_verify"); diff --git a/src/taskmanager.js b/src/taskmanager.js index f01e952..2ae50af 100644 --- a/src/taskmanager.js +++ b/src/taskmanager.js @@ -49,8 +49,7 @@ function thread(self, fn, modules) { if (res) { if (res.buff) { - res.buff = new Uint8Array(res.buff); - self.postMessage(res, [res.buff.buffer]); + self.postMessage(res, [res.buff]); } else { self.postMessage(res); } @@ -120,10 +119,6 @@ async function buildTaskManager(fn, mods, initTask) { return; } - if (data.buff) { - data.buff = Buffer.from(data.buff); - } - if (tm.workers[i].asyncCb) { tm.workers[i].asyncCb(data).then(()=> { finishTask(); @@ -140,12 +135,10 @@ async function buildTaskManager(fn, mods, initTask) { tm.workers[i].state = "WORKING"; if (task.buff) { - task.buff = new Uint8Array(task.buff); - tm.workers[i].worker.postMessage(task, [task.buff.buffer]); + tm.workers[i].worker.postMessage(task, [task.buff]); } else { tm.workers[i].worker.postMessage(task); } - } for (let i=0; i