diff --git a/cli.js b/cli.js index a7ee5bc..2b6a50f 100755 --- a/cli.js +++ b/cli.js @@ -133,7 +133,7 @@ const commands = [ cmd: "powersoftau import <", description: "import a response to a ptau file", alias: ["pti"], - options: "-verbose|v -nopoints -nocheck -description|d -name|n", + options: "-verbose|v -nopoints -nocheck -name|n", action: powersOfTawImport }, { @@ -204,7 +204,7 @@ const commands = [ cmd: "zkey contribute ", description: "creates a zkey file with a new contribution", alias: ["zkc"], - options: "-verbose|v", + options: "-verbose|v -entropy|e -name|n", action: zkeyContribute }, { @@ -772,7 +772,7 @@ async function zkeyNew(params, options) { zkeyName = params[2]; } - return phase2.new(r1csName, ptauName, zkeyName, options.verbose); + return zkey.new(r1csName, ptauName, zkeyName, options.verbose); } // zkey export bellman [circuit.zkey] [circuit.mpcparams] @@ -792,7 +792,7 @@ async function zkeyExportBellman(params, options) { mpcparamsName = params[1]; } - return phase2.exportMPCParams(zkeyName, mpcparamsName, options.verbose); + return zkey.exportMPCParams(zkeyName, mpcparamsName, options.verbose); } @@ -846,8 +846,7 @@ async function zkeyVerify(params, options) { } -// phase2 contribute - +// zkey contribute async function zkeyContribute(params, options) { let zkeyOldName; let zkeyNewName; @@ -856,5 +855,5 @@ async function zkeyContribute(params, options) { zkeyNewName = params[1]; - return phase2.contribute(zkeyOldName, zkeyNewName, options.verbose); + return zkey.contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, options.verbose); } diff --git a/src/keypair.js b/src/keypair.js index 659c33a..d54553b 100644 --- a/src/keypair.js +++ b/src/keypair.js @@ -7,7 +7,7 @@ const blake2b = require("blake2b"); const ChaCha = require("ffjavascript").ChaCha; function hashToG2(hash) { - const hashV = new DataView(hash.buffer); + const hashV = new DataView(hash.buffer, hash.byteOffset, hash.byteLength); const seed = []; for (let i=0; i<8; i++) { seed[i] = hashV.getUint32(i*4); @@ -57,6 +57,17 @@ function createPTauKey(curve, challangeHash, rng) { return key; } +function createDeltaKey(curve, transcript, rng) { + const delta = {}; + delta.prvKey = curve.Fr.fromRng(rng); + delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng)); + delta.g1_sx = curve.G1.affine(curve.G1.mulScalar(delta.g1_s, delta.prvKey)); + delta.g2_sp = hashToG2(transcript); + delta.g2_spx = curve.G2.affine(curve.G2.mulScalar(delta.g2_sp, delta.prvKey)); + return delta; +} + module.exports.createPTauKey = createPTauKey; module.exports.getG2sp = getG2sp; module.exports.hashToG2 = hashToG2; +module.exports.createDeltaKey =createDeltaKey; diff --git a/src/misc.js b/src/misc.js index cdeccab..cb26c65 100644 --- a/src/misc.js +++ b/src/misc.js @@ -1,4 +1,7 @@ const Blake2b = require("blake2b-wasm"); +const readline = require("readline"); +const ChaCha = require("ffjavascript").ChaCha; +const crypto = require("crypto"); const _revTable = []; for (let i=0; i<256; i++) { @@ -74,9 +77,42 @@ async function sameRatio(curve, g1s, g1sx, g2s, g2sx) { return res; } + + +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +function askEntropy() { + return new Promise((resolve) => { + rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) ); + }); +} + +async function getRandomRng(entropy) { + // Generate a random key + while (!entropy) { + entropy = await askEntropy(); + } + const hasher = Blake2b(64); + hasher.update(crypto.randomBytes(64)); + const enc = new TextEncoder(); // always utf-8 + hasher.update(enc.encode(entropy)); + const hash = Buffer.from(hasher.digest()); + + const seed = []; + for (let i=0;i<8;i++) { + seed[i] = hash.readUInt32BE(i*4); + } + const rng = new ChaCha(seed); + return rng; +} + module.exports.bitReverse = bitReverse; module.exports.log2 = log2; module.exports.formatHash = formatHash; module.exports.hashIsEqual = hashIsEqual; module.exports.cloneHasher = cloneHasher; module.exports.sameRatio = sameRatio; +module.exports.getRandomRng = getRandomRng; diff --git a/src/mpc_applykey.js b/src/mpc_applykey.js index 859f55d..3fa5065 100644 --- a/src/mpc_applykey.js +++ b/src/mpc_applykey.js @@ -1,5 +1,6 @@ const buildTaskManager = require("./taskmanager"); +const binFileUtils = require("./binfileutils"); /* This function creates a new section in the fdTo file with id idSection. @@ -8,116 +9,36 @@ const buildTaskManager = require("./taskmanager"); It also updates the newChallangeHasher with the new points */ -async function applyKey(params) { - const { - fdFrom, - sections, - curve, - fdTo, - sectionId, - NPoints, - G:Gs, - first, - inc, - newChallangeHasher, - responseHasher, - returnPoints, - sectionName, - verbose - } = params; - const G = curve[Gs]; - const MAX_CHUNK_SIZE = 1024; - - let res = []; +async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, verbose) { + const MAX_CHUNK_SIZE = 1 << 16; + const G = curve[groupName]; const sG = G.F.n8*2; - const buffUv = new Uint8Array(sG); - const scG = G.F.n8; - const buffCv = new Uint8Array(scG); + const nPoints = sections[idSection][0].size / sG; - const taskManager = await buildTaskManager(contributeThread, { - ffjavascript: "ffjavascript" - },{ - curve: curve.name - }); + await binFileUtils.startReadUniqueSection(fdOld, sections,idSection ); + await binFileUtils.startWriteSection(fdNew, idSection); - fdFrom.pos = sections[sectionId][0].p; - await fdTo.writeULE32(sectionId); // tauG1 - const pSection = fdTo.pos; - await fdTo.writeULE64(0); // Temporally set to 0 length let t = first; - let writePointer = fdTo.pos; - let beginWritePointer = fdTo.pos; - for (let i=0; i< NPoints; i+=MAX_CHUNK_SIZE) { - if ((verbose)&&i) console.log(`${sectionName}: ` + i); - const n = Math.min(NPoints - i, MAX_CHUNK_SIZE); - const buff = await fdFrom.read(n*sG); - await taskManager.addTask({ - cmd: "MUL", - G: Gs, - first: t, - inc: inc.toString(), - buff: buff.slice(), - n: n, - writePos: writePointer - }, async function(r) { - return await fdTo.write(r.buff, r.writePos); - }); + for (let i=0; i=0) res[idx] = P; - } +} + +async function applyKeyToBinFile(fdOld, fdNew, curve, groupName, nPoints, first, inc, sectionName, verbose) { - return res; } -function contributeThread(ctx, task) { - if (task.cmd == "INIT") { - ctx.assert = ctx.modules.assert; - if (task.curve == "bn128") { - ctx.curve = ctx.modules.ffjavascript.bn128; - } else { - ctx.assert(false, "curve not defined"); - } - return {}; - } else if (task.cmd == "MUL") { - const G = ctx.curve[task.G]; - const sG = G.F.n64*8*2; - const buffDest = new Uint8Array(sG*task.n); - let t = ctx.curve.Fr.e(task.first); - let inc = ctx.curve.Fr.e(task.inc); - for (let i=0; i0) { + const paramsBuff = new Uint8Array(params); + await fd.writeULE32(paramsBuff.byteLength); + await fd.write(paramsBuff); + } else { + await fd.writeULE32(0); + } + } async function writeMPCParams(fd, curve, mpcParams) { @@ -382,9 +434,33 @@ async function writeMPCParams(fd, curve, mpcParams) { await binFileUtils.endWriteSection(fd); } +function hashG1(hasher, curve, p) { + const buff = new Uint8Array(curve.G1.F.n8*2); + curve.G1.toRprUncompressed(buff, 0, p); + hasher.update(buff); +} + +function hashG2(hasher,curve, p) { + const buff = new Uint8Array(curve.G2.F.n8*2); + curve.G2.toRprUncompressed(buff, 0, p); + hasher.update(buff); +} + +function hashPubKey(hasher, curve, c) { + hashG1(hasher, curve, c.deltaAfter); + hashG1(hasher, curve, c.delta.g1_s); + hashG1(hasher, curve, c.delta.g1_sx); + hashG2(hasher, curve, c.delta.g2_spx); + hasher.update(c.transcript); +} + + module.exports.readHeader = readHeader; module.exports.writeHeader = writeHeader; module.exports.read = readZKey; module.exports.write = writeZKey; module.exports.readMPCParams = readMPCParams; module.exports.writeMPCParams = writeMPCParams; +module.exports.hashG1 = hashG1; +module.exports.hashG2 = hashG2; +module.exports.hashPubKey = hashPubKey; diff --git a/src/zkey_verify.js b/src/zkey_verify.js index 193b2aa..038d527 100644 --- a/src/zkey_verify.js +++ b/src/zkey_verify.js @@ -9,6 +9,7 @@ const sameRatio = misc.sameRatio; const crypto = require("crypto"); const ChaCha = require("ffjavascript").ChaCha; const newZKey = require("./zkey_new"); +const {hashG1, hashPubKey} = require("./zkey_utils"); module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, verbose) { @@ -34,8 +35,8 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi const c = mpcParams.contributions[i]; const ourHasher = misc.cloneHasher(accumulatedHasher); - hashG1(ourHasher, c.delta.g1_s); - hashG1(ourHasher, c.delta.g1_sx); + hashG1(ourHasher, curve, c.delta.g1_s); + hashG1(ourHasher, curve, c.delta.g1_sx); if (!misc.hashIsEqual(ourHasher.digest(), c.transcript)) { console.log(`INVALID(${i}): Inconsistent transcript `); @@ -56,10 +57,10 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi return false; } - hashPubKey(accumulatedHasher, c); + hashPubKey(accumulatedHasher, curve, c); const contributionHasher = Blake2b(64); - hashPubKey(contributionHasher, c); + hashPubKey(contributionHasher, curve, c); responses.push(contributionHasher.digest()); curDelta = c.deltaAfter; @@ -181,27 +182,6 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi return true; - function hashG1(hasher, p) { - const buff = new Uint8Array(sG1); - curve.G1.toRprUncompressed(buff, 0, p); - hasher.update(buff); - } - - function hashG2(hasher, p) { - const buff = new Uint8Array(sG2); - curve.G2.toRprUncompressed(buff, 0, p); - hasher.update(buff); - } - - function hashPubKey(hasher, c) { - hashG1(hasher, c.deltaAfter); - hashG1(hasher, c.delta.g1_s); - hashG1(hasher, c.delta.g1_sx); - hashG2(hasher, c.delta.g2_spx); - hasher.update(c.transcript); - } - - async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) { const MAX_CHUNK_SIZE = 1<<20;