Contribution phase2

This commit is contained in:
Jordi Baylina 2020-06-16 16:45:32 +02:00
parent 2e9ce5efe6
commit 3ba1341404
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
8 changed files with 241 additions and 135 deletions

13
cli.js
View File

@ -133,7 +133,7 @@ const commands = [
cmd: "powersoftau import <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>",
description: "import a response to a ptau file",
alias: ["pti"],
options: "-verbose|v -nopoints -nocheck -description|d -name|n",
options: "-verbose|v -nopoints -nocheck -name|n",
action: powersOfTawImport
},
{
@ -204,7 +204,7 @@ const commands = [
cmd: "zkey contribute <circuit_old.zkey> <circuit_new.zkey>",
description: "creates a zkey file with a new contribution",
alias: ["zkc"],
options: "-verbose|v",
options: "-verbose|v -entropy|e -name|n",
action: zkeyContribute
},
{
@ -772,7 +772,7 @@ async function zkeyNew(params, options) {
zkeyName = params[2];
}
return phase2.new(r1csName, ptauName, zkeyName, options.verbose);
return zkey.new(r1csName, ptauName, zkeyName, options.verbose);
}
// zkey export bellman [circuit.zkey] [circuit.mpcparams]
@ -792,7 +792,7 @@ async function zkeyExportBellman(params, options) {
mpcparamsName = params[1];
}
return phase2.exportMPCParams(zkeyName, mpcparamsName, options.verbose);
return zkey.exportMPCParams(zkeyName, mpcparamsName, options.verbose);
}
@ -846,8 +846,7 @@ async function zkeyVerify(params, options) {
}
// phase2 contribute <circuit_old.zkey> <circuit_new.zkey>
// zkey contribute <circuit_old.zkey> <circuit_new.zkey>
async function zkeyContribute(params, options) {
let zkeyOldName;
let zkeyNewName;
@ -856,5 +855,5 @@ async function zkeyContribute(params, options) {
zkeyNewName = params[1];
return phase2.contribute(zkeyOldName, zkeyNewName, options.verbose);
return zkey.contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, options.verbose);
}

View File

@ -7,7 +7,7 @@ const blake2b = require("blake2b");
const ChaCha = require("ffjavascript").ChaCha;
function hashToG2(hash) {
const hashV = new DataView(hash.buffer);
const hashV = new DataView(hash.buffer, hash.byteOffset, hash.byteLength);
const seed = [];
for (let i=0; i<8; i++) {
seed[i] = hashV.getUint32(i*4);
@ -57,6 +57,17 @@ function createPTauKey(curve, challangeHash, rng) {
return key;
}
function createDeltaKey(curve, transcript, rng) {
const delta = {};
delta.prvKey = curve.Fr.fromRng(rng);
delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
delta.g1_sx = curve.G1.affine(curve.G1.mulScalar(delta.g1_s, delta.prvKey));
delta.g2_sp = hashToG2(transcript);
delta.g2_spx = curve.G2.affine(curve.G2.mulScalar(delta.g2_sp, delta.prvKey));
return delta;
}
module.exports.createPTauKey = createPTauKey;
module.exports.getG2sp = getG2sp;
module.exports.hashToG2 = hashToG2;
module.exports.createDeltaKey =createDeltaKey;

View File

@ -1,4 +1,7 @@
const Blake2b = require("blake2b-wasm");
const readline = require("readline");
const ChaCha = require("ffjavascript").ChaCha;
const crypto = require("crypto");
const _revTable = [];
for (let i=0; i<256; i++) {
@ -74,9 +77,42 @@ async function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
return res;
}
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
function askEntropy() {
return new Promise((resolve) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
});
}
async function getRandomRng(entropy) {
// Generate a random key
while (!entropy) {
entropy = await askEntropy();
}
const hasher = Blake2b(64);
hasher.update(crypto.randomBytes(64));
const enc = new TextEncoder(); // always utf-8
hasher.update(enc.encode(entropy));
const hash = Buffer.from(hasher.digest());
const seed = [];
for (let i=0;i<8;i++) {
seed[i] = hash.readUInt32BE(i*4);
}
const rng = new ChaCha(seed);
return rng;
}
module.exports.bitReverse = bitReverse;
module.exports.log2 = log2;
module.exports.formatHash = formatHash;
module.exports.hashIsEqual = hashIsEqual;
module.exports.cloneHasher = cloneHasher;
module.exports.sameRatio = sameRatio;
module.exports.getRandomRng = getRandomRng;

View File

@ -1,5 +1,6 @@
const buildTaskManager = require("./taskmanager");
const binFileUtils = require("./binfileutils");
/*
This function creates a new section in the fdTo file with id idSection.
@ -8,116 +9,36 @@ const buildTaskManager = require("./taskmanager");
It also updates the newChallangeHasher with the new points
*/
async function applyKey(params) {
const {
fdFrom,
sections,
curve,
fdTo,
sectionId,
NPoints,
G:Gs,
first,
inc,
newChallangeHasher,
responseHasher,
returnPoints,
sectionName,
verbose
} = params;
const G = curve[Gs];
const MAX_CHUNK_SIZE = 1024;
let res = [];
async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, verbose) {
const MAX_CHUNK_SIZE = 1 << 16;
const G = curve[groupName];
const sG = G.F.n8*2;
const buffUv = new Uint8Array(sG);
const scG = G.F.n8;
const buffCv = new Uint8Array(scG);
const nPoints = sections[idSection][0].size / sG;
const taskManager = await buildTaskManager(contributeThread, {
ffjavascript: "ffjavascript"
},{
curve: curve.name
});
await binFileUtils.startReadUniqueSection(fdOld, sections,idSection );
await binFileUtils.startWriteSection(fdNew, idSection);
fdFrom.pos = sections[sectionId][0].p;
await fdTo.writeULE32(sectionId); // tauG1
const pSection = fdTo.pos;
await fdTo.writeULE64(0); // Temporally set to 0 length
let t = first;
let writePointer = fdTo.pos;
let beginWritePointer = fdTo.pos;
for (let i=0; i< NPoints; i+=MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
const n = Math.min(NPoints - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG);
await taskManager.addTask({
cmd: "MUL",
G: Gs,
first: t,
inc: inc.toString(),
buff: buff.slice(),
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if (verbose) console.log(`Applying key: ${sectionName}: ${i}/${nPoints}`);
const n= Math.min(nPoints - i, MAX_CHUNK_SIZE);
let buff;
buff = await fdOld.read(n*sG);
buff = await G.batchApplyKey(buff, t, inc);
await fdNew.write(buff);
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
writePointer += n*sG;
}
await taskManager.finish();
await binFileUtils.endWriteSection(fdNew);
await binFileUtils.endReadSection(fdOld);
const sSize = fdTo.pos - pSection -8;
const lastPos = fdTo.pos;
await fdTo.writeULE64(sSize, pSection);
fdTo.pos = lastPos;
fdTo.pos = beginWritePointer;
for (let i=0; i<NPoints; i++) {
const buff = await fdTo.read(sG);
const P = G.fromRprLEM(buff, 0);
G.toRprBE(buffUv, 0, P);
newChallangeHasher.update(buffUv);
G.toRprCompressed(buffCv, 0, P);
responseHasher.update(buffCv);
const idx = returnPoints.indexOf(i);
if (idx>=0) res[idx] = P;
}
}
async function applyKeyToBinFile(fdOld, fdNew, curve, groupName, nPoints, first, inc, sectionName, verbose) {
return res;
}
function contributeThread(ctx, task) {
if (task.cmd == "INIT") {
ctx.assert = ctx.modules.assert;
if (task.curve == "bn128") {
ctx.curve = ctx.modules.ffjavascript.bn128;
} else {
ctx.assert(false, "curve not defined");
}
return {};
} else if (task.cmd == "MUL") {
const G = ctx.curve[task.G];
const sG = G.F.n64*8*2;
const buffDest = new Uint8Array(sG*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const P = G.fromRprLEM(task.buff, i*sG);
const R = G.mulScalar(P, t);
G.toRprLEM(buffDest, i*sG, R); // Main thread will convert it to Montgomery
t = ctx.curve.Fr.mul(t, inc);
}
return {
buff: buffDest,
writePos: task.writePos
};
} else {
ctx.assert(false, "Op not implemented");
}
}
module.exports = applyKey;
module.exports.applyKeyToBinFile = applyKeyToBinFile;
module.exports.applyKeyToSection = applyKeyToSection;

View File

@ -1,4 +1,87 @@
module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, verbose) {
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const getCurve = require("./curves").getCurveFromQ;
const misc = require("./misc");
const Blake2b = require("blake2b-wasm");
const utils = require("./zkey_utils");
const hashToG2 = require("./keypair").hashToG2;
const {applyKeyToSection} = require("./mpc_applykey");
module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name, entropy, verbose) {
await Blake2b.ready();
const {fd: fdOld, sections: sections} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fdOld, sections, "groth16");
const curve = getCurve(zkey.q);
await curve.loadEngine();
const mpcParams = await zkeyUtils.readMPCParams(fdOld, curve, sections);
const fdNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
const curContribution = {};
const rng = await misc.getRandomRng(entropy);
const transcriptHasher = Blake2b(64);
for (let i=0; i<mpcParams.contributions.length; i++) {
utils.hashPubKey(transcriptHasher, curve, mpcParams.contributions[i]);
}
curContribution.delta = {};
curContribution.delta.prvKey = curve.Fr.fromRng(rng);
curContribution.delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.affine(curve.G1.mulScalar(curContribution.delta.g1_s, curContribution.delta.prvKey));
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_s);
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_sx);
curContribution.transcript = transcriptHasher.digest();
curContribution.delta.g2_sp = hashToG2(curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.affine(curve.G2.mulScalar(curContribution.delta.g2_sp, curContribution.delta.prvKey));
zkey.vk_delta_1 = curve.G1.mulScalar(zkey.vk_delta_1, curContribution.delta.prvKey);
zkey.vk_delta_2 = curve.G2.mulScalar(zkey.vk_delta_2, curContribution.delta.prvKey);
curContribution.deltaAfter = zkey.vk_delta_1;
mpcParams.contributions.push(curContribution);
curContribution.type = 0;
if (name) curContribution.name = name;
await zkeyUtils.writeHeader(fdNew, zkey);
// IC
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
// Coeffs (Keep original)
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
// A Section
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
// B1 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
// B2 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
const invDelta = curve.Fr.inv(curContribution.delta.prvKey);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", verbose);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", verbose);
await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams);
await fdOld.close();
await fdNew.close();
const contributionHasher = Blake2b(64);
utils.hashPubKey(contributionHasher, curve, curContribution);
const contribuionHash = contributionHasher.digest();
console.log("Contribution Hash: ");
console.log(misc.formatHash(contribuionHash));
return true;
};

View File

@ -395,7 +395,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
async function hashHPoints() {
const CHUNK_SIZE = 1<<20;
const CHUNK_SIZE = 1<<16;
hashU32(domainSize-1);

View File

@ -346,6 +346,33 @@ async function readContribution(fd, curve) {
c.delta.g1_sx = await readG1(fd, curve);
c.delta.g2_spx = await readG2(fd, curve);
c.transcript = await fd.read(64);
c.type = await fd.readULE32();
const paramLength = await fd.readULE32();
const curPos = fd.pos;
let lastType =0;
while (fd.pos-curPos < paramLength) {
const buffType = await fd.read(1);
if (buffType[0]<= lastType) throw new Error("Parameters in the contribution must be sorted");
lastType = buffType[0];
if (buffType[0]==1) { // Name
const buffLen = await fd.read(1);
const buffStr = await fd.read(buffLen[0]);
c.name = new TextDecoder().decode(buffStr);
} else if (buffType[0]==2) {
const buffExp = await fd.read(1);
c.numIterationsExp = buffExp[0];
} else if (buffType[0]==3) {
const buffLen = await fd.read(1);
c.beaconHash = await fd.read(buffLen[0]);
} else {
throw new Error("Parameter not recognized");
}
}
if (fd.pos != curPos + paramLength) {
throw new Error("Parametes do not match");
}
return c;
}
@ -370,6 +397,31 @@ async function writeContribution(fd, curve, c) {
await writeG1(fd, curve, c.delta.g1_sx);
await writeG2(fd, curve, c.delta.g2_spx);
await fd.write(c.transcript);
await fd.writeULE32(c.type || 0);
const params = [];
if (c.name) {
params.push(1); // Param Name
const nameData = new TextEncoder("utf-8").encode(c.name.substring(0,64));
params.push(nameData.byteLength);
for (let i=0; i<nameData.byteLength; i++) params.push(nameData[i]);
}
if (c.type == 1) {
params.push(2); // Param numIterationsExp
params.push(c.numIterationsExp);
params.push(3); // Beacon Hash
params.push(c.beaconHash.byteLength);
for (let i=0; i<c.beaconHash.byteLength; i++) params.push(c.beaconHash[i]);
}
if (params.length>0) {
const paramsBuff = new Uint8Array(params);
await fd.writeULE32(paramsBuff.byteLength);
await fd.write(paramsBuff);
} else {
await fd.writeULE32(0);
}
}
async function writeMPCParams(fd, curve, mpcParams) {
@ -382,9 +434,33 @@ async function writeMPCParams(fd, curve, mpcParams) {
await binFileUtils.endWriteSection(fd);
}
function hashG1(hasher, curve, p) {
const buff = new Uint8Array(curve.G1.F.n8*2);
curve.G1.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
function hashG2(hasher,curve, p) {
const buff = new Uint8Array(curve.G2.F.n8*2);
curve.G2.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
function hashPubKey(hasher, curve, c) {
hashG1(hasher, curve, c.deltaAfter);
hashG1(hasher, curve, c.delta.g1_s);
hashG1(hasher, curve, c.delta.g1_sx);
hashG2(hasher, curve, c.delta.g2_spx);
hasher.update(c.transcript);
}
module.exports.readHeader = readHeader;
module.exports.writeHeader = writeHeader;
module.exports.read = readZKey;
module.exports.write = writeZKey;
module.exports.readMPCParams = readMPCParams;
module.exports.writeMPCParams = writeMPCParams;
module.exports.hashG1 = hashG1;
module.exports.hashG2 = hashG2;
module.exports.hashPubKey = hashPubKey;

View File

@ -9,6 +9,7 @@ const sameRatio = misc.sameRatio;
const crypto = require("crypto");
const ChaCha = require("ffjavascript").ChaCha;
const newZKey = require("./zkey_new");
const {hashG1, hashPubKey} = require("./zkey_utils");
module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, verbose) {
@ -34,8 +35,8 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
const c = mpcParams.contributions[i];
const ourHasher = misc.cloneHasher(accumulatedHasher);
hashG1(ourHasher, c.delta.g1_s);
hashG1(ourHasher, c.delta.g1_sx);
hashG1(ourHasher, curve, c.delta.g1_s);
hashG1(ourHasher, curve, c.delta.g1_sx);
if (!misc.hashIsEqual(ourHasher.digest(), c.transcript)) {
console.log(`INVALID(${i}): Inconsistent transcript `);
@ -56,10 +57,10 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
return false;
}
hashPubKey(accumulatedHasher, c);
hashPubKey(accumulatedHasher, curve, c);
const contributionHasher = Blake2b(64);
hashPubKey(contributionHasher, c);
hashPubKey(contributionHasher, curve, c);
responses.push(contributionHasher.digest());
curDelta = c.deltaAfter;
@ -181,27 +182,6 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
return true;
function hashG1(hasher, p) {
const buff = new Uint8Array(sG1);
curve.G1.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
function hashG2(hasher, p) {
const buff = new Uint8Array(sG2);
curve.G2.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
function hashPubKey(hasher, c) {
hashG1(hasher, c.deltaAfter);
hashG1(hasher, c.delta.g1_s);
hashG1(hasher, c.delta.g1_sx);
hashG2(hasher, c.delta.g2_spx);
hasher.update(c.transcript);
}
async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) {
const MAX_CHUNK_SIZE = 1<<20;