bls12-381 working

This commit is contained in:
Jordi Baylina 2020-06-30 15:45:21 +02:00
parent 8bc56a54a6
commit 6b08298526
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
32 changed files with 458 additions and 320 deletions

118
cli.js
View File

@ -33,13 +33,12 @@ const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuil
const wtnsFile = require("./src/wtnsfile");
const loadSyms = require("./src/loadsyms");
const printR1cs = require("./src/printr1cs");
const r1cs = require("./src/r1cs");
const clProcessor = require("./src/clprocessor");
const powersOfTaw = require("./src/powersoftau");
const bn128 = require("ffjavascript").bn128;
const solidityGenerator = require("./src/soliditygenerator.js");
const Scalar = require("ffjavascript").Scalar;
@ -64,18 +63,32 @@ const commands = [
action: r1csPrint
},
{
cmd: "witness calculate [circuit.wasm] [input.json] [witness.wtns]",
description: "Caclculate specific witness of a circuit given an input",
alias: ["wc", "calculatewitness -ws|wasm:circuit.wasm -i|input:input.json -wt|witness:witness.wtns"],
action: witnessCalculate
cmd: "r1cs export json [circuit.r1cs] [circuit.json]",
description: "Export r1cs to JSON file",
alias: ["rej"],
action: r1csExportJSON
},
{
cmd: "witness debug [circuit.wasm] [input.json] [witness.wtns] [circuit.sym]",
cmd: "wtns calculate [circuit.wasm] [input.json] [witness.wtns]",
description: "Caclculate specific witness of a circuit given an input",
alias: ["wc", "calculatewitness -ws|wasm:circuit.wasm -i|input:input.json -wt|witness:witness.wtns"],
action: wtnsCalculate
},
{
cmd: "wtns debug [circuit.wasm] [input.json] [witness.wtns] [circuit.sym]",
description: "Calculate the witness with debug info.",
longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ",
options: "-get|g -set|s -trigger|t",
alias: ["wd"],
action: witnessDebug
action: wtnsDebug
},
{
cmd: "wtns export json [witness.wtns] [witnes.json]",
description: "Calculate the witness with debug info.",
longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ",
options: "-get|g -set|s -trigger|t",
alias: ["wej"],
action: wtnsExportJson
},
{
cmd: "zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]",
@ -110,7 +123,7 @@ const commands = [
action: solidityGenCall
},
{
cmd: "powersoftau new <power> [powersoftau_0000.ptau]",
cmd: "powersoftau new <curve> <power> [powersoftau_0000.ptau]",
description: "Starts a powers of tau ceremony",
alias: ["ptn"],
options: "-verbose|v",
@ -314,13 +327,8 @@ function changeExt(fileName, newExt) {
async function r1csInfo(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const cir = await loadR1cs(r1csName);
await r1cs.info(r1csName);
console.log(`# Wires: ${cir.nVars}`);
console.log(`# Constraints: ${cir.nConstraints}`);
console.log(`# Private Inputs: ${cir.nPrvInputs}`);
console.log(`# Public Inputs: ${cir.nPubInputs}`);
console.log(`# Outputs: ${cir.nOutputs}`);
return 0;
}
@ -328,18 +336,29 @@ async function r1csInfo(params, options) {
// r1cs print [circuit.r1cs] [circuit.sym]
async function r1csPrint(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const symName = params[2] || changeExt(r1csName, "sym");
const symName = params[1] || changeExt(r1csName, "sym");
const cir = await loadR1cs(r1csName, true, true);
const sym = await loadSyms(symName);
printR1cs(cir, sym);
await r1cs.print(cir, sym);
return 0;
}
// witness calculate <circuit.wasm> <input.json> <witness.wtns>
async function witnessCalculate(params, options) {
// r1cs export json [circuit.r1cs] [circuit.json]
async function r1csExportJSON(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const jsonName = params[1] || changeExt(r1csName, "json");
await r1cs.exportJson(r1csName, jsonName);
return 0;
}
// wtns calculate <circuit.wasm> <input.json> <witness.wtns>
async function wtnsCalculate(params, options) {
const wasmName = params[0] || "circuit.wasm";
const inputName = params[1] || "input.json";
const witnessName = params[2] || "witness.wtns";
@ -361,9 +380,9 @@ async function witnessCalculate(params, options) {
}
// witness debug <circuit.wasm> <input.json> <witness.wtns> <circuit.sym>
// wtns debug <circuit.wasm> <input.json> <witness.wtns> <circuit.sym>
// -get|g -set|s -trigger|t
async function witnessDebug(params, options) {
async function wtnsDebug(params, options) {
const wasmName = params[0] || "circuit.wasm";
const inputName = params[1] || "input.json";
const witnessName = params[2] || "witness.wtns";
@ -410,6 +429,21 @@ async function witnessDebug(params, options) {
}
// wtns export json [witness.wtns] [witness.json]
// -get|g -set|s -trigger|t
async function wtnsExportJson(params, options) {
const wtnsName = params[0] || "witness.wtns";
const jsonName = params[1] || "witness.json";
const w = await wtnsFile.read(wtnsName);
await fs.promises.writeFile(jsonName, JSON.stringify(stringifyBigInts(w), null, 1));
return 0;
}
// zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]
async function zksnarkSetup(params, options) {
@ -511,31 +545,7 @@ async function zkeyExportVKey(params) {
const zkeyName = params[0] || "circuit.zkey";
const verificationKeyName = params[2] || "verification_key.json";
const zKey = await zkey.utils.read(zkeyName);
let curve;
if (Scalar.eq(zKey.q, bn128.q)) {
curve = bn128;
} else {
assert(false, " Curve not supported");
}
const vKey = {
protocol: zKey.protocol,
nPublic: zKey.nPublic,
IC: zKey.IC,
vk_alpha_1: zKey.vk_alpha_1,
vk_beta_2: zKey.vk_beta_2,
vk_gamma_2: zKey.vk_gamma_2,
vk_delta_2: zKey.vk_delta_2,
vk_alphabeta_12: await curve.pairing( zKey.vk_alpha_1 , zKey.vk_beta_2 )
};
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(vKey), null, 1), "utf-8");
return await zkey.exportVerificationKey(zkeyName, verificationKeyName);
}
// zkey export json [circuit.zkey] [circuit.zkey.json]",
@ -634,22 +644,28 @@ async function solidityGenCall(params, options) {
return 0;
}
// powersoftau new <curve> <power> [powersoftau_0000.ptau]",
async function powersOfTawNew(params, options) {
let curveName;
let power;
let ptauName;
power = parseInt(params[0]);
curveName = params[0];
power = parseInt(params[1]);
if ((power<1) || (power>28)) {
throw new Error("Power must be between 1 and 28");
}
if (params.length < 2) {
if (params.length < 3) {
ptauName = "powersOfTaw" + power + "_0000.ptau";
} else {
ptauName = params[1];
ptauName = params[2];
}
return await powersOfTaw.newAccumulator(bn128, power, ptauName, options.verbose);
const curve = await curves.getCurveFromName(curveName);
return await powersOfTaw.newAccumulator(curve, power, ptauName, options.verbose);
}
async function powersOfTawExportChallange(params, options) {
@ -672,7 +688,7 @@ async function powersOfTawChallangeContribute(params, options) {
let challangeName;
let responseName;
const curve = curves.getCurveFromName(params[0]);
const curve = await curves.getCurveFromName(params[0]);
challangeName = params[1];

View File

@ -1,21 +1,44 @@
const Scalar = require("ffjavascript").Scalar;
const bn128 = require("ffjavascript").bn128;
const buildBn128 = require("ffjavascript").buildBn128;
const buildBls12381 = require("ffjavascript").buildBls12381;
module.exports.getCurveFromQ = function getCurveFromQ(q) {
const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const bls12381q = Scalar.e("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16);
const bn128q = Scalar.e("21888242871839275222246405745257275088696311157297823662689037894645226208583");
module.exports.getCurveFromR = async function getCurveFromR(r) {
let curve;
if (Scalar.eq(q, bn128.q)) {
curve = bn128;
if (Scalar.eq(r, bn128r)) {
curve = await buildBn128();
} else if (Scalar.eq(r, bls12381r)) {
curve = await buildBls12381();
} else {
throw new Error(`Curve not supported: ${q.toString()}`);
throw new Error(`Curve not supported: ${Scalar.toString(r)}`);
}
return curve;
};
module.exports.getCurveFromName = function getCurveFromName(name) {
module.exports.getCurveFromQ = async function getCurveFromQ(q) {
let curve;
if (Scalar.eq(q, bn128q)) {
curve = await buildBn128();
} else if (Scalar.eq(q, bls12381q)) {
curve = await buildBls12381();
} else {
throw new Error(`Curve not supported: ${Scalar.toString(q)}`);
}
return curve;
};
module.exports.getCurveFromName = async function getCurveFromName(name) {
let curve;
const normName = normalizeName(name);
if (["BN128", "BN254", "ALTBN128"].indexOf(normName) >= 0) {
curve = bn128;
curve = await buildBn128();
} else if (["BLS12381"].indexOf(normName) >= 0) {
curve = await buildBls12381();
} else {
throw new Error(`Curve not supported: ${name}`);
}

View File

@ -1,12 +1,9 @@
const bn128 = require("ffjavascript").bn128;
const utils = require("ffjavascript").utils;
const blake2b = require("blake2b-wasm");
const ChaCha = require("ffjavascript").ChaCha;
function hashToG2(hash) {
function hashToG2(curve, hash) {
const hashV = new DataView(hash.buffer, hash.byteOffset, hash.byteLength);
const seed = [];
for (let i=0; i<8; i++) {
@ -15,30 +12,31 @@ function hashToG2(hash) {
const rng = new ChaCha(seed);
const g2_sp = bn128.G2.fromRng(rng);
const g2_sp = curve.G2.fromRng(rng);
return g2_sp;
}
function getG2sp(persinalization, challange, g1s, g1sx) {
function getG2sp(curve, persinalization, challange, g1s, g1sx) {
const h = blake2b(64);
h.update(Buffer.from([persinalization]));
const b1 = new Uint8Array([persinalization]);
h.update(b1);
h.update(challange);
h.update( utils.beInt2Buff(g1s[0],32));
h.update( utils.beInt2Buff(g1s[1],32));
h.update( utils.beInt2Buff(g1sx[0],32));
h.update( utils.beInt2Buff(g1sx[1],32));
const hash = Buffer.from(h.digest());
const b3 = curve.G1.toUncompressed(g1s);
h.update( b3);
const b4 = curve.G1.toUncompressed(g1sx);
h.update( b4);
const hash =h.digest();
return hashToG2(hash);
return hashToG2(curve, hash);
}
function calculatePubKey(k, curve, personalization, challangeHash, rng ) {
k.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
k.g1_sx = curve.G1.affine(curve.G1.mulScalar(k.g1_s, k.prvKey));
k.g2_sp = curve.G2.affine(getG2sp(personalization, challangeHash, k.g1_s, k.g1_sx));
k.g2_spx = curve.G2.affine(curve.G2.mulScalar(k.g2_sp, k.prvKey));
k.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
k.g1_sx = curve.G1.toAffine(curve.G1.timesFr(k.g1_s, k.prvKey));
k.g2_sp = curve.G2.toAffine(getG2sp(curve, personalization, challangeHash, k.g1_s, k.g1_sx));
k.g2_spx = curve.G2.toAffine(curve.G2.timesFr(k.g2_sp, k.prvKey));
return k;
}
@ -60,10 +58,10 @@ function createPTauKey(curve, challangeHash, rng) {
function createDeltaKey(curve, transcript, rng) {
const delta = {};
delta.prvKey = curve.Fr.fromRng(rng);
delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
delta.g1_sx = curve.G1.affine(curve.G1.mulScalar(delta.g1_s, delta.prvKey));
delta.g2_sp = hashToG2(transcript);
delta.g2_spx = curve.G2.affine(curve.G2.mulScalar(delta.g2_sp, delta.prvKey));
delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
delta.g1_sx = curve.G1.toAffine(curve.G1.timesScalar(delta.g1_s, delta.prvKey));
delta.g2_sp = hashToG2(curve, transcript);
delta.g2_spx = curve.G2.toAffine(curve.G2.timesScalar(delta.g2_sp, delta.prvKey));
return delta;
}

View File

@ -53,9 +53,9 @@ async function applyKeyToChallangeSection(fdOld, fdNew, responseHasher, curve, g
buffOut = await G.batchLEMtoU(buffOutLEM);
}
if (responseHasher) responseHasher.update(buffOutC);
if (responseHasher) responseHasher.update(buffOut);
await fdNew.write(buffOut);
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}
}

View File

@ -141,7 +141,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
if (i==0) // Return the 2 first points.
for (let j=0; j<Math.min(2, NPoints); j++)
res.push(G.fromRprLEM(buffOutLEM, j*sG));
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}
await binFileUtils.endWriteSection(fdNew);

View File

@ -131,7 +131,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
if (i==0) // Return the 2 first points.
for (let j=0; j<Math.min(2, NPoints); j++)
res.push(G.fromRprLEM(buffOutLEM, j*sG));
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}
await binFileUtils.endWriteSection(fdNew);

View File

@ -60,10 +60,8 @@ async function newAccumulator(curve, power, fileName, verbose) {
await ptauUtils.writePTauHeader(fd, curve, power, 0);
const buffG1 = new Uint8Array(curve.G1.F.n8*2);
const buffG2 = new Uint8Array(curve.G2.F.n8*2);
curve.G1.toRprLEM(buffG1, 0, curve.G1.g);
curve.G2.toRprLEM(buffG2, 0, curve.G2.g);
const buffG1 = curve.G1.oneAffine;
const buffG2 = curve.G2.oneAffine;
// Write tauG1
///////////

View File

@ -1,9 +1,9 @@
const assert = require("assert");
const Scalar = require("ffjavascript").Scalar;
const bn128 = require("ffjavascript").bn128;
const Blake2b = require("blake2b-wasm");
const keyPair = require("./keypair");
const misc = require("./misc");
const {getCurveFromQ} = require("./curves");
async function writePTauHeader(fd, curve, power, ceremonyPower) {
// Write the header
@ -39,12 +39,9 @@ async function readPTauHeader(fd, sections) {
const n8 = await fd.readULE32();
const buff = await fd.read(n8);
const q = Scalar.fromRprLE(buff);
let curve;
if (Scalar.eq(q, bn128.q)) {
curve = bn128;
} else {
assert(false, fd.fileName +": Curve not supported");
}
const curve = await getCurveFromQ(q);
assert(curve.F1.n64*8 == n8, fd.fileName +": Invalid size");
const power = await fd.readULE32();
@ -88,7 +85,7 @@ function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
if (montgomery) {
p = curve.G1.fromRprLEM( buff, pos );
} else {
p = curve.G1.fromRprBE( buff, pos );
p = curve.G1.fromRprUncompressed( buff, pos );
}
pos += curve.G1.F.n8*2;
return p;
@ -99,7 +96,7 @@ function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
if (montgomery) {
p = curve.G2.fromRprLEM( buff, pos );
} else {
p = curve.G2.fromRprBE( buff, pos );
p = curve.G2.fromRprUncompressed( buff, pos );
}
pos += curve.G2.F.n8*2;
return p;
@ -122,7 +119,7 @@ function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
if (montgomery) {
curve.G1.toRprLEM(buff, pos, p);
} else {
curve.G1.toRprBE(buff, pos, p);
curve.G1.toRprUncompressed(buff, pos, p);
}
pos += curve.F1.n8*2;
}
@ -131,7 +128,7 @@ function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
if (montgomery) {
curve.G2.toRprLEM(buff, pos, p);
} else {
curve.G2.toRprBE(buff, pos, p);
curve.G2.toRprUncompressed(buff, pos, p);
}
pos += curve.F2.n8*2;
}
@ -194,12 +191,12 @@ async function readContribution(fd, curve) {
return c;
async function readG1() {
const pBuff = await fd.read(curve.F1.n8*2);
const pBuff = await fd.read(curve.G1.F.n8*2);
return curve.G1.fromRprLEM( pBuff );
}
async function readG2() {
const pBuff = await fd.read(curve.F2.n8*2);
const pBuff = await fd.read(curve.G2.F.n8*2);
return curve.G2.fromRprLEM( pBuff );
}
@ -302,8 +299,8 @@ function calculateFirstChallangeHash(curve, power, verbose) {
const vG1 = new Uint8Array(curve.G1.F.n8*2);
const vG2 = new Uint8Array(curve.G2.F.n8*2);
curve.G1.toRprBE(vG1, 0, curve.G1.g);
curve.G2.toRprBE(vG2, 0, curve.G2.g);
curve.G1.toRprUncompressed(vG1, 0, curve.G1.g);
curve.G2.toRprUncompressed(vG2, 0, curve.G2.g);
hasher.update(Blake2b(64).digest());

View File

@ -53,9 +53,9 @@ async function verifyContribution(curve, cur, prev) {
}
}
cur.key.tau.g2_sp = keyPair.getG2sp(0, prev.nextChallange, cur.key.tau.g1_s, cur.key.tau.g1_sx);
cur.key.alpha.g2_sp = keyPair.getG2sp(1, prev.nextChallange, cur.key.alpha.g1_s, cur.key.alpha.g1_sx);
cur.key.beta.g2_sp = keyPair.getG2sp(2, prev.nextChallange, cur.key.beta.g1_s, cur.key.beta.g1_sx);
cur.key.tau.g2_sp = curve.G2.toAffine(keyPair.getG2sp(curve, 0, prev.nextChallange, cur.key.tau.g1_s, cur.key.tau.g1_sx));
cur.key.alpha.g2_sp = curve.G2.toAffine(keyPair.getG2sp(curve, 1, prev.nextChallange, cur.key.alpha.g1_s, cur.key.alpha.g1_sx));
cur.key.beta.g2_sp = curve.G2.toAffine(keyPair.getG2sp(curve, 2, prev.nextChallange, cur.key.beta.g1_s, cur.key.beta.g1_sx));
sr = await sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
if (sr !== true) {
@ -308,7 +308,7 @@ async function verify(tauFilename, verbose) {
const buff = await fd.read(sG);
const P = G.fromRprLEM(buff);
G.toRprBE(buffUv, 0, P);
G.toRprUncompressed(buffUv, 0, P);
nextContributionHasher.update(buffUv);
return P;
@ -343,8 +343,8 @@ async function verify(tauFilename, verbose) {
const firstBase = G.fromRprLEM(bases, 0);
const r = crypto.randomBytes(4).readUInt32BE(0, true);
R1 = G.add(R1, G.mulScalar(lastBase, r));
R2 = G.add(R2, G.mulScalar(firstBase, r));
R1 = G.add(R1, G.timesScalar(lastBase, r));
R2 = G.add(R2, G.timesScalar(firstBase, r));
}
const r1 = await G.multiExpAffine(bases.slice(0, (n-1)*sG), scalars);

View File

@ -23,9 +23,11 @@ const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function genProof(vk_proof, witness, verbose) {
@ -51,12 +53,12 @@ module.exports = function genProof(vk_proof, witness, verbose) {
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.A[s], witness[s]));
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[s], witness[s]));
// pi_b = pi_b + B[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.B2[s], witness[s]));
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B2[s], witness[s]));
pib1 = G1.add( pib1, G1.mulScalar( vk_proof.B1[s], witness[s]));
pib1 = G1.add( pib1, G1.timesScalar( vk_proof.B1[s], witness[s]));
if ((verbose)&&(s%1000 == 1)) console.log("A, B1, B2: ", s);
@ -65,45 +67,45 @@ module.exports = function genProof(vk_proof, witness, verbose) {
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[s], witness[s]));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[s], witness[s]));
if ((verbose)&&(s%1000 == 1)) console.log("C: ", s);
}
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.vk_delta_1, r ));
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.vk_delta_2, s ));
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.vk_delta_2, s ));
pib1 = G1.add( pib1, vk_proof.vk_beta_1 );
pib1 = G1.add( pib1, G1.mulScalar( vk_proof.vk_delta_1, s ));
pib1 = G1.add( pib1, G1.timesScalar( vk_proof.vk_delta_1, s ));
const h = calculateH(vk_proof, witness);
// proof.pi_c = G1.affine(proof.pi_c);
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_onlyc", proof.pi_c);
for (let i = 0; i < h.length; i++) {
// console.log(i + "->" + h[i].toString());
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.hExps[i], h[i]));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.hExps[i], h[i]));
if ((verbose)&&(i%1000 == 1)) console.log("H: ", i);
}
// proof.pi_c = G1.affine(proof.pi_c);
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_candh", proof.pi_c);
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.vk_delta_1, PolF.F.neg(PolF.F.mul(r,s) )));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.vk_delta_1, PolF.F.neg(PolF.F.mul(r,s) )));
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
proof.pi_a = G1.affine(proof.pi_a);
proof.pi_b = G2.affine(proof.pi_b);
proof.pi_c = G1.affine(proof.pi_c);
proof.pi_a = G1.toAffine(proof.pi_a);
proof.pi_b = G2.toAffine(proof.pi_b);
proof.pi_c = G1.toAffine(proof.pi_c);
proof.protocol = "groth";

View File

@ -25,11 +25,11 @@ const ZqField = require("ffjavascript").ZqField;
const createKeccakHash = require("keccak");
const utils = require("ffjavascript").utils;
/*
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function genProof(vk_proof, witness) {
const proof = {};
@ -58,35 +58,35 @@ module.exports = function genProof(vk_proof, witness) {
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.A[s], witness[s]));
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[s], witness[s]));
// pi_b = pi_b + B[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.B2[s], witness[s]));
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B2[s], witness[s]));
piadelta = G1.add( piadelta, G1.mulScalar( vk_proof.Adelta[s], witness[s]));
pib1 = G1.add( pib1, G1.mulScalar( vk_proof.B1[s], witness[s]));
piadelta = G1.add( piadelta, G1.timesScalar( vk_proof.Adelta[s], witness[s]));
pib1 = G1.add( pib1, G1.timesScalar( vk_proof.B1[s], witness[s]));
}
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[s], witness[s]));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[s], witness[s]));
}
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( G1.g, r ));
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( G1.g, r ));
piadelta = G1.add( piadelta, vk_proof.vk_alphadelta_1);
piadelta = G1.add( piadelta, G1.mulScalar( vk_proof.vk_delta_1, r ));
piadelta = G1.add( piadelta, G1.timesScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( G2.g, s ));
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( G2.g, s ));
pib1 = G1.add( pib1, vk_proof.vk_beta_1 );
pib1 = G1.add( pib1, G1.mulScalar( G1.g, s ));
pib1 = G1.add( pib1, G1.timesScalar( G1.g, s ));
proof.pi_a = G1.affine(proof.pi_a);
proof.pi_b = G2.affine(proof.pi_b);
proof.pi_a = G1.toAffine(proof.pi_a);
proof.pi_b = G2.toAffine(proof.pi_b);
const buff = Buffer.concat([
utils.beInt2Buff(proof.pi_a[0],32),
@ -111,28 +111,28 @@ module.exports = function genProof(vk_proof, witness) {
const h = calculateH(vk_proof, witness);
// proof.pi_c = G1.affine(proof.pi_c);
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_onlyc", proof.pi_c);
for (let i = 0; i < h.length; i++) {
// console.log(i + "->" + h[i].toString());
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.hExps[i], h[i]));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.hExps[i], h[i]));
}
// proof.pi_c = G1.affine(proof.pi_c);
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_candh", proof.pi_c);
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( G1.g, PolF.F.neg(PolF.F.mul(r,s) )));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( G1.g, PolF.F.neg(PolF.F.mul(r,s) )));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( piadelta, h2 ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( pib1, h1 ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.vk_delta_1, PolF.F.mul(h1,h2)));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( piadelta, h2 ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( pib1, h1 ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.vk_delta_1, PolF.F.mul(h1,h2)));
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
proof.pi_c = G1.affine(proof.pi_c);
proof.pi_c = G1.toAffine(proof.pi_c);
proof.protocol = "kimleeoh";

View File

@ -20,11 +20,11 @@
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function genProof(vk_proof, witness) {
const proof = {};
@ -48,41 +48,41 @@ module.exports = function genProof(vk_proof, witness) {
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.A[s], witness[s]));
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_ap = G1.add( proof.pi_ap, G1.mulScalar( vk_proof.Ap[s], witness[s]));
proof.pi_ap = G1.add( proof.pi_ap, G1.timesScalar( vk_proof.Ap[s], witness[s]));
}
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.B[s], witness[s]));
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_bp = G1.add( proof.pi_bp, G1.mulScalar( vk_proof.Bp[s], witness[s]));
proof.pi_bp = G1.add( proof.pi_bp, G1.timesScalar( vk_proof.Bp[s], witness[s]));
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[s], witness[s]));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_cp = G1.add( proof.pi_cp, G1.mulScalar( vk_proof.Cp[s], witness[s]));
proof.pi_cp = G1.add( proof.pi_cp, G1.timesScalar( vk_proof.Cp[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_kp = G1.add( proof.pi_kp, G1.mulScalar( vk_proof.Kp[s], witness[s]));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[s], witness[s]));
}
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.A[vk_proof.nVars], d1));
proof.pi_ap = G1.add( proof.pi_ap, G1.mulScalar( vk_proof.Ap[vk_proof.nVars], d1));
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[vk_proof.nVars], d1));
proof.pi_ap = G1.add( proof.pi_ap, G1.timesScalar( vk_proof.Ap[vk_proof.nVars], d1));
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.B[vk_proof.nVars], d2));
proof.pi_bp = G1.add( proof.pi_bp, G1.mulScalar( vk_proof.Bp[vk_proof.nVars], d2));
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B[vk_proof.nVars], d2));
proof.pi_bp = G1.add( proof.pi_bp, G1.timesScalar( vk_proof.Bp[vk_proof.nVars], d2));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[vk_proof.nVars], d3));
proof.pi_cp = G1.add( proof.pi_cp, G1.mulScalar( vk_proof.Cp[vk_proof.nVars], d3));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[vk_proof.nVars], d3));
proof.pi_cp = G1.add( proof.pi_cp, G1.timesScalar( vk_proof.Cp[vk_proof.nVars], d3));
proof.pi_kp = G1.add( proof.pi_kp, G1.mulScalar( vk_proof.Kp[vk_proof.nVars ], d1));
proof.pi_kp = G1.add( proof.pi_kp, G1.mulScalar( vk_proof.Kp[vk_proof.nVars+1], d2));
proof.pi_kp = G1.add( proof.pi_kp, G1.mulScalar( vk_proof.Kp[vk_proof.nVars+2], d3));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[vk_proof.nVars ], d1));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[vk_proof.nVars+1], d2));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[vk_proof.nVars+2], d3));
/*
let polA = [];
@ -120,17 +120,17 @@ module.exports = function genProof(vk_proof, witness) {
// console.log(h.length + "/" + vk_proof.hExps.length);
for (let i = 0; i < h.length; i++) {
proof.pi_h = G1.add( proof.pi_h, G1.mulScalar( vk_proof.hExps[i], h[i]));
proof.pi_h = G1.add( proof.pi_h, G1.timesScalar( vk_proof.hExps[i], h[i]));
}
proof.pi_a = G1.affine(proof.pi_a);
proof.pi_b = G2.affine(proof.pi_b);
proof.pi_c = G1.affine(proof.pi_c);
proof.pi_ap = G1.affine(proof.pi_ap);
proof.pi_bp = G1.affine(proof.pi_bp);
proof.pi_cp = G1.affine(proof.pi_cp);
proof.pi_kp = G1.affine(proof.pi_kp);
proof.pi_h = G1.affine(proof.pi_h);
proof.pi_a = G1.toAffine(proof.pi_a);
proof.pi_b = G2.toAffine(proof.pi_b);
proof.pi_c = G1.toAffine(proof.pi_c);
proof.pi_ap = G1.toAffine(proof.pi_ap);
proof.pi_bp = G1.toAffine(proof.pi_bp);
proof.pi_cp = G1.toAffine(proof.pi_cp);
proof.pi_kp = G1.toAffine(proof.pi_kp);
proof.pi_h = G1.toAffine(proof.pi_h);
// proof.h=h;

3
src/r1cs.js Normal file
View File

@ -0,0 +1,3 @@
module.exports.print = require("./r1cs_print");
module.exports.info = require("./r1cs_info");
module.exports.exportJson = require("./r1cs_export_json");

14
src/r1cs_export_json.js Normal file
View File

@ -0,0 +1,14 @@
const {stringifyBigInts} = require("ffjavascript").utils;
const fs = require("fs");
const readZKey = require("./zkey_utils").read;
const loadR1cs = require("r1csfile").load;
module.exports = r1csExportJson;
async function r1csExportJson(r1csFileName, jsonFileName, verbose) {
const cir = await loadR1cs(r1csFileName, true, true);
const S = JSON.stringify(stringifyBigInts(cir), null, 1);
await fs.promises.writeFile(jsonFileName, S);
}

25
src/r1cs_info.js Normal file
View File

@ -0,0 +1,25 @@
const Scalar = require("ffjavascript").Scalar;
const loadR1cs = require("r1csfile").load;
module.exports = r1csInfo;
const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617", 16);
async function r1csInfo(r1csName) {
const cir = await loadR1cs(r1csName);
if (Scalar.eq(cir.prime, bn128r)) {
console.log("# Curve: bn-128");
} else if (Scalar.eq(cir.prime, bls12381r)) {
console.log("# Curve: bls12-381");
} else {
console.log(`# Unknown Curve. Prime: ${Scalar.toString(cir.r)}`);
}
console.log(`# Wires: ${cir.nVars}`);
console.log(`# Constraints: ${cir.nConstraints}`);
console.log(`# Private Inputs: ${cir.nPrvInputs}`);
console.log(`# Public Inputs: ${cir.nPubInputs}`);
console.log(`# Outputs: ${cir.nOutputs}`);
}

View File

@ -1,12 +1,13 @@
module.exports = function printR1cs(r1cs, syms) {
module.exports = function r1csPrint(r1cs, syms) {
for (let i=0; i<r1cs.constraints.length; i++) {
printCostraint(r1cs.constraints[i]);
}
function printCostraint(c) {
const lc2str = (lc) => {
let S = "";
for (let k in lc) {
const keys = Object.keys(lc);
keys.forEach( (k) => {
let name = syms.varIdx2Name[k];
if (name == "one") name = "";
@ -16,7 +17,7 @@ module.exports = function printR1cs(r1cs, syms) {
if ((S!="")&&(vs[0]!="-")) vs = "+"+vs;
if (S!="") vs = " "+vs;
S= S + vs + name;
}
});
return S;
};
const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`;

View File

@ -22,12 +22,12 @@
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
*/
module.exports = function setup(circuit, verbose) {
const setup = {
vk_proof : {
@ -165,26 +165,26 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
let invDelta = F.inv(setup.toxic.kdelta);
let invGamma = F.inv(setup.toxic.kgamma);
setup.vk_proof.vk_alpha_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_alpha_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kdelta));
setup.vk_proof.vk_gamma_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma));
setup.vk_proof.vk_beta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kdelta));
setup.vk_proof.vk_gamma_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kgamma));
for (let s=0; s<circuit.nVars; s++) {
const A = G1.mulScalar(G1.g, v.a_t[s]);
const A = G1.timesScalar(G1.g, v.a_t[s]);
setup.vk_proof.A[s] = A;
const B1 = G1.mulScalar(G1.g, v.b_t[s]);
const B1 = G1.timesScalar(G1.g, v.b_t[s]);
setup.vk_proof.B1[s] = B1;
const B2 = G2.mulScalar(G2.g, v.b_t[s]);
const B2 = G2.timesScalar(G2.g, v.b_t[s]);
setup.vk_proof.B2[s] = B2;
@ -202,7 +202,7 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
F.mul(v.b_t[s], setup.toxic.kalpha)),
v.c_t[s]));
const IC = G1.mulScalar(G1.g, ps);
const IC = G1.timesScalar(G1.g, ps);
setup.vk_proof.IC[s]=IC;
}
@ -215,7 +215,7 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
F.mul(v.a_t[s], setup.toxic.kbeta),
F.mul(v.b_t[s], setup.toxic.kalpha)),
v.c_t[s]));
const C = G1.mulScalar(G1.g, ps);
const C = G1.timesScalar(G1.g, ps);
setup.vk_proof.C[s]=C;
if ((verbose)&&(s%1000 == 1)) console.log("C: ", s);
@ -230,10 +230,10 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
const zod = F.mul(invDelta, v.z_t);
setup.vk_proof.hExps[0] = G1.affine(G1.mulScalar(G1.g, zod));
setup.vk_proof.hExps[0] = G1.toAffine(G1.timesScalar(G1.g, zod));
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.mulScalar(G1.g, F.mul(eT, zod));
setup.vk_proof.hExps[i] = G1.timesScalar(G1.g, F.mul(eT, zod));
eT = F.mul(eT, setup.toxic.t);
if ((verbose)&&(i%1000 == 1)) console.log("Tau: ", i);

View File

@ -24,12 +24,12 @@ const bigInt = require("big-integer");
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
*/
module.exports = function setup(circuit) {
const setup = {
vk_proof : {
@ -135,34 +135,34 @@ function calculateEncriptedValuesAtT(setup, circuit) {
const gammaSquare = F.mul(setup.toxic.kgamma, setup.toxic.kgamma);
setup.vk_proof.vk_alpha_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_alphadelta_1 = G1.affine(G1.mulScalar( G1.g, F.mul(setup.toxic.kalpha, setup.toxic.kdelta)));
setup.vk_proof.vk_alpha_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_alphadelta_1 = G1.toAffine(G1.timesScalar( G1.g, F.mul(setup.toxic.kalpha, setup.toxic.kdelta)));
setup.vk_proof.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_proof.vk_beta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_alpha_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalpha));
setup.vk_verifier.vk_alpha_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kalpha));
setup.vk_verifier.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_gamma_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_delta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kdelta));
setup.vk_verifier.vk_beta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_gamma_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_delta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kdelta));
setup.vk_verifier.vk_alphabeta_12 = bn128.pairing( setup.vk_verifier.vk_alpha_1 , setup.vk_verifier.vk_beta_2 );
for (let s=0; s<circuit.nVars; s++) {
const A = G1.affine(G1.mulScalar(G1.g, F.mul(setup.toxic.kgamma, v.a_t[s])));
const A = G1.toAffine(G1.timesScalar(G1.g, F.mul(setup.toxic.kgamma, v.a_t[s])));
setup.vk_proof.A[s] = A;
setup.vk_proof.Adelta[s] = G1.affine(G1.mulScalar(A, setup.toxic.kdelta));
setup.vk_proof.Adelta[s] = G1.toAffine(G1.timesScalar(A, setup.toxic.kdelta));
const B1 = G1.affine(G1.mulScalar(G1.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
const B1 = G1.toAffine(G1.timesScalar(G1.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
setup.vk_proof.B1[s] = B1;
const B2 = G2.affine(G2.mulScalar(G2.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
const B2 = G2.toAffine(G2.timesScalar(G2.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
setup.vk_proof.B2[s] = B2;
}
@ -187,7 +187,7 @@ function calculateEncriptedValuesAtT(setup, circuit) {
)
);
const IC = G1.affine(G1.mulScalar(G1.g, ps));
const IC = G1.toAffine(G1.timesScalar(G1.g, ps));
setup.vk_verifier.IC[s]=IC;
}
@ -210,7 +210,7 @@ function calculateEncriptedValuesAtT(setup, circuit) {
)
);
const C = G1.affine(G1.mulScalar(G1.g, ps));
const C = G1.toAffine(G1.timesScalar(G1.g, ps));
setup.vk_proof.C[s]=C;
}
@ -222,10 +222,10 @@ function calculateEncriptedValuesAtT(setup, circuit) {
const zod = F.mul(gammaSquare, v.z_t);
setup.vk_proof.hExps[0] = G1.affine(G1.mulScalar(G1.g, zod));
setup.vk_proof.hExps[0] = G1.toAffine(G1.timesScalar(G1.g, zod));
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.affine(G1.mulScalar(G1.g, F.mul(eT, zod)));
setup.vk_proof.hExps[i] = G1.toAffine(G1.timesScalar(G1.g, F.mul(eT, zod)));
eT = F.mul(eT, setup.toxic.t);
}
}

View File

@ -20,12 +20,12 @@
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").F1Field;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
*/
module.exports = function setup(circuit) {
const setup = {
vk_proof : {
@ -139,18 +139,18 @@ function calculateEncriptedValuesAtT(setup, circuit) {
const gb = F.mul(setup.toxic.kbeta, setup.toxic.kgamma);
setup.vk_verifier.vk_a = G2.affine(G2.mulScalar( G2.g, setup.toxic.ka));
setup.vk_verifier.vk_b = G1.affine(G1.mulScalar( G1.g, setup.toxic.kb));
setup.vk_verifier.vk_c = G2.affine(G2.mulScalar( G2.g, setup.toxic.kc));
setup.vk_verifier.vk_gb_1 = G1.affine(G1.mulScalar( G1.g, gb));
setup.vk_verifier.vk_gb_2 = G2.affine(G2.mulScalar( G2.g, gb));
setup.vk_verifier.vk_g = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_a = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.ka));
setup.vk_verifier.vk_b = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kb));
setup.vk_verifier.vk_c = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kc));
setup.vk_verifier.vk_gb_1 = G1.toAffine(G1.timesScalar( G1.g, gb));
setup.vk_verifier.vk_gb_2 = G2.toAffine(G2.timesScalar( G2.g, gb));
setup.vk_verifier.vk_g = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kgamma));
for (let s=0; s<circuit.nVars; s++) {
// A[i] = G1 * polA(t)
const raat = F.mul(setup.toxic.ra, v.a_t[s]);
const A = G1.affine(G1.mulScalar(G1.g, raat));
const A = G1.toAffine(G1.timesScalar(G1.g, raat));
setup.vk_proof.A[s] = A;
@ -161,26 +161,26 @@ function calculateEncriptedValuesAtT(setup, circuit) {
// B1[i] = G1 * polB(t)
const rbbt = F.mul(setup.toxic.rb, v.b_t[s]);
const B1 = G1.affine(G1.mulScalar(G1.g, rbbt));
const B1 = G1.toAffine(G1.timesScalar(G1.g, rbbt));
// B2[i] = G2 * polB(t)
const B2 = G2.affine(G2.mulScalar(G2.g, rbbt));
const B2 = G2.toAffine(G2.timesScalar(G2.g, rbbt));
setup.vk_proof.B[s]=B2;
// C[i] = G1 * polC(t)
const rcct = F.mul(setup.toxic.rc, v.c_t[s]);
const C = G1.affine(G1.mulScalar( G1.g, rcct));
const C = G1.toAffine(G1.timesScalar( G1.g, rcct));
setup.vk_proof.C[s] =C;
// K = G1 * (A+B+C)
const kt = F.add(F.add(raat, rbbt), rcct);
const K = G1.affine(G1.mulScalar( G1.g, kt));
const K = G1.toAffine(G1.timesScalar( G1.g, kt));
/*
// Comment this lines to improve the process
const Ktest = G1.affine(G1.add(G1.add(A, B1), C));
const Ktest = G1.toAffine(G1.add(G1.add(A, B1), C));
if (!G1.equals(K, Ktest)) {
console.log ("=====FAIL======");
@ -188,35 +188,35 @@ function calculateEncriptedValuesAtT(setup, circuit) {
*/
if (s > setup.vk_proof.nPublic) {
setup.vk_proof.Ap[s] = G1.affine(G1.mulScalar(A, setup.toxic.ka));
setup.vk_proof.Ap[s] = G1.toAffine(G1.timesScalar(A, setup.toxic.ka));
}
setup.vk_proof.Bp[s] = G1.affine(G1.mulScalar(B1, setup.toxic.kb));
setup.vk_proof.Cp[s] = G1.affine(G1.mulScalar(C, setup.toxic.kc));
setup.vk_proof.Kp[s] = G1.affine(G1.mulScalar(K, setup.toxic.kbeta));
setup.vk_proof.Bp[s] = G1.toAffine(G1.timesScalar(B1, setup.toxic.kb));
setup.vk_proof.Cp[s] = G1.toAffine(G1.timesScalar(C, setup.toxic.kc));
setup.vk_proof.Kp[s] = G1.toAffine(G1.timesScalar(K, setup.toxic.kbeta));
}
// Extra coeficients
const A = G1.mulScalar( G1.g, F.mul(setup.toxic.ra, v.z_t));
setup.vk_proof.A[circuit.nVars] = G1.affine(A);
setup.vk_proof.Ap[circuit.nVars] = G1.affine(G1.mulScalar(A, setup.toxic.ka));
const A = G1.timesScalar( G1.g, F.mul(setup.toxic.ra, v.z_t));
setup.vk_proof.A[circuit.nVars] = G1.toAffine(A);
setup.vk_proof.Ap[circuit.nVars] = G1.toAffine(G1.timesScalar(A, setup.toxic.ka));
const B1 = G1.mulScalar( G1.g, F.mul(setup.toxic.rb, v.z_t));
const B2 = G2.mulScalar( G2.g, F.mul(setup.toxic.rb, v.z_t));
setup.vk_proof.B[circuit.nVars] = G2.affine(B2);
setup.vk_proof.Bp[circuit.nVars] = G1.affine(G1.mulScalar(B1, setup.toxic.kb));
const B1 = G1.timesScalar( G1.g, F.mul(setup.toxic.rb, v.z_t));
const B2 = G2.timesScalar( G2.g, F.mul(setup.toxic.rb, v.z_t));
setup.vk_proof.B[circuit.nVars] = G2.toAffine(B2);
setup.vk_proof.Bp[circuit.nVars] = G1.toAffine(G1.timesScalar(B1, setup.toxic.kb));
const C = G1.mulScalar( G1.g, F.mul(setup.toxic.rc, v.z_t));
setup.vk_proof.C[circuit.nVars] = G1.affine(C);
setup.vk_proof.Cp[circuit.nVars] = G1.affine(G1.mulScalar(C, setup.toxic.kc));
const C = G1.timesScalar( G1.g, F.mul(setup.toxic.rc, v.z_t));
setup.vk_proof.C[circuit.nVars] = G1.toAffine(C);
setup.vk_proof.Cp[circuit.nVars] = G1.toAffine(G1.timesScalar(C, setup.toxic.kc));
setup.vk_proof.Kp[circuit.nVars ] = G1.affine(G1.mulScalar(A, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+1] = G1.affine(G1.mulScalar(B1, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+2] = G1.affine(G1.mulScalar(C, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars ] = G1.toAffine(G1.timesScalar(A, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+1] = G1.toAffine(G1.timesScalar(B1, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+2] = G1.toAffine(G1.timesScalar(C, setup.toxic.kbeta));
// setup.vk_verifier.A[0] = G1.affine(G1.add(setup.vk_verifier.A[0], setup.vk_proof.A[circuit.nVars]));
// setup.vk_verifier.A[0] = G1.toAffine(G1.add(setup.vk_verifier.A[0], setup.vk_proof.A[circuit.nVars]));
// vk_z
setup.vk_verifier.vk_z = G2.affine(G2.mulScalar(
setup.vk_verifier.vk_z = G2.toAffine(G2.timesScalar(
G2.g,
F.mul(setup.toxic.rc, v.z_t)));
}
@ -229,7 +229,7 @@ function calculateHexps(setup) {
setup.vk_proof.hExps[0] = G1.g;
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.affine(G1.mulScalar(G1.g, eT));
setup.vk_proof.hExps[i] = G1.toAffine(G1.timesScalar(G1.g, eT));
eT = F.mul(eT, setup.toxic.t);
}
}

View File

@ -21,14 +21,14 @@
const bn128 = require("ffjavascript").bn128;
/*
const G1 = bn128.G1;
*/
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.mulScalar( vk_verifier.IC[s+1], publicSignals[s]));
cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
if (! bn128.F12.eq(

View File

@ -24,14 +24,15 @@ const bn128 = require("ffjavascript").bn128;
const createKeccakHash = require("keccak");
const utils = require("ffjavascript").utils;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.mulScalar( vk_verifier.IC[s+1], publicSignals[s]));
cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
const buff = Buffer.concat([
@ -59,8 +60,8 @@ module.exports = function isValid(vk_verifier, proof, publicSignals) {
if (! bn128.F12.eq(
bn128.pairing(
G1.add(proof.pi_a, G1.mulScalar(G1.g, h1)),
G2.add(proof.pi_b, G2.mulScalar(vk_verifier.vk_delta_2, h2))
G1.add(proof.pi_a, G1.timesScalar(G1.g, h1)),
G2.add(proof.pi_b, G2.timesScalar(vk_verifier.vk_delta_2, h2))
),
bn128.F12.mul(
vk_verifier.vk_alphabeta_12,

View File

@ -18,15 +18,15 @@
*/
const bn128 = require("ffjavascript").bn128;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let full_pi_a = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
full_pi_a = G1.add( full_pi_a, G1.mulScalar( vk_verifier.IC[s+1], publicSignals[s]));
full_pi_a = G1.add( full_pi_a, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
full_pi_a = G1.add( full_pi_a, proof.pi_a);

View File

@ -1,5 +1,4 @@
module.exports.new = require("./zkey_new.js");
module.exports.exportBellman = require("./zkey_export_bellman.js");
module.exports.importBellman = require("./zkey_import_bellman.js");
@ -9,3 +8,4 @@ module.exports.beacon = require("./zkey_beacon.js");
module.exports.exportJson = require("./zkey_export_json.js");
module.exports.utils = require("./zkey_utils.js");
module.exports.challangeContribute = require("./zkey_challangecontribute.js");
module.exports.exportVerificationKey = require("./zkey_export_verificationkey.js");

View File

@ -51,16 +51,16 @@ module.exports = async function beacon(zkeyNameOld, zkeyNameNew, name, numIterat
const curContribution = {};
curContribution.delta = {};
curContribution.delta.prvKey = curve.Fr.fromRng(rng);
curContribution.delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.affine(curve.G1.mulScalar(curContribution.delta.g1_s, curContribution.delta.prvKey));
curContribution.delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.toAffine(curve.G1.timesScalar(curContribution.delta.g1_s, curContribution.delta.prvKey));
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_s);
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_sx);
curContribution.transcript = transcriptHasher.digest();
curContribution.delta.g2_sp = hashToG2(curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.affine(curve.G2.mulScalar(curContribution.delta.g2_sp, curContribution.delta.prvKey));
curContribution.delta.g2_sp = hashToG2(curve, curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.toAffine(curve.G2.timesScalar(curContribution.delta.g2_sp, curContribution.delta.prvKey));
zkey.vk_delta_1 = curve.G1.mulScalar(zkey.vk_delta_1, curContribution.delta.prvKey);
zkey.vk_delta_2 = curve.G2.mulScalar(zkey.vk_delta_2, curContribution.delta.prvKey);
zkey.vk_delta_1 = curve.G1.timesScalar(zkey.vk_delta_1, curContribution.delta.prvKey);
zkey.vk_delta_2 = curve.G2.timesScalar(zkey.vk_delta_2, curContribution.delta.prvKey);
curContribution.deltaAfter = zkey.vk_delta_1;

View File

@ -44,10 +44,10 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
await copy(sG2); // beta2
await copy(sG2); // gamma2
const oldDelta1 = await readG1();
const delta1 = curve.G1.mulScalar(oldDelta1, delta);
const delta1 = curve.G1.timesScalar(oldDelta1, delta);
await writeG1(delta1);
const oldDelta2 = await readG2();
const delta2 = curve.G2.mulScalar(oldDelta2, delta);
const delta2 = curve.G2.timesScalar(oldDelta2, delta);
await writeG2(delta2);
// IC
@ -107,13 +107,13 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
const curContribution = {};
curContribution.delta = {};
curContribution.delta.prvKey = delta;
curContribution.delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.affine(curve.G1.mulScalar(curContribution.delta.g1_s, delta));
curContribution.delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.toAffine(curve.G1.timesScalar(curContribution.delta.g1_s, delta));
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_s);
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_sx);
curContribution.transcript = transcriptHasher.digest();
curContribution.delta.g2_sp = hashToG2(curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.affine(curve.G2.mulScalar(curContribution.delta.g2_sp, delta));
curContribution.delta.g2_sp = hashToG2(curve, curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.toAffine(curve.G2.timesScalar(curContribution.delta.g2_sp, delta));
curContribution.deltaAfter = delta1;
curContribution.type = 0;
mpcParams.contributions.push(curContribution);

View File

@ -25,6 +25,7 @@ module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name
const rng = await misc.getRandomRng(entropy);
const transcriptHasher = Blake2b(64);
transcriptHasher.update(mpcParams.csHash);
for (let i=0; i<mpcParams.contributions.length; i++) {
utils.hashPubKey(transcriptHasher, curve, mpcParams.contributions[i]);
}
@ -32,16 +33,16 @@ module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name
const curContribution = {};
curContribution.delta = {};
curContribution.delta.prvKey = curve.Fr.fromRng(rng);
curContribution.delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.affine(curve.G1.mulScalar(curContribution.delta.g1_s, curContribution.delta.prvKey));
curContribution.delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.toAffine(curve.G1.timesScalar(curContribution.delta.g1_s, curContribution.delta.prvKey));
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_s);
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_sx);
curContribution.transcript = transcriptHasher.digest();
curContribution.delta.g2_sp = hashToG2(curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.affine(curve.G2.mulScalar(curContribution.delta.g2_sp, curContribution.delta.prvKey));
curContribution.delta.g2_sp = hashToG2(curve, curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.toAffine(curve.G2.timesScalar(curContribution.delta.g2_sp, curContribution.delta.prvKey));
zkey.vk_delta_1 = curve.G1.mulScalar(zkey.vk_delta_1, curContribution.delta.prvKey);
zkey.vk_delta_2 = curve.G2.mulScalar(zkey.vk_delta_2, curContribution.delta.prvKey);
zkey.vk_delta_1 = curve.G1.timesScalar(zkey.vk_delta_1, curContribution.delta.prvKey);
zkey.vk_delta_2 = curve.G2.timesScalar(zkey.vk_delta_2, curContribution.delta.prvKey);
curContribution.deltaAfter = zkey.vk_delta_1;

View File

@ -0,0 +1,43 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const getCurve = require("./curves").getCurveFromQ;
const {stringifyBigInts} = require("ffjavascript").utils;
const fs = require("fs");
module.exports = async function zkeyExportVerificationKey(zkeyName, verificationKeyName) {
const {fd, sections} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fd, sections, "groth16");
const curve = await getCurve(zkey.q);
const sG1 = curve.G1.F.n8*2;
const alphaBeta = await curve.pairing( zkey.vk_alpha_1 , zkey.vk_beta_2 );
const vKey = {
protocol: zkey.protocol,
curve: curve.name,
nPublic: zkey.nPublic,
vk_alpha_1: curve.G1.toObject(zkey.vk_alpha_1),
vk_beta_2: curve.G2.toObject(zkey.vk_beta_2),
vk_gamma_2: curve.G2.toObject(zkey.vk_gamma_2),
vk_delta_2: curve.G2.toObject(zkey.vk_delta_2),
vk_alphabeta_12: curve.Gt.toObject(alphaBeta)
};
// Read IC Section
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 3);
vKey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
const buff = await fd.read(sG1);
const P = curve.G1.toObject(buff);
vKey.IC.push(P);
}
await binFileUtils.endReadSection(fd);
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(vKey), null, 1), "utf-8");
};

View File

@ -19,8 +19,6 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdPTau, sectionsPTau);
await curve.loadEngine();
const fdZKey = await binFileUtils.createBinFile(zkeyName, "zkey", 1, 10);
const sG1 = curve.G1.F.n8*2;
@ -62,7 +60,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
const primeR = curve.r;
const n8r = (Math.floor( (Scalar.bitLength(primeR) - 1) / 64) +1)*8;
const Rr = Scalar.mod(Scalar.shl(1, n8r*8), primeR);
const R2r = Scalar.mod(Scalar.mul(Rr,Rr), primeR);
const R2r = curve.Fr.e(Scalar.mod(Scalar.mul(Rr,Rr), primeR));
await fdZKey.writeULE32(n8q);
await binFileUtils.writeBigInt(fdZKey, primeQ, n8q);
@ -95,9 +93,9 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
const bg2 = new Uint8Array(sG2);
curve.G2.toRprLEM(bg2, 0, curve.G2.g);
const bg1U = new Uint8Array(sG1);
curve.G1.toRprBE(bg1U, 0, curve.G1.g);
curve.G1.toRprUncompressed(bg1U, 0, curve.G1.g);
const bg2U = new Uint8Array(sG2);
curve.G2.toRprBE(bg2U, 0, curve.G2.g);
curve.G2.toRprUncompressed(bg2U, 0, curve.G2.g);
await fdZKey.write(bg2); // gamma2
await fdZKey.write(bg1); // delta1
@ -285,7 +283,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
}
async function composeAndWritePointsChunk(groupName, arr) {
const concurrency= curve.engine.concurrency;
const concurrency= curve.tm.concurrency;
const nElementsPerThread = Math.floor(arr.length / concurrency);
const opPromises = [];
const G = curve[groupName];
@ -388,7 +386,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
]});
task.push({cmd: "GET", out: 0, var: 2, len: arr.length*sGout});
const res = await curve.engine.queueAction(task);
const res = await curve.tm.queueAction(task);
return res;
}
@ -409,7 +407,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
async function hashHPointsChunk(offset, nPoints) {
const buff1 = await fdPTau.read(nPoints *sG1, sectionsPTau[2][0].p + (offset + domainSize)*sG1);
const buff2 = await fdPTau.read(nPoints *sG1, sectionsPTau[2][0].p + offset*sG1);
const concurrency= curve.engine.concurrency;
const concurrency= curve.tm.concurrency;
const nPointsPerThread = Math.floor(nPoints / concurrency);
const opPromises = [];
for (let i=0; i<concurrency; i++) {
@ -464,7 +462,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
]});
task.push({cmd: "GET", out: 0, var: 2, len: nPoints*sG1});
const res = await curve.engine.queueAction(task);
const res = await curve.tm.queueAction(task);
return res;
}

View File

@ -211,7 +211,7 @@ async function readHeader(fd, sections, protocol) {
zkey.n8r = n8r;
zkey.r = await binFileUtils.readBigInt(fd, n8r);
let curve = getCurve(zkey.q);
let curve = await getCurve(zkey.q);
zkey.nVars = await fd.readULE32();
zkey.nPublic = await fd.readULE32();

View File

@ -42,7 +42,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
return false;
}
const delta_g2_sp = hashToG2(c.transcript);
const delta_g2_sp = hashToG2(curve, c.transcript);
sr = await sameRatio(curve, c.delta.g1_s, c.delta.g1_sx, delta_g2_sp, c.delta.g2_spx);
if (sr !== true) {
@ -59,8 +59,8 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
if (c.type == 1) {
const rng = misc.rngFromBeaconParams(c.beaconHash, c.numIterationsExp);
const expected_prvKey = curve.Fr.fromRng(rng);
const expected_g1_s = curve.G1.affine(curve.G1.fromRng(rng));
const expected_g1_sx = curve.G1.affine(curve.G1.mulScalar(expected_g1_s, expected_prvKey));
const expected_g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
const expected_g1_sx = curve.G1.toAffine(curve.G1.timesScalar(expected_g1_s, expected_prvKey));
if (curve.G1.eq(expected_g1_s, c.delta.g1_s) !== true) {
console.log(`INVALID(${i}): Key of the beacon does not match. g1_s `);
return false;

View File

@ -22,8 +22,7 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
throw new Error(`Invalid witness length. Circuit: ${zkey.nVars}, witness: ${wtns.nWitness}`);
}
const curve = getCurve(zkey.q);
await curve.loadEngine();
const curve = await getCurve(zkey.q);
const Fr = curve.Fr;
const G1 = curve.G1;
const G2 = curve.G2;
@ -41,15 +40,15 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
const [buffA_T, buffB_T, buffC_T] = await buldABC(curve, zkey, buffWitness, buffCoeffs);
const buffA = await Fr.ifft(buffA_T);
const buffAodd = await Fr.batchApplyKey(buffA, Fr.e(1), curve.PFr.w[power+1]);
const buffAodd = await Fr.batchApplyKey(buffA, Fr.e(1), curve.Fr.w[power+1]);
const buffAodd_T = await Fr.fft(buffAodd);
const buffB = await Fr.ifft(buffB_T);
const buffBodd = await Fr.batchApplyKey(buffB, Fr.e(1), curve.PFr.w[power+1]);
const buffBodd = await Fr.batchApplyKey(buffB, Fr.e(1), curve.Fr.w[power+1]);
const buffBodd_T = await Fr.fft(buffBodd);
const buffC = await Fr.ifft(buffC_T);
const buffCodd = await Fr.batchApplyKey(buffC, Fr.e(1), curve.PFr.w[power+1]);
const buffCodd = await Fr.batchApplyKey(buffC, Fr.e(1), curve.Fr.w[power+1]);
const buffCodd_T = await Fr.fft(buffCodd);
const buffPodd_T = await joinABC(curve, zkey, buffAodd_T, buffBodd_T, buffCodd_T);
@ -65,35 +64,35 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
const r = curve.Fr.random();
const s = curve.Fr.random();
proof.pi_a = G1.add( proof.pi_a, zkey.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( zkey.vk_delta_1, r ));
proof.pi_a = G1.add( proof.pi_a, G1.timesFr( zkey.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, zkey.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( zkey.vk_delta_2, s ));
proof.pi_b = G2.add( proof.pi_b, G2.timesFr( zkey.vk_delta_2, s ));
pib1 = G1.add( pib1, zkey.vk_beta_1 );
pib1 = G1.add( pib1, G1.mulScalar( zkey.vk_delta_1, s ));
pib1 = G1.add( pib1, G1.timesFr( zkey.vk_delta_1, s ));
proof.pi_c = G1.add(proof.pi_c, resH);
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( zkey.vk_delta_1, Fr.neg(Fr.mul(r,s) )));
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( zkey.vk_delta_1, Fr.neg(Fr.mul(r,s) )));
const publicSignals = [];
for (let i=1; i<= zkey.nPublic; i++) {
publicSignals.push(Fr.fromRprLE(buffWitness, i*Fr.n8));
const b = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8);
publicSignals.push(Scalar.fromRprLE(b));
}
proof.pi_a = G1.affine(proof.pi_a);
proof.pi_b = G2.affine(proof.pi_b);
proof.pi_c = G1.affine(proof.pi_c);
proof.pi_a = G1.toObject(G1.toAffine(proof.pi_a));
proof.pi_b = G2.toObject(G2.toAffine(proof.pi_b));
proof.pi_c = G1.toObject(G1.toAffine(proof.pi_c));
proof.protocol = "groth";
proof.protocol = "groth16";
await fdZKey.close();
await fdWtns.close();
@ -103,7 +102,7 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
async function buldABC(curve, zkey, witness, coeffs) {
const concurrency = curve.engine.concurrency;
const concurrency = curve.tm.concurrency;
const sCoef = 4*3 + zkey.n8r;
const elementsPerChunk = Math.floor(zkey.domainSize/concurrency);
@ -145,7 +144,7 @@ async function buldABC(curve, zkey, witness, coeffs) {
task.push({cmd: "GET", out: 0, var: 2, len: n*curve.Fr.n8});
task.push({cmd: "GET", out: 1, var: 3, len: n*curve.Fr.n8});
task.push({cmd: "GET", out: 2, var: 4, len: n*curve.Fr.n8});
promises.push(curve.engine.queueAction(task));
promises.push(curve.tm.queueAction(task));
}
const result = await Promise.all(promises);
@ -183,7 +182,7 @@ async function buldABC(curve, zkey, witness, coeffs) {
async function joinABC(curve, zkey, a, b, c) {
const concurrency = curve.engine.concurrency;
const concurrency = curve.tm.concurrency;
const n8 = curve.Fr.n8;
const nElements = Math.floor(a.byteLength / curve.Fr.n8);
@ -223,7 +222,7 @@ async function joinABC(curve, zkey, a, b, c) {
{var: 3}
]});
task.push({cmd: "GET", out: 0, var: 3, len: n*n8});
promises.push(curve.engine.queueAction(task));
promises.push(curve.tm.queueAction(task));
}
const result = await Promise.all(promises);

View File

@ -19,28 +19,47 @@
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const G1 = bn128.G1;
const Scalar = require("ffjavascript").Scalar;
const curves = require("./curves");
module.exports = async function isValid(vk_verifier, proof, publicSignals) {
/*
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.mulScalar( vk_verifier.IC[s+1], publicSignals[s]));
cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
*/
let cpub = await G1.multiExp(vk_verifier.IC.slice(1), publicSignals);
cpub = G1.add(cpub, vk_verifier.IC[0]);
const curve = await curves.getCurveFromName(vk_verifier.curve);
const res = await bn128.pairingEq(
bn128.G1.neg(proof.pi_a) , proof.pi_b,
cpub , vk_verifier.vk_gamma_2,
proof.pi_c , vk_verifier.vk_delta_2,
const IC0 = curve.G1.fromObject(vk_verifier.IC[0]);
const IC = new Uint8Array(curve.G1.F.n8*2 * publicSignals.length);
const w = new Uint8Array(curve.Fr.n8 * publicSignals.length);
vk_verifier.vk_alpha_1, vk_verifier.vk_beta_2
for (let i=0; i<publicSignals.length; i++) {
const buffP = curve.G1.fromObject(vk_verifier.IC[i+1]);
IC.set(buffP, i*curve.G1.F.n8*2);
Scalar.toRprLE(w, curve.Fr.n8*i, publicSignals[i], curve.Fr.n8);
}
let cpub = await curve.G1.multiExpAffine(IC, w);
cpub = curve.G1.add(cpub, IC0);
const pi_a = curve.G1.fromObject(proof.pi_a);
const pi_b = curve.G2.fromObject(proof.pi_b);
const pi_c = curve.G1.fromObject(proof.pi_c);
const vk_gamma_2 = curve.G2.fromObject(vk_verifier.vk_gamma_2);
const vk_delta_2 = curve.G2.fromObject(vk_verifier.vk_delta_2);
const vk_alpha_1 = curve.G1.fromObject(vk_verifier.vk_alpha_1);
const vk_beta_2 = curve.G2.fromObject(vk_verifier.vk_beta_2);
const res = await curve.pairingEq(
curve.G1.neg(pi_a) , pi_b,
cpub , vk_gamma_2,
pi_c , vk_delta_2,
vk_alpha_1, vk_beta_2
);
if (! res) return false;