powersof taw new export and contribute

This commit is contained in:
Jordi Baylina 2020-05-09 15:05:45 +02:00
parent 65b722b9af
commit 6598f9df4a
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
16 changed files with 2377 additions and 438 deletions

880
cli.js
View File

@ -30,271 +30,161 @@ const {stringifyBigInts, unstringifyBigInts} = require("ffjavascript").utils;
const loadR1cs = require("r1csfile").load;
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder;
const version = require("./package").version;
const zkeyFile = require("./src/zkeyfile");
const wtnsFile = require("./src/wtnsfile");
const loadSyms = require("./src/loadsyms");
const printR1cs = require("./src/printr1cs");
const argv = require("yargs")
.version(version)
.usage(`snarkjs <command> <options>
const clProcessor = require("./src/clprocessor");
setup command
const powersOfTaw = require("./src/powersoftaw");
const bn128 = require("ffjavascript").bn128;
const commands = [
{
cmd: "r1cs info [circuit.r1cs]",
description: "Print statistiscs of a circuit",
alias: ["ri", "info -r|r1cs:circuit.r1cs"],
action: r1csInfo
},
{
cmd: "r1cs print [circuit.r1cs] [circuit.sym]",
description: "Print the constraints of a circuit",
alias: ["rp", "print -r|r1cs:circuit.r1cs -s|sym"],
action: r1csPrint
},
{
cmd: "witness calculate [circuit.wasm] [input.json] [witness.wtns]",
description: "Caclculate specific witness of a circuit given an input",
alias: ["wc", "calculatewitness -ws|wasm:circuit.wasm -i|input:input.json -wt|witness:witness.wtns"],
action: witnessCalculate
},
{
cmd: "witness debug [circuit.wasm] [input.json] [witness.wtns] [circuit.sym]",
description: "Calculate the witness with debug info.",
longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ",
options: "-get|g -set|s -trigger|t",
alias: ["wd"],
action: witnessDebug
},
{
cmd: "zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]",
description: "Run a simple setup for a circuit generating the proving key.",
alias: ["zs", "setup -r1cs|r -provingkey|pk -verificationkey|vk"],
options: "-verbose|v -protocol",
action: zksnarkSetup
},
{
cmd: "zksnark prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]",
description: "Generates a zk Proof",
alias: ["zp", "zksnark proof", "proof -pk|provingkey -wt|witness -p|proof -pub|public"],
options: "-verbose|v -protocol",
action: zksnarkProve
},
{
cmd: "zksnark verify [verification_key.json] [public.json] [proof.json]",
description: "Verify a zk Proof",
alias: ["zv", "verify -vk|verificationkey -pub|public -p|proof"],
action: zksnarkVerify
},
{
cmd: "solidity genverifier <verificationKey.json> <verifier.sol>",
description: "Creates a verifier in solidity",
alias: ["ks", "generateverifier -vk|verificationkey -v|verifier"],
action: solidityGenVerifier
},
{
cmd: "solidity gencall <public.json> <proof.json>",
description: "Generates call parameters ready to be called.",
alias: ["pc", "generatecall -pub|public -p|proof"],
action: solidityGenCall
},
{
cmd: "powersoftaw new <power> [powersoftaw_0000.ptaw]",
description: "Starts a powers of taw ceremony",
alias: ["ptn"],
options: "-verbose|v",
action: powersOfTawNew
},
{
cmd: "powersoftaw export challange <powersoftaw_0000.ptaw> [challange]",
description: "Creates a challange",
alias: ["pte"],
options: "-verbose|v",
action: powersOfTawExportChallange
},
{
cmd: "powersoftaw contribute <challange> [response]",
description: "Contribute to a challange",
alias: ["ptc"],
options: "-verbose|v -entropy|e",
action: powersOfTawContribute
},
];
clProcessor(commands).then( (res) => {
process.exit(res);
}, (err) => {
console.log(err.stack);
console.log("ERROR: " + err);
process.exit(1);
});
/*
TODO COMMANDS
=============
snarkjs setup <option>
{
cmd: "r1cs export circomJSON [circuit.r1cs] [circuit.json]",
description: "Exports a R1CS to JSON file.",
alias: ["rj"],
action: r1csExportCircomJSON
},
{
cmd: "witness export json <witness.wtns> <witness.json>",
description: "Export witness file to json",
alias: ["wj"],
action: witnessExportJson
},
{
cmd: "zkey export vkey <circuit.zkey> <verification_key.json>",
description: "Exports a verification key to JSON",
alias: ["kv"],
action: zKeySolidity
},
{
cmd: "witness verify <circuit.r1cs> <witness.wtns>",
description: "Verify a witness agains a r1cs",
alias: ["wv"],
action: witnessVerify
},
ptau new Starts a ceremony with a new challange for the powes of Tau ceremony
ptau contribute Contribute in the ceremony of powers of tau
ptau beacon Apply a beacon random to the ceremony
ptau verify Verify the powers of tau ceremony
ptau preparePhase2 Prepare Powers of Taus for a phase 2
phase2 new Starts a second phase ceremony for a given circuit with a first challange and a reference Hash.
phase2 constribute Contribute in the seconf phase ceremony
phase2 beacon Contribute in the seconf phase ceremony with a Powers of Tau
phase2 verify Verify the Powers of tau
zksnark setup s Run a simple setup for a circuit generating the proving key.
zksnark prove p Generates a zk Proof
zksnark verify v Verify a zk Proof
zkey export pkJSON pkjson Exports a proving key to JSON
zkey export vkJSON vkjson Exports a verification key to JSON
zkey export vkSolidity vksol Creates a verifier in solidity
proof callParameters cp Generates call parameters ready to be called.
*/
Runs a setup for a circuit generating the proving and the verification key.
-r or --r1cs <r1csFile>
Filename of the compiled circuit file generated by circom.
Default: circuit.r1cs
--pk or --provingkey <provingKeyFile>
Output filename where the proving key will be stored.
Default: proving_key.json
--vk or --verificationkey <verificationKeyFile>
Output filename where the verification key will be stored.
Default: verification_key.json
--protocol [original|groth|kimleeoh]
Defines which variant of the zk-SNARK protocol you want to use.
Default: groth
--verbose
Print verbose to screen
calculate witness command
=========================
snarkjs calculatewitness <options>
Calculate the witness of a circuit given an input.
--ws --wasm <wasmFile>
Filename of the compiled circuit file generated by circom.
Default: circuit.r1cs
-i or --input <inputFile>
JSON file with the inputs of the circuit.
Default: input.json
Example of a circuit with two inputs a and b:
{"a": "22", "b": "33"}
--wt --witness
Output filename with the generated witness.
Default: witness.json
--lg or --logget
Output GET access to the signals.
--ls or --logset
Output SET access to the signal.
--lt or --logtrigger
Output when a subcomponent is triggered and when finished.
--s or --sanitycheck
-s or --sym <symFile>
Filename of the debuging symbols file generated by circom.
Default: circuit.sym
generate a proof command
========================
snarkjs proof <options>
--wt or --witness
Input filename used to calculate the proof.
Default: witness.json
--pk or --provingkey <provingKeyFile>
Input filename with the proving key (generated during the setup).
Default: proving_key.json
-p or --proof
Output filename with the zero-knowledge proof.
Default: proof.json
--pub or --public <publicFilename>
Output filename with the value of the public wires/signals.
This info will be needed to verify the proof.
Default: public.json
--verbose
Print verbose to screen
verify command
==============
snarkjs verify <options>
The command returns "OK" if the proof is valid
and "INVALID" in case it is not a valid proof.
--vk or --verificationkey <verificationKeyFile>
Input filename with the verification key (generated during the setup).
Default: verification_key.json
-p or --proof
Input filename with the zero-knowledge proof you want to verify.
Default: proof.json
--pub or --public <publicFilename>
Input filename with the public wires/signals.
Default: public.json
generate solidity verifier command
==================================
snarkjs generateverifier <options>
Generates a solidity smart contract that verifies the zero-knowledge proof.
--vk or --verificationkey <verificationKeyFile>
Input filename with the verification key (generated during the setup).
Default: verification_key.json
-v or --verifier
Output file with a solidity smart contract that verifies a zero-knowledge proof.
Default: verifier.sol
generate call parameters
========================
snarkjs generatecall <options>
Outputs into the console the raw parameters to be used in 'verifyProof'
method of the solidity verifier function.
-p or --proof
Input filename with the zero-knowledge proof you want to use.
Default: proof.json
--pub or --public <publicFilename>
Input filename with the public wires/signals.
Default: public.json
circuit info
============
snarkjs info <options>
Print statistics of a circuit.
-r or --r1cs <r1csFile>
Filename of the compiled circuit file generated by circom.
Default: circuit.r1cs
print constraints
=================
snarkjs printconstraints <options>
Print all the constraints of a given circuit.
-r or --r1cs <r1csFile>
Filename of the compiled circuit file generated by circom.
Default: circuit.r1cs
-s or --sym <symFile>
Filename of the debuging symbols file generated by circom.
Default: circuit.sym
`)
.alias("r", "r1cs")
.alias("s", "sym")
.alias("pk", "provingkey")
.alias("vk", "verificationkey")
.alias("wt", "witness")
.alias("ws", "wasm")
.alias("p", "proof")
.alias("i", "input")
.alias("pub", "public")
.alias("v", "verifier")
.alias("lo", "logoutput")
.alias("lg", "logget")
.alias("ls", "logset")
.alias("lt", "logtrigger")
.help("h")
.alias("h", "help")
.epilogue(`Copyright (C) 2018 0kims association
This program comes with ABSOLUTELY NO WARRANTY;
This is free software, and you are welcome to redistribute it
under certain conditions; see the COPYING file in the official
repo directory at https://github.com/iden3/circom `)
.argv;
const r1csName = (argv.r1cs) ? argv.r1cs : "circuit.r1cs";
const symName = (argv.sym) ? argv.sym : "circuit.sym";
const provingKeyName = (argv.provingkey) ? argv.provingkey : "proving_key.json";
const verificationKeyName = (argv.verificationkey) ? argv.verificationkey : "verification_key.json";
const inputName = (argv.input) ? argv.input : "input.json";
const wasmName = (argv.wasm) ? argv.wasm : "circuit.wasm";
const witnessName = (argv.witness) ? argv.witness : "witness.json";
const proofName = (argv.proof) ? argv.proof : "proof.json";
const publicName = (argv.public) ? argv.public : "public.json";
const verifierName = (argv.verifier) ? argv.verifier : "verifier.sol";
const protocol = (argv.protocol) ? argv.protocol : "groth";
run().then(() => {
process.exit();
});
function p256(n) {
let nstr = n.toString(16);
@ -303,159 +193,327 @@ function p256(n) {
return nstr;
}
async function run() {
try {
if (argv._[0].toUpperCase() == "INFO") {
const cir = await loadR1cs(r1csName);
console.log(`# Wires: ${cir.nVars}`);
console.log(`# Constraints: ${cir.nConstraints}`);
console.log(`# Private Inputs: ${cir.nPrvInputs}`);
console.log(`# Public Inputs: ${cir.nPubInputs}`);
console.log(`# Outputs: ${cir.nOutputs}`);
} else if (argv._[0].toUpperCase() == "PRINTCONSTRAINTS") {
const cir = await loadR1cs(r1csName, true, true);
const sym = await loadSyms(symName);
printR1cs(cir, sym);
} else if (argv._[0].toUpperCase() == "SETUP") {
const cir = await loadR1cs(r1csName, true);
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const setup = zkSnark[protocol].setup(cir, argv.verbose);
await fs.promises.writeFile(provingKeyName, JSON.stringify(stringifyBigInts(setup.vk_proof), null, 1), "utf-8");
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(setup.vk_verifier), null, 1), "utf-8");
} else if (argv._[0].toUpperCase() == "CALCULATEWITNESS") {
const wasm = await fs.promises.readFile(wasmName);
const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8")));
let options;
let sym;
if (argv.logset || argv.logget || argv.logtrigger || argv.sanitycheck) {
options = {
sanityCheck: true
};
if (argv.logset) {
if (!sym) sym = await loadSyms(symName);
options.logSetSignal= function(labelIdx, value) {
console.log("SET " + sym.labelIdx2Name[labelIdx] + " <-- " + value.toString());
};
}
if (argv.logget) {
if (!sym) sym = await loadSyms(symName);
options.logGetSignal= function(varIdx, value) {
console.log("GET " + sym.labelIdx2Name[varIdx] + " --> " + value.toString());
};
}
if (argv.logtrigger) {
if (!sym) sym = await loadSyms(symName);
options.logStartComponent= function(cIdx) {
console.log("START: " + sym.componentIdx2Name[cIdx]);
};
options.logFinishComponent= function(cIdx) {
console.log("FINISH: " + sym.componentIdx2Name[cIdx]);
};
}
}
const wc = await WitnessCalculatorBuilder(wasm, options);
const w = await wc.calculateWitness(input);
await fs.promises.writeFile(witnessName, JSON.stringify(stringifyBigInts(w), null, 1));
} else if (argv._[0].toUpperCase() == "PROOF") {
const witness = unstringifyBigInts(JSON.parse(fs.readFileSync(witnessName, "utf8")));
const provingKey = unstringifyBigInts(JSON.parse(fs.readFileSync(provingKeyName, "utf8")));
const protocol = provingKey.protocol;
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const {proof, publicSignals} = zkSnark[protocol].genProof(provingKey, witness, argv.verbose);
await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
} else if (argv._[0].toUpperCase() == "VERIFY") {
const public = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8")));
const protocol = verificationKey.protocol;
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const isValid = zkSnark[protocol].isValid(verificationKey, proof, public);
if (isValid) {
console.log("OK");
process.exit(0);
} else {
console.log("INVALID");
process.exit(1);
}
} else if (argv._[0].toUpperCase() == "GENERATEVERIFIER") {
const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8")));
let verifierCode;
if (verificationKey.protocol == "original") {
verifierCode = generateVerifier_original(verificationKey);
} else if (verificationKey.protocol == "groth") {
verifierCode = generateVerifier_groth(verificationKey);
} else if (verificationKey.protocol == "kimleeoh") {
verifierCode = generateVerifier_kimleeoh(verificationKey);
} else {
throw new Error("InvalidProof");
}
fs.writeFileSync(verifierName, verifierCode, "utf-8");
process.exit(0);
} else if (argv._[0].toUpperCase() == "GENERATECALL") {
const public = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8")));
let inputs = "";
for (let i=0; i<public.length; i++) {
if (inputs != "") inputs = inputs + ",";
inputs = inputs + p256(public[i]);
}
let S;
if ((typeof proof.protocol === "undefined") || (proof.protocol == "original")) {
S=`[${p256(proof.pi_a[0])}, ${p256(proof.pi_a[1])}],` +
`[${p256(proof.pi_ap[0])}, ${p256(proof.pi_ap[1])}],` +
`[[${p256(proof.pi_b[0][1])}, ${p256(proof.pi_b[0][0])}],[${p256(proof.pi_b[1][1])}, ${p256(proof.pi_b[1][0])}]],` +
`[${p256(proof.pi_bp[0])}, ${p256(proof.pi_bp[1])}],` +
`[${p256(proof.pi_c[0])}, ${p256(proof.pi_c[1])}],` +
`[${p256(proof.pi_cp[0])}, ${p256(proof.pi_cp[1])}],` +
`[${p256(proof.pi_h[0])}, ${p256(proof.pi_h[1])}],` +
`[${p256(proof.pi_kp[0])}, ${p256(proof.pi_kp[1])}],` +
`[${inputs}]`;
} else if ((proof.protocol == "groth")||(proof.protocol == "kimleeoh")) {
S=`[${p256(proof.pi_a[0])}, ${p256(proof.pi_a[1])}],` +
`[[${p256(proof.pi_b[0][1])}, ${p256(proof.pi_b[0][0])}],[${p256(proof.pi_b[1][1])}, ${p256(proof.pi_b[1][0])}]],` +
`[${p256(proof.pi_c[0])}, ${p256(proof.pi_c[1])}],` +
`[${inputs}]`;
} else {
throw new Error("InvalidProof");
}
console.log(S);
process.exit(0);
} else {
throw new Error("Invalid Command");
}
} catch(err) {
console.log(err.stack);
console.log("ERROR: " + err);
process.exit(1);
function changeExt(fileName, newExt) {
let S = fileName;
while ((S.length>0) && (S[S.length-1] != ".")) S = S.slice(0, S.length-1);
if (S.length>0) {
return S + newExt;
} else {
return fileName+"."+newExt;
}
}
// r1cs export circomJSON [circuit.r1cs] [circuit.json]
async function r1csInfo(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const cir = await loadR1cs(r1csName);
console.log(`# Wires: ${cir.nVars}`);
console.log(`# Constraints: ${cir.nConstraints}`);
console.log(`# Private Inputs: ${cir.nPrvInputs}`);
console.log(`# Public Inputs: ${cir.nPubInputs}`);
console.log(`# Outputs: ${cir.nOutputs}`);
return 0;
}
// r1cs print [circuit.r1cs] [circuit.sym]
async function r1csPrint(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const symName = params[2] || changeExt(r1csName, "sym");
const cir = await loadR1cs(r1csName, true, true);
const sym = await loadSyms(symName);
printR1cs(cir, sym);
return 0;
}
// witness calculate <circuit.wasm> <input.json> <witness.wtns>
async function witnessCalculate(params, options) {
const wasmName = params[0] || "circuit.wasm";
const inputName = params[1] || "input.json";
const witnessName = params[2] || "witness.wtns";
const wasm = await fs.promises.readFile(wasmName);
const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8")));
const wc = await WitnessCalculatorBuilder(wasm, options);
const w = await wc.calculateBinWitness(input);
await wtnsFile.writeBin(witnessName, w, wc.prime);
/*
const w = await wc.calculateWitness(input);
await wtnsFile.write(witnessName, w, wc.prime);
*/
// fs.promises.writeFile(witnessName, JSON.stringify(stringifyBigInts(w), null, 1));
return 0;
}
// witness debug <circuit.wasm> <input.json> <witness.wtns> <circuit.sym>
// -get|g -set|s -trigger|t
async function witnessDebug(params, options) {
const wasmName = params[0] || "circuit.wasm";
const inputName = params[1] || "input.json";
const witnessName = params[2] || "witness.wtns";
const symName = params[3] || changeExt(wasmName, "sym");
const wasm = await fs.promises.readFile(wasmName);
const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8")));
let wcOps = {
sanityCheck: true
};
let sym = await loadSyms(symName);
if (options.set) {
if (!sym) sym = await loadSyms(symName);
wcOps.logSetSignal= function(labelIdx, value) {
console.log("SET " + sym.labelIdx2Name[labelIdx] + " <-- " + value.toString());
};
}
if (options.get) {
if (!sym) sym = await loadSyms(symName);
wcOps.logGetSignal= function(varIdx, value) {
console.log("GET " + sym.labelIdx2Name[varIdx] + " --> " + value.toString());
};
}
if (options.trigger) {
if (!sym) sym = await loadSyms(symName);
wcOps.logStartComponent= function(cIdx) {
console.log("START: " + sym.componentIdx2Name[cIdx]);
};
wcOps.logFinishComponent= function(cIdx) {
console.log("FINISH: " + sym.componentIdx2Name[cIdx]);
};
}
const wc = await WitnessCalculatorBuilder(wasm, wcOps);
const w = await wc.calculateWitness(input);
await wtnsFile.write(witnessName, w);
// await fs.promises.writeFile(witnessName, JSON.stringify(stringifyBigInts(w), null, 1));
return 0;
}
// zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]
async function zksnarkSetup(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const zkeyName = params[1] || changeExt(r1csName, "zkey");
const verificationKeyName = params[2] || "verification_key.json";
const protocol = options.protocol || "groth16";
const cir = await loadR1cs(r1csName, true);
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const setup = zkSnark[protocol].setup(cir, options.verbose);
await zkeyFile.write(zkeyName, setup.vk_proof);
// await fs.promises.writeFile(provingKeyName, JSON.stringify(stringifyBigInts(setup.vk_proof), null, 1), "utf-8");
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(setup.vk_verifier), null, 1), "utf-8");
return 0;
}
// zksnark prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
async function zksnarkProve(params, options) {
const zkeyName = params[0] || "circuit.zkey";
const witnessName = params[1] || "witness.wtns";
const proofName = params[2] || "proof.json";
const publicName = params[3] || "public.json";
const witness = await wtnsFile.read(witnessName);
// const witness = unstringifyBigInts(JSON.parse(fs.readFileSync(witnessName, "utf8")));
const provingKey = await zkeyFile.read(zkeyName);
// const provingKey = unstringifyBigInts(JSON.parse(fs.readFileSync(provingKeyName, "utf8")));
const protocol = provingKey.protocol;
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const {proof, publicSignals} = zkSnark[protocol].genProof(provingKey, witness, options.verbose);
await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
return 0;
}
// zksnark verify [verification_key.json] [public.json] [proof.json]
async function zksnarkVerify(params, options) {
const verificationKeyName = params[0] || "verification_key.json";
const publicName = params[0] || "public.json";
const proofName = params[0] || "proof.json";
const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8")));
const pub = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8")));
const protocol = verificationKey.protocol;
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const isValid = zkSnark[protocol].isValid(verificationKey, proof, pub);
if (isValid) {
console.log("OK");
return 0;
} else {
console.log("INVALID");
return 1;
}
}
// solidity genverifier <verificationKey.json> <verifier.sol>
async function solidityGenVerifier(params, options) {
let verificationKeyName;
let verifierName;
if (params.length < 1) {
verificationKeyName = "verification_key.json";
} else {
verificationKeyName = params[0];
}
if (params.length < 2) {
verifierName = "verifier.sol";
} else {
verifierName = params[1];
}
const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8")));
let verifierCode;
if (verificationKey.protocol == "original") {
verifierCode = generateVerifier_original(verificationKey);
} else if (verificationKey.protocol == "groth16") {
verifierCode = generateVerifier_groth16(verificationKey);
} else if (verificationKey.protocol == "kimleeoh") {
verifierCode = generateVerifier_kimleeoh(verificationKey);
} else {
throw new Error("InvalidProof");
}
fs.writeFileSync(verifierName, verifierCode, "utf-8");
return 0;
}
// solidity gencall <public.json> <proof.json>
async function solidityGenCall(params, options) {
let publicName;
let proofName;
if (params.length < 1) {
publicName = "public.json";
} else {
publicName = params[0];
}
if (params.length < 2) {
proofName = "proof.json";
} else {
proofName = params[1];
}
const public = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8")));
let inputs = "";
for (let i=0; i<public.length; i++) {
if (inputs != "") inputs = inputs + ",";
inputs = inputs + p256(public[i]);
}
let S;
if ((typeof proof.protocol === "undefined") || (proof.protocol == "original")) {
S=`[${p256(proof.pi_a[0])}, ${p256(proof.pi_a[1])}],` +
`[${p256(proof.pi_ap[0])}, ${p256(proof.pi_ap[1])}],` +
`[[${p256(proof.pi_b[0][1])}, ${p256(proof.pi_b[0][0])}],[${p256(proof.pi_b[1][1])}, ${p256(proof.pi_b[1][0])}]],` +
`[${p256(proof.pi_bp[0])}, ${p256(proof.pi_bp[1])}],` +
`[${p256(proof.pi_c[0])}, ${p256(proof.pi_c[1])}],` +
`[${p256(proof.pi_cp[0])}, ${p256(proof.pi_cp[1])}],` +
`[${p256(proof.pi_h[0])}, ${p256(proof.pi_h[1])}],` +
`[${p256(proof.pi_kp[0])}, ${p256(proof.pi_kp[1])}],` +
`[${inputs}]`;
} else if ((proof.protocol == "groth16")||(proof.protocol == "kimleeoh")) {
S=`[${p256(proof.pi_a[0])}, ${p256(proof.pi_a[1])}],` +
`[[${p256(proof.pi_b[0][1])}, ${p256(proof.pi_b[0][0])}],[${p256(proof.pi_b[1][1])}, ${p256(proof.pi_b[1][0])}]],` +
`[${p256(proof.pi_c[0])}, ${p256(proof.pi_c[1])}],` +
`[${inputs}]`;
} else {
throw new Error("InvalidProof");
}
console.log(S);
return 0;
}
async function powersOfTawNew(params, options) {
let power;
let ptawName;
power = parseInt(params[0]);
if ((power<1) || (power>27)) {
throw new Error("Power must be between 1 and 27");
}
if (params.length < 2) {
ptawName = "powersOfTaw" + power + "_0000.ptaw";
} else {
ptawName = params[1];
}
return await powersOfTaw.newAccumulator(bn128, power, ptawName, options.verbose);
}
async function powersOfTawExportChallange(params, options) {
let ptawName;
let challangeName;
ptawName = params[0];
if (params.length < 2) {
challangeName = "challange";
} else {
challangeName = params[1];
}
return await powersOfTaw.exportChallange(ptawName, challangeName, options.verbose);
}
async function powersOfTawContribute(params, options) {
let challangeName;
let responseName;
challangeName = params[0];
if (params.length < 2) {
responseName = changeExt(challangeName, "response");
} else {
responseName = params[1];
}
return await powersOfTaw.contribute(bn128, challangeName, responseName, options.entropy, options.verbose);
}
function generateVerifier_original(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_original.sol"), "utf-8");
@ -514,8 +572,8 @@ function generateVerifier_original(verificationKey) {
}
function generateVerifier_groth(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_groth.sol"), "utf-8");
function generateVerifier_groth16(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_groth16.sol"), "utf-8");
const vkalfa1_str = `${verificationKey.vk_alfa_1[0].toString()},`+
@ -556,7 +614,9 @@ function generateVerifier_groth(verificationKey) {
}
function generateVerifier_kimleeoh(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_groth.sol"), "utf-8");
assert(false); // Not implemented yet because it requires G2 exponentiation onchain.
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_groth16.sol"), "utf-8");
const vkalfa1_str = `${verificationKey.vk_alfa_1[0].toString()},`+

View File

@ -22,10 +22,10 @@ exports.original = {
genProof: require("./src/prover_original.js"),
isValid: require("./src/verifier_original.js")
};
exports.groth = {
setup: require("./src/setup_groth.js"),
genProof: require("./src/prover_groth.js"),
isValid: require("./src/verifier_groth.js")
exports.groth16 = {
setup: require("./src/setup_groth16.js"),
genProof: require("./src/prover_groth16.js"),
isValid: require("./src/verifier_groth16.js")
};
exports.kimleeoh = {
setup: require("./src/setup_kimleeoh.js"),

27
package-lock.json generated
View File

@ -144,6 +144,23 @@
"integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==",
"dev": true
},
"blake2b": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/blake2b/-/blake2b-2.1.3.tgz",
"integrity": "sha512-pkDss4xFVbMb4270aCyGD3qLv92314Et+FsKzilCLxDz5DuZ2/1g3w4nmBbu6nKApPspnjG7JcwTjGZnduB1yg==",
"requires": {
"blake2b-wasm": "^1.1.0",
"nanoassert": "^1.0.0"
}
},
"blake2b-wasm": {
"version": "1.1.7",
"resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-1.1.7.tgz",
"integrity": "sha512-oFIHvXhlz/DUgF0kq5B1CqxIDjIJwh9iDeUUGQUcvgiGz7Wdw03McEO7CfLBy7QKGdsydcMCgO9jFNBAFCtFcA==",
"requires": {
"nanoassert": "^1.0.0"
}
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
@ -285,6 +302,11 @@
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=",
"dev": true
},
"commander": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz",
"integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg=="
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@ -1345,6 +1367,11 @@
"integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==",
"dev": true
},
"nanoassert": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/nanoassert/-/nanoassert-1.1.0.tgz",
"integrity": "sha1-TzFS4JVA/eKMdvRLGbvNHVpCR40="
},
"natural-compare": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",

View File

@ -28,8 +28,10 @@
"url": "https://github.com/iden3/snarkjs.git"
},
"dependencies": {
"blake2b": "^2.1.3",
"chai": "^4.2.0",
"circom_runtime": "0.0.6",
"commander": "^5.1.0",
"escape-string-regexp": "^1.0.5",
"ffjavascript": "0.1.0",
"keccak": "^3.0.0",

237
src/clprocessor.js Normal file
View File

@ -0,0 +1,237 @@
#!/usr/bin/env node
const version = require("../package").version;
let selectedCommand = null;
module.exports = async function clProcessor(commands) {
const cl = [];
const argv = {};
for (let i=2; i<process.argv.length; i++) {
if (process.argv[i][0] == "-") {
let S = process.argv[i];
while (S[0] == "-") S = S.slice(1);
const arr = S.split("=")
if (arr.length > 1) {
argv[arr[0]] = arr.slice(1).join("=");
} else {
argv[arr[0]] = true;
}
} else {
cl.push(process.argv[i]);
}
}
for (let i=0; i<commands.length; i++) {
const cmd = commands[i];
const m = calculateMatch(commands[i], cl);
if (m) {
if ((argv.h) || (argv.help)) {
helpCmd(cmd);
return;
}
if (areParamsValid(cmd.cmd, m)) {
if (cmd.options) {
const options = getOptions(cmd.options);
await cmd.action(m, options);
} else {
await cmd.action(m);
}
} else {
if (m.length>0) console.log("Invalid number of parameters");
helpCmd(cmd);
}
return;
}
}
if (cl.length>0) console.log("Invalid command");
helpAll();
function calculateMatch(cmd, cl) {
const alias = [];
alias.push(parseLine(cmd.cmd));
if (cmd.alias) {
if (Array.isArray(cmd.alias)) {
for (let i=0; i<cmd.alias.length; i++) {
alias.push(parseLine(cmd.alias[i]));
}
} else {
alias.push(parseLine(cmd.alias));
}
}
for (let i=0; i<cl.length; i++) {
for (let j=0; j<alias.length; j++) {
const w = alias[j].cmd.shift();
if (cl[i].toUpperCase() == w.toUpperCase()) {
if (alias[j].cmd.length == 0) {
return buildRemaining(alias[j].params, cl.slice(i+1));
}
} else {
alias.splice(j, 1);
j--;
}
}
}
return null;
function buildRemaining(defParams, cl) {
const res = [];
let p=0;
for (let i=0; i<defParams.length; i++) {
if (defParams[i][0]=="-") {
res.push(getOption(defParams[i]).val);
} else {
if (p<cl.length) {
res.push(cl[p++]);
} else {
res.push(null);
}
}
}
while (p<cl.length) {
res.push(cl[p++]);
}
return res;
}
}
function parseLine(l) {
const words = l.match(/(\S+)/g);
for (let i=0; i<words.length; i++) {
if ( (words[i][0] == "<")
|| (words[i][0] == "[")
|| (words[i][0] == "-"))
{
return {
cmd: words.slice(0,i),
params: words.slice(i)
};
}
}
return {
cmd: words,
params: []
};
}
function getOption(o) {
const arr1 = o.slice(1).split(":");
const arr2 = arr1[0].split("|");
for (let i = 0; i<arr2.length; i++) {
if (argv[arr2[i]]) return {
key: arr2[0],
val: argv[arr2[i]]
};
}
return {
key: arr2[0],
val: (arr1.length >1) ? arr1[1] : null
};
}
function areParamsValid(cmd, params) {
const pl = parseLine(cmd);
if (params.length > pl.params.length) return false;
let minParams = pl.params.length;
while ((minParams>0)&&(pl.params[minParams-1][0] == "[")) minParams --;
if (params.length < minParams) return false;
for (let i=0; (i< pl.params.length)&&(pl.params[i][0]=="<"); i++) {
if (typeof params[i] == "undefined") return false;
}
return true;
}
function getOptions(options) {
const res = {};
const opts = options.match(/(\S+)/g);
for (let i=0; i<opts.length; i++) {
const o = getOption(opts[i]);
res[o.key] = o.val;
}
return res;
}
function printVersion() {
console.log("snarkjs@"+version);
}
function epilog() {
console.log(` Copyright (C) 2018 0kims association
This program comes with ABSOLUTELY NO WARRANTY;
This is free software, and you are welcome to redistribute it
under certain conditions; see the COPYING file in the official
repo directory at https://github.com/iden3/snarkjs `);
}
function helpAll() {
printVersion();
epilog();
console.log("");
console.log("Usage:");
console.log(" snarkjs <full command> ... <options>");
console.log(" or snarkjs <shorcut> ... <options>");
console.log("");
console.log("Type snarkjs <command> --help to get more information for that command");
console.log("");
console.log("Full Command Description");
console.log("============ =================");
for (let i=0; i<commands.length; i++) {
const cmd = commands[i];
let S = "";
const pl = parseLine(cmd.cmd);
S += pl.cmd.join(" ");
while (S.length<30) S = S+" ";
S += cmd.description;
console.log(S);
S = " Usage: snarkjs ";
if (cmd.alias) {
if (Array.isArray(cmd.alias)) {
S += cmd.alias[0];
} else {
S += cmd.alias;
}
} else {
S += pl.cmd.join(" ");
}
S += " " + pl.params.join(" ");
console.log(S);
// console.log("");
}
}
function helpCmd(cmd) {
if (typeof cmd == "undefined") cmd = selectedCommand;
if (typeof cmd == "undefined") return helpAll();
printVersion();
epilog();
console.log("");
if (cmd.longDescription) {
console.log(cmd.longDescription);
} else {
console.log(cmd.description);
}
console.log("Usage: ");
console.log(" snarkjs "+ cmd.cmd);
const pl = parseLine(cmd.cmd);
let S = " or snarkjs ";
if (cmd.alias) {
if (Array.isArray(cmd.alias)) {
S += cmd.alias[0];
} else {
S += cmd.alias;
}
} else {
S += pl.cmd.join(" ");
}
S += " " + pl.params.join(" ");
console.log(S);
console.log("");
}
}

44
src/keypair.js Normal file
View File

@ -0,0 +1,44 @@
const bn128 = require("ffjavascript").bn128;
const utils = require("ffjavascript").utils;
const blake2b = require("blake2b");
const ChaCha = require("ffjavascript").ChaCha;
function getG2sp(persinalization, challange, g1s, g1sx) {
const h = blake2b(64);
h.update(Buffer.from([persinalization]));
h.update(challange);
h.update( utils.beInt2Buff(g1s[0],32));
h.update( utils.beInt2Buff(g1s[1],32));
h.update( utils.beInt2Buff(g1sx[0],32));
h.update( utils.beInt2Buff(g1sx[1],32));
const hash = Buffer.from(h.digest());
const seed = [];
for (let i=0; i<8; i++) {
seed[i] = hash.readUInt32BE(i*4);
}
const rng = new ChaCha(seed);
const g2_sp = bn128.G2.fromRng(rng);
return g2_sp;
}
function createKeyPair(curve, personalization, challangeHash, rng ) {
const k = {};
k.prvKey= curve.Fr.fromRng(rng);
k.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
k.g1_sx = curve.G1.affine(curve.G1.mulScalar(k.g1_s, k.prvKey));
k.g2_sp = curve.G2.affine(getG2sp(personalization, challangeHash, k.g1_s, k.g1_sx));
k.g2_spx = curve.G2.affine(curve.G2.mulScalar(k.g2_sp, k.prvKey));
return k;
}
module.exports.create = createKeyPair;
module.exports.getG2sp = getG2sp;

573
src/powersoftaw.js Normal file
View File

@ -0,0 +1,573 @@
/*
Header
n8
prime
power
nContributions
tauG1
[(1<<power)*2-1] G1
tauG2
[1<<power] G2
alfaTauG1
[1<<power] G1
betaTauG1
[1<<power] G1
betaG2
[1] G2
contributions
[NContributions]
tauG1
tauG2
alphaTauG1
betaTauG1
betaG2
partialHash
state
tau_g1s
tau_g1sx
tau_g2spx
alfa_g1s
alfa_g1sx
alfa_g1spx
beta_g1s
beta_g1sx
beta_g1spx
*/
const fastFile = require("fastfile");
const Scalar = require("ffjavascript").Scalar;
const assert = require("assert");
const bn128 = require("ffjavascript").bn128;
const blake2b = require("blake2b");
const readline = require("readline");
const crypto = require("crypto");
const ChaCha = require("ffjavascript").ChaCha;
const fs = require("fs");
const buildTaskManager = require("./taskmanager");
const keyPair = require("./keypair");
async function newAccumulator(curve, power, fileName, verbose) {
const fd = await fastFile.createOverride(fileName);
await fd.write(Buffer.from("ptau"), 0); // Magic "r1cs"
await fd.writeULE32(1); // Version
await fd.writeULE32(7); // Number of Sections
// Write the header
///////////
await fd.writeULE32(1); // Header type
const pHeaderSize = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
const primeQ = curve.q;
await fd.writeULE32(curve.F1.n64*8);
await fd.write(Scalar.toRprLE(primeQ, curve.F1.n64*8));
await fd.writeULE32(power); // power
await fd.writeULE32(0); // Total number of public contributions
const headerSize = fd.pos - pHeaderSize - 8;
// Write tauG1
///////////
await fd.writeULE32(2); // tauG1
const pTauG1 = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
const nTauG1 = (1 << power) * 2 -1;
for (let i=0; i< nTauG1; i++) {
await fd.write(curve.G1.toRprLEM(curve.G1.g));
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG1: " + i);
}
const tauG1Size = fd.pos - pTauG1 -8;
// Write tauG2
///////////
await fd.writeULE32(3); // tauG2
const pTauG2 = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
const nTauG2 = (1 << power);
for (let i=0; i< nTauG2; i++) {
await fd.write(curve.G2.toRprLEM(curve.G2.g));
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG2: " + i);
}
const tauG2Size = fd.pos - pTauG2 -8;
// Write alfaTauG1
///////////
await fd.writeULE32(4); // alfaTauG1
const pAlfaTauG1 = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
const nAlfaTauG1 = (1 << power);
for (let i=0; i< nAlfaTauG1; i++) {
await fd.write(curve.G1.toRprLEM(curve.G1.g));
if ((verbose)&&((i%100000) == 0)&&i) console.log("alfaTauG1: " + i);
}
const alfaTauG1Size = fd.pos - pAlfaTauG1 -8;
// Write betaTauG1
///////////
await fd.writeULE32(5); // betaTauG1
const pBetaTauG1 = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
const nBetaTauG1 = (1 << power);
for (let i=0; i< nBetaTauG1; i++) {
await fd.write(curve.G1.toRprLEM(curve.G1.g));
if ((verbose)&&((i%100000) == 0)&&i) console.log("betaTauG1: " + i);
}
const betaTauG1Size = fd.pos - pBetaTauG1 -8;
// Write betaG2
///////////
await fd.writeULE32(6); // betaG2
const pBetaG2 = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
await fd.write(curve.G2.toRprLEM(curve.G2.g));
const betaG2Size = fd.pos - pBetaG2 -8;
// Contributions
///////////
await fd.writeULE32(7); // betaG2
const pContributions = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
const contributionsSize = fd.pos - pContributions -8;
// Write sizes
await fd.writeULE64(headerSize, pHeaderSize);
await fd.writeULE64(tauG1Size, pTauG1);
await fd.writeULE64(tauG2Size, pTauG2);
await fd.writeULE64(alfaTauG1Size, pAlfaTauG1);
await fd.writeULE64(betaTauG1Size, pBetaTauG1);
await fd.writeULE64(betaG2Size, pBetaG2);
await fd.writeULE64(contributionsSize, pContributions);
await fd.close();
}
async function exportChallange(pTauFilename, challangeFilename, verbose) {
const fdFrom = await fastFile.readExisting(pTauFilename);
const b = await fdFrom.read(4);
if (b.toString() != "ptau") assert(false, "Invalid File format");
let v = await fdFrom.readULE32();
if (v>1) assert(false, "Version not supported");
const nSections = await fdFrom.readULE32();
// Scan sections
let sections = [];
for (let i=0; i<nSections; i++) {
let ht = await fdFrom.readULE32();
let hl = await fdFrom.readULE64();
if (typeof sections[ht] == "undefined") sections[ht] = [];
sections[ht].push({
p: fdFrom.pos,
size: hl
});
fdFrom.pos += hl;
}
if (!sections[1]) assert(false, "File has no header");
if (sections[1].length>1) assert(false, "File has more than one header");
fdFrom.pos = sections[1][0].p;
const n8 = await fdFrom.readULE32();
const qBuff = await fdFrom.read(n8);
const q = Scalar.fromRprLE(qBuff);
let curve;
if (Scalar.eq(q, bn128.q)) {
curve = bn128;
} else {
assert(false, "Curve not supported");
}
assert(curve.F1.n64*8 == n8, "Invalid size");
const power = await fdFrom.readULE32();
const nContributions = await fdFrom.readULE32();
let challangeHash;
if (nContributions == 0) {
challangeHash = Buffer.from(blake2b(64).digest());
} else {
assert(false, "Not implemented");
}
const fdTo = await fastFile.createOverride(challangeFilename);
const toHash = blake2b(64);
fdTo.write(challangeHash);
toHash.update(challangeHash);
// Process tauG1
if (!sections[2]) assert(false, "File has no tauG1 section");
if (sections[2].length>1) assert(false, "File has more than one tauG1 section");
fdFrom.pos = sections[2][0].p;
const nTauG1 = (1 << power) * 2 -1;
for (let i=0; i< nTauG1; i++) {
const p = await readG1();
await writeG1(p);
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG1: " + i);
}
if (fdFrom.pos != sections[2][0].p + sections[2][0].size) assert(false, "Invalid tauG1 section size");
// Process tauG2
if (!sections[3]) assert(false, "File has no tauG2 section");
if (sections[3].length>1) assert(false, "File has more than one tauG2 section");
fdFrom.pos = sections[3][0].p;
const nTauG2 = 1 << power ;
for (let i=0; i< nTauG2; i++) {
const p = await readG2();
await writeG2(p);
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG2: " + i);
}
if (fdFrom.pos != sections[3][0].p + sections[3][0].size) assert(false, "Invalid tauG2 section size");
// Process alphaTauG1
if (!sections[4]) assert(false, "File has no alphaTauG1 section");
if (sections[4].length>1) assert(false, "File has more than one alphaTauG1 section");
fdFrom.pos = sections[4][0].p;
const nAlphaTauG1 = 1 << power ;
for (let i=0; i< nAlphaTauG1; i++) {
const p = await readG1();
await writeG1(p);
if ((verbose)&&((i%100000) == 0)&&i) console.log("alphaTauG1: " + i);
}
if (fdFrom.pos != sections[4][0].p + sections[4][0].size) assert(false, "Invalid alphaTauG1 section size");
// Process betaTauG1
if (!sections[5]) assert(false, "File has no betaTauG1 section");
if (sections[5].length>1) assert(false, "File has more than one betaTauG1 section");
fdFrom.pos = sections[5][0].p;
const nBetaTauG1 = 1 << power ;
for (let i=0; i< nBetaTauG1; i++) {
const p = await readG1();
await writeG1(p);
if ((verbose)&&((i%100000) == 0)&&i) console.log("betaTauG1: " + i);
}
if (fdFrom.pos != sections[5][0].p + sections[5][0].size) assert(false, "Invalid betaTauG1 section size");
// Process betaG2
if (!sections[6]) assert(false, "File has no betaG2 section");
if (sections[6].length>1) assert(false, "File has more than one betaG2 section");
fdFrom.pos = sections[6][0].p;
const betaG2 = await readG2();
await writeG2(betaG2);
if (fdFrom.pos != sections[6][0].p + sections[6][0].size) assert(false, "Invalid betaG2 section size");
await fdFrom.close();
await fdTo.close();
const newChallangeHash = toHash.digest("hex");
console.log("Challange Hash: " +newChallangeHash);
async function readG1() {
const pBuff = await fdFrom.read(curve.F1.n64*8*2);
return curve.G1.fromRprLEM( pBuff );
}
async function readG2() {
const pBuff = await fdFrom.read(curve.F1.n64*8*2*2);
return curve.G2.fromRprLEM( pBuff );
}
async function writeG1(p) {
const rpr = curve.G1.toRprBE(p);
await fdTo.write(rpr);
toHash.update(rpr);
}
async function writeG2(p) {
const rpr = curve.G2.toRprBE(p);
await fdTo.write(rpr);
toHash.update(rpr);
}
}
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
function askEntropy() {
return new Promise((resolve, reject) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
});
}
async function contribute(curve, challangeFilename, responesFileName, entropy, verbose) {
const MAX_CHUNK_SIZE = 1024;
let stats = await fs.promises.stat(challangeFilename);
const sG1 = curve.F1.n64*8*2;
const scG1 = curve.F1.n64*8; // Compresed size
const sG2 = curve.F2.n64*8*2;
const scG2 = curve.F2.n64*8; // Compresed size
const domainSize = (stats.size + sG1 - 64 - sG2) / (4*sG1 + sG2);
let e = domainSize;
let power = 0;
while (e>1) {
e = e /2;
power += 1;
}
assert(1<<power == domainSize, "Invalid file size");
const fdFrom = await fastFile.readExisting(challangeFilename);
const fdTo = await fastFile.createOverride(responesFileName);
let writePointer = 0;
while (!entropy) {
entropy = await askEntropy();
}
// Calculate the hash
console.log("Hashing challange");
const challangeHasher = blake2b(64);
for (let i=0; i<stats.size; i+= fdFrom.pageSize) {
const s = Math.min(stats.size - i, fdFrom.pageSize);
const buff = await fdFrom.read(s);
challangeHasher.update(buff);
}
const challangeHash = Buffer.from(challangeHasher.digest());
console.log("Challange Hash: " + challangeHash.toString("hex"));
const claimedHash = await fdFrom.read(64, 0);
console.log("Claimed Hash: " + claimedHash.toString("hex"));
const hasher = blake2b(64);
hasher.update(crypto.randomBytes(64));
hasher.update(entropy);
const hash = Buffer.from(hasher.digest());
const seed = [];
for (let i=0;i<8;i++) {
seed[i] = hash.readUInt32BE(i*4);
}
// const rng = new ChaCha(seed);
const rng = new ChaCha();
const kTau = keyPair.create(curve, 0, challangeHash, rng);
const kAlpha = keyPair.create(curve, 1, challangeHash, rng);
const kBeta = keyPair.create(curve, 2, challangeHash, rng);
if (verbose) {
console.log("kTau.g1_s_x: " + kTau.g1_s[0].toString(16));
console.log("kTau.g1_s_y: " + kTau.g1_s[1].toString(16));
console.log("kTau.g1_sx_x: " + kTau.g1_sx[0].toString(16));
console.log("kTau.g1_sx_y: " + kTau.g1_sx[1].toString(16));
console.log("kTau.g2_sp_x_c0: " + kTau.g2_sp[0][0].toString(16));
console.log("kTau.g2_sp_x_c1: " + kTau.g2_sp[0][1].toString(16));
console.log("kTau.g2_sp_y_c0: " + kTau.g2_sp[1][0].toString(16));
console.log("kTau.g2_sp_y_c1: " + kTau.g2_sp[1][1].toString(16));
console.log("kTau.g2_spx_x_c0: " + kTau.g2_spx[0][0].toString(16));
console.log("kTau.g2_spx_x_c1: " + kTau.g2_spx[0][1].toString(16));
console.log("kTau.g2_spx_y_c0: " + kTau.g2_spx[1][0].toString(16));
console.log("kTau.g2_spx_y_c1: " + kTau.g2_spx[1][1].toString(16));
}
await fdTo.write(challangeHash);
writePointer += 64;
const taskManager = await buildTaskManager(contributeThread, {
ffjavascript: "ffjavascript"
},{
curve: curve.name
});
// TauG1
let t = curve.Fr.e(1);
for (let i=0; i<domainSize*2-1; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("TauG1: " + i);
const n = Math.min(domainSize*2-1 - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG1);
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: kTau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n));
writePointer += n*scG1;
}
// TauG2
t = curve.Fr.e(1);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("TauG2: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG2);
await taskManager.addTask({
cmd: "MULG2",
first: t,
inc: kTau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n));
writePointer += n*scG2;
}
// AlphaTauG1
t = curve.Fr.e(kAlpha.prvKey);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("AlfaTauG1: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG1);
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: kTau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n));
writePointer += n*scG1;
}
// BetaTauG1
t = curve.Fr.e(kBeta.prvKey);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("BetaTauG1: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG1);
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: kTau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n));
writePointer += n*scG1;
}
// BetaG2
const buffOldBeta = await fdFrom.read(sG2);
const oldBeta = curve.G2.fromRprBE(buffOldBeta);
const newBeta = curve.G2.mulScalar(oldBeta, kBeta.prvKey);
const buffNewBeta = curve.G2.toRprCompressed(newBeta);
await fdTo.write(buffNewBeta, writePointer);
writePointer += scG2;
//Write Key
await fdTo.write(curve.G1.toRprBE(kTau.g1_s), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kTau.g1_sx), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kAlpha.g1_s), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kAlpha.g1_sx), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kBeta.g1_s), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kBeta.g1_sx), writePointer);
writePointer += sG1;
await fdTo.write(curve.G2.toRprBE(kTau.g2_spx), writePointer);
writePointer += sG2;
await fdTo.write(curve.G2.toRprBE(kAlpha.g2_spx), writePointer);
writePointer += sG2;
await fdTo.write(curve.G2.toRprBE(kBeta.g2_spx), writePointer);
writePointer += sG2;
await taskManager.finish();
await fdTo.close();
await fdFrom.close();
}
function contributeThread(ctx, task) {
if (task.cmd == "INIT") {
ctx.assert = ctx.modules.assert;
if (task.curve == "bn128") {
ctx.curve = ctx.modules.ffjavascript.bn128;
} else {
ctx.assert(false, "curve not defined");
}
return {};
} else if (task.cmd == "MULG1") {
// console.log("StartMULG1 "+ ctx.processId);
const sG1 = ctx.curve.F1.n64*8*2;
const scG1 = ctx.curve.F1.n64*8; // Compresed size
const buffDest = Buffer.allocUnsafe(scG1*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const slice = task.buff.slice(i*sG1, (i+1)*sG1);
const b = Buffer.from(slice);
const P = ctx.curve.G1.fromRprBE(b);
const R = ctx.curve.G1.mulScalar(P, t);
const bR = ctx.curve.G1.toRprCompressed(R);
bR.copy(buffDest, i*scG1);
t = ctx.curve.Fr.mul(t, inc);
}
// console.log("EndMulG1 "+ ctx.processId);
return {
buff: buffDest,
writePos: task.writePos
};
} else if (task.cmd == "MULG2") {
// console.log("StartMULG2 "+ ctx.processId);
const sG2 = ctx.curve.F2.n64*8*2;
const scG2 = ctx.curve.F2.n64*8; // Compresed size
const buffDest = Buffer.allocUnsafe(scG2*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const slice = task.buff.slice(i*sG2, (i+1)*sG2);
const b = Buffer.from(slice);
const P = ctx.curve.G2.fromRprBE(b);
const R = ctx.curve.G2.mulScalar(P, t);
const bR = ctx.curve.G2.toRprCompressed(R);
bR.copy(buffDest, i*scG2);
t = ctx.curve.Fr.mul(t, inc);
}
// console.log("EndMulG2 "+ ctx.processId);
return {
buff: buffDest,
writePos: task.writePos
};
} else {
ctx.assert(false, "Op not implemented");
}
}
module.exports.newAccumulator = newAccumulator;
module.exports.exportChallange = exportChallange;
module.exports.contribute = contribute;

View File

@ -158,6 +158,16 @@ function calculateH(vk_proof, witness) {
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
for (let i=0; i<vk_proof.ccoefs.length; i++) {
const coef = vk_proof.ccoefs[i];
if (coef.matrix == 0) {
polA_T[coef.constraint] = F.add( polA_T[coef.constraint], F.mul(witness[ coef.signal ], coef.value) );
} else if (coef.matrix == 1) {
polB_T[coef.constraint] = F.add( polB_T[coef.constraint], F.mul(witness[ coef.signal ], coef.value) );
}
}
/*
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
@ -166,6 +176,8 @@ function calculateH(vk_proof, witness) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
}
*/
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);

View File

@ -19,8 +19,6 @@
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bigInt = require("big-integer");
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
@ -33,18 +31,15 @@ const F = new ZqField(bn128.r);
module.exports = function setup(circuit, verbose) {
const setup = {
vk_proof : {
protocol: "groth",
protocol: "groth16",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
vk_verifier: {
protocol: "groth",
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.q = bn128.q;
setup.vk_proof.r = bn128.r;
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
@ -52,6 +47,21 @@ module.exports = function setup(circuit, verbose) {
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit, verbose);
setup.vk_verifier = {
protocol: setup.vk_proof.protocol,
nPublic: setup.vk_proof.nPublic,
IC: setup.vk_proof.IC,
vk_alfa_1: setup.vk_proof.vk_alfa_1,
vk_beta_2: setup.vk_proof.vk_beta_2,
vk_gamma_2: setup.vk_proof.vk_gamma_2,
vk_delta_2: setup.vk_proof.vk_delta_2,
vk_alfabeta_12: bn128.pairing( setup.vk_proof.vk_alfa_1 , setup.vk_proof.vk_beta_2 )
};
return setup;
};
@ -66,8 +76,22 @@ function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
for (let c=0; c<circuit.nConstraints; c++) {
setup.vk_proof.ccoefs = [];
for (let m=0; m<2; m++) {
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][m]) {
setup.vk_proof.ccoefs.push({
matrix: m,
constraint: c,
signal: s,
value: circuit.constraints[c][m][s]
});
}
}
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = circuit.constraints[c][0][s];
}
@ -87,6 +111,12 @@ function calculatePolinomials(setup, circuit) {
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
setup.vk_proof.ccoefs.push({
matrix: 0,
constraint: circuit.nConstraints + i,
signal: i,
value: F.one
});
}
}
@ -125,7 +155,7 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
setup.vk_proof.B1 = new Array(circuit.nVars);
setup.vk_proof.B2 = new Array(circuit.nVars);
setup.vk_proof.C = new Array(circuit.nVars);
setup.vk_verifier.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.vk_proof.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.toxic.kalfa = F.random();
setup.toxic.kbeta = F.random();
@ -141,16 +171,9 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
setup.vk_proof.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kdelta));
setup.vk_proof.vk_gamma_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_alfa_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalfa));
setup.vk_verifier.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_gamma_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_delta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kdelta));
setup.vk_verifier.vk_alfabeta_12 = bn128.pairing( setup.vk_verifier.vk_alfa_1 , setup.vk_verifier.vk_beta_2 );
for (let s=0; s<circuit.nVars; s++) {
const A = G1.mulScalar(G1.g, v.a_t[s]);
@ -180,7 +203,7 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
v.c_t[s]));
const IC = G1.mulScalar(G1.g, ps);
setup.vk_verifier.IC[s]=IC;
setup.vk_proof.IC[s]=IC;
}
for (let s=setup.vk_proof.nPublic+1; s<circuit.nVars; s++) {
@ -222,8 +245,7 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
G2.multiAffine(setup.vk_proof.B2);
G1.multiAffine(setup.vk_proof.C);
G1.multiAffine(setup.vk_proof.hExps);
G1.multiAffine(setup.vk_verifier.IC);
G1.multiAffine(setup.vk_proof.IC);
}

View File

@ -17,11 +17,9 @@
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bigInt = require("big-integer");
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
const ZqField = require("ffjavascript").F1Field;
const G1 = bn128.G1;
const G2 = bn128.G2;

220
src/taskmanager.js Normal file
View File

@ -0,0 +1,220 @@
const assert = require("assert");
const inBrowser = (typeof window !== "undefined");
let NodeWorker;
if (!inBrowser) {
NodeWorker = require("worker_threads").Worker;
}
class Deferred {
constructor() {
this.promise = new Promise((resolve, reject)=> {
this.reject = reject;
this.resolve = resolve;
});
}
}
function thread(self, fn, modules) {
const ctx = {
modules: modules
};
self.onmessage = function(e) {
let data;
if (e.data) {
data = e.data;
} else {
data = e;
}
if (data.cmd == "INIT") {
ctx.processId = data.processId;
}
if (data.cmd == "TERMINATE") {
self.postMessage({cmd: "TERMINATE"});
process.exit();
return;
}
let res = fn(ctx, data);
res = res || {};
res.cmd = data.cmd;
if (res) {
if (res.buff) {
res.buff = new Uint8Array(res.buff);
self.postMessage(res, [res.buff.buffer]);
} else {
self.postMessage(res);
}
}
};
}
async function buildTaskManager(fn, mods, initTask) {
let concurrency;
if ((typeof(navigator) === "object") && navigator.hardwareConcurrency) {
concurrency = navigator.hardwareConcurrency;
} else {
const os = require("os");
concurrency = os.cpus().length;
}
const tm = {
workers: []
};
let S = "{";
const keys = Object.keys(mods);
for (let i=0; i<keys.length; i++) {
const key= keys[i];
S += `${key}: require('${mods[key]}'), `;
}
S += "}";
function getOnMsg(i) {
return function(e) {
function finishTask() {
// It can not be a waiting task and it's terminating
assert( !(tm.waitingTask && tm.terminateDeferred));
if (tm.terminateDeferred) {
tm.workers[i].worker.postMessage({cmd: "TERMINATE"});
return;
}
tm.workers[i].state = "READY";
if (tm.waitingTask) {
processTask(i, tm.waitingTask.task, tm.waitingTask.asyncCb);
const d = tm.waitingTask.deferral;
tm.waitingTask = null;
d.resolve();
}
}
let data;
if ((e)&&(e.data)) {
data = e.data;
} else {
data = e;
}
if (data.cmd == "TERMINATE") {
tm.workers[i].state = "TERMINATED";
tm.tryTerminate();
return;
}
if (data.buff) {
data.buff = Buffer.from(data.buff);
}
if (tm.workers[i].asyncCb) {
tm.workers[i].asyncCb(data).then(()=> {
finishTask();
});
} else {
finishTask();
}
};
}
function processTask(i, task, asyncCb) {
assert(tm.workers[i].state == "READY");
tm.workers[i].asyncCb = asyncCb;
tm.workers[i].state = "WORKING";
if (task.buff) {
task.buff = new Uint8Array(task.buff);
tm.workers[i].worker.postMessage(task, [task.buff.buffer]);
} else {
tm.workers[i].worker.postMessage(task);
}
}
for (let i=0; i<concurrency; i++) {
const worker = new NodeWorker(`(${thread.toString()})(require('worker_threads').parentPort, ${fn.toString()},${S});`, {eval: true});
worker.on("message", getOnMsg(i));
tm.workers[i] = {
state: "READY",
worker: worker,
taskPromise: null
};
}
for (let i=0; i<concurrency; i++) {
initTask.cmd = "INIT";
initTask.processId = i;
processTask(i, initTask);
}
tm.finish = function() {
const self = this;
assert (self.terminatePromise == null);
self.terminateDeferred = new Deferred();
for (let i=0; i<concurrency; i++) {
if (self.workers[i].state == "READY") {
self.workers[i].worker.postMessage({cmd: "TERMINATE"});
}
}
return self.terminateDeferred.promise;
};
tm.addTask = function (task, asyncCb) {
const self = this;
assert (!self.waitingTask);
assert(!self.terminateDeferred);
const deferral = new Deferred();
let i;
for (i=0; i<tm.workers.length; i++) {
if (self.workers[i].state == "READY") break;
}
if (i<tm.workers.length) {
processTask(i, task, asyncCb);
deferral.resolve();
} else {
self.waitingTask = {
task: task,
deferral: deferral,
asyncCb: asyncCb
};
}
return deferral.promise;
};
tm.tryTerminate = function() {
const self = this;
if (!self.terminateDeferred) return;
for (let i=0; i<concurrency; i++) {
if (self.workers[i].state != "TERMINATED") return;
}
self.terminateDeferred.resolve();
};
return tm;
}
module.exports = buildTaskManager;

155
src/wtnsfile.js Normal file
View File

@ -0,0 +1,155 @@
const Scalar = require("ffjavascript").Scalar;
const fastFile = require("fastfile");
const assert = require("assert");
module.exports.write = async function writeZKey(fileName, witness, prime) {
const fd = await fastFile.createOverride(fileName);
await fd.write(Buffer.from("wtns"), 0); // Magic "r1cs"
let p = 4;
await writeU32(1); // Version
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
await writeU32(n8);
await writeBigInt(prime);
await writeU32(witness.length);
for (let i=0; i<witness.length; i++) {
await writeBigInt(witness[i]);
}
await fd.close();
async function writeU32(v, pos) {
let o = (typeof pos == "undefined") ? p : pos;
const b = Buffer.allocUnsafe(4);
b.writeInt32LE(v);
await fd.write(b, o);
if (typeof(pos) == "undefined") p += 4;
}
async function writeBigInt(n, pos) {
let o = (typeof pos == "undefined") ? p : pos;
const s = n.toString(16);
const b = Buffer.from(s.padStart(n8*2, "0"), "hex");
const buff = Buffer.allocUnsafe(b.length);
for (let i=0; i<b.length; i++) buff[i] = b[b.length-1-i];
await fd.write(buff, o);
if (typeof(pos) == "undefined") p += n8;
}
};
module.exports.writeBin = async function writeZKey(fileName, witnessBin, prime) {
witnessBin = Buffer.from(witnessBin);
const fd = await fastFile.createOverride(fileName);
await fd.write(Buffer.from("wtns"), 0); // Magic "r1cs"
let p = 4;
await writeU32(1); // Version
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
await writeU32(n8);
await writeBigInt(prime);
assert(witnessBin.length % n8 == 0);
await writeU32(witnessBin.length / n8);
await fd.write(witnessBin, p);
await fd.close();
async function writeU32(v, pos) {
let o = (typeof pos == "undefined") ? p : pos;
const b = Buffer.allocUnsafe(4);
b.writeInt32LE(v);
await fd.write(b, o);
if (typeof(pos) == "undefined") p += 4;
}
async function writeBigInt(n, pos) {
let o = (typeof pos == "undefined") ? p : pos;
const s = n.toString(16);
const b = Buffer.from(s.padStart(n8*2, "0"), "hex");
const buff = Buffer.allocUnsafe(b.length);
for (let i=0; i<b.length; i++) buff[i] = b[b.length-1-i];
await fd.write(buff, o);
if (typeof(pos) == "undefined") p += n8;
}
};
module.exports.read = async function writeZKey(fileName) {
const res = [];
const fd = await fastFile.readExisting(fileName);
const b = await fd.read(0, 4);
if (b.toString() != "wtns") assert(false, "Invalid File format");
let p=4;
let v = await readU32();
if (v>1) assert(false, "Version not supported");
const n8 = await readU32();
await readBigInt();
const nWitness = await readU32();
for (let i=0; i<nWitness; i++) {
const v = await readBigInt();
res.push(v);
}
return res;
async function readU32() {
const b = await fd.read(p, 4);
p+=4;
return b.readUInt32LE(0);
}
async function readBigInt() {
const buff = await fd.read(p, n8);
assert(buff.length == n8);
const buffR = Buffer.allocUnsafe(n8);
for (let i=0; i<n8; i++) buffR[i] = buff[n8-1-i];
p += n8;
return Scalar.fromString(buffR.toString("hex"), 16);
}
};

484
src/zkeyfile.js Normal file
View File

@ -0,0 +1,484 @@
// Format
// ======
// Header
// Prover Type 1 Groth
// HeaderGroth
// n8q
// q
// n8r
// r
// NVars
// NPub
// DomainSize (multiple of 2
// alfa1
// beta1
// delta1
// beta2
// gamma2
// delta2
// IC
// PolA
// PolB
// PointsA
// PointsB1
// PointsB2
// PointsC
// PointsH
// Contributions
const Scalar = require("ffjavascript").Scalar;
const F1Field = require("ffjavascript").F1Field;
const fastFile = require("fastfile");
const assert = require("assert");
module.exports.write = async function writeZKey(fileName, zkey) {
const fd = await fastFile.createOverride(fileName);
await fd.write(Buffer.from("zkey"), 0); // Magic "r1cs"
let p = 4;
await writeU32(1); // Version
await writeU32(6); // Number of Sections
// Write the header
///////////
await writeU32(1); // Header type
const pHeaderSize = p;
await writeU64(0); // Temporally set to 0 length
await writeU32(1); // Groth
const headerSize = p - pHeaderSize - 8;
// Write the Groth header section
///////////
const primeQ = zkey.q;
const Fq = new F1Field(zkey.q);
const n8q = (Math.floor( (Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
const Rq = Scalar.mod(Scalar.shl(1, n8q*8), primeQ);
const primeR = zkey.r;
const Fr = new F1Field(zkey.r);
const n8r = (Math.floor( (Scalar.bitLength(primeR) - 1) / 64) +1)*8;
const Rr = Scalar.mod(Scalar.shl(1, n8r*8), primeR);
const R2r = Scalar.mod(Scalar.mul(Rr,Rr), primeR);
// Field Def
await writeU32(2); // Constraints type
const pGrothHeader = p;
await writeU64(0); // Temporally set to 0 length
await writeU32(n8q);
await writeBigIntQ(primeQ);
await writeU32(n8r);
await writeBigIntR(primeR);
await writeU32(zkey.nVars); // Total number of bars
await writeU32(zkey.nPublic); // Total number of public vars (not including ONE)
await writeU32(zkey.domainSize); // domainSize
await writePointG1(zkey.vk_alfa_1);
await writePointG1(zkey.vk_beta_1);
await writePointG1(zkey.vk_delta_1);
await writePointG2(zkey.vk_beta_2);
await writePointG2(zkey.vk_gamma_2);
await writePointG2(zkey.vk_delta_2);
const grothHeaderSize = p - pGrothHeader - 8;
// Write IC Section
///////////
await writeU32(3); // IC
const pIc = p;
await writeU64(0); // Temporally set to 0 length
for (let i=0; i<= zkey.nPublic; i++) {
await writePointG1(zkey.IC[i] );
}
const icSize = p - pIc -8;
// Write Pol A
///////////
await writeU32(4); // A Pols
const pCoefs = p;
await writeU64(0); // Temporally set to 0 length
await writeU32(zkey.ccoefs.length);
for (let i=0; i<zkey.ccoefs.length; i++) {
const coef = zkey.ccoefs[i];
await writeU32(coef.matrix);
await writeU32(coef.constraint);
await writeU32(coef.signal);
await writeFr2(coef.value);
}
const coefsSize = p - pCoefs -8;
// Write A B1 B2 C points
///////////
await writeU32(5); // A B1 B2 C points
const pPointsAB1B2C = p;
await writeU64(0); // Temporally set to 0 length
for (let i=0; i<zkey.nVars; i++) {
await writePointG1(zkey.A[i]);
await writePointG1(zkey.B1[i]);
await writePointG2(zkey.B2[i]);
if (i<=zkey.nPublic) {
await writePointG1_zero();
} else {
await writePointG1(zkey.C[i]);
}
}
const pointsAB1B2CSize = p - pPointsAB1B2C - 8;
// Write H points
///////////
await writeU32(6); // H Points
const pPointsH = p;
await writeU64(0); // Temporally set to 0 length
for (let i=0; i<zkey.domainSize; i++) {
await writePointG1(zkey.hExps[i]);
}
const pointsHsize = p - pPointsH -8;
// Write sizes
await writeU64(headerSize, pHeaderSize);
await writeU64(grothHeaderSize, pGrothHeader);
await writeU64(icSize, pIc);
await writeU64(coefsSize, pCoefs);
await writeU64(pointsAB1B2CSize, pPointsAB1B2C);
await writeU64(pointsHsize, pPointsH);
await fd.close();
async function writeU32(v, pos) {
let o = (typeof pos == "undefined") ? p : pos;
const b = Buffer.allocUnsafe(4);
b.writeInt32LE(v);
await fd.write(b, o);
if (typeof(pos) == "undefined") p += 4;
}
async function writeU64(v, pos) {
let o = (typeof pos == "undefined") ? p : pos;
const b = Buffer.allocUnsafe(8);
const LSB = v & 0xFFFFFFFF;
const MSB = Math.floor(v / 0x100000000);
b.writeInt32LE(LSB, 0);
b.writeInt32LE(MSB, 4);
await fd.write(b, o);
if (typeof(pos) == "undefined") p += 8;
}
async function writeBigIntQ(n, pos) {
let o = (typeof pos == "undefined") ? p : pos;
const s = n.toString(16);
const b = Buffer.from(s.padStart(n8q*2, "0"), "hex");
const buff = Buffer.allocUnsafe(b.length);
for (let i=0; i<b.length; i++) buff[i] = b[b.length-1-i];
await fd.write(buff, o);
if (typeof(pos) == "undefined") p += n8q;
}
async function writeBigIntR(n, pos) {
let o = (typeof pos == "undefined") ? p : pos;
const s = n.toString(16);
const b = Buffer.from(s.padStart(n8r*2, "0"), "hex");
const buff = Buffer.allocUnsafe(b.length);
for (let i=0; i<b.length; i++) buff[i] = b[b.length-1-i];
await fd.write(buff, o);
if (typeof(pos) == "undefined") p += n8r;
}
async function writeFr2(n) {
// Convert to montgomery
n = Scalar.mod( Scalar.mul(n, R2r), primeR);
await writeBigIntR(n);
}
async function writeFq(n) {
// Convert to montgomery
n = Scalar.mod( Scalar.mul(n, Rq), primeQ);
await writeBigIntQ(n);
}
async function writePointG1(p) {
if (Fq.isZero(p[2])) {
await writeFq(0);
await writeFq(0);
} else {
await writeFq(p[0]);
await writeFq(p[1]);
}
}
async function writePointG1_zero() {
await writeFq(0);
await writeFq(0);
}
async function writePointG2(p) {
if (Fq.isZero(p[2][0]) && Fq.isZero(p[2][1])) {
await writeFq(Fq.e(0));
await writeFq(Fq.e(0));
await writeFq(Fq.e(0));
await writeFq(Fq.e(0));
} else {
await writeFq(p[0][0]);
await writeFq(p[0][1]);
await writeFq(p[1][0]);
await writeFq(p[1][1]);
}
}
};
module.exports.read = async function readZKey(fileName) {
const zkey = {};
const fd = await fastFile.readExisting(fileName);
const b = await fd.read(0, 4);
if (b.toString() != "zkey") assert(false, "Invalid File format");
let p=4;
let v = await readU32();
if (v>1) assert(false, "Version not supported");
const nSections = await readU32();
// Scan sections
let sections = [];
for (let i=0; i<nSections; i++) {
let ht = await readU32();
let hl = await readU64();
if (typeof sections[ht] == "undefined") sections[ht] = [];
sections[ht].push({
p: p,
size: hl
});
p += hl;
}
// Read Header
/////////////////////
if (sections[1].length==0) assert(false, "File has no header");
if (sections[1].length>1) assert(false, "File has more than one header");
p = sections[1][0].p;
const protocol = await readU32();
if (protocol != 1) assert("File is not groth");
if (p != sections[1][0].p + sections[1][0].size) assert(false, "Invalid header section size");
// Read Groth Header
/////////////////////
if (sections[2].length==0) assert(false, "File has no groth header");
if (sections[2].length>1) assert(false, "File has more than one groth header");
zkey.protocol = "groth16";
p = sections[2][0].p;
const n8q = await readU32();
zkey.q = await readBigIntQ();
const Fq = new F1Field(zkey.q);
const Rq = Scalar.mod(Scalar.shl(1, n8q*8), zkey.q);
const Rqi = Fq.inv(Rq);
const n8r = await readU32();
zkey.r = await readBigIntR();
const Fr = new F1Field(zkey.r);
const Rr = Scalar.mod(Scalar.shl(1, n8q*8), zkey.r);
const Rri = Fr.inv(Rr);
const Rri2 = Fr.mul(Rri, Rri);
zkey.nVars = await readU32();
zkey.nPublic = await readU32();
zkey.domainSize = await readU32();
zkey.vk_alfa_1 = await readG1();
zkey.vk_beta_1 = await readG1();
zkey.vk_delta_1 = await readG1();
zkey.vk_beta_2 = await readG2();
zkey.vk_gamma_2 = await readG2();
zkey.vk_delta_2 = await readG2();
if (p != sections[2][0].p + sections[2][0].size) assert(false, "Invalid groth header section size");
// Read IC Section
///////////
if (sections[3].length==0) assert(false, "File has no IC section");
if (sections[3].length>1) assert(false, "File has more than one IC section");
p = sections[3][0].p;
zkey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
const P = await readG1();
zkey.IC.push(P);
}
if (p != sections[3][0].p + sections[3][0].size) assert(false, "Invalid IC section size");
// Read Coefs
///////////
if (sections[4].length==0) assert(false, "File has no PolA section");
if (sections[4].length>1) assert(false, "File has more than one PolA section");
p = sections[4][0].p;
const nCCoefs = await readU32();
zkey.ccoefs = [];
for (let i=0; i<nCCoefs; i++) {
const m = await readU32();
const c = await readU32();
const s = await readU32();
const v = await readFr2();
zkey.ccoefs.push({
matrix: m,
constraint: c,
signal: s,
value: v
});
}
if (p != sections[4][0].p + sections[4][0].size) assert(false, "Invalid PolsA section size");
// Read A B1 B2 C points
///////////
if (sections[5].length==0) assert(false, "File has no AB1B2C section");
if (sections[5].length>1) assert(false, "File has more than one AB1B2C section");
p = sections[5][0].p;
zkey.A = [];
zkey.B1 = [];
zkey.B2 = [];
zkey.C = [];
for (let i=0; i<zkey.nVars; i++) {
const A = await readG1();
const B1 = await readG1();
const B2 = await readG2();
const C = await readG1();
zkey.A.push(A);
zkey.B1.push(B1);
zkey.B2.push(B2);
zkey.C.push(C);
if (i<= zkey.nPublic) {
assert(Fr.isZero(C[2]), "C value for public is not zero");
}
}
if (p != sections[5][0].p + sections[5][0].size) assert(false, "Invalid AB1B2C section size");
// Read H points
///////////
if (sections[6].length==0) assert(false, "File has no H section");
if (sections[6].length>1) assert(false, "File has more than one H section");
p = sections[6][0].p;
zkey.hExps = [];
for (let i=0; i<zkey.domainSize; i++) {
const H = await readG1();
zkey.hExps.push(H);
}
if (p != sections[6][0].p + sections[6][0].size) assert(false, "Invalid H section size");
await fd.close();
return zkey;
async function readU32() {
const b = await fd.read(p, 4);
p+=4;
return b.readUInt32LE(0);
}
async function readU64() {
const b = await fd.read(p, 8);
p+=8;
const LS = b.readUInt32LE(0);
const MS = b.readUInt32LE(4);
return MS * 0x100000000 + LS;
}
async function readBigIntQ() {
const buff = await fd.read(p, n8q);
assert(buff.length == n8q);
const buffR = Buffer.allocUnsafe(n8q);
for (let i=0; i<n8q; i++) buffR[i] = buff[n8q-1-i];
p += n8q;
return Scalar.fromString(buffR.toString("hex"), 16);
}
async function readBigIntR() {
const buff = await fd.read(p, n8r);
assert(buff.length == n8r);
const buffR = Buffer.allocUnsafe(n8r);
for (let i=0; i<n8r; i++) buffR[i] = buff[n8r-1-i];
p += n8r;
return Scalar.fromString(buffR.toString("hex"), 16);
}
async function readFq() {
const n = await readBigIntQ();
return Fq.mul(n, Rqi);
}
async function readFr2() {
const n = await readBigIntR();
return Fr.mul(n, Rri2);
}
async function readG1() {
const x = await readFq();
const y = await readFq();
if (Fq.isZero(x) && Fq.isZero(y)) {
return [Fq.e(0), Fq.e(1), Fq.e(0)];
} else {
return [x , y, Fq.e(1)];
}
}
async function readG2() {
const xa = await readFq();
const xb = await readFq();
const ya = await readFq();
const yb = await readFq();
if (Fq.isZero(xa) && Fq.isZero(xb) && Fq.isZero(ya) && Fq.isZero(yb)) {
return [[Fq.e(0),Fq.e(0)],[Fq.e(1),Fq.e(0)], [Fq.e(0),Fq.e(0)]];
} else {
return [[xa, xb],[ya, yb], [Fq.e(1),Fq.e(0)]];
}
}
};

105
test/keypar_test.js Normal file
View File

@ -0,0 +1,105 @@
const assert = require("assert");
const bn128 = require("ffjavascript").bn128;
const Fq = bn128.F1;
const getG2sp = require("../src/keypair").getG2sp;
const pubKey = {
tau_g1_s: [
Fq.e("0x1403cf4fed293e66a8cd522be9f938524111f6f08762371bff53ee387a39cf13"),
Fq.e("0x2accbda355c222301a1bd802db7454d86a4ec2ee89ae895ca21f147d6b705740"),
Fq.e("1")
],
tau_g1_sx: [
Fq.e("0x12996cf89d854246f1ab002e446436b77a64349117ec1fb2aa57a304890e81ef"),
Fq.e("0x0c17fd067df52c480a1db3c6890821f975932d89d0d53c6c60777cc56f1dd712"),
Fq.e("1")
],
alpha_g1_s:[
Fq.e("0x12a64bbe8af7fcb19052e25e188c1fcdac454928142f8e89f58e03249e18b223"),
Fq.e("0x22be31a388d0ec551530e1b1581b671b4340e88990de805a7bfed8bdb9c1accd"),
Fq.e("1")
],
alpha_g1_sx: [
Fq.e("0x262ff8dd594374c6ed5e892ba31315f6e47c500784a12ea8d2c573730888a392"),
Fq.e("0x0b3a94f2b61178f2974e039cfd671e7405ec43eb2c09dc8f43a34f450917a62f"),
Fq.e("1")
],
beta_g1_s: [
Fq.e("0x0d9b3088b69daf6746c6bba4f9b359234abbfd3306bce14b198e7a5556c777e6"),
Fq.e("0x066d1acac914883df6a9dc57dc2037a481ba4b8646efe13e2584b9258bd52d0c"),
Fq.e("1")
],
beta_g1_sx: [
Fq.e("0x248232878c359dbe632c387dc0d955520e8d3363f1cd9621ec9fd4a05460c754"),
Fq.e("0x12074f06ef232a472cb36c328e760c4acfb4bedad4ca3ee09971578a0fe185ab"),
Fq.e("1")
],
tau_g2_spx: [
[
Fq.e("0x0fe02fcc3aee51c1f3a37f3f152ebe5476ae659468f2ee81cdeb19d0dad366c5"),
Fq.e("0x01aeb4db892bcb273aada80f5eab10e2e50ae59a5c274b0d7303f5c5a52ee88b"),
],[
Fq.e("0x2d00022d840d493fb93c68a63b29e2692c0cd3caf354fe60eae1ebacefc2c948"),
Fq.e("0x204065ff10344153a08cfe4ae543c47fba883ef8a54530fa6a52c87e5c28ef2b"),
],[
Fq.e("1"),
Fq.e("0")
]
],
alpha_g2_spx: [
[
Fq.e("0x2e649d01a58a7795762df8f0634c273ebce6950a9a2ba3d4459458620d3164a0"),
Fq.e("0x1b58044d3e205a918124fea3983583199b4f99fd0abb39ede2c684b0810bdc1e"),
],[
Fq.e("0x021d41558cea5fa32c9f3de5834cb2ee45ce4cdf471353395d019dfe0c9c2509"),
Fq.e("0x1c04148bac3f17b219c2655cd63ad2596ea63293103487be488a1d5a9054ddbf"),
],[
Fq.e("1"),
Fq.e("0")
]
],
beta_g2_spx: [
[
Fq.e("0x029251aed5163109667300035ce200b7195fc6e261581ba38776d87d7f0b1a7d"),
Fq.e("0x09d6847f1b945ccdc00418a807f4b0af67ec5c0030c4f203581eff9d4af4347f"),
],[
Fq.e("0x04b62ecdc94bf94fcefdf93f06ca4f63026a47a0d4138941b8ee45b9f7177e5c"),
Fq.e("0x1f0a6bff3945f207f407ff1c813b66a28b495f55a3788c3e200c74817e86f7ce"),
],[
Fq.e("1"),
Fq.e("0")
]
]
};
const challange = Buffer.from(
"bc0bde7980381fa642b2097591dd83f1"+
"ed15b003e15c35520af32c95eb519149"+
"2a6f3175215635cfc10e6098e2c612d0"+
"ca84f1a9f90b5333560c8af59b9209f4", "hex");
describe("keypair", () => {
it("It should calculate the right g2_s for the test vectors", async () => {
const tau_g2_sp = getG2sp(0, challange, pubKey.tau_g1_s, pubKey.tau_g1_sx);
assert(bn128.F12.eq(
bn128.pairing(pubKey.tau_g1_sx,tau_g2_sp),
bn128.pairing(pubKey.tau_g1_s, pubKey.tau_g2_spx)));
const alpha_g2_sp = getG2sp(1, challange, pubKey.alpha_g1_s, pubKey.alpha_g1_sx);
assert(bn128.F12.eq(
bn128.pairing(pubKey.alpha_g1_sx, alpha_g2_sp),
bn128.pairing(pubKey.alpha_g1_s , pubKey.alpha_g2_spx)));
const beta_g2_sp = getG2sp(2, challange, pubKey.beta_g1_s, pubKey.beta_g1_sx);
assert(bn128.F12.eq(
bn128.pairing(pubKey.beta_g1_sx, beta_g2_sp),
bn128.pairing(pubKey.beta_g1_s , pubKey.beta_g2_spx)));
});
});