api and tutorial start

This commit is contained in:
Jordi Baylina 2020-07-11 10:31:52 +02:00
parent 1d3ad357c1
commit 9b418568f6
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
83 changed files with 38236 additions and 3461 deletions

View File

@ -5,7 +5,8 @@ module.exports = {
"mocha": true "mocha": true
}, },
"parserOptions": { "parserOptions": {
"ecmaVersion": 2017 "ecmaVersion": 2020,
"sourceType": "module"
}, },
"extends": "eslint:recommended", "extends": "eslint:recommended",
"rules": { "rules": {

212
TUTORIAL.md Normal file
View File

@ -0,0 +1,212 @@
### Install snarkjs and circom
```sh
npm install -g circom
npm install -g snarkjs
```
### Help
```sh
snarkjs
```
In commands that takes long time, you can add the -v or --verbose option to see the progress.
### Start a new ceremony.
```sh
snarkjs powersoftau new bn128 12 pot12_0000.ptau
```
### Contribute in the ceremony
```sh
snarkjs powersoftau contribute pot12_0000.ptau pot12_0001.ptau --name="Example Name" -v
```
### Do a second contribution
```sh
snarkjs powersoftau contribute pot12_0001.ptau pot12_0002.ptau --name="Second contribution Name" -v
```
### Verify the file
```sh
snarkjs powersoftau verify pot12_0002.ptau
```
### Contribute using ther party software.
```sh
snarkjs powersoftau export challange pot12_0002.ptau challange_0003
snarkjs powersoftau challange contribute bn128 challange_0003 response_0003
snarkjs powersoftau import response pot12_0002.ptau response_0003 pot12_0003.ptau -n="Third contribution name"
```
### Add a beacon
```sh
snarkjs powersoftau beacon pot12_0003.ptau pot12_beacon.ptau 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon"
```
### Prepare phase2
```sh
powersoftau prepare phase2 pot12_beacon.ptau pot12_final.ptau -v
```
### Verify the last file
```sh
snarkjs powersoftau verify pot12_final.ptau
```
### Create a circuit
```sh
mkdir mycircuit
cd my mycircuit
cat <<EOT > circuit.circom
template Multiplier(n) {
signal private input a;
signal private input b;
signal output c;
signal int[n];
int[0] <== a*a + b;
for (var i=1; i<n; i++) {
int[i] <== int[i-1]*int[i-1] + b;
}
c <== int[n-1];
}
component main = Multiplier(1000);
EOT
```
### compile the circuit
```sh
circom circuit.circom -r -w -s -v
```
### info of a circuit
```sh
snarkjs r1cs info circuit.r1cs
```
### Print the constraints
```sh
snarkjs r1cs print circuit.r1cs
```
### export r1cs to json
```sh
snarkjs r1cs export json circuit.r1cs circuit.r1cs.json
```
### Generate the reference zKey without contributions from the circuit.
```sh
snarkjs zkey new circuit.r1cs pot12_final.ptau circuit_0000.zkey
```
### Contribute in the phase2 ceremony
```sh
snarkjs zkey contribute circuit_0000.zkey circuit_0001.zkey --name="1st Contributor Name" -v
```
### Do a second phase2 contribution
```sh
snarkjs zkey contribute circuit_0001.zkey circuit_0002.zkey --name="Second contribution Name" -v
```
### Verify the zkey file
```sh
snarkjs zkey verify circuit.r1cs pot12_final.ptau circuit_0002.zkey
```
### Contribute using ther party software.
```sh
snarkjs zkey export bellman circuit_0002.zkey challange_phase2_0003
snarkjs zkey bellman contribute bn128 challange_phase2_0003 response_phase2_0003
snarkjs zkey import bellman circuit_0002.zkey response_phase2_0003 circuit_0003.zkey -n="Third contribution name"
```
### Add a beacon
```sh
snarkjs zkey beacon circuit_0003.zkey circuit_final.zkey 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon phase2"
```
### Verify the final file
```sh
snarkjs zkey verify circuit.r1cs pot12_final.ptau circuit_final.zkey
```
### Export the verification key
```sh
snarkjs zkey export verificationkey circuit_final.zkey verification_key.json
```
### Calculat witess
```sh
cat <<EOT > input.json
{"a": 3, "b": 11}
EOT
snarkjs wtns calculate circuit.wasm input.json witness.wtns
```
### Debug witness calculation
En general when you are developing a new circuit you will want to check for some errors in the witness calculation process.
You can do it by doing
```sh
snarkjs wtns debug circuit.wasm input.json witness.wtns circuit.sym --trigger --get --set
```
This will log every time a new component is started/ended ( --trigger ) when a signal is set (--set) and when it's get (--get)
### Proof calculation
```sh
snarkjs groth16 prove circuit_final.zkey witness.wtns proof.json public.json
```
It is possible also to do the calculate witness and the prove calculation in the same command:
```sh
snarkjs groth16 fullprove input.json circuit.wasm circuit_final.zkey proof.json public.json
```
### Verify
```sh
snarkjs groth16 verify verification_key.json public.json proof.json
```
### Export Solidity Verifier
```sh
snarkjs zkey export solidityverifier circuit_final.zkey verifier.sol
```
You can deploy th "Verifier" smartcontract using remix for example.
In order to simulate a verification call, you can do:
```sh
zkey export soliditycalldata public.json proof.json
```
And cut and paste the resolt directlly in the "verifyProof" field in the deployed smart contract.
This call will return true if the proof and the public data is valid.

6501
build/cli.cjs Executable file

File diff suppressed because it is too large Load Diff

14985
build/main.js Normal file

File diff suppressed because one or more lines are too long

14972
build/snarkjs.js Normal file

File diff suppressed because one or more lines are too long

477
cli.js
View File

@ -1,5 +1,3 @@
#!/usr/bin/env node
/* /*
Copyright 2018 0KIMS association. Copyright 2018 0KIMS association.
@ -21,35 +19,95 @@
/* eslint-disable no-console */ /* eslint-disable no-console */
const fs = require("fs"); import fs from "fs";
const path = require("path");
const zkSnark = require("./index.js"); import {load as loadR1cs} from "r1csfile";
const {stringifyBigInts, unstringifyBigInts} = require("ffjavascript").utils;
const loadR1cs = require("r1csfile").load; import loadSyms from "./src/loadsyms.js";
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder; import * as r1cs from "./src/r1cs.js";
const wtnsFile = require("./src/wtnsfile"); import clProcessor from "./src/clprocessor.js";
const loadSyms = require("./src/loadsyms"); import * as powersOfTaw from "./src/powersoftau.js";
const r1cs = require("./src/r1cs");
const clProcessor = require("./src/clprocessor"); import { utils } from "ffjavascript";
const {stringifyBigInts, unstringifyBigInts} = utils;
const powersOfTaw = require("./src/powersoftau"); import * as zkey from "./src/zkey.js";
import * as groth16 from "./src/groth16.js";
import * as wtns from "./src/wtns.js";
import * as curves from "./src/curves.js";
import path from "path";
const solidityGenerator = require("./src/soliditygenerator.js"); import Logger from "logplease";
const logger = Logger.create("snarkJS", {showTimestamp:false});
const Scalar = require("ffjavascript").Scalar; Logger.setLogLevel("INFO");
const assert = require("assert");
const zkey = require("./src/zkey");
const zksnark = require("./src/zksnark");
const curves = require("./src/curves");
const commands = [ const commands = [
{
cmd: "powersoftau new <curve> <power> [powersoftau_0000.ptau]",
description: "Starts a powers of tau ceremony",
alias: ["ptn"],
options: "-verbose|v",
action: powersOfTawNew
},
{
cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>",
description: "creates a ptau file with a new contribution",
alias: ["ptc"],
options: "-verbose|v -name|n -entropy|e",
action: powersOfTawContribute
},
{
cmd: "powersoftau export challange <powersoftau_0000.ptau> [challange]",
description: "Creates a challange",
alias: ["ptec"],
options: "-verbose|v",
action: powersOfTawExportChallange
},
{
cmd: "powersoftau challange contribute <curve> <challange> [response]",
description: "Contribute to a challange",
alias: ["ptcc"],
options: "-verbose|v -entropy|e",
action: powersOfTawChallangeContribute
},
{
cmd: "powersoftau import response <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>",
description: "import a response to a ptau file",
alias: ["ptir"],
options: "-verbose|v -nopoints -nocheck -name|n",
action: powersOfTawImport
},
{
cmd: "powersoftau verify <powersoftau.ptau>",
description: "verifies a powers of tau file",
alias: ["ptv"],
options: "-verbose|v",
action: powersOfTawVerify
},
{
cmd: "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>",
description: "adds a beacon",
alias: ["ptb"],
options: "-verbose|v -name|n",
action: powersOfTawBeacon
},
{
cmd: "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>",
description: "Prepares phase 2. ",
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
alias: ["pt2"],
options: "-verbose|v",
action: powersOfTawPreparePhase2
},
{
cmd: "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
description: "Exports a power of tau file to a JSON",
alias: ["ptej"],
options: "-verbose|v",
action: powersOfTawExportJson
},
{ {
cmd: "r1cs info [circuit.r1cs]", cmd: "r1cs info [circuit.r1cs]",
description: "Print statistiscs of a circuit", description: "Print statistiscs of a circuit",
@ -86,10 +144,11 @@ const commands = [
cmd: "wtns export json [witness.wtns] [witnes.json]", cmd: "wtns export json [witness.wtns] [witnes.json]",
description: "Calculate the witness with debug info.", description: "Calculate the witness with debug info.",
longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ", longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ",
options: "-get|g -set|s -trigger|t", options: "-verbose|v",
alias: ["wej"], alias: ["wej"],
action: wtnsExportJson action: wtnsExportJson
}, },
/*
{ {
cmd: "zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]", cmd: "zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]",
description: "Run a simple setup for a circuit generating the proving key.", description: "Run a simple setup for a circuit generating the proving key.",
@ -97,94 +156,38 @@ const commands = [
options: "-verbose|v -protocol", options: "-verbose|v -protocol",
action: zksnarkSetup action: zksnarkSetup
}, },
*/
{ {
cmd: "zksnark prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]", cmd: "groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]",
description: "Generates a zk Proof", description: "Generates a zk Proof from witness",
alias: ["zp", "zksnark proof", "proof -pk|provingkey -wt|witness -p|proof -pub|public"], alias: ["g16p", "zpw", "zksnark proof", "proof -pk|provingkey -wt|witness -p|proof -pub|public"],
options: "-verbose|v -protocol", options: "-verbose|v -protocol",
action: zksnarkProve action: zksnarkProve
}, },
{ {
cmd: "zksnark verify [verification_key.json] [public.json] [proof.json]", cmd: "groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]",
description: "Generates a zk Proof from input",
alias: ["g16f", "g16i"],
options: "-verbose|v -protocol",
action: zksnarkFullProve
},
{
cmd: "groth16 verify [verification_key.json] [public.json] [proof.json]",
description: "Verify a zk Proof", description: "Verify a zk Proof",
alias: ["zv", "verify -vk|verificationkey -pub|public -p|proof"], alias: ["g16v", "verify -vk|verificationkey -pub|public -p|proof"],
action: zksnarkVerify action: zksnarkVerify
}, },
{ {
cmd: "solidity genverifier <verificationKey.json> <verifier.sol>", cmd: "zkey export solidityverifier [circuit.zkey] [verifier.sol]",
description: "Creates a verifier in solidity", description: "Creates a verifier in solidity",
alias: ["ks", "generateverifier -vk|verificationkey -v|verifier"], alias: ["zkesv", "generateverifier -vk|verificationkey -v|verifier"],
action: solidityGenVerifier action: zkeyExportSolidityVerifier
}, },
{ {
cmd: "solidity gencall <public.json> <proof.json>", cmd: "zkey export soliditycalldata <public.json> <proof.json>",
description: "Generates call parameters ready to be called.", description: "Generates call parameters ready to be called.",
alias: ["pc", "generatecall -pub|public -p|proof"], alias: ["zkesc", "generatecall -pub|public -p|proof"],
action: solidityGenCall action: zkeyExportSolidityCalldata
},
{
cmd: "powersoftau new <curve> <power> [powersoftau_0000.ptau]",
description: "Starts a powers of tau ceremony",
alias: ["ptn"],
options: "-verbose|v",
action: powersOfTawNew
},
{
cmd: "powersoftau export challange <powersoftau_0000.ptau> [challange]",
description: "Creates a challange",
alias: ["ptec"],
options: "-verbose|v",
action: powersOfTawExportChallange
},
{
cmd: "powersoftau challange contribute <curve> <challange> [response]",
description: "Contribute to a challange",
alias: ["ptcc"],
options: "-verbose|v -entropy|e",
action: powersOfTawChallangeContribute
},
{
cmd: "powersoftau import <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>",
description: "import a response to a ptau file",
alias: ["pti"],
options: "-verbose|v -nopoints -nocheck -name|n",
action: powersOfTawImport
},
{
cmd: "powersoftau verify <powersoftau.ptau>",
description: "verifies a powers of tau file",
alias: ["ptv"],
options: "-verbose|v",
action: powersOfTawVerify
},
{
cmd: "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>",
description: "adds a beacon",
alias: ["ptb"],
options: "-verbose|v -name|n",
action: powersOfTawBeacon
},
{
cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>",
description: "creates a ptau file with a new contribution",
alias: ["ptc"],
options: "-verbose|v -name|n -entropy|e",
action: powersOfTawContribute
},
{
cmd: "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>",
description: "Prepares phase 2. ",
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
alias: ["pt2"],
options: "-verbose|v",
action: powersOfTawPreparePhase2
},
{
cmd: "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
description: "Exports a power of tau file to a JSON",
alias: ["ptej"],
options: "-verbose|v",
action: powersOfTawExportJson
}, },
{ {
cmd: "zkey new [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]", cmd: "zkey new [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]",
@ -204,7 +207,7 @@ const commands = [
cmd: "zkey import bellman <circuit_old.zkey> <circuit.mpcparams> <circuit_new.zkey>", cmd: "zkey import bellman <circuit_old.zkey> <circuit.mpcparams> <circuit_new.zkey>",
description: "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman) ", description: "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman) ",
alias: ["zkib"], alias: ["zkib"],
options: "-verbose|v", options: "-verbose|v -name|n",
action: zkeyImportBellman action: zkeyImportBellman
}, },
{ {
@ -229,14 +232,14 @@ const commands = [
action: zkeyBeacon action: zkeyBeacon
}, },
{ {
cmd: "zkey challange contribute <curve> <challange> [response]", cmd: "zkey bellman contribute <curve> <circuit.mpcparams> <circuit_response.mpcparams>",
description: "contributes to a llallange file in bellman format", description: "contributes to a llallange file in bellman format",
alias: ["zkcc"], alias: ["zkbc"],
options: "-verbose|v -entropy|e", options: "-verbose|v -entropy|e",
action: zkeyChallangeContribute action: zkeyBellmanContribute
}, },
{ {
cmd: "zkey export vkey [circuit.zkey] [verification_key.json]", cmd: "zkey export verificationkey [circuit.zkey] [verification_key.json]",
description: "Exports a verification key", description: "Exports a verification key",
alias: ["zkev"], alias: ["zkev"],
action: zkeyExportVKey action: zkeyExportVKey
@ -256,8 +259,7 @@ const commands = [
clProcessor(commands).then( (res) => { clProcessor(commands).then( (res) => {
process.exit(res); process.exit(res);
}, (err) => { }, (err) => {
console.log(err.stack); logger.error(err);
console.log("ERROR: " + err);
process.exit(1); process.exit(1);
}); });
@ -327,7 +329,9 @@ function changeExt(fileName, newExt) {
async function r1csInfo(params, options) { async function r1csInfo(params, options) {
const r1csName = params[0] || "circuit.r1cs"; const r1csName = params[0] || "circuit.r1cs";
await r1cs.info(r1csName); if (options.verbose) Logger.setLogLevel("DEBUG");
await r1cs.info(r1csName, logger);
return 0; return 0;
@ -337,11 +341,14 @@ async function r1csInfo(params, options) {
async function r1csPrint(params, options) { async function r1csPrint(params, options) {
const r1csName = params[0] || "circuit.r1cs"; const r1csName = params[0] || "circuit.r1cs";
const symName = params[1] || changeExt(r1csName, "sym"); const symName = params[1] || changeExt(r1csName, "sym");
if (options.verbose) Logger.setLogLevel("DEBUG");
const cir = await loadR1cs(r1csName, true, true); const cir = await loadR1cs(r1csName, true, true);
const sym = await loadSyms(symName); const sym = await loadSyms(symName);
await r1cs.print(cir, sym); await r1cs.print(cir, sym, logger);
return 0; return 0;
} }
@ -352,7 +359,12 @@ async function r1csExportJSON(params, options) {
const r1csName = params[0] || "circuit.r1cs"; const r1csName = params[0] || "circuit.r1cs";
const jsonName = params[1] || changeExt(r1csName, "json"); const jsonName = params[1] || changeExt(r1csName, "json");
await r1cs.exportJson(r1csName, jsonName); if (options.verbose) Logger.setLogLevel("DEBUG");
const r1csObj = await r1cs.exportJson(r1csName, logger);
const S = JSON.stringify(utils.stringifyBigInts(r1csObj), null, 1);
await fs.promises.writeFile(jsonName, S);
return 0; return 0;
} }
@ -363,18 +375,11 @@ async function wtnsCalculate(params, options) {
const inputName = params[1] || "input.json"; const inputName = params[1] || "input.json";
const witnessName = params[2] || "witness.wtns"; const witnessName = params[2] || "witness.wtns";
const wasm = await fs.promises.readFile(wasmName); if (options.verbose) Logger.setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8"))); const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8")));
const wc = await WitnessCalculatorBuilder(wasm, options); await wtns.calculate(input, wasmName, witnessName, {});
const w = await wc.calculateBinWitness(input);
await wtnsFile.writeBin(witnessName, w, wc.prime);
/*
const w = await wc.calculateWitness(input);
await wtnsFile.write(witnessName, w, wc.prime);
*/
// fs.promises.writeFile(witnessName, JSON.stringify(stringifyBigInts(w), null, 1));
return 0; return 0;
} }
@ -388,42 +393,11 @@ async function wtnsDebug(params, options) {
const witnessName = params[2] || "witness.wtns"; const witnessName = params[2] || "witness.wtns";
const symName = params[3] || changeExt(wasmName, "sym"); const symName = params[3] || changeExt(wasmName, "sym");
const wasm = await fs.promises.readFile(wasmName); if (options.verbose) Logger.setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8"))); const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8")));
let wcOps = { await wtns.debug(input, wasmName, witnessName, symName, options, logger);
sanityCheck: true
};
let sym = await loadSyms(symName);
if (options.set) {
if (!sym) sym = await loadSyms(symName);
wcOps.logSetSignal= function(labelIdx, value) {
console.log("SET " + sym.labelIdx2Name[labelIdx] + " <-- " + value.toString());
};
}
if (options.get) {
if (!sym) sym = await loadSyms(symName);
wcOps.logGetSignal= function(varIdx, value) {
console.log("GET " + sym.labelIdx2Name[varIdx] + " --> " + value.toString());
};
}
if (options.trigger) {
if (!sym) sym = await loadSyms(symName);
wcOps.logStartComponent= function(cIdx) {
console.log("START: " + sym.componentIdx2Name[cIdx]);
};
wcOps.logFinishComponent= function(cIdx) {
console.log("FINISH: " + sym.componentIdx2Name[cIdx]);
};
}
const wc = await WitnessCalculatorBuilder(wasm, wcOps);
const w = await wc.calculateWitness(input);
await wtnsFile.write(witnessName, w);
// await fs.promises.writeFile(witnessName, JSON.stringify(stringifyBigInts(w), null, 1));
return 0; return 0;
} }
@ -435,7 +409,9 @@ async function wtnsExportJson(params, options) {
const wtnsName = params[0] || "witness.wtns"; const wtnsName = params[0] || "witness.wtns";
const jsonName = params[1] || "witness.json"; const jsonName = params[1] || "witness.json";
const w = await wtnsFile.read(wtnsName); if (options.verbose) Logger.setLogLevel("DEBUG");
const w = await wtns.exportJson(wtnsName);
await fs.promises.writeFile(jsonName, JSON.stringify(stringifyBigInts(w), null, 1)); await fs.promises.writeFile(jsonName, JSON.stringify(stringifyBigInts(w), null, 1));
@ -443,7 +419,7 @@ async function wtnsExportJson(params, options) {
} }
/*
// zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json] // zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]
async function zksnarkSetup(params, options) { async function zksnarkSetup(params, options) {
@ -465,36 +441,9 @@ async function zksnarkSetup(params, options) {
return 0; return 0;
} }
/*
// zksnark prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
async function zksnarkProve(params, options) {
const zkeyName = params[0] || "circuit.zkey";
const witnessName = params[1] || "witness.wtns";
const proofName = params[2] || "proof.json";
const publicName = params[3] || "public.json";
const witness = await wtnsFile.read(witnessName);
// const witness = unstringifyBigInts(JSON.parse(fs.readFileSync(witnessName, "utf8")));
const provingKey = await zkeyFile.read(zkeyName);
// const provingKey = unstringifyBigInts(JSON.parse(fs.readFileSync(provingKeyName, "utf8")));
const protocol = provingKey.protocol;
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const {proof, publicSignals} = zkSnark[protocol].genProof(provingKey, witness, options.verbose);
await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
return 0;
}
*/ */
// groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
// zksnark prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
async function zksnarkProve(params, options) { async function zksnarkProve(params, options) {
const zkeyName = params[0] || "circuit.zkey"; const zkeyName = params[0] || "circuit.zkey";
@ -502,8 +451,9 @@ async function zksnarkProve(params, options) {
const proofName = params[2] || "proof.json"; const proofName = params[2] || "proof.json";
const publicName = params[3] || "public.json"; const publicName = params[3] || "public.json";
if (options.verbose) Logger.setLogLevel("DEBUG");
const {proof, publicSignals} = await zksnark.groth16.prover(zkeyName, witnessName, options.verbose); const {proof, publicSignals} = await groth16.prove(zkeyName, witnessName, logger);
await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8"); await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8"); await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
@ -511,41 +461,60 @@ async function zksnarkProve(params, options) {
return 0; return 0;
} }
// zksnark verify [verification_key.json] [public.json] [proof.json] // groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]
async function zksnarkFullProve(params, options) {
const inputName = params[0] || "input.json";
const wasmName = params[1] || "circuit.wasm";
const zkeyName = params[2] || "circuit.zkey";
const proofName = params[3] || "proof.json";
const publicName = params[4] || "public.json";
if (options.verbose) Logger.setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8")));
const {proof, publicSignals} = await groth16.fullProve(input, wasmName, zkeyName, logger);
await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
return 0;
}
// groth16 verify [verification_key.json] [public.json] [proof.json]
async function zksnarkVerify(params, options) { async function zksnarkVerify(params, options) {
const verificationKeyName = params[0] || "verification_key.json"; const verificationKeyName = params[0] || "verification_key.json";
const publicName = params[0] || "public.json"; const publicName = params[1] || "public.json";
const proofName = params[0] || "proof.json"; const proofName = params[2] || "proof.json";
const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8"))); const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8")));
const pub = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8"))); const pub = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8"))); const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8")));
/* if (options.verbose) Logger.setLogLevel("DEBUG");
const protocol = verificationKey.protocol;
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const isValid = zkSnark[protocol].isValid(verificationKey, proof, pub); const isValid = await groth16.validate(verificationKey, pub, proof, logger);
*/
const isValid = await zksnark.groth16.verifier(verificationKey, proof, pub);
if (isValid) { if (isValid) {
console.log("OK");
return 0; return 0;
} else { } else {
console.log("INVALID");
return 1; return 1;
} }
} }
// zkey export vkey [circuit.zkey] [verification_key.json]", // zkey export vkey [circuit.zkey] [verification_key.json]",
async function zkeyExportVKey(params) { async function zkeyExportVKey(params, options) {
const zkeyName = params[0] || "circuit.zkey"; const zkeyName = params[0] || "circuit.zkey";
const verificationKeyName = params[2] || "verification_key.json"; const verificationKeyName = params[2] || "verification_key.json";
return await zkey.exportVerificationKey(zkeyName, verificationKeyName); if (options.verbose) Logger.setLogLevel("DEBUG");
const vKey = await zkey.exportVerificationKey(zkeyName);
const S = JSON.stringify(utils.stringifyBigInts(vKey), null, 1);
await fs.promises.writeFile(verificationKeyName, S);
} }
// zkey export json [circuit.zkey] [circuit.zkey.json]", // zkey export json [circuit.zkey] [circuit.zkey.json]",
@ -553,18 +522,23 @@ async function zkeyExportJson(params, options) {
const zkeyName = params[0] || "circuit.zkey"; const zkeyName = params[0] || "circuit.zkey";
const zkeyJsonName = params[1] || "circuit.zkey.json"; const zkeyJsonName = params[1] || "circuit.zkey.json";
return await zkey.exportJson(zkeyName, zkeyJsonName, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
const zKey = await zkey.exportJson(zkeyName, logger);
const S = JSON.stringify(utils.stringifyBigInts(zKey), null, 1);
await fs.promises.writeFile(zkeyJsonName, S);
} }
// solidity genverifier <verificationKey.json> <verifier.sol> // solidity genverifier [circuit.zkey] [verifier.sol]
async function solidityGenVerifier(params, options) { async function zkeyExportSolidityVerifier(params, options) {
let verificationKeyName; let zkeyName;
let verifierName; let verifierName;
if (params.length < 1) { if (params.length < 1) {
verificationKeyName = "verification_key.json"; zkeyName = "circuit.zkey";
} else { } else {
verificationKeyName = params[0]; zkeyName = params[0];
} }
if (params.length < 2) { if (params.length < 2) {
@ -573,18 +547,11 @@ async function solidityGenVerifier(params, options) {
verifierName = params[1]; verifierName = params[1];
} }
const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8"))); if (options.verbose) Logger.setLogLevel("DEBUG");
let verifierCode; const templateName = path.join( __dirname, "templates", "verifier_groth16.sol");
if (verificationKey.protocol == "original") {
verifierCode = solidityGenerator.generateVerifier_original(verificationKey); const verifierCode = await zkey.exportSolidityVerifier(zkeyName, templateName, logger);
} else if (verificationKey.protocol == "groth16") {
verifierCode = solidityGenerator.generateVerifier_groth16(verificationKey);
} else if (verificationKey.protocol == "kimleeoh") {
verifierCode = solidityGenerator.generateVerifier_kimleeoh(verificationKey);
} else {
throw new Error("InvalidProof");
}
fs.writeFileSync(verifierName, verifierCode, "utf-8"); fs.writeFileSync(verifierName, verifierCode, "utf-8");
@ -593,7 +560,7 @@ async function solidityGenVerifier(params, options) {
// solidity gencall <public.json> <proof.json> // solidity gencall <public.json> <proof.json>
async function solidityGenCall(params, options) { async function zkeyExportSolidityCalldata(params, options) {
let publicName; let publicName;
let proofName; let proofName;
@ -609,14 +576,15 @@ async function solidityGenCall(params, options) {
proofName = params[1]; proofName = params[1];
} }
if (options.verbose) Logger.setLogLevel("DEBUG");
const public = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8"))); const pub = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8"))); const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8")));
let inputs = ""; let inputs = "";
for (let i=0; i<public.length; i++) { for (let i=0; i<pub.length; i++) {
if (inputs != "") inputs = inputs + ","; if (inputs != "") inputs = inputs + ",";
inputs = inputs + p256(public[i]); inputs = inputs + p256(pub[i]);
} }
let S; let S;
@ -665,7 +633,9 @@ async function powersOfTawNew(params, options) {
const curve = await curves.getCurveFromName(curveName); const curve = await curves.getCurveFromName(curveName);
return await powersOfTaw.newAccumulator(curve, power, ptauName, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.newAccumulator(curve, power, ptauName, logger);
} }
async function powersOfTawExportChallange(params, options) { async function powersOfTawExportChallange(params, options) {
@ -680,7 +650,9 @@ async function powersOfTawExportChallange(params, options) {
challangeName = params[1]; challangeName = params[1];
} }
return await powersOfTaw.exportChallange(ptauName, challangeName, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.exportChallange(ptauName, challangeName, logger);
} }
// powersoftau challange contribute <curve> <challange> [response] // powersoftau challange contribute <curve> <challange> [response]
@ -698,7 +670,9 @@ async function powersOfTawChallangeContribute(params, options) {
responseName = params[2]; responseName = params[2];
} }
return await powersOfTaw.challangeContribute(curve, challangeName, responseName, options.entropy, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.challangeContribute(curve, challangeName, responseName, options.entropy, logger);
} }
@ -716,7 +690,9 @@ async function powersOfTawImport(params, options) {
if (options.nopoints) importPoints = false; if (options.nopoints) importPoints = false;
if (options.nocheck) doCheck = false; if (options.nocheck) doCheck = false;
const res = await powersOfTaw.importResponse(oldPtauName, response, newPtauName, options.name, importPoints, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
const res = await powersOfTaw.importResponse(oldPtauName, response, newPtauName, options.name, importPoints, logger);
if (res) return res; if (res) return res;
if (!doCheck) return; if (!doCheck) return;
@ -729,12 +705,12 @@ async function powersOfTawVerify(params, options) {
ptauName = params[0]; ptauName = params[0];
const res = await powersOfTaw.verify(ptauName, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
const res = await powersOfTaw.verify(ptauName, logger);
if (res === true) { if (res === true) {
console.log("Powers of tau OK!");
return 0; return 0;
} else { } else {
console.log("=======>INVALID Powers of tau<==========");
return 1; return 1;
} }
} }
@ -750,7 +726,9 @@ async function powersOfTawBeacon(params, options) {
beaconHashStr = params[2]; beaconHashStr = params[2];
numIterationsExp = params[3]; numIterationsExp = params[3];
return await powersOfTaw.beacon(oldPtauName, newPtauName, options.name ,numIterationsExp, beaconHashStr, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.beacon(oldPtauName, newPtauName, options.name ,beaconHashStr, numIterationsExp, logger);
} }
async function powersOfTawContribute(params, options) { async function powersOfTawContribute(params, options) {
@ -760,7 +738,9 @@ async function powersOfTawContribute(params, options) {
oldPtauName = params[0]; oldPtauName = params[0];
newPtauName = params[1]; newPtauName = params[1];
return await powersOfTaw.contribute(oldPtauName, newPtauName, options.name , options.entropy, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.contribute(oldPtauName, newPtauName, options.name , options.entropy, logger);
} }
async function powersOfTawPreparePhase2(params, options) { async function powersOfTawPreparePhase2(params, options) {
@ -770,7 +750,9 @@ async function powersOfTawPreparePhase2(params, options) {
oldPtauName = params[0]; oldPtauName = params[0];
newPtauName = params[1]; newPtauName = params[1];
return await powersOfTaw.preparePhase2(oldPtauName, newPtauName, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.preparePhase2(oldPtauName, newPtauName, logger);
} }
// powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>", // powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
@ -781,7 +763,13 @@ async function powersOfTawExportJson(params, options) {
ptauName = params[0]; ptauName = params[0];
jsonName = params[1]; jsonName = params[1];
return await powersOfTaw.exportJson(ptauName, jsonName, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
const pTau = await powersOfTaw.exportJson(ptauName, logger);
const S = JSON.stringify(stringifyBigInts(pTau), null, 1);
await fs.promises.writeFile(jsonName, S);
} }
@ -809,7 +797,9 @@ async function zkeyNew(params, options) {
zkeyName = params[2]; zkeyName = params[2];
} }
return zkey.new(r1csName, ptauName, zkeyName, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.newZKey(r1csName, ptauName, zkeyName, logger);
} }
// zkey export bellman [circuit.zkey] [circuit.mpcparams] // zkey export bellman [circuit.zkey] [circuit.mpcparams]
@ -829,7 +819,9 @@ async function zkeyExportBellman(params, options) {
mpcparamsName = params[1]; mpcparamsName = params[1];
} }
return zkey.exportBellman(zkeyName, mpcparamsName, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.exportBellman(zkeyName, mpcparamsName, logger);
} }
@ -844,7 +836,9 @@ async function zkeyImportBellman(params, options) {
mpcParamsName = params[1]; mpcParamsName = params[1];
zkeyNameNew = params[2]; zkeyNameNew = params[2];
return zkey.importBellman(zkeyNameOld, mpcParamsName, zkeyNameNew, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.importBellman(zkeyNameOld, mpcParamsName, zkeyNameNew, options.name, logger);
} }
// phase2 verify [circuit.r1cs] [powersoftau.ptau] [circuit.zkey] // phase2 verify [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]
@ -871,12 +865,12 @@ async function zkeyVerify(params, options) {
zkeyName = params[2]; zkeyName = params[2];
} }
const res = await zkey.verify(r1csName, ptauName, zkeyName, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
const res = await zkey.verify(r1csName, ptauName, zkeyName, logger);
if (res === true) { if (res === true) {
console.log("zKey OK!");
return 0; return 0;
} else { } else {
console.log("=======>INVALID zKey<==========");
return 1; return 1;
} }
@ -891,8 +885,9 @@ async function zkeyContribute(params, options) {
zkeyOldName = params[0]; zkeyOldName = params[0];
zkeyNewName = params[1]; zkeyNewName = params[1];
if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, options.verbose); return zkey.contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, logger);
} }
// zkey beacon <circuit_old.zkey> <circuit_new.zkey> <beaconHash(Hex)> <numIterationsExp> // zkey beacon <circuit_old.zkey> <circuit_new.zkey> <beaconHash(Hex)> <numIterationsExp>
@ -907,12 +902,14 @@ async function zkeyBeacon(params, options) {
beaconHashStr = params[2]; beaconHashStr = params[2];
numIterationsExp = params[3]; numIterationsExp = params[3];
return await zkey.beacon(zkeyOldName, zkeyNewName, options.name ,numIterationsExp, beaconHashStr, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return await zkey.beacon(zkeyOldName, zkeyNewName, options.name ,beaconHashStr, numIterationsExp, logger);
} }
// zkey challange contribute <curve> <challange> [response]", // zkey challange contribute <curve> <challange> [response]",
async function zkeyChallangeContribute(params, options) { async function zkeyBellmanContribute(params, options) {
let challangeName; let challangeName;
let responseName; let responseName;
@ -926,6 +923,8 @@ async function zkeyChallangeContribute(params, options) {
responseName = params[2]; responseName = params[2];
} }
return zkey.challangeContribute(curve, challangeName, responseName, options.entropy, options.verbose); if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.bellmanContribute(curve, challangeName, responseName, options.entropy, logger);
} }

View File

@ -0,0 +1,17 @@
import resolve from "rollup-plugin-node-resolve";
import commonJS from "rollup-plugin-commonjs";
export default {
input: "main.js",
output: {
file: "build/main.js",
format: "cjs",
},
external: ["fs", "os", "worker_threads", "readline", "crypto", "path"],
plugins: [
resolve({ preferBuiltins: true }),
commonJS({
preserveSymlinks: true
}),
]
};

View File

@ -0,0 +1,41 @@
import resolve from "rollup-plugin-node-resolve";
import commonJS from "rollup-plugin-commonjs";
import json from "rollup-plugin-json";
export default {
input: "cli.js",
output: {
file: "build/cli.cjs",
format: "cjs",
banner: "#! /usr/bin/env node\n",
},
external: [
"fs",
"os",
"worker_threads",
"readline",
"crypto",
"path",
"big-integer",
"wasmsnark",
"circom_runtime",
"blake2b-wasm",
"ffjavascript",
"keccak",
"yargs",
"logplease"
],
plugins: [
resolve({
preferBuiltins: true,
}),
commonJS({
preserveSymlinks: true,
include: "node_modules/**",
exclude: "node_modules/big-integer/**"
}),
json()
]
};

View File

@ -0,0 +1,23 @@
import resolve from "rollup-plugin-node-resolve";
import commonJS from "rollup-plugin-commonjs";
import ignore from "rollup-plugin-ignore";
import replace from "rollup-plugin-replace";
export default {
input: "main.js",
output: {
file: "build/snarkjs.js",
format: "iife",
sourcemap: "inline",
globals: {
os: "null"
},
name: "snarkjs"
},
plugins: [
ignore(["fs", "os", "crypto", "readline", "worker_threads"]),
resolve(),
commonJS(),
replace({ "process.browser": !!process.env.BROWSER }),
]
};

View File

@ -1,35 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
exports.original = {
setup: require("./src/setup_original.js"),
genProof: require("./src/prover_original.js"),
isValid: require("./src/verifier_original.js")
};
exports.groth16 = {
setup: require("./src/setup_groth16.js"),
genProof: require("./src/prover_groth16.js"),
isValid: require("./src/verifier_groth16.js")
};
exports.kimleeoh = {
setup: require("./src/setup_kimleeoh.js"),
genProof: require("./src/prover_kimleeoh.js"),
isValid: require("./src/verifier_kimleeoh.js")
};

7
main.js Normal file
View File

@ -0,0 +1,7 @@
export * as groth16 from "./src/groth16.js";
export * as powersOfTau from "./src/powersoftau.js";
export * as r1cs from "./src/r1cs.js";
export * as wtns from "./src/wtns.js";
export * as zKey from "./src/zkey.js";

253
package-lock.json generated
View File

@ -36,6 +36,27 @@
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
"dev": true "dev": true
}, },
"@types/estree": {
"version": "0.0.45",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.45.tgz",
"integrity": "sha512-jnqIUKDUqJbDIUxm0Uj7bnlMnRm1T/eZ9N+AVMqhPgzrba2GhGG5o/jCTwmdPK709nEZsGoMzXEDUjcXHa3W0g==",
"dev": true
},
"@types/node": {
"version": "14.0.18",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.18.tgz",
"integrity": "sha512-0Z3nS5acM0cIV4JPzrj9g/GH0Et5vmADWtip3YOXOp1NpOLU8V3KoZDc8ny9c1pe/YSYYzQkAWob6dyV/EWg4g==",
"dev": true
},
"@types/resolve": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-0.0.8.tgz",
"integrity": "sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==",
"dev": true,
"requires": {
"@types/node": "*"
}
},
"acorn": { "acorn": {
"version": "7.1.1", "version": "7.1.1",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz",
@ -119,7 +140,8 @@
"assertion-error": { "assertion-error": {
"version": "1.1.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
"integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==" "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==",
"dev": true
}, },
"astral-regex": { "astral-regex": {
"version": "1.0.0", "version": "1.0.0",
@ -144,33 +166,17 @@
"integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==", "integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==",
"dev": true "dev": true
}, },
"blake2b": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/blake2b/-/blake2b-2.1.3.tgz",
"integrity": "sha512-pkDss4xFVbMb4270aCyGD3qLv92314Et+FsKzilCLxDz5DuZ2/1g3w4nmBbu6nKApPspnjG7JcwTjGZnduB1yg==",
"requires": {
"blake2b-wasm": "^1.1.0",
"nanoassert": "^1.0.0"
},
"dependencies": {
"blake2b-wasm": { "blake2b-wasm": {
"version": "1.1.7", "version": "git+https://github.com/jbaylina/blake2b-wasm.git#0d5f024b212429c7f50a7f533aa3a2406b5b42b3",
"resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-1.1.7.tgz", "from": "git+https://github.com/jbaylina/blake2b-wasm.git",
"integrity": "sha512-oFIHvXhlz/DUgF0kq5B1CqxIDjIJwh9iDeUUGQUcvgiGz7Wdw03McEO7CfLBy7QKGdsydcMCgO9jFNBAFCtFcA==",
"requires": { "requires": {
"nanoassert": "^1.0.0" "nanoassert": "^1.0.0"
} }
}
}
}, },
"blake2b-wasm": { "blakejs": {
"version": "2.1.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-2.1.0.tgz", "resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.1.0.tgz",
"integrity": "sha512-8zKXt9nk4cUCBU2jaUcSYcPA+UESwWOmb9Gsi8J35BifVb+tjVmbDhZbvmVmZEk6xZN1y35RNW6VqOwb0mkqsg==", "integrity": "sha1-ad+S75U6qIylGjLfarHFShVfx6U="
"dev": true,
"requires": {
"nanoassert": "^1.0.0"
}
}, },
"brace-expansion": { "brace-expansion": {
"version": "1.1.11", "version": "1.1.11",
@ -197,6 +203,12 @@
"integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
"dev": true "dev": true
}, },
"builtin-modules": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz",
"integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==",
"dev": true
},
"callsites": { "callsites": {
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@ -212,6 +224,7 @@
"version": "4.2.0", "version": "4.2.0",
"resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz",
"integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==", "integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==",
"dev": true,
"requires": { "requires": {
"assertion-error": "^1.1.0", "assertion-error": "^1.1.0",
"check-error": "^1.0.2", "check-error": "^1.0.2",
@ -241,7 +254,8 @@
"check-error": { "check-error": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz",
"integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=" "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=",
"dev": true
}, },
"chokidar": { "chokidar": {
"version": "3.3.0", "version": "3.3.0",
@ -260,11 +274,11 @@
} }
}, },
"circom_runtime": { "circom_runtime": {
"version": "0.0.6", "version": "0.0.8",
"resolved": "https://registry.npmjs.org/circom_runtime/-/circom_runtime-0.0.6.tgz", "resolved": "https://registry.npmjs.org/circom_runtime/-/circom_runtime-0.0.8.tgz",
"integrity": "sha512-o0T5MuWzxnxinWG3+CygS/kZouoP+z5ZrufUwqKJy3gsVFJhkbqMpfKmcBGjhExB3uatA7cKyOiRAOLOz5+t5w==", "integrity": "sha512-4ddyXq5doq0Mj3QXUWy3owmiE+gI4EMYwn7UjFeQKUb9ieXK6ZKvz3RVP+fktcuPQvdc69q5X310trnX7d7Xcw==",
"requires": { "requires": {
"ffjavascript": "0.1.0", "ffjavascript": "0.2.2",
"fnv-plus": "^1.3.1" "fnv-plus": "^1.3.1"
} }
}, },
@ -313,11 +327,6 @@
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=",
"dev": true "dev": true
}, },
"commander": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz",
"integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg=="
},
"concat-map": { "concat-map": {
"version": "0.0.1", "version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@ -354,6 +363,7 @@
"version": "3.0.1", "version": "3.0.1",
"resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz",
"integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==",
"dev": true,
"requires": { "requires": {
"type-detect": "^4.0.0" "type-detect": "^4.0.0"
} }
@ -435,7 +445,8 @@
"escape-string-regexp": { "escape-string-regexp": {
"version": "1.0.5", "version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
"dev": true
}, },
"eslint": { "eslint": {
"version": "6.8.0", "version": "6.8.0",
@ -579,6 +590,12 @@
"integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
"dev": true "dev": true
}, },
"estree-walker": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz",
"integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==",
"dev": true
},
"esutils": { "esutils": {
"version": "2.0.3", "version": "2.0.3",
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
@ -629,16 +646,18 @@
"dev": true "dev": true
}, },
"fastfile": { "fastfile": {
"version": "0.0.1", "version": "0.0.5",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.1.tgz", "resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.5.tgz",
"integrity": "sha512-Fk8PWafGWGEUw7oPq/dJen92ASxknCEy4ZC8n4VEvSwCp/jcReyEmVoWsRIWTf+IvAp2MzvFi54vOPeK2LQZtQ==" "integrity": "sha512-h6YDy9iI1gITf900quL91qnBl25JtqU5KD82NzhW0B35YFjGhXwWSkUA8g+nyz1th95RWEhtonz7O2AiSL+lQg=="
}, },
"ffjavascript": { "ffjavascript": {
"version": "0.1.0", "version": "0.2.2",
"resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.1.0.tgz", "resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.2.tgz",
"integrity": "sha512-dmKlUasSfvUcxBm8nCSKl2x7EFJsXA7OVP8XLFA03T2+6mAc3IiVLC2ambEVOcMOhyhl0vJfVZjM9f9d38D1rw==", "integrity": "sha512-Fp3qbKCuk3ZuOgMhpV6E5fiv/ZfMV5iuiGCk2YBb2rdFkgHza5Vc/AZTDDIwetoq1kh/ZX3Ky8k214jzDHVFDw==",
"requires": { "requires": {
"big-integer": "^1.6.48" "big-integer": "^1.6.48",
"wasmcurves": "0.0.4",
"worker-threads": "^1.0.0"
} }
}, },
"figures": { "figures": {
@ -740,7 +759,8 @@
"get-func-name": { "get-func-name": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
"integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=" "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=",
"dev": true
}, },
"get-stream": { "get-stream": {
"version": "4.1.0", "version": "4.1.0",
@ -1019,6 +1039,12 @@
"is-extglob": "^2.1.1" "is-extglob": "^2.1.1"
} }
}, },
"is-module": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz",
"integrity": "sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=",
"dev": true
},
"is-number": { "is-number": {
"version": "7.0.0", "version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
@ -1031,6 +1057,15 @@
"integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=",
"dev": true "dev": true
}, },
"is-reference": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz",
"integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==",
"dev": true,
"requires": {
"@types/estree": "*"
}
},
"is-regex": { "is-regex": {
"version": "1.0.5", "version": "1.0.5",
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz",
@ -1138,6 +1173,20 @@
"chalk": "^2.4.2" "chalk": "^2.4.2"
} }
}, },
"logplease": {
"version": "1.2.15",
"resolved": "https://registry.npmjs.org/logplease/-/logplease-1.2.15.tgz",
"integrity": "sha512-jLlHnlsPSJjpwUfcNyUxXCl33AYg2cHhIf9QhGL2T4iPT0XPB+xP1LRKFPgIg1M/sg9kAJvy94w9CzBNrfnstA=="
},
"magic-string": {
"version": "0.25.7",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.7.tgz",
"integrity": "sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==",
"dev": true,
"requires": {
"sourcemap-codec": "^1.4.4"
}
},
"map-age-cleaner": { "map-age-cleaner": {
"version": "0.1.3", "version": "0.1.3",
"resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", "resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz",
@ -1575,10 +1624,17 @@
"resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz",
"integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A="
}, },
"path-parse": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
"integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==",
"dev": true
},
"pathval": { "pathval": {
"version": "1.1.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz",
"integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=" "integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=",
"dev": true
}, },
"picomatch": { "picomatch": {
"version": "2.2.2", "version": "2.2.2",
@ -1614,12 +1670,12 @@
"dev": true "dev": true
}, },
"r1csfile": { "r1csfile": {
"version": "0.0.5", "version": "0.0.9",
"resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.5.tgz", "resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.9.tgz",
"integrity": "sha512-B+BdKPb/WUTp4N/3X4d1Spgx9Ojx5tFVejGZRJxpTtzq34mC8Vi/czWfiPj85V8kud31lCfYcZ16z7+czvM0Sw==", "integrity": "sha512-VEp8K+Y3z+rRepjVgnnHI0fMgkTts6jYGr6R2WYWTJzW/g08rChWKErjwJRp4VRmqBGHNDV73GImLCxmf3+/7w==",
"requires": { "requires": {
"fastfile": "0.0.1", "fastfile": "0.0.5",
"ffjavascript": "0.1.0" "ffjavascript": "0.2.2"
} }
}, },
"readdirp": { "readdirp": {
@ -1647,6 +1703,15 @@
"resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz",
"integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=" "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE="
}, },
"resolve": {
"version": "1.17.0",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz",
"integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==",
"dev": true,
"requires": {
"path-parse": "^1.0.6"
}
},
"resolve-from": { "resolve-from": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
@ -1672,6 +1737,75 @@
"glob": "^7.1.3" "glob": "^7.1.3"
} }
}, },
"rollup": {
"version": "2.20.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.20.0.tgz",
"integrity": "sha512-hkbp//ne1om8+PQRpd81zk0KDvbJxkLZdZJh1ZNxjd1EkI0H1TmYuHqqXx88yciS+5YnMom3geubQjTeeUnNNw==",
"dev": true,
"requires": {
"fsevents": "~2.1.2"
}
},
"rollup-plugin-commonjs": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/rollup-plugin-commonjs/-/rollup-plugin-commonjs-10.1.0.tgz",
"integrity": "sha512-jlXbjZSQg8EIeAAvepNwhJj++qJWNJw1Cl0YnOqKtP5Djx+fFGkp3WRh+W0ASCaFG5w1jhmzDxgu3SJuVxPF4Q==",
"dev": true,
"requires": {
"estree-walker": "^0.6.1",
"is-reference": "^1.1.2",
"magic-string": "^0.25.2",
"resolve": "^1.11.0",
"rollup-pluginutils": "^2.8.1"
}
},
"rollup-plugin-ignore": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/rollup-plugin-ignore/-/rollup-plugin-ignore-1.0.6.tgz",
"integrity": "sha512-OC9h/VMWcOJBwtHxLCaYeuGhqlOWTaE0/S1u5BZqxR8KB+0SjvCRPoHNMdCZQ3c3yVPWFOB2GM49atg2RvGicQ==",
"dev": true
},
"rollup-plugin-json": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/rollup-plugin-json/-/rollup-plugin-json-4.0.0.tgz",
"integrity": "sha512-hgb8N7Cgfw5SZAkb3jf0QXii6QX/FOkiIq2M7BAQIEydjHvTyxXHQiIzZaTFgx1GK0cRCHOCBHIyEkkLdWKxow==",
"dev": true,
"requires": {
"rollup-pluginutils": "^2.5.0"
}
},
"rollup-plugin-node-resolve": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-5.2.0.tgz",
"integrity": "sha512-jUlyaDXts7TW2CqQ4GaO5VJ4PwwaV8VUGA7+km3n6k6xtOEacf61u0VXwN80phY/evMcaS+9eIeJ9MOyDxt5Zw==",
"dev": true,
"requires": {
"@types/resolve": "0.0.8",
"builtin-modules": "^3.1.0",
"is-module": "^1.0.0",
"resolve": "^1.11.1",
"rollup-pluginutils": "^2.8.1"
}
},
"rollup-plugin-replace": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/rollup-plugin-replace/-/rollup-plugin-replace-2.2.0.tgz",
"integrity": "sha512-/5bxtUPkDHyBJAKketb4NfaeZjL5yLZdeUihSfbF2PQMz+rSTEb8ARKoOl3UBT4m7/X+QOXJo3sLTcq+yMMYTA==",
"dev": true,
"requires": {
"magic-string": "^0.25.2",
"rollup-pluginutils": "^2.6.0"
}
},
"rollup-pluginutils": {
"version": "2.8.2",
"resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz",
"integrity": "sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==",
"dev": true,
"requires": {
"estree-walker": "^0.6.1"
}
},
"run-async": { "run-async": {
"version": "2.4.0", "version": "2.4.0",
"resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz", "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz",
@ -1735,6 +1869,12 @@
"is-fullwidth-code-point": "^2.0.0" "is-fullwidth-code-point": "^2.0.0"
} }
}, },
"sourcemap-codec": {
"version": "1.4.8",
"resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz",
"integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==",
"dev": true
},
"sprintf-js": { "sprintf-js": {
"version": "1.0.3", "version": "1.0.3",
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
@ -1892,7 +2032,8 @@
"type-detect": { "type-detect": {
"version": "4.0.8", "version": "4.0.8",
"resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz",
"integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==" "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==",
"dev": true
}, },
"type-fest": { "type-fest": {
"version": "0.8.1", "version": "0.8.1",
@ -1915,6 +2056,15 @@
"integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==",
"dev": true "dev": true
}, },
"wasmcurves": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/wasmcurves/-/wasmcurves-0.0.4.tgz",
"integrity": "sha512-c/Tob+F/7jJhep1b2qtj54r4nkGaRifNbQ1OJx8cBBFH1RlHbWIbISHWONClOxiVwy/JZOpbN4SgvSX/4lF80A==",
"requires": {
"big-integer": "^1.6.42",
"blakejs": "^1.1.0"
}
},
"which": { "which": {
"version": "1.3.1", "version": "1.3.1",
"resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz",
@ -1943,6 +2093,11 @@
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
"dev": true "dev": true
}, },
"worker-threads": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/worker-threads/-/worker-threads-1.0.0.tgz",
"integrity": "sha512-vK6Hhvph8oLxocEJIlc3YfGAZhm210uGzjZsXSu+JYLAQ/s/w4Tqgl60JrdH58hW8NSGP4m3bp8a92qPXgX05w=="
},
"wrap-ansi": { "wrap-ansi": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz",

View File

@ -1,13 +1,22 @@
{ {
"name": "snarkjs", "name": "snarkjs",
"type": "module",
"version": "0.1.31", "version": "0.1.31",
"description": "zkSNARKs implementation in JavaScript", "description": "zkSNARKs implementation in JavaScript",
"main": "index.js", "main": "./build/main.cjs",
"module": "./main.js",
"exports": {
"import": "./main.js",
"require": "./build/main.cjs"
},
"scripts": { "scripts": {
"test": "mocha" "test": "mocha",
"build": "rollup -c config/rollup.cjs.config.js",
"buildcli": "rollup -c config/rollup.cli.config.js",
"buildiife": "BROWSER=true rollup -c config/rollup.iife.config.js"
}, },
"bin": { "bin": {
"snarkjs": "cli.js" "snarkjs": "build/cli.js"
}, },
"directories": { "directories": {
"templates": "templates" "templates": "templates"
@ -29,16 +38,24 @@
}, },
"dependencies": { "dependencies": {
"blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git", "blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git",
"circom_runtime": "0.0.6", "circom_runtime": "0.0.8",
"ffjavascript": "0.1.0", "fastfile": "0.0.5",
"ffjavascript": "0.2.2",
"keccak": "^3.0.0", "keccak": "^3.0.0",
"r1csfile": "0.0.5", "logplease": "^1.2.15",
"r1csfile": "0.0.9",
"yargs": "^12.0.5" "yargs": "^12.0.5"
}, },
"devDependencies": { "devDependencies": {
"chai": "^4.2.0", "chai": "^4.2.0",
"eslint": "^6.8.0", "eslint": "^6.8.0",
"lodash": "^4.17.15", "lodash": "^4.17.15",
"mocha": "^7.1.1" "mocha": "^7.1.1",
"rollup": "^2.20.0",
"rollup-plugin-commonjs": "^10.1.0",
"rollup-plugin-ignore": "^1.0.6",
"rollup-plugin-json": "^4.0.0",
"rollup-plugin-node-resolve": "^5.2.0",
"rollup-plugin-replace": "^2.2.0"
} }
} }

View File

@ -1,8 +1,8 @@
const Scalar = require("ffjavascript").Scalar;
const fastFile = require("fastfile");
const assert = require("assert");
async function readBinFile(fileName, type, maxVersion) { import { Scalar } from "ffjavascript";
import * as fastFile from "fastfile";
export async function readBinFile(fileName, type, maxVersion) {
const fd = await fastFile.readExisting(fileName); const fd = await fastFile.readExisting(fileName);
@ -10,11 +10,11 @@ async function readBinFile(fileName, type, maxVersion) {
let readedType = ""; let readedType = "";
for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]); for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]);
if (readedType != type) assert(false, fileName + ": Invalid File format"); if (readedType != type) throw new Error(fileName + ": Invalid File format");
let v = await fd.readULE32(); let v = await fd.readULE32();
if (v>maxVersion) assert(false, "Version not supported"); if (v>maxVersion) throw new Error("Version not supported");
const nSections = await fd.readULE32(); const nSections = await fd.readULE32();
@ -34,7 +34,7 @@ async function readBinFile(fileName, type, maxVersion) {
return {fd, sections}; return {fd, sections};
} }
async function createBinFile(fileName, type, version, nSections) { export async function createBinFile(fileName, type, version, nSections) {
const fd = await fastFile.createOverride(fileName); const fd = await fastFile.createOverride(fileName);
@ -48,8 +48,8 @@ async function createBinFile(fileName, type, version, nSections) {
return fd; return fd;
} }
async function startWriteSection(fd, idSection) { export async function startWriteSection(fd, idSection) {
assert(typeof fd.writingSection === "undefined", "Already writing a section"); if (typeof fd.writingSection !== "undefined") throw new Error("Already writing a section");
await fd.writeULE32(idSection); // Header type await fd.writeULE32(idSection); // Header type
fd.writingSection = { fd.writingSection = {
pSectionSize: fd.pos pSectionSize: fd.pos
@ -57,8 +57,8 @@ async function startWriteSection(fd, idSection) {
await fd.writeULE64(0); // Temporally set to 0 length await fd.writeULE64(0); // Temporally set to 0 length
} }
async function endWriteSection(fd) { export async function endWriteSection(fd) {
assert(typeof fd.writingSection != "undefined", "Not writing a section"); if (typeof fd.writingSection === "undefined") throw new Error("Not writing a section");
const sectionSize = fd.pos - fd.writingSection.pSectionSize - 8; const sectionSize = fd.pos - fd.writingSection.pSectionSize - 8;
const oldPos = fd.pos; const oldPos = fd.pos;
@ -68,36 +68,36 @@ async function endWriteSection(fd) {
delete fd.writingSection; delete fd.writingSection;
} }
async function startReadUniqueSection(fd, sections, idSection) { export async function startReadUniqueSection(fd, sections, idSection) {
assert(typeof fd.readingSection === "undefined", "Already reading a section"); if (typeof fd.readingSection !== "undefined") throw new Error("Already reading a section");
if (!sections[idSection]) assert(false, fd.fileName + ": Missing section "+ idSection ); if (!sections[idSection]) throw new Error(fd.fileName + ": Missing section "+ idSection );
if (sections[idSection].length>1) assert(false, fd.fileName +": Section Duplicated " +idSection); if (sections[idSection].length>1) throw new Error(fd.fileName +": Section Duplicated " +idSection);
fd.pos = sections[idSection][0].p; fd.pos = sections[idSection][0].p;
fd.readingSection = sections[idSection][0]; fd.readingSection = sections[idSection][0];
} }
async function endReadSection(fd, noCheck) { export async function endReadSection(fd, noCheck) {
assert(typeof fd.readingSection != "undefined", "Not reading a section"); if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
if (!noCheck) { if (!noCheck) {
assert.equal(fd.pos-fd.readingSection.p, fd.readingSection.size); if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
} }
delete fd.readingSection; delete fd.readingSection;
} }
async function writeBigInt(fd, n, n8, pos) { export async function writeBigInt(fd, n, n8, pos) {
const buff = new Uint8Array(n8); const buff = new Uint8Array(n8);
Scalar.toRprLE(buff, 0, n, n8); Scalar.toRprLE(buff, 0, n, n8);
await fd.write(buff, pos); await fd.write(buff, pos);
} }
async function readBigInt(fd, n8, pos) { export async function readBigInt(fd, n8, pos) {
const buff = await fd.read(n8, pos); const buff = await fd.read(n8, pos);
return Scalar.fromRprLE(buff, 0, n8); return Scalar.fromRprLE(buff, 0, n8);
} }
async function copySection(fdFrom, sections, fdTo, sectionId) { export async function copySection(fdFrom, sections, fdTo, sectionId) {
const chunkSize = fdFrom.pageSize; const chunkSize = fdFrom.pageSize;
await startReadUniqueSection(fdFrom, sections, sectionId); await startReadUniqueSection(fdFrom, sections, sectionId);
await startWriteSection(fdTo, sectionId); await startWriteSection(fdTo, sectionId);
@ -111,14 +111,14 @@ async function copySection(fdFrom, sections, fdTo, sectionId) {
} }
async function readFullSection(fd, sections, idSection) { export async function readFullSection(fd, sections, idSection) {
await startReadUniqueSection(fd, sections, idSection); await startReadUniqueSection(fd, sections, idSection);
const res = await fd.read(fd.readingSection.size); const res = await fd.read(fd.readingSection.size);
await endReadSection(fd); await endReadSection(fd);
return res; return res;
} }
async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) { export async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
const MAX_BUFF_SIZE = fd1.pageSize * 16; const MAX_BUFF_SIZE = fd1.pageSize * 16;
await startReadUniqueSection(fd1, sections1, idSection); await startReadUniqueSection(fd1, sections1, idSection);
await startReadUniqueSection(fd2, sections2, idSection); await startReadUniqueSection(fd2, sections2, idSection);
@ -134,16 +134,3 @@ async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
await endReadSection(fd2); await endReadSection(fd2);
return true; return true;
} }
module.exports.readBinFile = readBinFile;
module.exports.createBinFile = createBinFile;
module.exports.writeBigInt = writeBigInt;
module.exports.readBigInt = readBigInt;
module.exports.startWriteSection = startWriteSection;
module.exports.endWriteSection = endWriteSection;
module.exports.startReadUniqueSection = startReadUniqueSection;
module.exports.endReadSection = endReadSection;
module.exports.copySection = copySection;
module.exports.readFullSection = readFullSection;
module.exports.sectionIsEqual = sectionIsEqual;

View File

@ -1,17 +1,15 @@
#!/usr/bin/env node import pkg from "../package.json";
const version = pkg.version;
const version = require("../package").version;
let selectedCommand = null; let selectedCommand = null;
module.exports = async function clProcessor(commands) { export default async function clProcessor(commands) {
const cl = []; const cl = [];
const argv = {}; const argv = {};
for (let i=2; i<process.argv.length; i++) { for (let i=2; i<process.argv.length; i++) {
if (process.argv[i][0] == "-") { if (process.argv[i][0] == "-") {
let S = process.argv[i]; let S = process.argv[i];
while (S[0] == "-") S = S.slice(1); while (S[0] == "-") S = S.slice(1);
const arr = S.split("=") const arr = S.split("=");
if (arr.length > 1) { if (arr.length > 1) {
argv[arr[0]] = arr.slice(1).join("="); argv[arr[0]] = arr.slice(1).join("=");
} else { } else {
@ -34,7 +32,7 @@ module.exports = async function clProcessor(commands) {
const options = getOptions(cmd.options); const options = getOptions(cmd.options);
await cmd.action(m, options); await cmd.action(m, options);
} else { } else {
await cmd.action(m); await cmd.action(m, {});
} }
} else { } else {
if (m.length>0) console.log("Invalid number of parameters"); if (m.length>0) console.log("Invalid number of parameters");
@ -198,7 +196,6 @@ module.exports = async function clProcessor(commands) {
} }
S += " " + pl.params.join(" "); S += " " + pl.params.join(" ");
console.log(S); console.log(S);
// console.log("");
} }
} }

View File

@ -1,6 +1,4 @@
const Scalar = require("ffjavascript").Scalar; import { Scalar, buildBn128, buildBls12381} from "ffjavascript";
const buildBn128 = require("ffjavascript").buildBn128;
const buildBls12381 = require("ffjavascript").buildBls12381;
const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16); const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617"); const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
@ -8,7 +6,7 @@ const bn128r = Scalar.e("2188824287183927522224640574525727508854836440041603434
const bls12381q = Scalar.e("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16); const bls12381q = Scalar.e("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16);
const bn128q = Scalar.e("21888242871839275222246405745257275088696311157297823662689037894645226208583"); const bn128q = Scalar.e("21888242871839275222246405745257275088696311157297823662689037894645226208583");
module.exports.getCurveFromR = async function getCurveFromR(r) { export async function getCurveFromR(r) {
let curve; let curve;
if (Scalar.eq(r, bn128r)) { if (Scalar.eq(r, bn128r)) {
curve = await buildBn128(); curve = await buildBn128();
@ -20,7 +18,7 @@ module.exports.getCurveFromR = async function getCurveFromR(r) {
return curve; return curve;
}; };
module.exports.getCurveFromQ = async function getCurveFromQ(q) { export async function getCurveFromQ(q) {
let curve; let curve;
if (Scalar.eq(q, bn128q)) { if (Scalar.eq(q, bn128q)) {
curve = await buildBn128(); curve = await buildBn128();
@ -32,7 +30,7 @@ module.exports.getCurveFromQ = async function getCurveFromQ(q) {
return curve; return curve;
}; };
module.exports.getCurveFromName = async function getCurveFromName(name) { export async function getCurveFromName(name) {
let curve; let curve;
const normName = normalizeName(name); const normName = normalizeName(name);
if (["BN128", "BN254", "ALTBN128"].indexOf(normName) >= 0) { if (["BN128", "BN254", "ALTBN128"].indexOf(normName) >= 0) {

3
src/groth16.js Normal file
View File

@ -0,0 +1,3 @@
export {default as fullProve} from "./groth16_fullprove.js";
export {default as prove} from "./groth16_prove.js";
export {default as validate} from "./groth16_verify.js";

10
src/groth16_fullprove.js Normal file
View File

@ -0,0 +1,10 @@
import groth16_prove from "./groth16_prove.js";
import wtns_calculate from "./wtns_calculate.js";
export default async function groth16ProofFromInput(input, wasmFile, zkeyFileName, logger) {
const wtns= {
type: "mem"
};
await wtns_calculate(input, wasmFile, wtns);
return await groth16_prove(zkeyFileName, wtns, logger);
}

View File

@ -1,14 +1,15 @@
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const zkeyUtils = require("./zkey").utils; import * as zkeyUtils from "./zkey_utils.js";
const wtnsFile = require("./wtnsfile"); import * as wtnsUtils from "./wtns_utils.js";
const getCurve = require("./curves").getCurveFromQ; import { getCurveFromQ as getCurve } from "./curves.js";
const {log2} = require("./misc"); import { log2 } from "./misc.js";
const Scalar = require("ffjavascript").Scalar; import { Scalar, utils } from "ffjavascript";
const {stringifyBigInts} = utils;
async function groth16Prover(zkeyFileName, witnessFileName, verbose) { export default async function groth16ProofFromInput(zkeyFileName, witnessFileName, logger) {
const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils.readBinFile(witnessFileName, "wtns", 2); const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils.readBinFile(witnessFileName, "wtns", 2);
const wtns = await wtnsFile.readHeader(fdWtns, sectionsWtns); const wtns = await wtnsUtils.readHeader(fdWtns, sectionsWtns);
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2); const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2);
@ -53,7 +54,7 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
const buffPodd_T = await joinABC(curve, zkey, buffAodd_T, buffBodd_T, buffCodd_T); const buffPodd_T = await joinABC(curve, zkey, buffAodd_T, buffBodd_T, buffCodd_T);
const proof = {}; let proof = {};
proof.pi_a = await curve.G1.multiExpAffine(buffBasesA, buffWitness); proof.pi_a = await curve.G1.multiExpAffine(buffBasesA, buffWitness);
let pib1 = await curve.G1.multiExpAffine(buffBasesB1, buffWitness); let pib1 = await curve.G1.multiExpAffine(buffBasesB1, buffWitness);
@ -81,7 +82,7 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( zkey.vk_delta_1, Fr.neg(Fr.mul(r,s) ))); proof.pi_c = G1.add( proof.pi_c, G1.timesFr( zkey.vk_delta_1, Fr.neg(Fr.mul(r,s) )));
const publicSignals = []; let publicSignals = [];
for (let i=1; i<= zkey.nPublic; i++) { for (let i=1; i<= zkey.nPublic; i++) {
const b = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8); const b = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8);
@ -97,6 +98,10 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
await fdZKey.close(); await fdZKey.close();
await fdWtns.close(); await fdWtns.close();
proof = stringifyBigInts(proof);
publicSignals = stringifyBigInts(publicSignals);
return {proof, publicSignals}; return {proof, publicSignals};
} }
@ -237,4 +242,3 @@ async function joinABC(curve, zkey, a, b, c) {
return outBuff; return outBuff;
} }
module.exports = groth16Prover;

View File

@ -18,11 +18,12 @@
*/ */
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */ /* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
import { Scalar } from "ffjavascript";
import * as curves from "./curves.js";
import { utils } from "ffjavascript";
const {unstringifyBigInts} = utils;
const Scalar = require("ffjavascript").Scalar; export default async function isValid(vk_verifier, publicSignals, proof, logger) {
const curves = require("./curves");
module.exports = async function isValid(vk_verifier, proof, publicSignals) {
/* /*
let cpub = vk_verifier.IC[0]; let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) { for (let s= 0; s< vk_verifier.nPublic; s++) {
@ -30,6 +31,10 @@ module.exports = async function isValid(vk_verifier, proof, publicSignals) {
} }
*/ */
vk_verifier = unstringifyBigInts(vk_verifier);
proof = unstringifyBigInts(proof);
publicSignals = unstringifyBigInts(publicSignals);
const curve = await curves.getCurveFromName(vk_verifier.curve); const curve = await curves.getCurveFromName(vk_verifier.curve);
const IC0 = curve.G1.fromObject(vk_verifier.IC[0]); const IC0 = curve.G1.fromObject(vk_verifier.IC[0]);
@ -62,6 +67,11 @@ module.exports = async function isValid(vk_verifier, proof, publicSignals) {
vk_alpha_1, vk_beta_2 vk_alpha_1, vk_beta_2
); );
if (! res) return false; if (! res) {
if (logger) logger.error("Invalid proof");
return false;
}
if (logger) logger.info("OK!");
return true; return true;
}; };

View File

@ -1,9 +1,9 @@
const blake2b = require("blake2b-wasm"); import blake2b from "blake2b-wasm";
const ChaCha = require("ffjavascript").ChaCha; import { ChaCha } from "ffjavascript";
function hashToG2(curve, hash) { export function hashToG2(curve, hash) {
const hashV = new DataView(hash.buffer, hash.byteOffset, hash.byteLength); const hashV = new DataView(hash.buffer, hash.byteOffset, hash.byteLength);
const seed = []; const seed = [];
for (let i=0; i<8; i++) { for (let i=0; i<8; i++) {
@ -17,7 +17,7 @@ function hashToG2(curve, hash) {
return g2_sp; return g2_sp;
} }
function getG2sp(curve, persinalization, challange, g1s, g1sx) { export function getG2sp(curve, persinalization, challange, g1s, g1sx) {
const h = blake2b(64); const h = blake2b(64);
const b1 = new Uint8Array([persinalization]); const b1 = new Uint8Array([persinalization]);
@ -40,7 +40,7 @@ function calculatePubKey(k, curve, personalization, challangeHash, rng ) {
return k; return k;
} }
function createPTauKey(curve, challangeHash, rng) { export function createPTauKey(curve, challangeHash, rng) {
const key = { const key = {
tau: {}, tau: {},
alpha: {}, alpha: {},
@ -55,7 +55,7 @@ function createPTauKey(curve, challangeHash, rng) {
return key; return key;
} }
function createDeltaKey(curve, transcript, rng) { export function createDeltaKey(curve, transcript, rng) {
const delta = {}; const delta = {};
delta.prvKey = curve.Fr.fromRng(rng); delta.prvKey = curve.Fr.fromRng(rng);
delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng)); delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
@ -64,8 +64,3 @@ function createDeltaKey(curve, transcript, rng) {
delta.g2_spx = curve.G2.toAffine(curve.G2.timesScalar(delta.g2_sp, delta.prvKey)); delta.g2_spx = curve.G2.toAffine(curve.G2.timesScalar(delta.g2_sp, delta.prvKey));
return delta; return delta;
} }
module.exports.createPTauKey = createPTauKey;
module.exports.getG2sp = getG2sp;
module.exports.hashToG2 = hashToG2;
module.exports.createDeltaKey =createDeltaKey;

View File

@ -1,12 +1,14 @@
const fs = require("fs"); import * as fastFile from "fastFile";
module.exports = async function loadSymbols(symFileName) { export default async function loadSymbols(symFileName) {
const sym = { const sym = {
labelIdx2Name: [ "one" ], labelIdx2Name: [ "one" ],
varIdx2Name: [ "one" ], varIdx2Name: [ "one" ],
componentIdx2Name: [] componentIdx2Name: []
}; };
const symsStr = await fs.promises.readFile(symFileName, "utf8"); const fd = await fastFile.readExisting(symFileName);
const buff = await fd.read(fd.totalSize);
const symsStr = new TextDecoder("utf-8").decode(buff);
const lines = symsStr.split("\n"); const lines = symsStr.split("\n");
for (let i=0; i<lines.length; i++) { for (let i=0; i<lines.length; i++) {
const arr = lines[i].split(","); const arr = lines[i].split(",");
@ -22,6 +24,8 @@ module.exports = async function loadSymbols(symFileName) {
} }
} }
await fd.close();
return sym; return sym;
function extractComponent(name) { function extractComponent(name) {
@ -29,4 +33,4 @@ module.exports = async function loadSymbols(symFileName) {
arr.pop(); // Remove the lasr element arr.pop(); // Remove the lasr element
return arr.join("."); return arr.join(".");
} }
}; }

View File

@ -1,7 +1,8 @@
const Blake2b = require("blake2b-wasm"); /* global window */
const readline = require("readline"); import Blake2b from "blake2b-wasm";
const ChaCha = require("ffjavascript").ChaCha; import readline from "readline";
const crypto = require("crypto"); import { ChaCha } from "ffjavascript";
import crypto from "crypto";
const _revTable = []; const _revTable = [];
for (let i=0; i<256; i++) { for (let i=0; i<256; i++) {
@ -19,7 +20,7 @@ function _revSlow(idx, bits) {
return res; return res;
} }
function bitReverse(idx, bits) { export function bitReverse(idx, bits) {
return ( return (
_revTable[idx >>> 24] | _revTable[idx >>> 24] |
(_revTable[(idx >>> 16) & 0xFF] << 8) | (_revTable[(idx >>> 16) & 0xFF] << 8) |
@ -29,13 +30,13 @@ function bitReverse(idx, bits) {
} }
function log2( V ) export function log2( V )
{ {
return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) ); return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) );
} }
function formatHash(b) { export function formatHash(b, title) {
const a = new DataView(b.buffer, b.byteOffset, b.byteLength); const a = new DataView(b.buffer, b.byteOffset, b.byteLength);
let S = ""; let S = "";
for (let i=0; i<4; i++) { for (let i=0; i<4; i++) {
@ -46,10 +47,11 @@ function formatHash(b) {
S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0"); S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0");
} }
} }
if (title) S = title + "\n" + S;
return S; return S;
} }
function hashIsEqual(h1, h2) { export function hashIsEqual(h1, h2) {
if (h1.byteLength != h2.byteLength) return false; if (h1.byteLength != h2.byteLength) return false;
var dv1 = new Int8Array(h1); var dv1 = new Int8Array(h1);
var dv2 = new Int8Array(h2); var dv2 = new Int8Array(h2);
@ -60,14 +62,14 @@ function hashIsEqual(h1, h2) {
return true; return true;
} }
function cloneHasher(h) { export function cloneHasher(h) {
const ph = h.getPartialHash(); const ph = h.getPartialHash();
const res = Blake2b(64); const res = Blake2b(64);
res.setPartialHash(ph); res.setPartialHash(ph);
return res; return res;
} }
async function sameRatio(curve, g1s, g1sx, g2s, g2sx) { export async function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
if (curve.G1.isZero(g1s)) return false; if (curve.G1.isZero(g1s)) return false;
if (curve.G1.isZero(g1sx)) return false; if (curve.G1.isZero(g1sx)) return false;
if (curve.G2.isZero(g2s)) return false; if (curve.G2.isZero(g2s)) return false;
@ -78,20 +80,23 @@ async function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
} }
export function askEntropy() {
const rl = readline.createInterface({ if (process.browser) {
return window.prompt("Enter a random text. (Entropy): ", "");
} else {
const rl = readline.createInterface({
input: process.stdin, input: process.stdin,
output: process.stdout output: process.stdout
}); });
function askEntropy() {
return new Promise((resolve) => { return new Promise((resolve) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) ); rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
}); });
}
} }
async function getRandomRng(entropy) { export async function getRandomRng(entropy) {
// Generate a random key // Generate a random Rng
while (!entropy) { while (!entropy) {
entropy = await askEntropy(); entropy = await askEntropy();
} }
@ -109,7 +114,7 @@ async function getRandomRng(entropy) {
return rng; return rng;
} }
function rngFromBeaconParams(beaconHash, numIterationsExp) { export function rngFromBeaconParams(beaconHash, numIterationsExp) {
let nIterationsInner; let nIterationsInner;
let nIterationsOuter; let nIterationsOuter;
if (numIterationsExp<32) { if (numIterationsExp<32) {
@ -138,25 +143,17 @@ function rngFromBeaconParams(beaconHash, numIterationsExp) {
return rng; return rng;
} }
function hex2ByteArray(s) { export function hex2ByteArray(s) {
if (s instanceof Uint8Array) return s;
if (s.slice(0,2) == "0x") s= s.slice(2);
return new Uint8Array(s.match(/[\da-f]{2}/gi).map(function (h) { return new Uint8Array(s.match(/[\da-f]{2}/gi).map(function (h) {
return parseInt(h, 16); return parseInt(h, 16);
})); }));
} }
function byteArray2hex(byteArray) { export function byteArray2hex(byteArray) {
return Array.prototype.map.call(byteArray, function(byte) { return Array.prototype.map.call(byteArray, function(byte) {
return ("0" + (byte & 0xFF).toString(16)).slice(-2); return ("0" + (byte & 0xFF).toString(16)).slice(-2);
}).join(""); }).join("");
} }
module.exports.bitReverse = bitReverse;
module.exports.log2 = log2;
module.exports.formatHash = formatHash;
module.exports.hashIsEqual = hashIsEqual;
module.exports.cloneHasher = cloneHasher;
module.exports.sameRatio = sameRatio;
module.exports.getRandomRng = getRandomRng;
module.exports.rngFromBeaconParams = rngFromBeaconParams;
module.exports.hex2ByteArray = hex2ByteArray;
module.exports.byteArray2hex = byteArray2hex;

View File

@ -1,6 +1,5 @@
const buildTaskManager = require("./taskmanager"); import * as binFileUtils from "./binfileutils.js";
const binFileUtils = require("./binfileutils");
/* /*
This function creates a new section in the fdTo file with id idSection. This function creates a new section in the fdTo file with id idSection.
@ -9,7 +8,7 @@ const binFileUtils = require("./binfileutils");
It also updates the newChallangeHasher with the new points It also updates the newChallangeHasher with the new points
*/ */
async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, verbose) { export async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, logger) {
const MAX_CHUNK_SIZE = 1 << 16; const MAX_CHUNK_SIZE = 1 << 16;
const G = curve[groupName]; const G = curve[groupName];
const sG = G.F.n8*2; const sG = G.F.n8*2;
@ -20,7 +19,7 @@ async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, group
let t = first; let t = first;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) { for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if (verbose) console.log(`Applying key: ${sectionName}: ${i}/${nPoints}`); if (logger) logger.debug(`Applying key: ${sectionName}: ${i}/${nPoints}`);
const n= Math.min(nPoints - i, MAX_CHUNK_SIZE); const n= Math.min(nPoints - i, MAX_CHUNK_SIZE);
let buff; let buff;
buff = await fdOld.read(n*sG); buff = await fdOld.read(n*sG);
@ -35,13 +34,13 @@ async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, group
async function applyKeyToChallangeSection(fdOld, fdNew, responseHasher, curve, groupName, nPoints, first, inc, formatOut, sectionName, verbose) { export async function applyKeyToChallangeSection(fdOld, fdNew, responseHasher, curve, groupName, nPoints, first, inc, formatOut, sectionName, logger) {
const G = curve[groupName]; const G = curve[groupName];
const sG = G.F.n8*2; const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first; let t = first;
for (let i=0 ; i<nPoints ; i+= chunkSize) { for (let i=0 ; i<nPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i); if (logger) logger.debug(`Applying key ${sectionName}: ${i}/${nPoints}`);
const n= Math.min(nPoints-i, chunkSize ); const n= Math.min(nPoints-i, chunkSize );
const buffInU = await fdOld.read(n * sG); const buffInU = await fdOld.read(n * sG);
const buffInLEM = await G.batchUtoLEM(buffInU); const buffInLEM = await G.batchUtoLEM(buffInU);
@ -59,6 +58,3 @@ async function applyKeyToChallangeSection(fdOld, fdNew, responseHasher, curve, g
} }
} }
module.exports.applyKeyToChallangeSection = applyKeyToChallangeSection;
module.exports.applyKeyToSection = applyKeyToSection;

View File

@ -1,31 +0,0 @@
const blake2wasm = require("blake2b-wasm");
async function run() {
await blake2wasm.ready();
const hasher1 = blake2wasm(64);
hasher1.update(Uint8Array.of(1,2,3,4));
const ph = hasher1.getPartialHash();
hasher1.update(Uint8Array.of(5,6,7,8));
console.log(hasher1.digest("hex"));
const hasher2 = blake2wasm(64);
hasher2.setPartialHash(ph);
hasher2.update(Uint8Array.of(5,6,7,8));
console.log(hasher2.digest("hex"));
}
run().then(() => {
process.exit();
});

View File

@ -1,11 +1,10 @@
module.exports.newAccumulator = require("./powersoftau_new"); export {default as newAccumulator} from "./powersoftau_new.js";
module.exports.exportChallange = require("./powersoftau_exportchallange"); export {default as exportChallange} from "./powersoftau_export_challange.js";
module.exports.challangeContribute = require("./powersoftau_challangecontribute"); export {default as importResponse} from "./powersoftau_import.js";
module.exports.importResponse = require("./powersoftau_import"); export {default as verify} from "./powersoftau_verify.js";
module.exports.verify = require("./powersoftau_verify"); export {default as challangeContribute} from "./powersoftau_challange_contribute.js";
module.exports.challangeContribute = require("./powersoftau_challangecontribute"); export {default as beacon} from "./powersoftau_beacon.js";
module.exports.beacon = require("./powersoftau_beacon"); export {default as contribute} from "./powersoftau_contribute.js";
module.exports.contribute = require("./powersoftau_contribute"); export {default as preparePhase2} from "./powersoftau_preparephase2.js";
module.exports.preparePhase2 = require("./powersoftau_preparephase2"); export {default as exportJson} from "./powersoftau_export_json.js";
module.exports.exportJson = require("./powersoftau_exportjson");

View File

@ -1,26 +1,26 @@
const Blake2b = require("blake2b-wasm"); import Blake2b from "blake2b-wasm";
const utils = require("./powersoftau_utils"); import * as utils from "./powersoftau_utils.js";
const misc = require("./misc"); import * as misc from "./misc.js";
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp, beaconHashStr, verbose) { export default async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,numIterationsExp, logger) {
const beaconHash = misc.hex2ByteArray(beaconHashStr); const beaconHash = misc.hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0) if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length)) || (beaconHash.byteLength*2 !=beaconHashStr.length))
{ {
console.log("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)"); if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false; return false;
} }
if (beaconHash.length>=256) { if (beaconHash.length>=256) {
console.log("Maximum lenght of beacon hash is 255 bytes"); if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
return false; return false;
} }
numIterationsExp = parseInt(numIterationsExp); numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) { if ((numIterationsExp<10)||(numIterationsExp>63)) {
console.log("Invalid numIterationsExp. (Must be between 10 and 63)"); if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
return false; return false;
} }
@ -30,10 +30,11 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1); const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections); const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
if (power != ceremonyPower) { if (power != ceremonyPower) {
throw new Error("This file has been reduced. You cannot contribute into a reduced file."); if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
return false;
} }
if (sections[12]) { if (sections[12]) {
console.log("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again."); if (logger) logger.warn("Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
} }
const contributions = await utils.readContributions(fdOld, curve, sections); const contributions = await utils.readContributions(fdOld, curve, sections);
const curContribution = { const curContribution = {
@ -48,7 +49,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
if (contributions.length>0) { if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange; lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else { } else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose); lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, logger);
} }
curContribution.key = utils.keyFromBeacon(curve, lastChallangeHash, beaconHash, numIterationsExp); curContribution.key = utils.keyFromBeacon(curve, lastChallangeHash, beaconHash, numIterationsExp);
@ -62,15 +63,15 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
const startSections = []; const startSections = [];
let firstPoints; let firstPoints;
firstPoints = await processSection(2, "G1", (1<<power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1" ); firstPoints = await processSection(2, "G1", (1<<power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1", logger );
curContribution.tauG1 = firstPoints[1]; curContribution.tauG1 = firstPoints[1];
firstPoints = await processSection(3, "G2", (1<<power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2" ); firstPoints = await processSection(3, "G2", (1<<power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2", logger );
curContribution.tauG2 = firstPoints[1]; curContribution.tauG2 = firstPoints[1];
firstPoints = await processSection(4, "G1", (1<<power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1" ); firstPoints = await processSection(4, "G1", (1<<power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1", logger );
curContribution.alphaG1 = firstPoints[0]; curContribution.alphaG1 = firstPoints[0];
firstPoints = await processSection(5, "G1", (1<<power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1" ); firstPoints = await processSection(5, "G1", (1<<power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1", logger );
curContribution.betaG1 = firstPoints[0]; curContribution.betaG1 = firstPoints[0];
firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2" ); firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2", logger );
curContribution.betaG2 = firstPoints[0]; curContribution.betaG2 = firstPoints[0];
curContribution.partialHash = responseHasher.getPartialHash(); curContribution.partialHash = responseHasher.getPartialHash();
@ -82,22 +83,20 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
responseHasher.update(new Uint8Array(buffKey)); responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest(); const hashResponse = responseHasher.digest();
console.log("Contribution Response Hash imported: "); if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: "));
console.log(misc.formatHash(hashResponse));
const nextChallangeHasher = new Blake2b(64); const nextChallangeHasher = new Blake2b(64);
nextChallangeHasher.update(hashResponse); nextChallangeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1"); await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1", logger);
await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2"); await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2", logger);
await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1"); await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1", logger);
await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1"); await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1", logger);
await hashSection(fdNew, "G2", 6, 1 , "betaG2"); await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger);
curContribution.nextChallange = nextChallangeHasher.digest(); curContribution.nextChallange = nextChallangeHasher.digest();
console.log("Next Challange Hash: "); if (logger) logger.info(misc.formatHash(curContribution.nextChallange, "Next Challange Hash: "));
console.log(misc.formatHash(curContribution.nextChallange));
contributions.push(curContribution); contributions.push(curContribution);
@ -106,9 +105,9 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
await fdOld.close(); await fdOld.close();
await fdNew.close(); await fdNew.close();
return; return hashResponse;
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) { async function processSection(sectionId, groupName, NPoints, first, inc, sectionName, logger) {
const res = []; const res = [];
fdOld.pos = sections[sectionId][0].p; fdOld.pos = sections[sectionId][0].p;
@ -121,7 +120,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first; let t = first;
for (let i=0 ; i<NPoints ; i+= chunkSize) { for (let i=0 ; i<NPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i); if (logger) logger.debug(`applying key${sectionName}: ${i}/${NPoints}`);
const n= Math.min(NPoints-i, chunkSize ); const n= Math.min(NPoints-i, chunkSize );
const buffIn = await fdOld.read(n * sG); const buffIn = await fdOld.read(n * sG);
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc); const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
@ -150,7 +149,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
} }
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName) { async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName, logger) {
const G = curve[groupName]; const G = curve[groupName];
const sG = G.F.n8*2; const sG = G.F.n8*2;
@ -160,7 +159,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
fdTo.pos = startSections[sectionId]; fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) { for (let i=0; i< nPoints; i += nPointsChunk) {
if ((verbose)&&i) console.log(`Hashing ${sectionName}: ` + i); if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk); const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG); const buffLEM = await fdTo.read(n * sG);
@ -174,4 +173,3 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
} }
} }
module.exports = beacon;

View File

@ -16,23 +16,22 @@
// G2*tp*alpha (compressed) // G2*tp*alpha (compressed)
// G2*up*beta (compressed) // G2*up*beta (compressed)
const fastFile = require("fastfile"); import * as fastFile from "fastfile";
const assert = require("assert"); import Blake2b from "blake2b-wasm";
const Blake2b = require("blake2b-wasm"); import * as utils from "./powersoftau_utils.js";
const fs = require("fs"); import * as misc from "./misc.js";
const utils = require("./powersoftau_utils"); import { applyKeyToChallangeSection } from "./mpc_applykey.js";
const misc = require("./misc"); import * as keyPair from "./keypair.js";
const { applyKeyToChallangeSection } = require("./mpc_applykey");
const keyPair = require("./keypair");
async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) { export default async function challangeContribute(curve, challangeFilename, responesFileName, entropy, logger) {
await Blake2b.ready(); await Blake2b.ready();
let stats = await fs.promises.stat(challangeFilename); const fdFrom = await fastFile.readExisting(challangeFilename);
const sG1 = curve.F1.n64*8*2; const sG1 = curve.F1.n64*8*2;
const sG2 = curve.F2.n64*8*2; const sG2 = curve.F2.n64*8*2;
const domainSize = (stats.size + sG1 - 64 - sG2) / (4*sG1 + sG2); const domainSize = (fdFrom.totalSize + sG1 - 64 - sG2) / (4*sG1 + sG2);
let e = domainSize; let e = domainSize;
let power = 0; let power = 0;
while (e>1) { while (e>1) {
@ -40,49 +39,37 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
power += 1; power += 1;
} }
assert(1<<power == domainSize, "Invalid file size"); if (1<<power != domainSize) throw new Error("Invalid file size");
console.log("Power to tau size: "+power); if (logger) logger.debug("Power to tau size: "+power);
const fdFrom = await fastFile.readExisting(challangeFilename); const rng = await misc.getRandomRng(entropy);
const fdTo = await fastFile.createOverride(responesFileName); const fdTo = await fastFile.createOverride(responesFileName);
// Calculate the hash // Calculate the hash
console.log("Hashing challange"); if (logger) logger.debug("Hashing challange");
const challangeHasher = Blake2b(64); const challangeHasher = Blake2b(64);
for (let i=0; i<stats.size; i+= fdFrom.pageSize) { for (let i=0; i<fdFrom.totalSize; i+= fdFrom.pageSize) {
const s = Math.min(stats.size - i, fdFrom.pageSize); const s = Math.min(fdFrom.totalSize - i, fdFrom.pageSize);
const buff = await fdFrom.read(s); const buff = await fdFrom.read(s);
challangeHasher.update(buff); challangeHasher.update(buff);
} }
const claimedHash = await fdFrom.read(64, 0); const claimedHash = await fdFrom.read(64, 0);
console.log("Claimed Previus Challange Hash: "); if (logger) logger.info(misc.formatHash(claimedHash, "Claimed Previus Response Hash: "));
console.log(misc.formatHash(claimedHash));
const challangeHash = challangeHasher.digest(); const challangeHash = challangeHasher.digest();
console.log("Current Challange Hash: "); if (logger) logger.info(misc.formatHash(challangeHash, "Current Challange Hash: "));
console.log(misc.formatHash(challangeHash));
const rng = await misc.getRandomRng(entropy);
const key = keyPair.createPTauKey(curve, challangeHash, rng); const key = keyPair.createPTauKey(curve, challangeHash, rng);
if (verbose) { if (logger) {
["tau", "alpha", "beta"].forEach( (k) => { ["tau", "alpha", "beta"].forEach( (k) => {
console.log(k, ".g1_s_x: " + key[k].g1_s[0].toString(16)); logger.debug(k + ".g1_s: " + curve.G1.toString(key[k].g1_s, 16));
console.log(k, ".g1_s_y: " + key[k].g1_s[1].toString(16)); logger.debug(k + ".g1_sx: " + curve.G1.toString(key[k].g1_sx, 16));
console.log(k, ".g1_sx_x: " + key[k].g1_sx[0].toString(16)); logger.debug(k + ".g2_sp: " + curve.G2.toString(key[k].g2_sp, 16));
console.log(k, ".g1_sx_y: " + key[k].g1_sx[1].toString(16)); logger.debug(k + ".g2_spx: " + curve.G2.toString(key[k].g2_spx, 16));
console.log(k, ".g2_sp_x_c0: " + key[k].g2_sp[0][0].toString(16)); logger.debug("");
console.log(k, ".g2_sp_x_c1: " + key[k].g2_sp[0][1].toString(16));
console.log(k, ".g2_sp_y_c0: " + key[k].g2_sp[1][0].toString(16));
console.log(k, ".g2_sp_y_c1: " + key[k].g2_sp[1][1].toString(16));
console.log(k, ".g2_spx_x_c0: " + key[k].g2_spx[0][0].toString(16));
console.log(k, ".g2_spx_x_c1: " + key[k].g2_spx[0][1].toString(16));
console.log(k, ".g2_spx_y_c0: " + key[k].g2_spx[1][0].toString(16));
console.log(k, ".g2_spx_y_c1: " + key[k].g2_spx[1][1].toString(16));
console.log("");
}); });
} }
@ -91,11 +78,11 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
await fdTo.write(challangeHash); await fdTo.write(challangeHash);
responseHasher.update(challangeHash); responseHasher.update(challangeHash);
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power)*2-1, curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG1" , verbose ); await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power)*2-1, curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG1" , logger );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", (1<<power) , curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG2" , verbose ); await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", (1<<power) , curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG2" , logger );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.alpha.prvKey, key.tau.prvKey, "COMPRESSED", "alphaTauG1", verbose ); await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.alpha.prvKey, key.tau.prvKey, "COMPRESSED", "alphaTauG1", logger );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG1" , verbose ); await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG1" , logger );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", 1 , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG2" , verbose ); await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", 1 , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG2" , logger );
// Write and hash key // Write and hash key
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3); const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
@ -103,11 +90,9 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
await fdTo.write(buffKey); await fdTo.write(buffKey);
responseHasher.update(buffKey); responseHasher.update(buffKey);
const responseHash = responseHasher.digest(); const responseHash = responseHasher.digest();
console.log("Contribution Response Hash: "); if (logger) logger.info(misc.formatHash(responseHash, "Contribution Response Hash: "));
console.log(misc.formatHash(responseHash));
await fdTo.close(); await fdTo.close();
await fdFrom.close(); await fdFrom.close();
} }
module.exports = challangeContribute;

View File

@ -5,22 +5,23 @@
// 2^N AlphaTauG1 Points (uncompressed) // 2^N AlphaTauG1 Points (uncompressed)
// 2^N BetaTauG1 Points (uncompressed) // 2^N BetaTauG1 Points (uncompressed)
const Blake2b = require("blake2b-wasm"); import Blake2b from "blake2b-wasm";
const utils = require("./powersoftau_utils"); import * as utils from "./powersoftau_utils.js";
const keyPair = require("./keypair"); import * as keyPair from "./keypair.js";
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const misc = require("./misc"); import * as misc from "./misc.js";
async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbose) { export default async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logger) {
await Blake2b.ready(); await Blake2b.ready();
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1); const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections); const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
if (power != ceremonyPower) { if (power != ceremonyPower) {
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
throw new Error("This file has been reduced. You cannot contribute into a reduced file."); throw new Error("This file has been reduced. You cannot contribute into a reduced file.");
} }
if (sections[12]) { if (sections[12]) {
console.log("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again."); if (logger) logger.warn("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
} }
const contributions = await utils.readContributions(fdOld, curve, sections); const contributions = await utils.readContributions(fdOld, curve, sections);
const curContribution = { const curContribution = {
@ -30,15 +31,16 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
let lastChallangeHash; let lastChallangeHash;
const rng = await misc.getRandomRng(entropy);
if (contributions.length>0) { if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange; lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else { } else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose); lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, logger);
} }
// Generate a random key // Generate a random key
const rng = await misc.getRandomRng(entropy);
curContribution.key = keyPair.createPTauKey(curve, lastChallangeHash, rng); curContribution.key = keyPair.createPTauKey(curve, lastChallangeHash, rng);
@ -72,8 +74,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
responseHasher.update(new Uint8Array(buffKey)); responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest(); const hashResponse = responseHasher.digest();
console.log("Contribution Response Hash imported: "); if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: "));
console.log(misc.formatHash(hashResponse));
const nextChallangeHasher = new Blake2b(64); const nextChallangeHasher = new Blake2b(64);
nextChallangeHasher.update(hashResponse); nextChallangeHasher.update(hashResponse);
@ -86,8 +87,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
curContribution.nextChallange = nextChallangeHasher.digest(); curContribution.nextChallange = nextChallangeHasher.digest();
console.log("Next Challange Hash: "); if (logger) logger.info(misc.formatHash(curContribution.nextChallange, "Next Challange Hash: "));
console.log(misc.formatHash(curContribution.nextChallange));
contributions.push(curContribution); contributions.push(curContribution);
@ -96,7 +96,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
await fdOld.close(); await fdOld.close();
await fdNew.close(); await fdNew.close();
return; return hashResponse;
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) { async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) {
const res = []; const res = [];
@ -111,7 +111,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first; let t = first;
for (let i=0 ; i<NPoints ; i+= chunkSize) { for (let i=0 ; i<NPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i); if (logger) logger.debug(`processing: ${sectionName}: ${i}/${NPoints}`);
const n= Math.min(NPoints-i, chunkSize ); const n= Math.min(NPoints-i, chunkSize );
const buffIn = await fdOld.read(n * sG); const buffIn = await fdOld.read(n * sG);
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc); const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
@ -150,7 +150,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
fdTo.pos = startSections[sectionId]; fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) { for (let i=0; i< nPoints; i += nPointsChunk) {
if ((verbose)&&i) console.log(`Hashing ${sectionName}: ` + i); if ((logger)&&i) logger.debug(`Hashing ${sectionName}: ` + i);
const n = Math.min(nPoints-i, nPointsChunk); const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG); const buffLEM = await fdTo.read(n * sG);
@ -166,4 +166,3 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
} }
module.exports = contribute;

View File

@ -6,13 +6,13 @@
// 2^N BetaTauG1 Points (uncompressed) // 2^N BetaTauG1 Points (uncompressed)
// BetaG2 (uncompressed) // BetaG2 (uncompressed)
const fastFile = require("fastfile"); import * as fastFile from "fastfile";
const Blake2b = require("blake2b-wasm"); import Blake2b from "blake2b-wasm";
const utils = require("./powersoftau_utils"); import * as utils from "./powersoftau_utils.js";
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const misc = require("./misc"); import * as misc from "./misc.js";
async function exportChallange(pTauFilename, challangeFilename, verbose) { export default async function exportChallange(pTauFilename, challangeFilename, logger) {
await Blake2b.ready(); await Blake2b.ready();
const {fd: fdFrom, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1); const {fd: fdFrom, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1);
@ -28,11 +28,9 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
curChallangeHash = contributions[contributions.length-1].nextChallange; curChallangeHash = contributions[contributions.length-1].nextChallange;
} }
console.log("Last Response Hash: "); if (logger) logger.info(misc.formatHash(lastResponseHash, "Last Response Hash: "));
console.log(misc.formatHash(lastResponseHash));
console.log("New Challange Hash: "); if (logger) logger.info(misc.formatHash(curChallangeHash, "New Challange Hash: "));
console.log(misc.formatHash(curChallangeHash));
const fdTo = await fastFile.createOverride(challangeFilename); const fdTo = await fastFile.createOverride(challangeFilename);
@ -53,12 +51,14 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
const calcCurChallangeHash = toHash.digest(); const calcCurChallangeHash = toHash.digest();
if (!misc.hashIsEqual (curChallangeHash, calcCurChallangeHash)) { if (!misc.hashIsEqual (curChallangeHash, calcCurChallangeHash)) {
console.log("Calc Curret Challange Hash: "); if (logger) logger.info(misc.formatHash(calcCurChallangeHash, "Calc Curret Challange Hash: "));
console.log(misc.formatHash(calcCurChallangeHash));
if (logger) logger.error("PTau file is corrupted. Calculated new challange hash does not match with the eclared one");
throw new Error("PTau file is corrupted. Calculated new challange hash does not match with the eclared one"); throw new Error("PTau file is corrupted. Calculated new challange hash does not match with the eclared one");
} }
return curChallangeHash;
async function exportSection(sectionId, groupName, nPoints, sectionName) { async function exportSection(sectionId, groupName, nPoints, sectionName) {
const G = curve[groupName]; const G = curve[groupName];
const sG = G.F.n8*2; const sG = G.F.n8*2;
@ -66,7 +66,7 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
await binFileUtils.startReadUniqueSection(fdFrom, sections, sectionId); await binFileUtils.startReadUniqueSection(fdFrom, sections, sectionId);
for (let i=0; i< nPoints; i+= nPointsChunk) { for (let i=0; i< nPoints; i+= nPointsChunk) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i); if (logger) logger.debug(`Exporting ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk); const n = Math.min(nPoints-i, nPointsChunk);
let buff; let buff;
buff = await fdFrom.read(n*sG); buff = await fdFrom.read(n*sG);
@ -80,4 +80,3 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
} }
module.exports = exportChallange;

View File

@ -1,9 +1,7 @@
const utils = require("./powersoftau_utils"); import * as utils from "./powersoftau_utils.js";
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const {stringifyBigInts} = require("ffjavascript").utils;
const fs = require("fs");
async function exportJson(pTauFilename, jsonFileName, verbose) { export default async function exportJson(pTauFilename, verbose) {
const {fd, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1); const {fd, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fd, sections); const {curve, power} = await utils.readPTauHeader(fd, sections);
@ -26,8 +24,8 @@ async function exportJson(pTauFilename, jsonFileName, verbose) {
await fd.close(); await fd.close();
const S = JSON.stringify(stringifyBigInts(pTau), null, 1); return pTau;
await fs.promises.writeFile(jsonFileName, S);
async function exportSection(sectionId, groupName, nPoints, sectionName) { async function exportSection(sectionId, groupName, nPoints, sectionName) {
@ -69,5 +67,4 @@ async function exportJson(pTauFilename, jsonFileName, verbose) {
} }
module.exports = exportJson;

View File

@ -1,12 +1,10 @@
const assert = require("assert"); import * as fastFile from "fastfile";
const fastFile = require("fastfile"); import Blake2b from "blake2b-wasm";
const Blake2b = require("blake2b-wasm"); import * as utils from "./powersoftau_utils.js";
const fs = require("fs"); import * as binFileUtils from "./binfileutils.js";
const utils = require("./powersoftau_utils"); import * as misc from "./misc.js";
const binFileUtils = require("./binfileutils");
const misc = require("./misc");
async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, verbose) { export default async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, logger) {
await Blake2b.ready(); await Blake2b.ready();
@ -15,39 +13,40 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
const contributions = await utils.readContributions(fdOld, curve, sections); const contributions = await utils.readContributions(fdOld, curve, sections);
const currentContribution = {}; const currentContribution = {};
if (name) currentContribution.name = name;
const sG1 = curve.F1.n8*2; const sG1 = curve.F1.n8*2;
const scG1 = curve.F1.n8; // Compresed size const scG1 = curve.F1.n8; // Compresed size
const sG2 = curve.F2.n8*2; const sG2 = curve.F2.n8*2;
const scG2 = curve.F2.n8; // Compresed size const scG2 = curve.F2.n8; // Compresed size
let stats = await fs.promises.stat(contributionFilename); const fdResponse = await fastFile.readExisting(contributionFilename);
assert.equal(stats.size,
if (fdResponse.totalSize !=
64 + // Old Hash 64 + // Old Hash
((1<<power)*2-1)*scG1 + ((1<<power)*2-1)*scG1 +
(1<<power)*scG2 + (1<<power)*scG2 +
(1<<power)*scG1 + (1<<power)*scG1 +
(1<<power)*scG1 + (1<<power)*scG1 +
scG2 + scG2 +
sG1*6 + sG2*3, sG1*6 + sG2*3)
"Size of the contribution is invalid" throw new Error("Size of the contribution is invalid");
);
let lastChallangeHash; let lastChallangeHash;
if (contributions.length>0) { if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange; lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else { } else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose); lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, logger);
} }
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7); const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7);
await utils.writePTauHeader(fdNew, curve, power); await utils.writePTauHeader(fdNew, curve, power);
const fdResponse = await fastFile.readExisting(contributionFilename);
const contributionPreviousHash = await fdResponse.read(64); const contributionPreviousHash = await fdResponse.read(64);
assert(misc.hashIsEqual(contributionPreviousHash,lastChallangeHash), if(!misc.hashIsEqual(contributionPreviousHash,lastChallangeHash))
"Wrong contribution. this contribution is not based on the previus hash"); throw new Error("Wrong contribution. this contribution is not based on the previus hash");
const hasherResponse = new Blake2b(64); const hasherResponse = new Blake2b(64);
hasherResponse.update(contributionPreviousHash); hasherResponse.update(contributionPreviousHash);
@ -75,22 +74,20 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
hasherResponse.update(new Uint8Array(buffKey)); hasherResponse.update(new Uint8Array(buffKey));
const hashResponse = hasherResponse.digest(); const hashResponse = hasherResponse.digest();
console.log("Contribution Response Hash imported: "); if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: "));
console.log(misc.formatHash(hashResponse));
const nextChallangeHasher = new Blake2b(64); const nextChallangeHasher = new Blake2b(64);
nextChallangeHasher.update(hashResponse); nextChallangeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1"); await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1", logger);
await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2"); await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2", logger);
await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1"); await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1", logger);
await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1"); await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1", logger);
await hashSection(fdNew, "G2", 6, 1 , "betaG2"); await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger);
currentContribution.nextChallange = nextChallangeHasher.digest(); currentContribution.nextChallange = nextChallangeHasher.digest();
console.log("Next Challange Hash: "); if (logger) logger.info(misc.formatHash(currentContribution.nextChallange, "Next Challange Hash: "));
console.log(misc.formatHash(currentContribution.nextChallange));
contributions.push(currentContribution); contributions.push(currentContribution);
@ -100,6 +97,8 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
await fdNew.close(); await fdNew.close();
await fdOld.close(); await fdOld.close();
return currentContribution.nextChallange;
async function processSection(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) { async function processSection(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
const G = curve[groupName]; const G = curve[groupName];
@ -114,7 +113,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
startSections[sectionId] = fdTo.pos; startSections[sectionId] = fdTo.pos;
for (let i=0; i< nPoints; i += nPointsChunk) { for (let i=0; i< nPoints; i += nPointsChunk) {
if ((verbose)&&i) console.log(`Importing ${sectionName}: ` + i); if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk); const n = Math.min(nPoints-i, nPointsChunk);
const buffC = await fdFrom.read(n * scG); const buffC = await fdFrom.read(n * scG);
@ -138,7 +137,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
} }
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName) { async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName, logger) {
const G = curve[groupName]; const G = curve[groupName];
const sG = G.F.n8*2; const sG = G.F.n8*2;
@ -148,7 +147,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
fdTo.pos = startSections[sectionId]; fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) { for (let i=0; i< nPoints; i += nPointsChunk) {
if ((verbose)&&i) console.log(`Hashing ${sectionName}: ` + i); if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk); const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG); const buffLEM = await fdTo.read(n * sG);
@ -163,4 +162,3 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
} }
module.exports = importResponse;

View File

@ -46,13 +46,12 @@ contributions(7)
] ]
*/ */
const ptauUtils = require("./powersoftau_utils"); import * as ptauUtils from "./powersoftau_utils.js";
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const utils = require("./powersoftau_utils"); import Blake2b from "blake2b-wasm";
const Blake2b = require("blake2b-wasm"); import * as misc from "./misc.js";
const misc = require("./misc");
async function newAccumulator(curve, power, fileName, verbose) { export default async function newAccumulator(curve, power, fileName, logger) {
await Blake2b.ready(); await Blake2b.ready();
@ -69,7 +68,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
const nTauG1 = (1 << power) * 2 -1; const nTauG1 = (1 << power) * 2 -1;
for (let i=0; i< nTauG1; i++) { for (let i=0; i< nTauG1; i++) {
await fd.write(buffG1); await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG1: " + i); if ((logger)&&((i%100000) == 0)&&i) logger.info("tauG1: " + i);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -79,7 +78,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
const nTauG2 = (1 << power); const nTauG2 = (1 << power);
for (let i=0; i< nTauG2; i++) { for (let i=0; i< nTauG2; i++) {
await fd.write(buffG2); await fd.write(buffG2);
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG2: " + i); if ((logger)&&((i%100000) == 0)&&i) logger.log("tauG2: " + i);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -89,7 +88,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
const nAlfaTauG1 = (1 << power); const nAlfaTauG1 = (1 << power);
for (let i=0; i< nAlfaTauG1; i++) { for (let i=0; i< nAlfaTauG1; i++) {
await fd.write(buffG1); await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("alphaTauG1: " + i); if ((logger)&&((i%100000) == 0)&&i) logger.log("alphaTauG1: " + i);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -99,7 +98,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
const nBetaTauG1 = (1 << power); const nBetaTauG1 = (1 << power);
for (let i=0; i< nBetaTauG1; i++) { for (let i=0; i< nBetaTauG1; i++) {
await fd.write(buffG1); await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("betaTauG1: " + i); if ((logger)&&((i%100000) == 0)&&i) logger.log("betaTauG1: " + i);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -117,14 +116,12 @@ async function newAccumulator(curve, power, fileName, verbose) {
await fd.close(); await fd.close();
const firstChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose); const firstChallangeHash = ptauUtils.calculateFirstChallangeHash(curve, power, logger);
console.log("Blank Contribution Hash:"); if (logger) logger.debug(misc.formatHash(Blake2b(64).digest(), "Blank Contribution Hash:"));
console.log(misc.formatHash(Blake2b(64).digest()));
console.log("First Contribution Hash:"); if (logger) logger.info(misc.formatHash(firstChallangeHash, "First Contribution Hash:"));
console.log(misc.formatHash(firstChallangeHash));
return firstChallangeHash;
} }
module.exports = newAccumulator;

View File

@ -1,10 +1,9 @@
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const utils = require("./powersoftau_utils"); import * as utils from "./powersoftau_utils.js";
const fastFile = require("fastfile"); import * as fastFile from "fastfile";
const {bitReverse} = require("./misc"); import { bitReverse } from "./misc.js";
const fs = require("fs");
async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) { export default async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1); const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections); const {curve, power} = await utils.readPTauHeader(fdOld, sections);
@ -12,7 +11,8 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11); const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
await utils.writePTauHeader(fdNew, curve, power); await utils.writePTauHeader(fdNew, curve, power);
const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp"); // const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
const fdTmp = await fastFile.createOverride({type: "mem"});
await binFileUtils.copySection(fdOld, sections, fdNew, 2); await binFileUtils.copySection(fdOld, sections, fdNew, 2);
await binFileUtils.copySection(fdOld, sections, fdNew, 3); await binFileUtils.copySection(fdOld, sections, fdNew, 3);
@ -30,13 +30,13 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
await fdNew.close(); await fdNew.close();
await fdTmp.close(); await fdTmp.close();
await fs.promises.unlink(newPTauFilename+ ".tmp"); // await fs.promises.unlink(newPTauFilename+ ".tmp");
return; return;
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) { async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
const CHUNKPOW = 16; const CHUNKPOW = 16;
if (verbose) console.log("Starting section: "+sectionName); if (logger) logger.debug("Starting section: "+sectionName);
await binFileUtils.startWriteSection(fdNew, newSectionId); await binFileUtils.startWriteSection(fdNew, newSectionId);
@ -63,7 +63,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
fdTmp.pos =0; fdTmp.pos =0;
for (let i=0; i<nChunks; i++) { for (let i=0; i<nChunks; i++) {
let buff; let buff;
if (verbose) console.log(`${sectionName} Prepare ${i+1}/${nChunks}`); if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
buff = await fdOld.read(pointsPerChunk*sGin); buff = await fdOld.read(pointsPerChunk*sGin);
buff = await G.batchToJacobian(buff); buff = await G.batchToJacobian(buff);
for (let j=0; j<pointsPerChunk; j++) { for (let j=0; j<pointsPerChunk; j++) {
@ -74,7 +74,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
await binFileUtils.endReadSection(fdOld, true); await binFileUtils.endReadSection(fdOld, true);
for (let j=0; j<nChunks; j++) { for (let j=0; j<nChunks; j++) {
if (verbose) console.log(`${sectionName} ${p} FFTMix ${j+1}/${nChunks}`); if (logger) logger.debug(`${sectionName} ${p} FFTMix ${j+1}/${nChunks}`);
let buff; let buff;
fdTmp.pos = (j*pointsPerChunk)*sGmid; fdTmp.pos = (j*pointsPerChunk)*sGmid;
buff = await fdTmp.read(pointsPerChunk*sGmid); buff = await fdTmp.read(pointsPerChunk*sGmid);
@ -87,7 +87,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
const nChunksPerGroup = nChunks / nGroups; const nChunksPerGroup = nChunks / nGroups;
for (let j=0; j<nGroups; j++) { for (let j=0; j<nGroups; j++) {
for (let k=0; k <nChunksPerGroup/2; k++) { for (let k=0; k <nChunksPerGroup/2; k++) {
if (verbose) console.log(`${sectionName} ${i}/${p} FFTJoin ${j+1}/${nGroups} ${k}/${nChunksPerGroup/2}`); if (logger) logger.debug(`${sectionName} ${i}/${p} FFTJoin ${j+1}/${nGroups} ${k}/${nChunksPerGroup/2}`);
const first = Fr.pow( PFr.w[i], k*pointsPerChunk); const first = Fr.pow( PFr.w[i], k*pointsPerChunk);
const inc = PFr.w[i]; const inc = PFr.w[i];
const o1 = j*nChunksPerGroup + k; const o1 = j*nChunksPerGroup + k;
@ -125,7 +125,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
fdTmp.pos = 0; fdTmp.pos = 0;
const factor = Fr.inv( Fr.e( 1<< p)); const factor = Fr.inv( Fr.e( 1<< p));
for (let i=0; i<nChunks; i++) { for (let i=0; i<nChunks; i++) {
if (verbose) console.log(`${sectionName} ${p} FFTFinal ${i+1}/${nChunks}`); if (logger) logger.debug(`${sectionName} ${p} FFTFinal ${i+1}/${nChunks}`);
let buff; let buff;
buff = await fdTmp.read(pointsPerChunk * sGmid); buff = await fdTmp.read(pointsPerChunk * sGmid);
buff = await G.fftFinal(buff, factor); buff = await G.fftFinal(buff, factor);
@ -145,4 +145,3 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
} }
} }
module.exports = preparePhase2;

View File

@ -1,48 +0,0 @@
const binFileUtils = require("./binfileutils");
const utils = require("./powersoftau_utils");
async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
await utils.writePTauHeader(fdNew, curve, power);
await binFileUtils.copySection(fdOld, sections, fdNew, 2);
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", (1<<power) , "tauG1" );
await processSection(3, 13, "G2", (1<<power) , "tauG2" );
await processSection(4, 14, "G1", (1<<power) , "alphaTauG1" );
await processSection(5, 15, "G1", (1<<power) , "betaTauG1" );
await fdOld.close();
await fdNew.close();
return;
async function processSection(oldSectionId, newSectionId, Gstr, NPoints, sectionName) {
if (verbose) console.log("Starting section: "+sectionName);
const G = curve[Gstr];
const sG = G.F.n8*2;
let buff;
await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
buff = await fdOld.read(sG*NPoints);
await binFileUtils.endReadSection(fdOld, true);
buff = await G.ifft(buff, verbose ? console.log : null);
await binFileUtils.startWriteSection(fdNew, newSectionId);
await fdNew.write(buff);
await binFileUtils.endWriteSection(fdNew);
}
}
module.exports = preparePhase2;

View File

@ -1,11 +1,10 @@
const assert = require("assert"); import { Scalar } from "ffjavascript";
const Scalar = require("ffjavascript").Scalar; import Blake2b from "blake2b-wasm";
const Blake2b = require("blake2b-wasm"); import * as keyPair from "./keypair.js";
const keyPair = require("./keypair"); import * as misc from "./misc.js";
const misc = require("./misc"); import { getCurveFromQ } from "./curves.js";
const {getCurveFromQ} = require("./curves");
async function writePTauHeader(fd, curve, power, ceremonyPower) { export async function writePTauHeader(fd, curve, power, ceremonyPower) {
// Write the header // Write the header
/////////// ///////////
@ -31,9 +30,9 @@ async function writePTauHeader(fd, curve, power, ceremonyPower) {
fd.pos = oldPos; fd.pos = oldPos;
} }
async function readPTauHeader(fd, sections) { export async function readPTauHeader(fd, sections) {
if (!sections[1]) assert(false, fd.fileName + ": File has no header"); if (!sections[1]) throw new Error(fd.fileName + ": File has no header");
if (sections[1].length>1) assert(false, fd.fileName +": File has more than one header"); if (sections[1].length>1) throw new Error(fd.fileName +": File has more than one header");
fd.pos = sections[1][0].p; fd.pos = sections[1][0].p;
const n8 = await fd.readULE32(); const n8 = await fd.readULE32();
@ -42,25 +41,25 @@ async function readPTauHeader(fd, sections) {
const curve = await getCurveFromQ(q); const curve = await getCurveFromQ(q);
assert(curve.F1.n64*8 == n8, fd.fileName +": Invalid size"); if (curve.F1.n64*8 != n8) throw new Error(fd.fileName +": Invalid size");
const power = await fd.readULE32(); const power = await fd.readULE32();
const ceremonyPower = await fd.readULE32(); const ceremonyPower = await fd.readULE32();
assert.equal(fd.pos-sections[1][0].p, sections[1][0].size); if (fd.pos-sections[1][0].p != sections[1][0].size) throw new Error("Invalid PTau header size");
return {curve, power, ceremonyPower}; return {curve, power, ceremonyPower};
} }
async function readPtauPubKey(fd, curve, montgomery) { export async function readPtauPubKey(fd, curve, montgomery) {
const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3); const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3);
return fromPtauPubKeyRpr(buff, 0, curve, montgomery); return fromPtauPubKeyRpr(buff, 0, curve, montgomery);
} }
function fromPtauPubKeyRpr(buff, pos, curve, montgomery) { export function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
const key = { const key = {
tau: {}, tau: {},
@ -103,7 +102,7 @@ function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
} }
} }
function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) { export function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
writeG1(key.tau.g1_s); writeG1(key.tau.g1_s);
writeG1(key.tau.g1_sx); writeG1(key.tau.g1_sx);
@ -136,7 +135,7 @@ function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
return buff; return buff;
} }
async function writePtauPubKey(fd, curve, key, montgomery) { export async function writePtauPubKey(fd, curve, key, montgomery) {
const buff = new Uint8Array(curve.F1.n8*2*6 + curve.F2.n8*2*3); const buff = new Uint8Array(curve.F1.n8*2*6 + curve.F2.n8*2*3);
toPtauPubKeyRpr(buff, 0, curve, key, montgomery); toPtauPubKeyRpr(buff, 0, curve, key, montgomery);
await fd.write(buff); await fd.write(buff);
@ -206,9 +205,9 @@ async function readContribution(fd, curve) {
} }
} }
async function readContributions(fd, curve, sections) { export async function readContributions(fd, curve, sections) {
if (!sections[7]) assert(false, fd.fileName + ": File has no contributions"); if (!sections[7]) throw new Error(fd.fileName + ": File has no contributions");
if (sections[7][0].length>1) assert(false, fd.fileName +": File has more than one contributions section"); if (sections[7][0].length>1) throw new Error(fd.fileName +": File has more than one contributions section");
fd.pos = sections[7][0].p; fd.pos = sections[7][0].p;
const nContributions = await fd.readULE32(); const nContributions = await fd.readULE32();
@ -219,7 +218,7 @@ async function readContributions(fd, curve, sections) {
contributions.push(c); contributions.push(c);
} }
assert.equal(fd.pos-sections[7][0].p, sections[7][0].size); if (fd.pos-sections[7][0].p != sections[7][0].size) throw new Error("Invalid contribution section size");
return contributions; return contributions;
} }
@ -274,7 +273,7 @@ async function writeContribution(fd, curve, contribution) {
} }
async function writeContributions(fd, curve, contributions) { export async function writeContributions(fd, curve, contributions) {
await fd.writeULE32(7); // Header type await fd.writeULE32(7); // Header type
const pContributionsSize = fd.pos; const pContributionsSize = fd.pos;
@ -292,8 +291,8 @@ async function writeContributions(fd, curve, contributions) {
fd.pos = oldPos; fd.pos = oldPos;
} }
function calculateFirstChallangeHash(curve, power, verbose) { export function calculateFirstChallangeHash(curve, power, logger) {
if (verbose) console.log("Calculating First Challange Hash"); if (logger) logger.debug("Calculating First Challange Hash");
const hasher = new Blake2b(64); const hasher = new Blake2b(64);
@ -307,14 +306,14 @@ function calculateFirstChallangeHash(curve, power, verbose) {
let n; let n;
n=(1 << power)*2 -1; n=(1 << power)*2 -1;
if (verbose) console.log("tauG1"); if (logger) logger.debug("Calculate Initial Hash: tauG1");
hashBlock(vG1, n); hashBlock(vG1, n);
n= 1 << power; n= 1 << power;
if (verbose) console.log("tauG2"); if (logger) logger.debug("Calculate Initial Hash: tauG2");
hashBlock(vG2, n); hashBlock(vG2, n);
if (verbose) console.log("alphaTauG1"); if (logger) logger.debug("Calculate Initial Hash: alphaTauG1");
hashBlock(vG1, n); hashBlock(vG1, n);
if (verbose) console.log("betaTauG1"); if (logger) logger.debug("Calculate Initial Hash: betaTauG1");
hashBlock(vG1, n); hashBlock(vG1, n);
hasher.update(vG2); hasher.update(vG2);
@ -330,7 +329,7 @@ function calculateFirstChallangeHash(curve, power, verbose) {
} }
for (let i=0; i<nBlocks; i++) { for (let i=0; i<nBlocks; i++) {
hasher.update(bigBuff); hasher.update(bigBuff);
if (verbose) console.log(i*blockSize); if (logger) logger.debug("Initial hash: " +i*blockSize);
} }
for (let i=0; i<rem; i++) { for (let i=0; i<rem; i++) {
hasher.update(buff); hasher.update(buff);
@ -339,7 +338,7 @@ function calculateFirstChallangeHash(curve, power, verbose) {
} }
function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) { export function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) {
const rng = misc.rngFromBeaconParams(beaconHash, numIterationsExp); const rng = misc.rngFromBeaconParams(beaconHash, numIterationsExp);
@ -348,14 +347,3 @@ function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) {
return key; return key;
} }
module.exports.readPTauHeader = readPTauHeader;
module.exports.writePTauHeader = writePTauHeader;
module.exports.readPtauPubKey = readPtauPubKey;
module.exports.writePtauPubKey = writePtauPubKey;
module.exports.readContributions = readContributions;
module.exports.writeContributions = writeContributions;
module.exports.calculateFirstChallangeHash = calculateFirstChallangeHash;
module.exports.toPtauPubKeyRpr = toPtauPubKeyRpr;
module.exports.fromPtauPubKeyRpr = fromPtauPubKeyRpr;
module.exports.keyFromBeacon = keyFromBeacon;

View File

@ -1,54 +1,53 @@
const Blake2b = require("blake2b-wasm"); import Blake2b from "blake2b-wasm";
const utils = require("./powersoftau_utils"); import * as utils from "./powersoftau_utils.js";
const keyPair = require("./keypair"); import * as keyPair from "./keypair.js";
const assert = require("assert"); import crypto from "crypto";
const crypto = require("crypto"); import * as binFileUtils from "./binfileutils.js";
const binFileUtils = require("./binfileutils"); import { ChaCha } from "ffjavascript";
const ChaCha = require("ffjavascript").ChaCha; import * as misc from "./misc.js";
const misc = require("./misc");
const sameRatio = misc.sameRatio; const sameRatio = misc.sameRatio;
async function verifyContribution(curve, cur, prev) { async function verifyContribution(curve, cur, prev, logger) {
let sr; let sr;
if (cur.type == 1) { // Verify the beacon. if (cur.type == 1) { // Verify the beacon.
const beaconKey = utils.keyFromBeacon(curve, prev.nextChallange, cur.beaconHash, cur.numIterationsExp); const beaconKey = utils.keyFromBeacon(curve, prev.nextChallange, cur.beaconHash, cur.numIterationsExp);
if (!curve.G1.eq(cur.key.tau.g1_s, beaconKey.tau.g1_s)) { if (!curve.G1.eq(cur.key.tau.g1_s, beaconKey.tau.g1_s)) {
console.log(`BEACON key (tauG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` ); if (logger) logger.error(`BEACON key (tauG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false; return false;
} }
if (!curve.G1.eq(cur.key.tau.g1_sx, beaconKey.tau.g1_sx)) { if (!curve.G1.eq(cur.key.tau.g1_sx, beaconKey.tau.g1_sx)) {
console.log(`BEACON key (tauG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` ); if (logger) logger.error(`BEACON key (tauG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false; return false;
} }
if (!curve.G2.eq(cur.key.tau.g2_spx, beaconKey.tau.g2_spx)) { if (!curve.G2.eq(cur.key.tau.g2_spx, beaconKey.tau.g2_spx)) {
console.log(`BEACON key (tauG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` ); if (logger) logger.error(`BEACON key (tauG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false; return false;
} }
if (!curve.G1.eq(cur.key.alpha.g1_s, beaconKey.alpha.g1_s)) { if (!curve.G1.eq(cur.key.alpha.g1_s, beaconKey.alpha.g1_s)) {
console.log(`BEACON key (alphaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` ); if (logger) logger.error(`BEACON key (alphaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false; return false;
} }
if (!curve.G1.eq(cur.key.alpha.g1_sx, beaconKey.alpha.g1_sx)) { if (!curve.G1.eq(cur.key.alpha.g1_sx, beaconKey.alpha.g1_sx)) {
console.log(`BEACON key (alphaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` ); if (logger) logger.error(`BEACON key (alphaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false; return false;
} }
if (!curve.G2.eq(cur.key.alpha.g2_spx, beaconKey.alpha.g2_spx)) { if (!curve.G2.eq(cur.key.alpha.g2_spx, beaconKey.alpha.g2_spx)) {
console.log(`BEACON key (alphaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` ); if (logger) logger.error(`BEACON key (alphaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false; return false;
} }
if (!curve.G1.eq(cur.key.beta.g1_s, beaconKey.beta.g1_s)) { if (!curve.G1.eq(cur.key.beta.g1_s, beaconKey.beta.g1_s)) {
console.log(`BEACON key (betaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` ); if (logger) logger.error(`BEACON key (betaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false; return false;
} }
if (!curve.G1.eq(cur.key.beta.g1_sx, beaconKey.beta.g1_sx)) { if (!curve.G1.eq(cur.key.beta.g1_sx, beaconKey.beta.g1_sx)) {
console.log(`BEACON key (betaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` ); if (logger) logger.error(`BEACON key (betaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false; return false;
} }
if (!curve.G2.eq(cur.key.beta.g2_spx, beaconKey.beta.g2_spx)) { if (!curve.G2.eq(cur.key.beta.g2_spx, beaconKey.beta.g2_spx)) {
console.log(`BEACON key (betaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` ); if (logger) logger.error(`BEACON key (betaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false; return false;
} }
} }
@ -59,56 +58,57 @@ async function verifyContribution(curve, cur, prev) {
sr = await sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx); sr = await sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
if (sr !== true) { if (sr !== true) {
console.log("INVALID key (tau) in challange #"+cur.id); if (logger) logger.error("INVALID key (tau) in challange #"+cur.id);
return false; return false;
} }
sr = await sameRatio(curve, cur.key.alpha.g1_s, cur.key.alpha.g1_sx, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx); sr = await sameRatio(curve, cur.key.alpha.g1_s, cur.key.alpha.g1_sx, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
if (sr !== true) { if (sr !== true) {
console.log("INVALID key (alpha) in challange #"+cur.id); if (logger) logger.error("INVALID key (alpha) in challange #"+cur.id);
return false; return false;
} }
sr = await sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, cur.key.beta.g2_sp, cur.key.beta.g2_spx); sr = await sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
if (sr !== true) { if (sr !== true) {
console.log("INVALID key (beta) in challange #"+cur.id); if (logger) logger.error("INVALID key (beta) in challange #"+cur.id);
return false; return false;
} }
sr = await sameRatio(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx); sr = await sameRatio(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
if (sr !== true) { if (sr !== true) {
console.log("INVALID tau*G1. challange #"+cur.id+" It does not follow the previous contribution"); if (logger) logger.error("INVALID tau*G1. challange #"+cur.id+" It does not follow the previous contribution");
return false; return false;
} }
sr = await sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2); sr = await sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2);
if (sr !== true) { if (sr !== true) {
console.log("INVALID tau*G2. challange #"+cur.id+" It does not follow the previous contribution"); if (logger) logger.error("INVALID tau*G2. challange #"+cur.id+" It does not follow the previous contribution");
return false; return false;
} }
sr = await sameRatio(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx); sr = await sameRatio(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
if (sr !== true) { if (sr !== true) {
console.log("INVALID alpha*G1. challange #"+cur.id+" It does not follow the previous contribution"); if (logger) logger.error("INVALID alpha*G1. challange #"+cur.id+" It does not follow the previous contribution");
return false; return false;
} }
sr = await sameRatio(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx); sr = await sameRatio(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
if (sr !== true) { if (sr !== true) {
console.log("INVALID beta*G1. challange #"+cur.id+" It does not follow the previous contribution"); if (logger) logger.error("INVALID beta*G1. challange #"+cur.id+" It does not follow the previous contribution");
return false; return false;
} }
sr = await sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, prev.betaG2, cur.betaG2); sr = await sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, prev.betaG2, cur.betaG2);
if (sr !== true) { if (sr !== true) {
console.log("INVALID beta*G2. challange #"+cur.id+"It does not follow the previous contribution"); if (logger) logger.error("INVALID beta*G2. challange #"+cur.id+"It does not follow the previous contribution");
return false; return false;
} }
if (logger) logger.info("Powers Of tau file OK!");
return true; return true;
} }
async function verify(tauFilename, verbose) { export default async function verify(tauFilename, logger) {
let sr; let sr;
await Blake2b.ready(); await Blake2b.ready();
@ -116,22 +116,22 @@ async function verify(tauFilename, verbose) {
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fd, sections); const {curve, power, ceremonyPower} = await utils.readPTauHeader(fd, sections);
const contrs = await utils.readContributions(fd, curve, sections); const contrs = await utils.readContributions(fd, curve, sections);
if (verbose) console.log("power: 2**" + power); if (logger) logger.debug("power: 2**" + power);
// Verify Last contribution // Verify Last contribution
if (verbose) console.log("Computing initial contribution hash"); if (logger) logger.debug("Computing initial contribution hash");
const initialContribution = { const initialContribution = {
tauG1: curve.G1.g, tauG1: curve.G1.g,
tauG2: curve.G2.g, tauG2: curve.G2.g,
alphaG1: curve.G1.g, alphaG1: curve.G1.g,
betaG1: curve.G1.g, betaG1: curve.G1.g,
betaG2: curve.G2.g, betaG2: curve.G2.g,
nextChallange: utils.calculateFirstChallangeHash(curve, ceremonyPower, verbose), nextChallange: utils.calculateFirstChallangeHash(curve, ceremonyPower, logger),
responseHash: Blake2b(64).digest() responseHash: Blake2b(64).digest()
}; };
if (contrs.length == 0) { if (contrs.length == 0) {
console.log("This file has no contribution! It cannot be used in production"); if (logger) logger.error("This file has no contribution! It cannot be used in production");
return false; return false;
} }
@ -142,8 +142,8 @@ async function verify(tauFilename, verbose) {
prevContr = initialContribution; prevContr = initialContribution;
} }
const curContr = contrs[contrs.length-1]; const curContr = contrs[contrs.length-1];
if (verbose) console.log("Validating contribution #"+contrs[contrs.length-1].id); if (logger) logger.debug("Validating contribution #"+contrs[contrs.length-1].id);
const res = await verifyContribution(curve, curContr,prevContr, verbose); const res = await verifyContribution(curve, curContr, prevContr, logger);
if (!res) return false; if (!res) return false;
@ -155,71 +155,71 @@ async function verify(tauFilename, verbose) {
// await test(); // await test();
// Verify Section tau*G1 // Verify Section tau*G1
if (verbose) console.log("Verifying powers in tau*G1 section"); if (logger) logger.debug("Verifying powers in tau*G1 section");
const rTau1 = await processSection(2, "G1", "tauG1", (1 << power)*2-1, [0, 1]); const rTau1 = await processSection(2, "G1", "tauG1", (1 << power)*2-1, [0, 1], logger);
sr = await sameRatio(curve, rTau1.R1, rTau1.R2, curve.G2.g, curContr.tauG2); sr = await sameRatio(curve, rTau1.R1, rTau1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) { if (sr !== true) {
console.log("tauG1 section. Powers do not match"); if (logger) logger.error("tauG1 section. Powers do not match");
return false; return false;
} }
if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) { if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) {
console.log("First element of tau*G1 section must be the generator"); if (logger) logger.error("First element of tau*G1 section must be the generator");
return false; return false;
} }
if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) { if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) {
console.log("Second element of tau*G1 section does not match the one in the contribution section"); if (logger) logger.error("Second element of tau*G1 section does not match the one in the contribution section");
return false; return false;
} }
// await test(); // await test();
// Verify Section tau*G2 // Verify Section tau*G2
if (verbose) console.log("Verifying powers in tau*G2 section"); if (logger) logger.debug("Verifying powers in tau*G2 section");
const rTau2 = await processSection(3, "G2", "tauG2", 1 << power, [0, 1]); const rTau2 = await processSection(3, "G2", "tauG2", 1 << power, [0, 1], logger);
sr = await sameRatio(curve, curve.G1.g, curContr.tauG1, rTau2.R1, rTau2.R2); sr = await sameRatio(curve, curve.G1.g, curContr.tauG1, rTau2.R1, rTau2.R2);
if (sr !== true) { if (sr !== true) {
console.log("tauG2 section. Powers do not match"); if (logger) logger.error("tauG2 section. Powers do not match");
return false; return false;
} }
if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) { if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) {
console.log("First element of tau*G2 section must be the generator"); if (logger) logger.error("First element of tau*G2 section must be the generator");
return false; return false;
} }
if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) { if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) {
console.log("Second element of tau*G2 section does not match the one in the contribution section"); if (logger) logger.error("Second element of tau*G2 section does not match the one in the contribution section");
return false; return false;
} }
// Verify Section alpha*tau*G1 // Verify Section alpha*tau*G1
if (verbose) console.log("Verifying powers in alpha*tau*G1 section"); if (logger) logger.debug("Verifying powers in alpha*tau*G1 section");
const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 1 << power, [0]); const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 1 << power, [0], logger);
sr = await sameRatio(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, curve.G2.g, curContr.tauG2); sr = await sameRatio(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) { if (sr !== true) {
console.log("alphaTauG1 section. Powers do not match"); if (logger) logger.error("alphaTauG1 section. Powers do not match");
return false; return false;
} }
if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) { if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) {
console.log("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section"); if (logger) logger.error("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section");
return false; return false;
} }
// Verify Section beta*tau*G1 // Verify Section beta*tau*G1
if (verbose) console.log("Verifying powers in beta*tau*G1 section"); if (logger) logger.debug("Verifying powers in beta*tau*G1 section");
const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 1 << power, [0]); const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 1 << power, [0], logger);
sr = await sameRatio(curve, rBetaTauG1.R1, rBetaTauG1.R2, curve.G2.g, curContr.tauG2); sr = await sameRatio(curve, rBetaTauG1.R1, rBetaTauG1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) { if (sr !== true) {
console.log("betaTauG1 section. Powers do not match"); if (logger) logger.error("betaTauG1 section. Powers do not match");
return false; return false;
} }
if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) { if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) {
console.log("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section"); if (logger) logger.error("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section");
return false; return false;
} }
//Verify Beta G2 //Verify Beta G2
const betaG2 = await processSectionBetaG2(); const betaG2 = await processSectionBetaG2(logger);
if (!curve.G2.eq(curContr.betaG2, betaG2)) { if (!curve.G2.eq(curContr.betaG2, betaG2)) {
console.log("betaG2 element in betaG2 section does not match the one in the contribution section"); if (logger) logger.error("betaG2 element in betaG2 section does not match the one in the contribution section");
return false; return false;
} }
@ -228,14 +228,11 @@ async function verify(tauFilename, verbose) {
// Check the nextChallangeHash // Check the nextChallangeHash
if (!misc.hashIsEqual(nextContributionHash,curContr.nextChallange)) { if (!misc.hashIsEqual(nextContributionHash,curContr.nextChallange)) {
console.log("Hash of the values does not match the next challange of the last contributor in the contributions section"); if (logger) logger.error("Hash of the values does not match the next challange of the last contributor in the contributions section");
return false; return false;
} }
if (verbose) { if (logger) logger.info(misc.formatHash(nextContributionHash, "Next challange hash: "));
console.log("Next challange hash: ");
console.log(misc.formatHash(nextContributionHash));
}
// Verify Previous contributions // Verify Previous contributions
@ -243,24 +240,26 @@ async function verify(tauFilename, verbose) {
for (let i = contrs.length-2; i>=0; i--) { for (let i = contrs.length-2; i>=0; i--) {
const curContr = contrs[i]; const curContr = contrs[i];
const prevContr = (i>0) ? contrs[i-1] : initialContribution; const prevContr = (i>0) ? contrs[i-1] : initialContribution;
const res = await verifyContribution(curve, curContr, prevContr); const res = await verifyContribution(curve, curContr, prevContr, logger);
if (!res) return false; if (!res) return false;
printContribution(curContr, prevContr); printContribution(curContr, prevContr, logger);
} }
console.log("-----------------------------------------------------"); if (logger) logger.info("-----------------------------------------------------");
if ((!sections[12]) || (!sections[13]) || (!sections[14]) || (!sections[15])) { if ((!sections[12]) || (!sections[13]) || (!sections[14]) || (!sections[15])) {
console.log("this file does not contain phase2 precalculated values. Please run: "); if (logger) logger.warn(
console.log(" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony." ); "this file does not contain phase2 precalculated values. Please run: \n" +
" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony."
);
} else { } else {
let res; let res;
res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1"); res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1", logger);
if (!res) return false; if (!res) return false;
res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2"); res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2", logger);
if (!res) return false; if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1"); res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1", logger);
if (!res) return false; if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1"); res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1", logger);
if (!res) return false; if (!res) return false;
} }
@ -269,11 +268,11 @@ async function verify(tauFilename, verbose) {
return true; return true;
function printContribution(curContr, prevContr) { function printContribution(curContr, prevContr) {
console.log("-----------------------------------------------------"); if (!logger) return;
console.log(`Contribution #${curContr.id}: ${curContr.name ||""}`); logger.info("-----------------------------------------------------");
logger.info(`Contribution #${curContr.id}: ${curContr.name ||""}`);
console.log("\tNext Challange"); logger.info(misc.formatHash(curContr.nextChallange, "Next Challange: "));
console.log(misc.formatHash(curContr.nextChallange));
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3); const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
utils.toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false); utils.toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false);
@ -283,26 +282,30 @@ async function verify(tauFilename, verbose) {
responseHasher.update(buffV); responseHasher.update(buffV);
const responseHash = responseHasher.digest(); const responseHash = responseHasher.digest();
console.log("\tResponse Hash"); logger.info(misc.formatHash(responseHash, "Response Hash:"));
console.log(misc.formatHash(responseHash));
console.log("\tBased on challange"); logger.info(misc.formatHash(prevContr.nextChallange, "Response Hash:"));
console.log(misc.formatHash(prevContr.nextChallange));
if (curContr.type == 1) { if (curContr.type == 1) {
console.log(`Beacon generator: ${misc.byteArray2hex(curContr.beaconHash)}`); logger.info(`Beacon generator: ${misc.byteArray2hex(curContr.beaconHash)}`);
console.log(`Beacon iterations Exp: ${curContr.numIterationsExp}`); logger.info(`Beacon iterations Exp: ${curContr.numIterationsExp}`);
} }
} }
async function processSectionBetaG2() { async function processSectionBetaG2(logger) {
const G = curve.G2; const G = curve.G2;
const sG = G.F.n8*2; const sG = G.F.n8*2;
const buffUv = new Uint8Array(sG); const buffUv = new Uint8Array(sG);
if (!sections[6]) assert(false, "File has no BetaG2 section"); if (!sections[6]) {
if (sections[6].length>1) assert(false, "File has more than one GetaG2 section"); logger.error("File has no BetaG2 section");
throw new Error("File has no BetaG2 section");
}
if (sections[6].length>1) {
logger.error("File has no BetaG2 section");
throw new Error("File has more than one GetaG2 section");
}
fd.pos = sections[6][0].p; fd.pos = sections[6][0].p;
const buff = await fd.read(sG); const buff = await fd.read(sG);
@ -314,7 +317,7 @@ async function verify(tauFilename, verbose) {
return P; return P;
} }
async function processSection(idSection, groupName, sectionName, nPoints, singularPointIndexes) { async function processSection(idSection, groupName, sectionName, nPoints, singularPointIndexes, logger) {
const MAX_CHUNK_SIZE = 1<<16; const MAX_CHUNK_SIZE = 1<<16;
const G = curve[groupName]; const G = curve[groupName];
const sG = G.F.n8*2; const sG = G.F.n8*2;
@ -328,7 +331,7 @@ async function verify(tauFilename, verbose) {
let lastBase = G.zero; let lastBase = G.zero;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) { for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i); if (logger) logger.debug(`points relations: ${sectionName}: ${i}/${nPoints} `);
const n = Math.min(nPoints - i, MAX_CHUNK_SIZE); const n = Math.min(nPoints - i, MAX_CHUNK_SIZE);
const bases = await fd.read(n*sG); const bases = await fd.read(n*sG);
@ -374,9 +377,9 @@ async function verify(tauFilename, verbose) {
} }
async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName) { async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName, logger) {
if (verbose) console.log(`Verifying phase2 calculated values ${sectionName}...`); if (logger) logger.debug(`Verifying phase2 calculated values ${sectionName}...`);
const G = curve[gName]; const G = curve[gName];
const sG = G.F.n8*2; const sG = G.F.n8*2;
@ -396,7 +399,7 @@ async function verify(tauFilename, verbose) {
return true; return true;
async function verifyPower(p) { async function verifyPower(p) {
if (verbose) console.log(`Power ${p}...`); if (logger) logger.debug(`Power ${p}...`);
const n8r = curve.Fr.n8; const n8r = curve.Fr.n8;
const nPoints = 1<<p; const nPoints = 1<<p;
let buff_r = new Uint8Array(nPoints * n8r); let buff_r = new Uint8Array(nPoints * n8r);
@ -425,7 +428,7 @@ async function verify(tauFilename, verbose) {
const resLagrange = await G.multiExpAffine(buffG, buff_r); const resLagrange = await G.multiExpAffine(buffG, buff_r);
if (!G.eq(resTau, resLagrange)) { if (!G.eq(resTau, resLagrange)) {
console.log("Phase2 caclutation does not match with powers of tau"); if (logger) logger.error("Phase2 caclutation does not match with powers of tau");
return false; return false;
} }
@ -433,5 +436,3 @@ async function verify(tauFilename, verbose) {
} }
} }
} }
module.exports = verify;

View File

@ -1,211 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function genProof(vk_proof, witness, verbose) {
const proof = {};
const r = PolF.F.random();
const s = PolF.F.random();
/* Uncomment to generate a deterministic proof to debug
const r = PolF.F.zero;
const s = PolF.F.zero;
*/
proof.pi_a = G1.zero;
proof.pi_b = G2.zero;
proof.pi_c = G1.zero;
let pib1 = G1.zero;
// Skip public entries and the "1" signal that are forced by the verifier
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[s], witness[s]));
// pi_b = pi_b + B[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B2[s], witness[s]));
pib1 = G1.add( pib1, G1.timesScalar( vk_proof.B1[s], witness[s]));
if ((verbose)&&(s%1000 == 1)) console.log("A, B1, B2: ", s);
}
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[s], witness[s]));
if ((verbose)&&(s%1000 == 1)) console.log("C: ", s);
}
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.vk_delta_2, s ));
pib1 = G1.add( pib1, vk_proof.vk_beta_1 );
pib1 = G1.add( pib1, G1.timesScalar( vk_proof.vk_delta_1, s ));
const h = calculateH(vk_proof, witness);
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_onlyc", proof.pi_c);
for (let i = 0; i < h.length; i++) {
// console.log(i + "->" + h[i].toString());
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.hExps[i], h[i]));
if ((verbose)&&(i%1000 == 1)) console.log("H: ", i);
}
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_candh", proof.pi_c);
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.vk_delta_1, PolF.F.neg(PolF.F.mul(r,s) )));
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
proof.pi_a = G1.toAffine(proof.pi_a);
proof.pi_b = G2.toAffine(proof.pi_b);
proof.pi_c = G1.toAffine(proof.pi_c);
proof.protocol = "groth";
return {proof, publicSignals};
};
/*
// Old Method. (It's clear for academic understanding)
function calculateH(vk_proof, witness) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
const polC_T = new Array(m).fill(PolF.F.zero);
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
for (let c in vk_proof.polsC[s]) {
polC_T[c] = F.add(polC_T[c], F.mul(witness[s], vk_proof.polsC[s][c]));
}
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polAB_S = PolF.mul(polA_S, polB_S);
const polC_S = PolF.ifft(polC_T);
const polABC_S = PolF.sub(polAB_S, polC_S);
const H_S = polABC_S.slice(m);
return H_S;
}
*/
function calculateH(vk_proof, witness) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
for (let i=0; i<vk_proof.ccoefs.length; i++) {
const coef = vk_proof.ccoefs[i];
if (coef.matrix == 0) {
polA_T[coef.constraint] = F.add( polA_T[coef.constraint], F.mul(witness[ coef.signal ], coef.value) );
} else if (coef.matrix == 1) {
polB_T[coef.constraint] = F.add( polB_T[coef.constraint], F.mul(witness[ coef.signal ], coef.value) );
}
}
/*
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
}
*/
const polC_T = new Array(polA_T.length);
for (let i=0; i<polA_T.length; i++) {
polC_T[i] = PolF.F.mul(polA_T[i], polB_T[i]);
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polC_S = PolF.ifft(polC_T);
// F(wx) = [1, w, w^2, ...... w^(m-1)] in time is the same than shift in in frequency
const r = PolF.log2(m)+1;
PolF._setRoots(r);
for (let i=0; i<polA_S.length; i++) {
polA_S[i] = PolF.F.mul( polA_S[i], PolF.roots[r][i]);
polB_S[i] = PolF.F.mul( polB_S[i], PolF.roots[r][i]);
polC_S[i] = PolF.F.mul( polC_S[i], PolF.roots[r][i]);
}
const polA_Todd = PolF.fft(polA_S);
const polB_Todd = PolF.fft(polB_S);
const polC_Todd = PolF.fft(polC_S);
const polABC_Todd = new Array(polA_S.length);
for (let i=0; i<polA_S.length; i++) {
polABC_Todd[i] = PolF.F.sub(PolF.F.mul( polA_Todd[i], polB_Todd[i]), polC_Todd[i]);
}
return polABC_Todd;
}

View File

@ -1,176 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
const createKeccakHash = require("keccak");
const utils = require("ffjavascript").utils;
/*
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function genProof(vk_proof, witness) {
const proof = {};
const r = PolF.F.random();
const s = PolF.F.random();
// const r = PolF.F.zero;
// const s = PolF.F.zero;
/* Uncomment to generate a deterministic proof to debug
const r = PolF.F.zero;
const s = PolF.F.zero;
*/
proof.pi_a = G1.zero;
proof.pi_b = G2.zero;
proof.pi_c = G1.zero;
let pib1 = G1.zero;
let piadelta = G1.zero;
// Skip public entries and the "1" signal that are forced by the verifier
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[s], witness[s]));
// pi_b = pi_b + B[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B2[s], witness[s]));
piadelta = G1.add( piadelta, G1.timesScalar( vk_proof.Adelta[s], witness[s]));
pib1 = G1.add( pib1, G1.timesScalar( vk_proof.B1[s], witness[s]));
}
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[s], witness[s]));
}
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( G1.g, r ));
piadelta = G1.add( piadelta, vk_proof.vk_alphadelta_1);
piadelta = G1.add( piadelta, G1.timesScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( G2.g, s ));
pib1 = G1.add( pib1, vk_proof.vk_beta_1 );
pib1 = G1.add( pib1, G1.timesScalar( G1.g, s ));
proof.pi_a = G1.toAffine(proof.pi_a);
proof.pi_b = G2.toAffine(proof.pi_b);
const buff = Buffer.concat([
utils.beInt2Buff(proof.pi_a[0],32),
utils.beInt2Buff(proof.pi_a[1],32),
utils.beInt2Buff(proof.pi_b[0][0],32),
utils.beInt2Buff(proof.pi_b[0][1],32),
utils.beInt2Buff(proof.pi_b[1][0],32),
utils.beInt2Buff(proof.pi_b[1][1],32)
]);
const h1buff = createKeccakHash("keccak256").update(buff).digest();
const h2buff = createKeccakHash("keccak256").update(h1buff).digest();
const h1 = utils.beBuff2int(h1buff);
const h2 = utils.beBuff2int(h2buff);
// const h1 = PolF.F.zero;
// const h2 = PolF.F.zero;
// console.log(h1.toString());
// console.log(h2.toString());
const h = calculateH(vk_proof, witness);
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_onlyc", proof.pi_c);
for (let i = 0; i < h.length; i++) {
// console.log(i + "->" + h[i].toString());
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.hExps[i], h[i]));
}
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_candh", proof.pi_c);
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( G1.g, PolF.F.neg(PolF.F.mul(r,s) )));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( piadelta, h2 ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( pib1, h1 ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.vk_delta_1, PolF.F.mul(h1,h2)));
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
proof.pi_c = G1.toAffine(proof.pi_c);
proof.protocol = "kimleeoh";
return {proof, publicSignals};
};
function calculateH(vk_proof, witness) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
const polC_T = new Array(m).fill(PolF.F.zero);
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
for (let c in vk_proof.polsC[s]) {
polC_T[c] = F.add(polC_T[c], F.mul(witness[s], vk_proof.polsC[s][c]));
}
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polAB_S = PolF.mul(polA_S, polB_S);
const polC_S = PolF.ifft(polC_T);
const polABC_S = PolF.sub(polAB_S, polC_S);
const H_S = polABC_S.slice(m);
return H_S;
}

View File

@ -1,209 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function genProof(vk_proof, witness) {
const proof = {};
const d1 = PolF.F.random();
const d2 = PolF.F.random();
const d3 = PolF.F.random();
proof.pi_a = G1.zero;
proof.pi_ap = G1.zero;
proof.pi_b = G2.zero;
proof.pi_bp = G1.zero;
proof.pi_c = G1.zero;
proof.pi_cp = G1.zero;
proof.pi_kp = G1.zero;
proof.pi_h = G1.zero;
// Skip public entries and the "1" signal that are forced by the verifier
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_ap = G1.add( proof.pi_ap, G1.timesScalar( vk_proof.Ap[s], witness[s]));
}
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_bp = G1.add( proof.pi_bp, G1.timesScalar( vk_proof.Bp[s], witness[s]));
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_cp = G1.add( proof.pi_cp, G1.timesScalar( vk_proof.Cp[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[s], witness[s]));
}
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[vk_proof.nVars], d1));
proof.pi_ap = G1.add( proof.pi_ap, G1.timesScalar( vk_proof.Ap[vk_proof.nVars], d1));
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B[vk_proof.nVars], d2));
proof.pi_bp = G1.add( proof.pi_bp, G1.timesScalar( vk_proof.Bp[vk_proof.nVars], d2));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[vk_proof.nVars], d3));
proof.pi_cp = G1.add( proof.pi_cp, G1.timesScalar( vk_proof.Cp[vk_proof.nVars], d3));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[vk_proof.nVars ], d1));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[vk_proof.nVars+1], d2));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[vk_proof.nVars+2], d3));
/*
let polA = [];
let polB = [];
let polC = [];
for (let s= 0; s< vk_proof.nVars; s++) {
polA = PolF.add(
polA,
PolF.mul(
vk_proof.polsA[s],
[witness[s]] ));
polB = PolF.add(
polB,
PolF.mul(
vk_proof.polsB[s],
[witness[s]] ));
polC = PolF.add(
polC,
PolF.mul(
vk_proof.polsC[s],
[witness[s]] ));
}
let polFull = PolF.sub(PolF.mul( polA, polB), polC);
const h = PolF.div(polFull, vk_proof.polZ );
*/
const h = calculateH(vk_proof, witness, d1, d2, d3);
// console.log(h.length + "/" + vk_proof.hExps.length);
for (let i = 0; i < h.length; i++) {
proof.pi_h = G1.add( proof.pi_h, G1.timesScalar( vk_proof.hExps[i], h[i]));
}
proof.pi_a = G1.toAffine(proof.pi_a);
proof.pi_b = G2.toAffine(proof.pi_b);
proof.pi_c = G1.toAffine(proof.pi_c);
proof.pi_ap = G1.toAffine(proof.pi_ap);
proof.pi_bp = G1.toAffine(proof.pi_bp);
proof.pi_cp = G1.toAffine(proof.pi_cp);
proof.pi_kp = G1.toAffine(proof.pi_kp);
proof.pi_h = G1.toAffine(proof.pi_h);
// proof.h=h;
proof.protocol = "original";
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
return {proof, publicSignals};
};
function calculateH(vk_proof, witness, d1, d2, d3) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
const polC_T = new Array(m).fill(PolF.F.zero);
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
for (let c in vk_proof.polsC[s]) {
polC_T[c] = F.add(polC_T[c], F.mul(witness[s], vk_proof.polsC[s][c]));
}
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polAB_S = PolF.mul(polA_S, polB_S);
const polC_S = PolF.ifft(polC_T);
const polABC_S = PolF.sub(polAB_S, polC_S);
const polZ_S = new Array(m+1).fill(F.zero);
polZ_S[m] = F.one;
polZ_S[0] = F.neg(F.one);
let H_S = PolF.div(polABC_S, polZ_S);
/*
const H2S = PolF.mul(H_S, polZ_S);
if (PolF.equals(H2S, polABC_S)) {
console.log("Is Divisible!");
} else {
console.log("ERROR: Not divisible!");
}
*/
/* add coefficients of the polynomial (d2*A + d1*B - d3) + d1*d2*Z */
H_S = PolF.extend(H_S, m+1);
for (let i=0; i<m; i++) {
const d2A = PolF.F.mul(d2, polA_S[i]);
const d1B = PolF.F.mul(d1, polB_S[i]);
H_S[i] = PolF.F.add(H_S[i], PolF.F.add(d2A, d1B));
}
H_S[0] = PolF.F.sub(H_S[0], d3);
// Z = x^m -1
const d1d2 = PolF.F.mul(d1, d2);
H_S[m] = PolF.F.add(H_S[m], d1d2);
H_S[0] = PolF.F.sub(H_S[0], d1d2);
H_S = PolF.reduce(H_S);
return H_S;
}

View File

@ -1,3 +1,4 @@
module.exports.print = require("./r1cs_print"); export {default as print} from "./r1cs_print.js";
module.exports.info = require("./r1cs_info"); export {default as info} from "./r1cs_info.js";
module.exports.exportJson = require("./r1cs_export_json"); export {default as exportJson} from "./r1cs_export_json.js";

View File

@ -1,14 +1,8 @@
const {stringifyBigInts} = require("ffjavascript").utils; import {load as loadR1cs} from "r1csfile";
const fs = require("fs");
const readZKey = require("./zkey_utils").read;
const loadR1cs = require("r1csfile").load;
module.exports = r1csExportJson; export default async function r1csExportJson(r1csFileName, logger) {
async function r1csExportJson(r1csFileName, jsonFileName, verbose) {
const cir = await loadR1cs(r1csFileName, true, true); const cir = await loadR1cs(r1csFileName, true, true);
const S = JSON.stringify(stringifyBigInts(cir), null, 1); return cir;
await fs.promises.writeFile(jsonFileName, S);
} }

View File

@ -1,25 +1,25 @@
const Scalar = require("ffjavascript").Scalar; import { Scalar } from "ffjavascript";
const loadR1cs = require("r1csfile").load; import {load as loadR1cs} from "r1csfile";
module.exports = r1csInfo;
const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16); const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617", 16); const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
export default async function r1csInfo(r1csName, logger) {
async function r1csInfo(r1csName) {
const cir = await loadR1cs(r1csName); const cir = await loadR1cs(r1csName);
if (Scalar.eq(cir.prime, bn128r)) { if (Scalar.eq(cir.prime, bn128r)) {
console.log("# Curve: bn-128"); if (logger) logger.info("Curve: bn-128");
} else if (Scalar.eq(cir.prime, bls12381r)) { } else if (Scalar.eq(cir.prime, bls12381r)) {
console.log("# Curve: bls12-381"); if (logger) logger.info("Curve: bls12-381");
} else { } else {
console.log(`# Unknown Curve. Prime: ${Scalar.toString(cir.r)}`); if (logger) logger.info(`Unknown Curve. Prime: ${Scalar.toString(cir.prime)}`);
} }
console.log(`# Wires: ${cir.nVars}`); if (logger) logger.info(`# of Wires: ${cir.nVars}`);
console.log(`# Constraints: ${cir.nConstraints}`); if (logger) logger.info(`# of Constraints: ${cir.nConstraints}`);
console.log(`# Private Inputs: ${cir.nPrvInputs}`); if (logger) logger.info(`# of Private Inputs: ${cir.nPrvInputs}`);
console.log(`# Public Inputs: ${cir.nPubInputs}`); if (logger) logger.info(`# of Public Inputs: ${cir.nPubInputs}`);
console.log(`# Outputs: ${cir.nOutputs}`); if (logger) logger.info(`# of Outputs: ${cir.nOutputs}`);
return cir;
} }

View File

@ -1,5 +1,5 @@
module.exports = function r1csPrint(r1cs, syms) { export default function r1csPrint(r1cs, syms, logger) {
for (let i=0; i<r1cs.constraints.length; i++) { for (let i=0; i<r1cs.constraints.length; i++) {
printCostraint(r1cs.constraints[i]); printCostraint(r1cs.constraints[i]);
} }
@ -21,7 +21,7 @@ module.exports = function r1csPrint(r1cs, syms) {
return S; return S;
}; };
const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`; const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`;
console.log(S); if (logger) logger.info(S);
} }
}; }

View File

@ -1,251 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
*/
module.exports = function setup(circuit, verbose) {
const setup = {
vk_proof : {
protocol: "groth16",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.q = bn128.q;
setup.vk_proof.r = bn128.r;
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
calculatePolinomials(setup, circuit);
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit, verbose);
setup.vk_verifier = {
protocol: setup.vk_proof.protocol,
nPublic: setup.vk_proof.nPublic,
IC: setup.vk_proof.IC,
vk_alpha_1: setup.vk_proof.vk_alpha_1,
vk_beta_2: setup.vk_proof.vk_beta_2,
vk_gamma_2: setup.vk_proof.vk_gamma_2,
vk_delta_2: setup.vk_proof.vk_delta_2,
vk_alphabeta_12: bn128.pairing( setup.vk_proof.vk_alpha_1 , setup.vk_proof.vk_beta_2 )
};
return setup;
};
function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsA = new Array(circuit.nVars);
setup.vk_proof.polsB = new Array(circuit.nVars);
setup.vk_proof.polsC = new Array(circuit.nVars);
for (let i=0; i<circuit.nVars; i++) {
setup.vk_proof.polsA[i] = {};
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
setup.vk_proof.ccoefs = [];
for (let m=0; m<2; m++) {
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][m]) {
setup.vk_proof.ccoefs.push({
matrix: m,
constraint: c,
signal: s,
value: circuit.constraints[c][m][s]
});
}
}
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = circuit.constraints[c][0][s];
}
for (let s in circuit.constraints[c][1]) {
setup.vk_proof.polsB[s][c] = circuit.constraints[c][1][s];
}
for (let s in circuit.constraints[c][2]) {
setup.vk_proof.polsC[s][c] = circuit.constraints[c][2][s];
}
}
/**
* add and process the constraints
* input_i * 0 = 0
* to ensure soundness of input consistency
*/
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
setup.vk_proof.ccoefs.push({
matrix: 0,
constraint: circuit.nConstraints + i,
signal: i,
value: F.one
});
}
}
function calculateValuesAtT(setup, circuit) {
const z_t = PolF.computeVanishingPolinomial(setup.vk_proof.domainBits, setup.toxic.t);
const u = PolF.evaluateLagrangePolynomials(setup.vk_proof.domainBits, setup.toxic.t);
const a_t = new Array(circuit.nVars).fill(F.zero);
const b_t = new Array(circuit.nVars).fill(F.zero);
const c_t = new Array(circuit.nVars).fill(F.zero);
// TODO: substitute setup.polsA for coeficients
for (let s=0; s<circuit.nVars; s++) {
for (let c in setup.vk_proof.polsA[s]) {
a_t[s] = F.add(a_t[s], F.mul(u[c], setup.vk_proof.polsA[s][c]));
}
for (let c in setup.vk_proof.polsB[s]) {
b_t[s] = F.add(b_t[s], F.mul(u[c], setup.vk_proof.polsB[s][c]));
}
for (let c in setup.vk_proof.polsC[s]) {
c_t[s] = F.add(c_t[s], F.mul(u[c], setup.vk_proof.polsC[s][c]));
}
}
return {a_t, b_t, c_t, z_t};
}
function calculateEncriptedValuesAtT(setup, circuit, verbose) {
const v = calculateValuesAtT(setup, circuit);
setup.vk_proof.A = new Array(circuit.nVars);
setup.vk_proof.B1 = new Array(circuit.nVars);
setup.vk_proof.B2 = new Array(circuit.nVars);
setup.vk_proof.C = new Array(circuit.nVars);
setup.vk_proof.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.toxic.kalpha = F.random();
setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random();
setup.toxic.kdelta = F.random();
let invDelta = F.inv(setup.toxic.kdelta);
let invGamma = F.inv(setup.toxic.kgamma);
setup.vk_proof.vk_alpha_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_beta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kdelta));
setup.vk_proof.vk_gamma_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kgamma));
for (let s=0; s<circuit.nVars; s++) {
const A = G1.timesScalar(G1.g, v.a_t[s]);
setup.vk_proof.A[s] = A;
const B1 = G1.timesScalar(G1.g, v.b_t[s]);
setup.vk_proof.B1[s] = B1;
const B2 = G2.timesScalar(G2.g, v.b_t[s]);
setup.vk_proof.B2[s] = B2;
if ((verbose)&&(s%1000 == 1)) console.log("A, B1, B2: ", s);
}
for (let s=0; s<=setup.vk_proof.nPublic; s++) {
let ps =
F.mul(
invGamma,
F.add(
F.add(
F.mul(v.a_t[s], setup.toxic.kbeta),
F.mul(v.b_t[s], setup.toxic.kalpha)),
v.c_t[s]));
const IC = G1.timesScalar(G1.g, ps);
setup.vk_proof.IC[s]=IC;
}
for (let s=setup.vk_proof.nPublic+1; s<circuit.nVars; s++) {
let ps =
F.mul(
invDelta,
F.add(
F.add(
F.mul(v.a_t[s], setup.toxic.kbeta),
F.mul(v.b_t[s], setup.toxic.kalpha)),
v.c_t[s]));
const C = G1.timesScalar(G1.g, ps);
setup.vk_proof.C[s]=C;
if ((verbose)&&(s%1000 == 1)) console.log("C: ", s);
}
// Calculate HExps
const maxH = setup.vk_proof.domainSize+1;
setup.vk_proof.hExps = new Array(maxH);
const zod = F.mul(invDelta, v.z_t);
setup.vk_proof.hExps[0] = G1.toAffine(G1.timesScalar(G1.g, zod));
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.timesScalar(G1.g, F.mul(eT, zod));
eT = F.mul(eT, setup.toxic.t);
if ((verbose)&&(i%1000 == 1)) console.log("Tau: ", i);
}
G1.multiAffine(setup.vk_proof.A);
G1.multiAffine(setup.vk_proof.B1);
G2.multiAffine(setup.vk_proof.B2);
G1.multiAffine(setup.vk_proof.C);
G1.multiAffine(setup.vk_proof.hExps);
G1.multiAffine(setup.vk_proof.IC);
}

View File

@ -1,232 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bigInt = require("big-integer");
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
*/
module.exports = function setup(circuit) {
const setup = {
vk_proof : {
protocol: "kimleeoh",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
vk_verifier: {
protocol: "kimleeoh",
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
calculatePolinomials(setup, circuit);
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit);
return setup;
};
function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsA = new Array(circuit.nVars);
setup.vk_proof.polsB = new Array(circuit.nVars);
setup.vk_proof.polsC = new Array(circuit.nVars);
for (let i=0; i<circuit.nVars; i++) {
setup.vk_proof.polsA[i] = {};
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = circuit.constraints[c][0][s];
}
for (let s in circuit.constraints[c][1]) {
setup.vk_proof.polsB[s][c] = circuit.constraints[c][1][s];
}
for (let s in circuit.constraints[c][2]) {
setup.vk_proof.polsC[s][c] = circuit.constraints[c][2][s];
}
}
/**
* add and process the constraints
* input_i * 0 = 0
* to ensure soundness of input consistency
*/
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
}
}
function calculateValuesAtT(setup, circuit) {
const z_t = PolF.computeVanishingPolinomial(setup.vk_proof.domainBits, setup.toxic.t);
const u = PolF.evaluateLagrangePolynomials(setup.vk_proof.domainBits, setup.toxic.t);
const a_t = new Array(circuit.nVars).fill(F.zero);
const b_t = new Array(circuit.nVars).fill(F.zero);
const c_t = new Array(circuit.nVars).fill(F.zero);
// TODO: substitute setup.polsA for coeficients
for (let s=0; s<circuit.nVars; s++) {
for (let c in setup.vk_proof.polsA[s]) {
a_t[s] = F.add(a_t[s], F.mul(u[c], setup.vk_proof.polsA[s][c]));
}
for (let c in setup.vk_proof.polsB[s]) {
b_t[s] = F.add(b_t[s], F.mul(u[c], setup.vk_proof.polsB[s][c]));
}
for (let c in setup.vk_proof.polsC[s]) {
c_t[s] = F.add(c_t[s], F.mul(u[c], setup.vk_proof.polsC[s][c]));
}
}
return {a_t, b_t, c_t, z_t};
}
function calculateEncriptedValuesAtT(setup, circuit) {
const v = calculateValuesAtT(setup, circuit);
setup.vk_proof.A = new Array(circuit.nVars);
setup.vk_proof.Adelta = new Array(circuit.nVars);
setup.vk_proof.B1 = new Array(circuit.nVars);
setup.vk_proof.B2 = new Array(circuit.nVars);
setup.vk_proof.C = new Array(circuit.nVars);
setup.vk_verifier.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.toxic.kalpha = F.random();
setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random();
setup.toxic.kdelta = F.random();
const gammaSquare = F.mul(setup.toxic.kgamma, setup.toxic.kgamma);
setup.vk_proof.vk_alpha_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_alphadelta_1 = G1.toAffine(G1.timesScalar( G1.g, F.mul(setup.toxic.kalpha, setup.toxic.kdelta)));
setup.vk_proof.vk_beta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_alpha_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kalpha));
setup.vk_verifier.vk_beta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_gamma_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_delta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kdelta));
setup.vk_verifier.vk_alphabeta_12 = bn128.pairing( setup.vk_verifier.vk_alpha_1 , setup.vk_verifier.vk_beta_2 );
for (let s=0; s<circuit.nVars; s++) {
const A = G1.toAffine(G1.timesScalar(G1.g, F.mul(setup.toxic.kgamma, v.a_t[s])));
setup.vk_proof.A[s] = A;
setup.vk_proof.Adelta[s] = G1.toAffine(G1.timesScalar(A, setup.toxic.kdelta));
const B1 = G1.toAffine(G1.timesScalar(G1.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
setup.vk_proof.B1[s] = B1;
const B2 = G2.toAffine(G2.timesScalar(G2.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
setup.vk_proof.B2[s] = B2;
}
for (let s=0; s<=setup.vk_proof.nPublic; s++) {
let ps =
F.add(
F.mul(
setup.toxic.kgamma,
v.c_t[s]
),
F.add(
F.mul(
setup.toxic.kbeta,
v.a_t[s]
),
F.mul(
setup.toxic.kalpha,
v.b_t[s]
)
)
);
const IC = G1.toAffine(G1.timesScalar(G1.g, ps));
setup.vk_verifier.IC[s]=IC;
}
for (let s=setup.vk_proof.nPublic+1; s<circuit.nVars; s++) {
let ps =
F.add(
F.mul(
gammaSquare,
v.c_t[s]
),
F.add(
F.mul(
F.mul(setup.toxic.kbeta, setup.toxic.kgamma),
v.a_t[s]
),
F.mul(
F.mul(setup.toxic.kalpha, setup.toxic.kgamma),
v.b_t[s]
)
)
);
const C = G1.toAffine(G1.timesScalar(G1.g, ps));
setup.vk_proof.C[s]=C;
}
// Calculate HExps
const maxH = setup.vk_proof.domainSize+1;
setup.vk_proof.hExps = new Array(maxH);
const zod = F.mul(gammaSquare, v.z_t);
setup.vk_proof.hExps[0] = G1.toAffine(G1.timesScalar(G1.g, zod));
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.toAffine(G1.timesScalar(G1.g, F.mul(eT, zod)));
eT = F.mul(eT, setup.toxic.t);
}
}

View File

@ -1,236 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").F1Field;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
*/
module.exports = function setup(circuit) {
const setup = {
vk_proof : {
protocol: "original",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
vk_verifier: {
protocol: "original",
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
calculatePolinomials(setup, circuit);
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit);
calculateHexps(setup, circuit);
return setup;
};
function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsA = new Array(circuit.nVars);
setup.vk_proof.polsB = new Array(circuit.nVars);
setup.vk_proof.polsC = new Array(circuit.nVars);
for (let i=0; i<circuit.nVars; i++) {
setup.vk_proof.polsA[i] = {};
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = circuit.constraints[c][0][s];
}
for (let s in circuit.constraints[c][1]) {
setup.vk_proof.polsB[s][c] = circuit.constraints[c][1][s];
}
for (let s in circuit.constraints[c][2]) {
setup.vk_proof.polsC[s][c] = circuit.constraints[c][2][s];
}
}
/**
* add and process the constraints
* input_i * 0 = 0
* to ensure soundness of input consistency
*/
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
}
}
function calculateValuesAtT(setup, circuit) {
const z_t = PolF.computeVanishingPolinomial(setup.vk_proof.domainBits, setup.toxic.t);
const u = PolF.evaluateLagrangePolynomials(setup.vk_proof.domainBits, setup.toxic.t);
const a_t = new Array(circuit.nVars).fill(F.zero);
const b_t = new Array(circuit.nVars).fill(F.zero);
const c_t = new Array(circuit.nVars).fill(F.zero);
// TODO: substitute setup.polsA for coeficients
for (let s=0; s<circuit.nVars; s++) {
for (let c in setup.vk_proof.polsA[s]) {
a_t[s] = F.add(a_t[s], F.mul(u[c], setup.vk_proof.polsA[s][c]));
}
for (let c in setup.vk_proof.polsB[s]) {
b_t[s] = F.add(b_t[s], F.mul(u[c], setup.vk_proof.polsB[s][c]));
}
for (let c in setup.vk_proof.polsC[s]) {
c_t[s] = F.add(c_t[s], F.mul(u[c], setup.vk_proof.polsC[s][c]));
}
}
return {a_t, b_t, c_t, z_t};
}
function calculateEncriptedValuesAtT(setup, circuit) {
const v = calculateValuesAtT(setup, circuit);
setup.vk_proof.A = new Array(circuit.nVars+1);
setup.vk_proof.B = new Array(circuit.nVars+1);
setup.vk_proof.C = new Array(circuit.nVars+1);
setup.vk_proof.Ap = new Array(circuit.nVars+1);
setup.vk_proof.Bp = new Array(circuit.nVars+1);
setup.vk_proof.Cp = new Array(circuit.nVars+1);
setup.vk_proof.Kp = new Array(circuit.nVars+3);
setup.vk_verifier.IC = new Array(circuit.nPubInputs);
setup.vk_verifier.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.toxic.ka = F.random();
setup.toxic.kb = F.random();
setup.toxic.kc = F.random();
setup.toxic.ra = F.random();
setup.toxic.rb = F.random();
setup.toxic.rc = F.mul(setup.toxic.ra, setup.toxic.rb);
setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random();
const gb = F.mul(setup.toxic.kbeta, setup.toxic.kgamma);
setup.vk_verifier.vk_a = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.ka));
setup.vk_verifier.vk_b = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kb));
setup.vk_verifier.vk_c = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kc));
setup.vk_verifier.vk_gb_1 = G1.toAffine(G1.timesScalar( G1.g, gb));
setup.vk_verifier.vk_gb_2 = G2.toAffine(G2.timesScalar( G2.g, gb));
setup.vk_verifier.vk_g = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kgamma));
for (let s=0; s<circuit.nVars; s++) {
// A[i] = G1 * polA(t)
const raat = F.mul(setup.toxic.ra, v.a_t[s]);
const A = G1.toAffine(G1.timesScalar(G1.g, raat));
setup.vk_proof.A[s] = A;
if (s <= setup.vk_proof.nPublic) {
setup.vk_verifier.IC[s]=A;
}
// B1[i] = G1 * polB(t)
const rbbt = F.mul(setup.toxic.rb, v.b_t[s]);
const B1 = G1.toAffine(G1.timesScalar(G1.g, rbbt));
// B2[i] = G2 * polB(t)
const B2 = G2.toAffine(G2.timesScalar(G2.g, rbbt));
setup.vk_proof.B[s]=B2;
// C[i] = G1 * polC(t)
const rcct = F.mul(setup.toxic.rc, v.c_t[s]);
const C = G1.toAffine(G1.timesScalar( G1.g, rcct));
setup.vk_proof.C[s] =C;
// K = G1 * (A+B+C)
const kt = F.add(F.add(raat, rbbt), rcct);
const K = G1.toAffine(G1.timesScalar( G1.g, kt));
/*
// Comment this lines to improve the process
const Ktest = G1.toAffine(G1.add(G1.add(A, B1), C));
if (!G1.equals(K, Ktest)) {
console.log ("=====FAIL======");
}
*/
if (s > setup.vk_proof.nPublic) {
setup.vk_proof.Ap[s] = G1.toAffine(G1.timesScalar(A, setup.toxic.ka));
}
setup.vk_proof.Bp[s] = G1.toAffine(G1.timesScalar(B1, setup.toxic.kb));
setup.vk_proof.Cp[s] = G1.toAffine(G1.timesScalar(C, setup.toxic.kc));
setup.vk_proof.Kp[s] = G1.toAffine(G1.timesScalar(K, setup.toxic.kbeta));
}
// Extra coeficients
const A = G1.timesScalar( G1.g, F.mul(setup.toxic.ra, v.z_t));
setup.vk_proof.A[circuit.nVars] = G1.toAffine(A);
setup.vk_proof.Ap[circuit.nVars] = G1.toAffine(G1.timesScalar(A, setup.toxic.ka));
const B1 = G1.timesScalar( G1.g, F.mul(setup.toxic.rb, v.z_t));
const B2 = G2.timesScalar( G2.g, F.mul(setup.toxic.rb, v.z_t));
setup.vk_proof.B[circuit.nVars] = G2.toAffine(B2);
setup.vk_proof.Bp[circuit.nVars] = G1.toAffine(G1.timesScalar(B1, setup.toxic.kb));
const C = G1.timesScalar( G1.g, F.mul(setup.toxic.rc, v.z_t));
setup.vk_proof.C[circuit.nVars] = G1.toAffine(C);
setup.vk_proof.Cp[circuit.nVars] = G1.toAffine(G1.timesScalar(C, setup.toxic.kc));
setup.vk_proof.Kp[circuit.nVars ] = G1.toAffine(G1.timesScalar(A, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+1] = G1.toAffine(G1.timesScalar(B1, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+2] = G1.toAffine(G1.timesScalar(C, setup.toxic.kbeta));
// setup.vk_verifier.A[0] = G1.toAffine(G1.add(setup.vk_verifier.A[0], setup.vk_proof.A[circuit.nVars]));
// vk_z
setup.vk_verifier.vk_z = G2.toAffine(G2.timesScalar(
G2.g,
F.mul(setup.toxic.rc, v.z_t)));
}
function calculateHexps(setup) {
const maxH = setup.vk_proof.domainSize+1;
setup.vk_proof.hExps = new Array(maxH);
setup.vk_proof.hExps[0] = G1.g;
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.toAffine(G1.timesScalar(G1.g, eT));
eT = F.mul(eT, setup.toxic.t);
}
}

3
src/solidity.js Normal file
View File

@ -0,0 +1,3 @@
export {default as genGroth16Verifier} from "./solidity_gengroth16verifier.js";
export {default as prove} from "./groth16_prove.js";
export {default as validate} from "./groth16_verify.js";

View File

@ -1,150 +0,0 @@
const path = require("path");
const fs = require("fs");
module.exports.generateVerifier_original = generateVerifier_original;
module.exports.generateVerifier_groth16 = generateVerifier_groth16;
// Not ready yet
// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
function generateVerifier_original(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_original.sol"), "utf-8");
const vka_str = `[${verificationKey.vk_a[0][1].toString()},`+
`${verificationKey.vk_a[0][0].toString()}], `+
`[${verificationKey.vk_a[1][1].toString()},` +
`${verificationKey.vk_a[1][0].toString()}]`;
template = template.replace("<%vk_a%>", vka_str);
const vkb_str = `${verificationKey.vk_b[0].toString()},`+
`${verificationKey.vk_b[1].toString()}`;
template = template.replace("<%vk_b%>", vkb_str);
const vkc_str = `[${verificationKey.vk_c[0][1].toString()},`+
`${verificationKey.vk_c[0][0].toString()}], `+
`[${verificationKey.vk_c[1][1].toString()},` +
`${verificationKey.vk_c[1][0].toString()}]`;
template = template.replace("<%vk_c%>", vkc_str);
const vkg_str = `[${verificationKey.vk_g[0][1].toString()},`+
`${verificationKey.vk_g[0][0].toString()}], `+
`[${verificationKey.vk_g[1][1].toString()},` +
`${verificationKey.vk_g[1][0].toString()}]`;
template = template.replace("<%vk_g%>", vkg_str);
const vkgb1_str = `${verificationKey.vk_gb_1[0].toString()},`+
`${verificationKey.vk_gb_1[1].toString()}`;
template = template.replace("<%vk_gb1%>", vkgb1_str);
const vkgb2_str = `[${verificationKey.vk_gb_2[0][1].toString()},`+
`${verificationKey.vk_gb_2[0][0].toString()}], `+
`[${verificationKey.vk_gb_2[1][1].toString()},` +
`${verificationKey.vk_gb_2[1][0].toString()}]`;
template = template.replace("<%vk_gb2%>", vkgb2_str);
const vkz_str = `[${verificationKey.vk_z[0][1].toString()},`+
`${verificationKey.vk_z[0][0].toString()}], `+
`[${verificationKey.vk_z[1][1].toString()},` +
`${verificationKey.vk_z[1][0].toString()}]`;
template = template.replace("<%vk_z%>", vkz_str);
// The points
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
let vi = "";
for (let i=0; i<verificationKey.IC.length; i++) {
if (vi != "") vi = vi + " ";
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
`${verificationKey.IC[i][1].toString()});\n`;
}
template = template.replace("<%vk_ic_pts%>", vi);
return template;
}
function generateVerifier_groth16(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8");
const vkalpha1_str = `${verificationKey.vk_alpha_1[0].toString()},`+
`${verificationKey.vk_alpha_1[1].toString()}`;
template = template.replace("<%vk_alpha1%>", vkalpha1_str);
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
`${verificationKey.vk_beta_2[0][0].toString()}], `+
`[${verificationKey.vk_beta_2[1][1].toString()},` +
`${verificationKey.vk_beta_2[1][0].toString()}]`;
template = template.replace("<%vk_beta2%>", vkbeta2_str);
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
`${verificationKey.vk_delta_2[0][0].toString()}], `+
`[${verificationKey.vk_delta_2[1][1].toString()},` +
`${verificationKey.vk_delta_2[1][0].toString()}]`;
template = template.replace("<%vk_delta2%>", vkdelta2_str);
// The points
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
let vi = "";
for (let i=0; i<verificationKey.IC.length; i++) {
if (vi != "") vi = vi + " ";
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
`${verificationKey.IC[i][1].toString()});\n`;
}
template = template.replace("<%vk_ic_pts%>", vi);
return template;
}
function generateVerifier_kimleeoh(verificationKey) {
assert(false); // Not implemented yet because it requires G2 exponentiation onchain.
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8");
const vkalpha1_str = `${verificationKey.vk_alpha_1[0].toString()},`+
`${verificationKey.vk_alpha_1[1].toString()}`;
template = template.replace("<%vk_alpha1%>", vkalpha1_str);
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
`${verificationKey.vk_beta_2[0][0].toString()}], `+
`[${verificationKey.vk_beta_2[1][1].toString()},` +
`${verificationKey.vk_beta_2[1][0].toString()}]`;
template = template.replace("<%vk_beta2%>", vkbeta2_str);
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
`${verificationKey.vk_delta_2[0][0].toString()}], `+
`[${verificationKey.vk_delta_2[1][1].toString()},` +
`${verificationKey.vk_delta_2[1][0].toString()}]`;
template = template.replace("<%vk_delta2%>", vkdelta2_str);
// The points
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
let vi = "";
for (let i=0; i<verificationKey.IC.length; i++) {
if (vi != "") vi = vi + " ";
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
`${verificationKey.IC[i][1].toString()});\n`;
}
template = template.replace("<%vk_ic_pts%>", vi);
return template;
}

View File

@ -1,5 +1,3 @@
const assert = require("assert");
const inBrowser = (typeof window !== "undefined"); const inBrowser = (typeof window !== "undefined");
let NodeWorker; let NodeWorker;
if (!inBrowser) { if (!inBrowser) {
@ -87,8 +85,8 @@ async function buildTaskManager(fn, mods, initTask) {
return function(e) { return function(e) {
function finishTask() { function finishTask() {
// It can not be a waiting task and it's terminating if ( (tm.waitingTask && tm.terminateDeferred))
assert( !(tm.waitingTask && tm.terminateDeferred)); throw new Error("It can not be a waiting task and it's terminating");
if (tm.terminateDeferred) { if (tm.terminateDeferred) {
tm.workers[i].worker.postMessage({cmd: "TERMINATE"}); tm.workers[i].worker.postMessage({cmd: "TERMINATE"});
@ -130,7 +128,8 @@ async function buildTaskManager(fn, mods, initTask) {
} }
function processTask(i, task, asyncCb) { function processTask(i, task, asyncCb) {
assert(tm.workers[i].state == "READY"); if (tm.workers[i].state != "READY")
throw new Error("Worker is not ready");
tm.workers[i].asyncCb = asyncCb; tm.workers[i].asyncCb = asyncCb;
tm.workers[i].state = "WORKING"; tm.workers[i].state = "WORKING";
@ -163,7 +162,8 @@ async function buildTaskManager(fn, mods, initTask) {
tm.finish = function() { tm.finish = function() {
const self = this; const self = this;
assert (self.terminatePromise == null); if (self.terminatePromise != null)
throw new Error("Task manager already terminated");
self.terminateDeferred = new Deferred(); self.terminateDeferred = new Deferred();
@ -178,8 +178,8 @@ async function buildTaskManager(fn, mods, initTask) {
tm.addTask = function (task, asyncCb) { tm.addTask = function (task, asyncCb) {
const self = this; const self = this;
assert (!self.waitingTask); if (self.waitingTask) throw new Error("Waiting task pending");
assert(!self.terminateDeferred); if (self.terminateDeferred) throw new Error("New task after task manager terminated");
const deferral = new Deferred(); const deferral = new Deferred();
let i; let i;
for (i=0; i<tm.workers.length; i++) { for (i=0; i<tm.workers.length; i++) {

View File

@ -1,45 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
/*
const G1 = bn128.G1;
*/
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
if (! bn128.F12.eq(
bn128.pairing( proof.pi_a , proof.pi_b ),
bn128.F12.mul(
vk_verifier.vk_alphabeta_12,
bn128.F12.mul(
bn128.pairing( cpub , vk_verifier.vk_gamma_2 ),
bn128.pairing( proof.pi_c , vk_verifier.vk_delta_2 )
))))
return false;
return true;
};

View File

@ -1,75 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const createKeccakHash = require("keccak");
const utils = require("ffjavascript").utils;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
const buff = Buffer.concat([
utils.beInt2Buff(proof.pi_a[0], 32),
utils.beInt2Buff(proof.pi_a[1], 32),
utils.beInt2Buff(proof.pi_b[0][0], 32),
utils.beInt2Buff(proof.pi_b[0][1], 32),
utils.beInt2Buff(proof.pi_b[1][0], 32),
utils.beInt2Buff(proof.pi_b[1][1], 32),
]);
const h1buff = createKeccakHash("keccak256").update(buff).digest();
const h2buff = createKeccakHash("keccak256").update(h1buff).digest();
const h1 = utils.beBuff2int(h1buff);
const h2 = utils.beBuff2int(h2buff);
// const h1 = bn128.Fr.zero;
// const h2 = bn128.Fr.zero;
// console.log(h1.toString());
// console.log(h2.toString());
if (! bn128.F12.eq(
bn128.pairing(
G1.add(proof.pi_a, G1.timesScalar(G1.g, h1)),
G2.add(proof.pi_b, G2.timesScalar(vk_verifier.vk_delta_2, h2))
),
bn128.F12.mul(
vk_verifier.vk_alphabeta_12,
bn128.F12.mul(
bn128.pairing( cpub , vk_verifier.vk_gamma_2 ),
bn128.pairing( proof.pi_c , G2.g )
))))
return false;
return true;
};

View File

@ -1,66 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bn128 = require("ffjavascript").bn128;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let full_pi_a = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
full_pi_a = G1.add( full_pi_a, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
full_pi_a = G1.add( full_pi_a, proof.pi_a);
if (! bn128.F12.eq(
bn128.pairing( proof.pi_a , vk_verifier.vk_a ),
bn128.pairing( proof.pi_ap , G2.g )))
return false;
if (! bn128.F12.eq(
bn128.pairing( vk_verifier.vk_b, proof.pi_b ),
bn128.pairing( proof.pi_bp , G2.g )))
return false;
if (! bn128.F12.eq(
bn128.pairing( proof.pi_c , vk_verifier.vk_c ),
bn128.pairing( proof.pi_cp , G2.g )))
return false;
if (! bn128.F12.eq(
bn128.F12.mul(
bn128.pairing( G1.add(full_pi_a, proof.pi_c) , vk_verifier.vk_gb_2 ),
bn128.pairing( vk_verifier.vk_gb_1 , proof.pi_b )
),
bn128.pairing( proof.pi_kp , vk_verifier.vk_g )))
return false;
if (! bn128.F12.eq(
bn128.pairing( full_pi_a , proof.pi_b ),
bn128.F12.mul(
bn128.pairing( proof.pi_h , vk_verifier.vk_z ),
bn128.pairing( proof.pi_c , G2.g )
)))
return false;
return true;
};

3
src/wtns.js Normal file
View File

@ -0,0 +1,3 @@
export {default as calculate} from "./wtns_calculate.js";
export {default as debug} from "./wtns_debug.js";
export {default as exportJson} from "./wtns_export_json.js";

22
src/wtns_calculate.js Normal file
View File

@ -0,0 +1,22 @@
import * as fastFile from "fastfile";
import circomRuntime from "circom_runtime";
import * as wtnsUtils from "./wtns_utils.js";
import * as binFileUtils from "./binfileutils.js";
const { WitnessCalculatorBuilder } = circomRuntime;
export default async function wtnsCalculate(input, wasmFileName, wtnsFileName, options) {
const fdWasm = await fastFile.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
const wc = await WitnessCalculatorBuilder(wasm);
const w = await wc.calculateBinWitness(input);
const fdWtns = await binFileUtils.createBinFile(wtnsFileName, "wtns", 2, 2);
await wtnsUtils.writeBin(fdWtns, w, wc.prime);
await fdWtns.close();
}

51
src/wtns_debug.js Normal file
View File

@ -0,0 +1,51 @@
import * as fastFile from "fastfile";
import circomRuntime from "circom_runtime";
import * as wtnsUtils from "./wtns_utils.js";
import * as binFileUtils from "./binfileutils.js";
import loadSyms from "./loadsyms.js";
const { WitnessCalculatorBuilder } = circomRuntime;
export default async function wtnsDebug(input, wasmFileName, wtnsFileName, symName, options, logger) {
const fdWasm = await fastFile.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
let wcOps = {
sanityCheck: true
};
let sym = await loadSyms(symName);
if (options.set) {
if (!sym) sym = await loadSyms(symName);
wcOps.logSetSignal= function(labelIdx, value) {
if (logger) logger.info("SET " + sym.labelIdx2Name[labelIdx] + " <-- " + value.toString());
};
}
if (options.get) {
if (!sym) sym = await loadSyms(symName);
wcOps.logGetSignal= function(varIdx, value) {
if (logger) logger.info("GET " + sym.labelIdx2Name[varIdx] + " --> " + value.toString());
};
}
if (options.trigger) {
if (!sym) sym = await loadSyms(symName);
wcOps.logStartComponent= function(cIdx) {
if (logger) logger.info("START: " + sym.componentIdx2Name[cIdx]);
};
wcOps.logFinishComponent= function(cIdx) {
if (logger) logger.info("FINISH: " + sym.componentIdx2Name[cIdx]);
};
}
const wc = await WitnessCalculatorBuilder(wasm, wcOps);
const w = await wc.calculateWitness(input);
const fdWtns = await binFileUtils.createBinFile(wtnsFileName, "wtns", 2, 2);
await wtnsUtils.write(fdWtns, w, wc.prime);
await fdWtns.close();
}

8
src/wtns_export_json.js Normal file
View File

@ -0,0 +1,8 @@
import {read} from "./wtns_utils.js";
export default async function wtnsExportJson(wtnsFileName) {
const w = await read(wtnsFileName);
return w;
}

View File

@ -1,11 +1,9 @@
const Scalar = require("ffjavascript").Scalar; import { Scalar } from "ffjavascript";
const assert = require("assert");
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
async function writeWtns(fileName, witness, prime) { export async function write(fd, witness, prime) {
const fd = await binFileUtils.createOverride(fileName,"wtns", 2, 2);
await binFileUtils.startWriteSection(fd, 1); await binFileUtils.startWriteSection(fd, 1);
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8; const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
@ -20,21 +18,18 @@ async function writeWtns(fileName, witness, prime) {
} }
await binFileUtils.endWriteSection(fd, 2); await binFileUtils.endWriteSection(fd, 2);
await fd.close();
} }
async function writeWtnsBin(fileName, witnessBin, prime) { export async function writeBin(fd, witnessBin, prime) {
witnessBin = Buffer.from(witnessBin);
const fd = await binFileUtils.createBinFile(fileName, "wtns", 2, 2);
await binFileUtils.startWriteSection(fd, 1); await binFileUtils.startWriteSection(fd, 1);
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8; const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
await fd.writeULE32(n8); await fd.writeULE32(n8);
await binFileUtils.writeBigInt(fd, prime, n8); await binFileUtils.writeBigInt(fd, prime, n8);
assert(witnessBin.length % n8 == 0); if (witnessBin.byteLength % n8 != 0) {
throw new Error("Invalid witness length");
}
await fd.writeULE32(witnessBin.byteLength / n8); await fd.writeULE32(witnessBin.byteLength / n8);
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -43,10 +38,9 @@ async function writeWtnsBin(fileName, witnessBin, prime) {
await fd.write(witnessBin); await fd.write(witnessBin);
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
await fd.close();
} }
async function readWtnsHeader(fd, sections) { export async function readHeader(fd, sections) {
await binFileUtils.startReadUniqueSection(fd, sections, 1); await binFileUtils.startReadUniqueSection(fd, sections, 1);
const n8 = await fd.readULE32(); const n8 = await fd.readULE32();
@ -58,11 +52,11 @@ async function readWtnsHeader(fd, sections) {
} }
async function readWtns(fileName) { export async function read(fileName) {
const {fd, sections} = await binFileUtils.readBinFile(fileName, "wtns", 2); const {fd, sections} = await binFileUtils.readBinFile(fileName, "wtns", 2);
const {n8, nWitness} = await readWtnsHeader(fd, sections); const {n8, nWitness} = await readHeader(fd, sections);
await binFileUtils.startReadUniqueSection(fd, sections, 2); await binFileUtils.startReadUniqueSection(fd, sections, 2);
const res = []; const res = [];
@ -77,7 +71,3 @@ async function readWtns(fileName) {
return res; return res;
} }
module.exports.read = readWtns;
module.exports.readHeader = readWtnsHeader;
module.exports.writeBin = writeWtnsBin;
module.exports.write = writeWtns;

View File

@ -1,11 +1,10 @@
export {default as newZKey} from "./zkey_new.js";
module.exports.new = require("./zkey_new.js"); export {default as exportBellman} from "./zkey_export_bellman.js";
module.exports.exportBellman = require("./zkey_export_bellman.js"); export {default as importBellman} from "./zkey_import_bellman.js";
module.exports.importBellman = require("./zkey_import_bellman.js"); export {default as verify} from "./zkey_verify.js";
module.exports.verify = require("./zkey_verify.js"); export {default as contribute} from "./zkey_contribute.js";
module.exports.contribute = require("./zkey_contribute.js"); export {default as beacon} from "./zkey_beacon.js";
module.exports.beacon = require("./zkey_beacon.js"); export {default as exportJson} from "./zkey_export_json.js";
module.exports.exportJson = require("./zkey_export_json.js"); export {default as bellmanContribute} from "./zkey_bellman_contribute.js";
module.exports.utils = require("./zkey_utils.js"); export {default as exportVerificationKey} from "./zkey_export_verificationkey.js";
module.exports.challangeContribute = require("./zkey_challangecontribute.js"); export {default as exportSolidityVerifier} from "./zkey_export_solidityverifier.js";
module.exports.exportVerificationKey = require("./zkey_export_verificationkey.js");

View File

@ -1,31 +1,31 @@
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const zkeyUtils = require("./zkey_utils"); import * as zkeyUtils from "./zkey_utils.js";
const getCurve = require("./curves").getCurveFromQ; import { getCurveFromQ as getCurve } from "./curves.js";
const misc = require("./misc"); import * as misc from "./misc.js";
const Blake2b = require("blake2b-wasm"); import Blake2b from "blake2b-wasm";
const utils = require("./zkey_utils"); import * as utils from "./zkey_utils.js";
const hashToG2 = require("./keypair").hashToG2; import { hashToG2 as hashToG2 } from "./keypair.js";
const {applyKeyToSection} = require("./mpc_applykey"); import { applyKeyToSection } from "./mpc_applykey.js";
module.exports = async function beacon(zkeyNameOld, zkeyNameNew, name, numIterationsExp, beaconHashStr, verbose) { export default async function beacon(zkeyNameOld, zkeyNameNew, name, beaconHashStr, numIterationsExp, logger) {
await Blake2b.ready(); await Blake2b.ready();
const beaconHash = misc.hex2ByteArray(beaconHashStr); const beaconHash = misc.hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0) if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length)) || (beaconHash.byteLength*2 !=beaconHashStr.length))
{ {
console.log("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)"); if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false; return false;
} }
if (beaconHash.length>=256) { if (beaconHash.length>=256) {
console.log("Maximum lenght of beacon hash is 255 bytes"); if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
return false; return false;
} }
numIterationsExp = parseInt(numIterationsExp); numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) { if ((numIterationsExp<10)||(numIterationsExp>63)) {
console.log("Invalid numIterationsExp. (Must be between 10 and 63)"); if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
return false; return false;
} }
@ -89,8 +89,8 @@ module.exports = async function beacon(zkeyNameOld, zkeyNameNew, name, numIterat
await binFileUtils.copySection(fdOld, sections, fdNew, 7); await binFileUtils.copySection(fdOld, sections, fdNew, 7);
const invDelta = curve.Fr.inv(curContribution.delta.prvKey); const invDelta = curve.Fr.inv(curContribution.delta.prvKey);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", verbose); await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", logger);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", verbose); await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", logger);
await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams); await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams);
@ -102,8 +102,7 @@ module.exports = async function beacon(zkeyNameOld, zkeyNameNew, name, numIterat
const contribuionHash = contributionHasher.digest(); const contribuionHash = contributionHasher.digest();
console.log("Contribution Hash: "); if (logger) logger.info(misc.formatHash(contribuionHash, "Contribution Hash: "));
console.log(misc.formatHash(contribuionHash));
return true; return contribuionHash;
}; }

View File

@ -16,15 +16,15 @@
// G2*tp*alpha (compressed) // G2*tp*alpha (compressed)
// G2*up*beta (compressed) // G2*up*beta (compressed)
const fastFile = require("fastfile"); import * as fastFile from "fastfile";
const Blake2b = require("blake2b-wasm"); import Blake2b from "blake2b-wasm";
const utils = require("./zkey_utils"); import * as utils from "./zkey_utils.js";
const misc = require("./misc"); import * as misc from "./misc.js";
const { applyKeyToChallangeSection } = require("./mpc_applykey"); import { applyKeyToChallangeSection } from "./mpc_applykey.js";
const {hashPubKey} = require("./zkey_utils"); import { hashPubKey } from "./zkey_utils.js";
const hashToG2 = require("./keypair").hashToG2; import { hashToG2 as hashToG2 } from "./keypair.js";
async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) { export default async function bellmanContribute(curve, challangeFilename, responesFileName, entropy, logger) {
await Blake2b.ready(); await Blake2b.ready();
const rng = await misc.getRandomRng(entropy); const rng = await misc.getRandomRng(entropy);
@ -58,12 +58,12 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
// H // H
const nH = await fdFrom.readUBE32(); const nH = await fdFrom.readUBE32();
await fdTo.writeUBE32(nH); await fdTo.writeUBE32(nH);
await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nH, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "H", verbose); await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nH, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "H", logger);
// L // L
const nL = await fdFrom.readUBE32(); const nL = await fdFrom.readUBE32();
await fdTo.writeUBE32(nL); await fdTo.writeUBE32(nL);
await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nL, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "L", verbose); await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nL, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "L", logger);
// A // A
const nA = await fdFrom.readUBE32(); const nA = await fdFrom.readUBE32();
@ -138,12 +138,15 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
const contributionHasher = Blake2b(64); const contributionHasher = Blake2b(64);
hashPubKey(contributionHasher, curve, curContribution); hashPubKey(contributionHasher, curve, curContribution);
console.log("Contribution Hash: "); const contributionHash = contributionHasher.digest();
console.log(misc.formatHash(contributionHasher.digest()));
if (logger) logger.info(misc.formatHash(contributionHash, "Contribution Hash: "));
await fdTo.close(); await fdTo.close();
await fdFrom.close(); await fdFrom.close();
return contributionHash;
async function copy(nBytes) { async function copy(nBytes) {
const CHUNK_SIZE = fdFrom.pageSize*2; const CHUNK_SIZE = fdFrom.pageSize*2;
for (let i=0; i<nBytes; i+= CHUNK_SIZE) { for (let i=0; i<nBytes; i+= CHUNK_SIZE) {
@ -177,5 +180,3 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
} }
module.exports = challangeContribute;

View File

@ -1,14 +1,14 @@
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const zkeyUtils = require("./zkey_utils"); import * as zkeyUtils from "./zkey_utils.js";
const getCurve = require("./curves").getCurveFromQ; import { getCurveFromQ as getCurve } from "./curves.js";
const misc = require("./misc"); import * as misc from "./misc.js";
const Blake2b = require("blake2b-wasm"); import Blake2b from "blake2b-wasm";
const utils = require("./zkey_utils"); import * as utils from "./zkey_utils.js";
const hashToG2 = require("./keypair").hashToG2; import { hashToG2 as hashToG2 } from "./keypair.js";
const {applyKeyToSection} = require("./mpc_applykey"); import { applyKeyToSection } from "./mpc_applykey.js";
module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name, entropy, verbose) { export default async function phase2contribute(zkeyNameOld, zkeyNameNew, name, entropy, logger) {
await Blake2b.ready(); await Blake2b.ready();
const {fd: fdOld, sections: sections} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2); const {fd: fdOld, sections: sections} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
@ -68,8 +68,8 @@ module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name
await binFileUtils.copySection(fdOld, sections, fdNew, 7); await binFileUtils.copySection(fdOld, sections, fdNew, 7);
const invDelta = curve.Fr.inv(curContribution.delta.prvKey); const invDelta = curve.Fr.inv(curContribution.delta.prvKey);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", verbose); await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", logger);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", verbose); await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", logger);
await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams); await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams);
@ -81,8 +81,7 @@ module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name
const contribuionHash = contributionHasher.digest(); const contribuionHash = contributionHasher.digest();
console.log("Contribution Hash: "); if (logger) logger.info(misc.formatHash(contribuionHash, "Contribution Hash: "));
console.log(misc.formatHash(contribuionHash));
return true; return contribuionHash;
}; }

View File

@ -1,10 +1,10 @@
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const zkeyUtils = require("./zkey_utils"); import * as zkeyUtils from "./zkey_utils.js";
const fastFile = require("fastfile"); import * as fastFile from "fastfile";
const getCurve = require("./curves").getCurveFromQ; import { getCurveFromQ as getCurve } from "./curves.js";
module.exports = async function phase2exportMPCParams(zkeyName, mpcparamsName, verbose) { export default async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyName, "zkey", 2); const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey, "groth16"); const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey, "groth16");
@ -40,8 +40,8 @@ module.exports = async function phase2exportMPCParams(zkeyName, mpcparamsName,
const buffBasesH_Lodd = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 9); const buffBasesH_Lodd = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 9);
let buffBasesH_Tau; let buffBasesH_Tau;
buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", verbose ? console.log : undefined); buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", logger);
buffBasesH_Tau = await curve.G1.batchApplyKey(buffBasesH_Tau, curve.Fr.neg(curve.Fr.e(2)), curve.Fr.w[zkey.power+1], "jacobian", "affine", verbose ? console.log : undefined); buffBasesH_Tau = await curve.G1.batchApplyKey(buffBasesH_Tau, curve.Fr.neg(curve.Fr.e(2)), curve.Fr.w[zkey.power+1], "jacobian", "affine", logger);
// Remove last element. (The degree of H will be allways m-2) // Remove last element. (The degree of H will be allways m-2)
buffBasesH_Tau = buffBasesH_Tau.slice(0, buffBasesH_Tau.byteLength - sG1); buffBasesH_Tau = buffBasesH_Tau.slice(0, buffBasesH_Tau.byteLength - sG1);

View File

@ -1,12 +1,8 @@
const {stringifyBigInts} = require("ffjavascript").utils; import { readZKey as readZKey } from "./zkey_utils.js";
const fs = require("fs");
const readZKey = require("./zkey_utils").read;
module.exports = zkeyExportJson;
async function zkeyExportJson(zkeyFileName, jsonFileName, verbose) { export default async function zkeyExportJson(zkeyFileName, verbose) {
const zKey = await readZKey(zkeyFileName); const zKey = await readZKey(zkeyFileName);
const S = JSON.stringify(stringifyBigInts(zKey), null, 1); return zKey;
await fs.promises.writeFile(jsonFileName, S);
} }

View File

@ -0,0 +1,56 @@
const moduleURL = new URL(import.meta.url);
const __dirname = path.dirname(moduleURL.pathname);
import * as fastFile from "fastfile";
import exportVerificationKey from "./zkey_export_verificationkey.js";
// Not ready yet
// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
export default async function exportSolidityVerifier(zKeyName, templateName, logger) {
const verificationKey = await exportVerificationKey(zKeyName, logger);
const fd = await fastFile.readExisting(templateName);
const buff = await fd.read(fd.totalSize);
let template = new TextDecoder("utf-8").decode(buff);
const vkalpha1_str = `${verificationKey.vk_alpha_1[0].toString()},`+
`${verificationKey.vk_alpha_1[1].toString()}`;
template = template.replace("<%vk_alpha1%>", vkalpha1_str);
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
`${verificationKey.vk_beta_2[0][0].toString()}], `+
`[${verificationKey.vk_beta_2[1][1].toString()},` +
`${verificationKey.vk_beta_2[1][0].toString()}]`;
template = template.replace("<%vk_beta2%>", vkbeta2_str);
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
`${verificationKey.vk_delta_2[0][0].toString()}], `+
`[${verificationKey.vk_delta_2[1][1].toString()},` +
`${verificationKey.vk_delta_2[1][0].toString()}]`;
template = template.replace("<%vk_delta2%>", vkdelta2_str);
// The points
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
let vi = "";
for (let i=0; i<verificationKey.IC.length; i++) {
if (vi != "") vi = vi + " ";
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
`${verificationKey.IC[i][1].toString()});\n`;
}
template = template.replace("<%vk_ic_pts%>", vi);
return template;
}

View File

@ -1,10 +1,11 @@
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const zkeyUtils = require("./zkey_utils"); import * as zkeyUtils from "./zkey_utils.js";
const getCurve = require("./curves").getCurveFromQ; import { getCurveFromQ as getCurve } from "./curves.js";
const {stringifyBigInts} = require("ffjavascript").utils; import { utils } from "ffjavascript";
const fs = require("fs"); const {stringifyBigInts} = utils;
module.exports = async function zkeyExportVerificationKey(zkeyName, verificationKeyName) {
export default async function zkeyExportVerificationKey(zkeyName, logger) {
const {fd, sections} = await binFileUtils.readBinFile(zkeyName, "zkey", 2); const {fd, sections} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fd, sections, "groth16"); const zkey = await zkeyUtils.readHeader(fd, sections, "groth16");
@ -14,7 +15,7 @@ module.exports = async function zkeyExportVerificationKey(zkeyName, verificatio
const alphaBeta = await curve.pairing( zkey.vk_alpha_1 , zkey.vk_beta_2 ); const alphaBeta = await curve.pairing( zkey.vk_alpha_1 , zkey.vk_beta_2 );
const vKey = { let vKey = {
protocol: zkey.protocol, protocol: zkey.protocol,
curve: curve.name, curve: curve.name,
nPublic: zkey.nPublic, nPublic: zkey.nPublic,
@ -39,5 +40,9 @@ module.exports = async function zkeyExportVerificationKey(zkeyName, verificatio
} }
await binFileUtils.endReadSection(fd); await binFileUtils.endReadSection(fd);
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(vKey), null, 1), "utf-8"); vKey = stringifyBigInts(vKey);
};
await fd.close();
return vKey;
}

View File

@ -1,10 +1,10 @@
const zkeyUtils = require("./zkey_utils"); import * as zkeyUtils from "./zkey_utils.js";
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const fastFile = require("fastfile"); import * as fastFile from "fastfile";
const getCurve = require("./curves").getCurveFromQ; import { getCurveFromQ as getCurve } from "./curves.js";
const misc = require("./misc"); import * as misc from "./misc.js";
module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, verbose) { export default async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, name, logger) {
const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2); const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
const zkeyHeader = await zkeyUtils.readHeader(fdZKeyOld, sectionsZKeyOld, "groth16"); const zkeyHeader = await zkeyUtils.readHeader(fdZKeyOld, sectionsZKeyOld, "groth16");
@ -52,23 +52,30 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
} }
if (!misc.hashIsEqual(newMPCParams.csHash, oldMPCParams.csHash)) { if (!misc.hashIsEqual(newMPCParams.csHash, oldMPCParams.csHash)) {
console.log("Hash of the original circuit does not match with the MPC one"); if (logger) logger.error("Hash of the original circuit does not match with the MPC one");
return false; return false;
} }
if (oldMPCParams.contributions.length > newMPCParams.contributions.length) { if (oldMPCParams.contributions.length > newMPCParams.contributions.length) {
console.log("The impoerted file does not include new contributions"); if (logger) logger.error("The impoerted file does not include new contributions");
return false; return false;
} }
for (let i=0; i<oldMPCParams.contributions.length; i++) { for (let i=0; i<oldMPCParams.contributions.length; i++) {
if (!contributionIsEqual(oldMPCParams.contributions[i], newMPCParams.contributions[i])) { if (!contributionIsEqual(oldMPCParams.contributions[i], newMPCParams.contributions[i])) {
console.log(`Previos contribution ${i} does not match`); if (logger) logger.error(`Previos contribution ${i} does not match`);
return false; return false;
} }
} }
// Set the same name to all new controbutions
if (name) {
for (let i=oldMPCParams.contributions.length; i<newMPCParams.contributions.length; i++) {
newMPCParams.contributions[i].name = name;
}
}
const fdZKeyNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10); const fdZKeyNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
fdMPCParams.pos = 0; fdMPCParams.pos = 0;
@ -84,7 +91,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// IC (Keep original) // IC (Keep original)
const nIC = await fdMPCParams.readUBE32(); const nIC = await fdMPCParams.readUBE32();
if (nIC != zkeyHeader.nPublic +1) { if (nIC != zkeyHeader.nPublic +1) {
console.log("Invalid number of points in IC"); if (logger) logger.error("Invalid number of points in IC");
await fdZKeyNew.discard(); await fdZKeyNew.discard();
return false; return false;
} }
@ -97,7 +104,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// H Section // H Section
const nH = await fdMPCParams.readUBE32(); const nH = await fdMPCParams.readUBE32();
if (nH != zkeyHeader.domainSize-1) { if (nH != zkeyHeader.domainSize-1) {
console.log("Invalid number of points in H"); if (logger) logger.error("Invalid number of points in H");
await fdZKeyNew.discard(); await fdZKeyNew.discard();
return false; return false;
} }
@ -108,8 +115,8 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
buffH.set(buffTauLEM); // Let the last one to zero. buffH.set(buffTauLEM); // Let the last one to zero.
const n2Inv = curve.Fr.neg(curve.Fr.inv(curve.Fr.e(2))); const n2Inv = curve.Fr.neg(curve.Fr.inv(curve.Fr.e(2)));
const wInv = curve.Fr.inv(curve.Fr.w[zkeyHeader.power+1]); const wInv = curve.Fr.inv(curve.Fr.w[zkeyHeader.power+1]);
buffH = await curve.G1.batchApplyKey(buffH, n2Inv, wInv, "affine", "jacobian", verbose ? console.log : undefined); buffH = await curve.G1.batchApplyKey(buffH, n2Inv, wInv, "affine", "jacobian", logger);
buffH = await curve.G1.ifft(buffH, "jacobian", "affine", verbose ? console.log : undefined); buffH = await curve.G1.ifft(buffH, "jacobian", "affine", logger);
await binFileUtils.startWriteSection(fdZKeyNew, 9); await binFileUtils.startWriteSection(fdZKeyNew, 9);
await fdZKeyNew.write(buffH); await fdZKeyNew.write(buffH);
await binFileUtils.endWriteSection(fdZKeyNew); await binFileUtils.endWriteSection(fdZKeyNew);
@ -117,7 +124,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// C Secion (L section) // C Secion (L section)
const nL = await fdMPCParams.readUBE32(); const nL = await fdMPCParams.readUBE32();
if (nL != (zkeyHeader.nVars-zkeyHeader.nPublic-1)) { if (nL != (zkeyHeader.nVars-zkeyHeader.nPublic-1)) {
console.log("Invalid number of points in L"); if (logger) logger.error("Invalid number of points in L");
await fdZKeyNew.discard(); await fdZKeyNew.discard();
return false; return false;
} }
@ -131,7 +138,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// A Section // A Section
const nA = await fdMPCParams.readUBE32(); const nA = await fdMPCParams.readUBE32();
if (nA != zkeyHeader.nVars) { if (nA != zkeyHeader.nVars) {
console.log("Invalid number of points in A"); if (logger) logger.error("Invalid number of points in A");
await fdZKeyNew.discard(); await fdZKeyNew.discard();
return false; return false;
} }
@ -141,7 +148,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// B1 Section // B1 Section
const nB1 = await fdMPCParams.readUBE32(); const nB1 = await fdMPCParams.readUBE32();
if (nB1 != zkeyHeader.nVars) { if (nB1 != zkeyHeader.nVars) {
console.log("Invalid number of points in B1"); if (logger) logger.error("Invalid number of points in B1");
await fdZKeyNew.discard(); await fdZKeyNew.discard();
return false; return false;
} }
@ -151,7 +158,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// B2 Section // B2 Section
const nB2 = await fdMPCParams.readUBE32(); const nB2 = await fdMPCParams.readUBE32();
if (nB2 != zkeyHeader.nVars) { if (nB2 != zkeyHeader.nVars) {
console.log("Invalid number of points in B2"); if (logger) logger.error("Invalid number of points in B2");
await fdZKeyNew.discard(); await fdZKeyNew.discard();
return false; return false;
} }
@ -187,5 +194,5 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
} }
}; }

View File

@ -1,20 +1,18 @@
const r1csFile = require("r1csfile"); import {loadHeader as loadR1csHeader} from "r1csfile";
const utils = require("./powersoftau_utils"); import * as utils from "./powersoftau_utils.js";
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const assert = require("assert"); import { log2, formatHash } from "./misc.js";
const {log2} = require("./misc"); import { Scalar } from "ffjavascript";
const Scalar = require("ffjavascript").Scalar; import Blake2b from "blake2b-wasm";
const Blake2b = require("blake2b-wasm");
const misc = require("./misc");
module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose) { export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
await Blake2b.ready(); await Blake2b.ready();
const csHasher = Blake2b(64); const csHasher = Blake2b(64);
const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1); const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1);
const r1cs = await r1csFile.loadHeader(fdR1cs, sectionsR1cs); const r1cs = await loadR1csHeader(fdR1cs, sectionsR1cs);
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1); const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdPTau, sectionsPTau); const {curve, power} = await utils.readPTauHeader(fdPTau, sectionsPTau);
@ -25,19 +23,19 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
const sG2 = curve.G2.F.n8*2; const sG2 = curve.G2.F.n8*2;
if (r1cs.prime != curve.r) { if (r1cs.prime != curve.r) {
console.log("r1cs curve does not match powers of tau ceremony curve"); if (logger) logger.error("r1cs curve does not match powers of tau ceremony curve");
return -1; return -1;
} }
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1; const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
if (cirPower > power) { if (cirPower > power) {
console.log(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints} > 2**${power}`); if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints} > 2**${power}`);
return -1; return -1;
} }
if (!sectionsPTau[12]) { if (!sectionsPTau[12]) {
console.log("Powers of tau is not prepared."); if (logger) logger.error("Powers of tau is not prepared.");
return -1; return -1;
} }
@ -124,7 +122,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
let nCoefs = 0; let nCoefs = 0;
fdZKey.pos += 4; fdZKey.pos += 4;
for (let c=0; c<r1cs.nConstraints; c++) { for (let c=0; c<r1cs.nConstraints; c++) {
if (verbose && (c%1000 == 0) && (c >0)) console.log(`${c}/${r1cs.nConstraints}`); if ((logger)&(c%10000 == 0)) logger.debug(`processing constraints: ${c}/${r1cs.nConstraints}`);
const nA = await fdR1cs.readULE32(); const nA = await fdR1cs.readULE32();
for (let i=0; i<nA; i++) { for (let i=0; i<nA; i++) {
const s = await fdR1cs.readULE32(); const s = await fdR1cs.readULE32();
@ -248,15 +246,14 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
await fdZKey.writeULE32(0); await fdZKey.writeULE32(0);
await binFileUtils.endWriteSection(fdZKey); await binFileUtils.endWriteSection(fdZKey);
console.log("Circuit hash: "); if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
console.log(misc.formatHash(csHash));
await fdZKey.close(); await fdZKey.close();
await fdPTau.close(); await fdPTau.close();
await fdR1cs.close(); await fdR1cs.close();
return 0; return csHash;
async function writeFr2(buff) { async function writeFr2(buff) {
const n = curve.Fr.fromRprLE(buff, 0); const n = curve.Fr.fromRprLE(buff, 0);
@ -274,7 +271,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
await binFileUtils.startWriteSection(fdZKey, idSection); await binFileUtils.startWriteSection(fdZKey, idSection);
for (let i=0; i<arr.length; i+= CHUNK_SIZE) { for (let i=0; i<arr.length; i+= CHUNK_SIZE) {
if (verbose) console.log(`${sectionName}: ${i}/${arr.length}`); if (logger) logger.debug(`Writing points ${sectionName}: ${i}/${arr.length}`);
const n = Math.min(arr.length -i, CHUNK_SIZE); const n = Math.min(arr.length -i, CHUNK_SIZE);
const subArr = arr.slice(i, i + n); const subArr = arr.slice(i, i + n);
await composeAndWritePointsChunk(groupName, subArr); await composeAndWritePointsChunk(groupName, subArr);
@ -326,7 +323,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
fnBatchToAffine = "g2m_batchToAffine"; fnBatchToAffine = "g2m_batchToAffine";
fnZero = "g2m_zero"; fnZero = "g2m_zero";
} else { } else {
assert(false); throw new Error("Invalid group");
} }
let acc =0; let acc =0;
for (let i=0; i<arr.length; i++) acc += arr[i] ? arr[i].length : 0; for (let i=0; i<arr.length; i++) acc += arr[i] ? arr[i].length : 0;
@ -393,14 +390,14 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
async function hashHPoints() { async function hashHPoints() {
const CHUNK_SIZE = 1<<16; const CHUNK_SIZE = 1<<14;
hashU32(domainSize-1); hashU32(domainSize-1);
for (let i=0; i<domainSize-1; i+= CHUNK_SIZE) { for (let i=0; i<domainSize-1; i+= CHUNK_SIZE) {
if (verbose) console.log(`HashingHPoints: ${i}/${domainSize}`); if (logger) logger.debug(`HashingHPoints: ${i}/${domainSize}`);
const n = Math.min(domainSize-1, CHUNK_SIZE); const n = Math.min(domainSize-1, CHUNK_SIZE);
await hashHPointsChunk(i*CHUNK_SIZE, n); await hashHPointsChunk(i, n);
} }
} }

View File

@ -26,16 +26,13 @@
// PointsH(9) // PointsH(9)
// Contributions(10) // Contributions(10)
import { Scalar, F1Field } from "ffjavascript";
import * as binFileUtils from "./binfileutils.js";
const Scalar = require("ffjavascript").Scalar; import { getCurveFromQ as getCurve } from "./curves.js";
const F1Field = require("ffjavascript").F1Field; import { log2 } from "./misc.js";
const assert = require("assert");
const binFileUtils = require("./binfileutils");
const getCurve = require("./curves").getCurveFromQ; export async function writeHeader(fd, zkey) {
const {log2} = require("./misc");
async function writeHeader(fd, zkey) {
// Write the header // Write the header
/////////// ///////////
@ -74,7 +71,7 @@ async function writeHeader(fd, zkey) {
} }
async function writeZKey(fileName, zkey) { export async function writeZKey(fileName, zkey) {
let curve = getCurve(zkey.q); let curve = getCurve(zkey.q);
@ -187,7 +184,7 @@ async function readG2(fd, curve) {
async function readHeader(fd, sections, protocol) { export async function readHeader(fd, sections, protocol) {
if (protocol != "groth16") throw new Error("Protocol not supported: "+protocol); if (protocol != "groth16") throw new Error("Protocol not supported: "+protocol);
const zkey = {}; const zkey = {};
@ -196,7 +193,7 @@ async function readHeader(fd, sections, protocol) {
///////////////////// /////////////////////
await binFileUtils.startReadUniqueSection(fd, sections, 1); await binFileUtils.startReadUniqueSection(fd, sections, 1);
const protocolId = await fd.readULE32(); const protocolId = await fd.readULE32();
if (protocolId != 1) assert("File is not groth"); if (protocolId != 1) throw new Error("File is not groth");
zkey.protocol = "groth16"; zkey.protocol = "groth16";
await binFileUtils.endReadSection(fd); await binFileUtils.endReadSection(fd);
@ -229,7 +226,7 @@ async function readHeader(fd, sections, protocol) {
} }
async function readZKey(fileName) { export async function readZKey(fileName) {
const {fd, sections} = await binFileUtils.readBinFile(fileName, "zkey", 1); const {fd, sections} = await binFileUtils.readBinFile(fileName, "zkey", 1);
const zkey = await readHeader(fd, sections, "groth16"); const zkey = await readHeader(fd, sections, "groth16");
@ -377,7 +374,7 @@ async function readContribution(fd, curve) {
} }
async function readMPCParams(fd, curve, sections) { export async function readMPCParams(fd, curve, sections) {
await binFileUtils.startReadUniqueSection(fd, sections, 10); await binFileUtils.startReadUniqueSection(fd, sections, 10);
const res = { contributions: []}; const res = { contributions: []};
res.csHash = await fd.read(64); res.csHash = await fd.read(64);
@ -424,7 +421,7 @@ async function writeContribution(fd, curve, c) {
} }
async function writeMPCParams(fd, curve, mpcParams) { export async function writeMPCParams(fd, curve, mpcParams) {
await binFileUtils.startWriteSection(fd, 10); await binFileUtils.startWriteSection(fd, 10);
await fd.write(mpcParams.csHash); await fd.write(mpcParams.csHash);
await fd.writeULE32(mpcParams.contributions.length); await fd.writeULE32(mpcParams.contributions.length);
@ -434,19 +431,19 @@ async function writeMPCParams(fd, curve, mpcParams) {
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
} }
function hashG1(hasher, curve, p) { export function hashG1(hasher, curve, p) {
const buff = new Uint8Array(curve.G1.F.n8*2); const buff = new Uint8Array(curve.G1.F.n8*2);
curve.G1.toRprUncompressed(buff, 0, p); curve.G1.toRprUncompressed(buff, 0, p);
hasher.update(buff); hasher.update(buff);
} }
function hashG2(hasher,curve, p) { export function hashG2(hasher,curve, p) {
const buff = new Uint8Array(curve.G2.F.n8*2); const buff = new Uint8Array(curve.G2.F.n8*2);
curve.G2.toRprUncompressed(buff, 0, p); curve.G2.toRprUncompressed(buff, 0, p);
hasher.update(buff); hasher.update(buff);
} }
function hashPubKey(hasher, curve, c) { export function hashPubKey(hasher, curve, c) {
hashG1(hasher, curve, c.deltaAfter); hashG1(hasher, curve, c.deltaAfter);
hashG1(hasher, curve, c.delta.g1_s); hashG1(hasher, curve, c.delta.g1_s);
hashG1(hasher, curve, c.delta.g1_sx); hashG1(hasher, curve, c.delta.g1_sx);
@ -454,13 +451,3 @@ function hashPubKey(hasher, curve, c) {
hasher.update(c.transcript); hasher.update(c.transcript);
} }
module.exports.readHeader = readHeader;
module.exports.writeHeader = writeHeader;
module.exports.read = readZKey;
module.exports.write = writeZKey;
module.exports.readMPCParams = readMPCParams;
module.exports.writeMPCParams = writeMPCParams;
module.exports.hashG1 = hashG1;
module.exports.hashG2 = hashG2;
module.exports.hashPubKey = hashPubKey;

View File

@ -1,18 +1,18 @@
const binFileUtils = require("./binfileutils"); import * as binFileUtils from "./binfileutils.js";
const zkeyUtils = require("./zkey_utils"); import * as zkeyUtils from "./zkey_utils.js";
const getCurve = require("./curves").getCurveFromQ; import { getCurveFromQ as getCurve } from "./curves.js";
const Blake2b = require("blake2b-wasm"); import Blake2b from "blake2b-wasm";
const misc = require("./misc"); import * as misc from "./misc.js";
const Scalar = require("ffjavascript").Scalar; import { hashToG2 as hashToG2 } from "./keypair.js";
const hashToG2 = require("./keypair").hashToG2;
const sameRatio = misc.sameRatio; const sameRatio = misc.sameRatio;
const crypto = require("crypto"); import crypto from "crypto";
const ChaCha = require("ffjavascript").ChaCha; import newZKey from "./zkey_new.js";
const newZKey = require("./zkey_new"); import {hashG1, hashPubKey} from "./zkey_utils.js";
const {hashG1, hashPubKey} = require("./zkey_utils"); import { Scalar, ChaCha } from "ffjavascript";
module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, verbose) {
export default async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
let sr; let sr;
await Blake2b.ready(); await Blake2b.ready();
@ -93,7 +93,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
||(zkeyInit.n8q != zkey.n8q) ||(zkeyInit.n8q != zkey.n8q)
||(zkeyInit.n8r != zkey.n8r)) ||(zkeyInit.n8r != zkey.n8r))
{ {
console.log("INVALID: Different curves"); if (logger) logger.error("INVALID: Different curves");
return false; return false;
} }
@ -101,110 +101,115 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
||(zkeyInit.nPublic != zkey.nPublic) ||(zkeyInit.nPublic != zkey.nPublic)
||(zkeyInit.domainSize != zkey.domainSize)) ||(zkeyInit.domainSize != zkey.domainSize))
{ {
console.log("INVALID: Different circuit parameters"); if (logger) logger.error("INVALID: Different circuit parameters");
return false; return false;
} }
if (!curve.G1.eq(zkey.vk_alpha_1, zkeyInit.vk_alpha_1)) { if (!curve.G1.eq(zkey.vk_alpha_1, zkeyInit.vk_alpha_1)) {
console.log("INVALID: Invalid alpha1"); if (logger) logger.error("INVALID: Invalid alpha1");
return false; return false;
} }
if (!curve.G1.eq(zkey.vk_beta_1, zkeyInit.vk_beta_1)) { if (!curve.G1.eq(zkey.vk_beta_1, zkeyInit.vk_beta_1)) {
console.log("INVALID: Invalid beta1"); if (logger) logger.error("INVALID: Invalid beta1");
return false; return false;
} }
if (!curve.G2.eq(zkey.vk_beta_2, zkeyInit.vk_beta_2)) { if (!curve.G2.eq(zkey.vk_beta_2, zkeyInit.vk_beta_2)) {
console.log("INVALID: Invalid beta2"); if (logger) logger.error("INVALID: Invalid beta2");
return false; return false;
} }
if (!curve.G2.eq(zkey.vk_gamma_2, zkeyInit.vk_gamma_2)) { if (!curve.G2.eq(zkey.vk_gamma_2, zkeyInit.vk_gamma_2)) {
console.log("INVALID: Invalid gamma2"); if (logger) logger.error("INVALID: Invalid gamma2");
return false; return false;
} }
if (!curve.G1.eq(zkey.vk_delta_1, curDelta)) { if (!curve.G1.eq(zkey.vk_delta_1, curDelta)) {
console.log("INVALID: Invalud delta1"); if (logger) logger.error("INVALID: Invalud delta1");
return false; return false;
} }
sr = await sameRatio(curve, curve.G1.g, curDelta, curve.G2.g, zkey.vk_delta_2); sr = await sameRatio(curve, curve.G1.g, curDelta, curve.G2.g, zkey.vk_delta_2);
if (sr !== true) { if (sr !== true) {
console.log("INVALID: Invalud delta2"); if (logger) logger.error("INVALID: Invalud delta2");
return false; return false;
} }
const mpcParamsInit = await zkeyUtils.readMPCParams(fdInit, curve, sectionsInit); const mpcParamsInit = await zkeyUtils.readMPCParams(fdInit, curve, sectionsInit);
if (!misc.hashIsEqual(mpcParams.csHash, mpcParamsInit.csHash)) { if (!misc.hashIsEqual(mpcParams.csHash, mpcParamsInit.csHash)) {
console.log("INVALID: Circuit does not match"); if (logger) logger.error("INVALID: Circuit does not match");
return false; return false;
} }
// Check sizes of sections // Check sizes of sections
if (sections[8][0].size != sG1*(zkey.nVars-zkey.nPublic-1)) { if (sections[8][0].size != sG1*(zkey.nVars-zkey.nPublic-1)) {
console.log("INVALID: Invalid L section size"); if (logger) logger.error("INVALID: Invalid L section size");
return false; return false;
} }
if (sections[9][0].size != sG1*(zkey.domainSize)) { if (sections[9][0].size != sG1*(zkey.domainSize)) {
console.log("INVALID: Invalid H section size"); if (logger) logger.error("INVALID: Invalid H section size");
return false; return false;
} }
let ss; let ss;
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 3); ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 3);
if (!ss) { if (!ss) {
console.log("INVALID: IC section is not identical"); if (logger) logger.error("INVALID: IC section is not identical");
return false; return false;
} }
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 4); ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 4);
if (!ss) { if (!ss) {
console.log("Coeffs section is not identical"); if (logger) logger.error("Coeffs section is not identical");
return false; return false;
} }
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 5); ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 5);
if (!ss) { if (!ss) {
console.log("A section is not identical"); if (logger) logger.error("A section is not identical");
return false; return false;
} }
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 6); ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 6);
if (!ss) { if (!ss) {
console.log("B1 section is not identical"); if (logger) logger.error("B1 section is not identical");
return false; return false;
} }
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 7); ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 7);
if (!ss) { if (!ss) {
console.log("B2 section is not identical"); if (logger) logger.error("B2 section is not identical");
return false; return false;
} }
// Check L // Check L
sr = await sectionHasSameRatio("G1", fdInit, sectionsInit, fd, sections, 8, zkey.vk_delta_2, zkeyInit.vk_delta_2, "L section"); sr = await sectionHasSameRatio("G1", fdInit, sectionsInit, fd, sections, 8, zkey.vk_delta_2, zkeyInit.vk_delta_2, "L section");
if (sr!==true) { if (sr!==true) {
console.log("L section does not match"); if (logger) logger.error("L section does not match");
return false; return false;
} }
// Check H // Check H
sr = await sameRatioH(); sr = await sameRatioH();
if (sr!==true) { if (sr!==true) {
console.log("H section does not match"); if (logger) logger.error("H section does not match");
return false; return false;
} }
if (logger) logger.info(misc.formatHash(mpcParams.csHash, "Circuit Hash: "));
await fd.close();
await fdInit.close();
for (let i=mpcParams.contributions.length-1; i>=0; i--) { for (let i=mpcParams.contributions.length-1; i>=0; i--) {
const c = mpcParams.contributions[i]; const c = mpcParams.contributions[i];
console.log("-------------------------"); if (logger) logger.info("-------------------------");
console.log(`contribution #${i+1}${c.name ? c.name : ""}:`); if (logger) logger.info(misc.formatHash(c.contributionHash, `contribution #${i+1} ${c.name ? c.name : ""}:`));
console.log(misc.formatHash(c.contributionHash));
if (c.type == 1) { if (c.type == 1) {
console.log(`Beacon generator: ${misc.byteArray2hex(c.beaconHash)}`); if (logger) logger.info(`Beacon generator: ${misc.byteArray2hex(c.beaconHash)}`);
console.log(`Beacon iterations Exp: ${c.numIterationsExp}`); if (logger) logger.info(`Beacon iterations Exp: ${c.numIterationsExp}`);
} }
} }
console.log("-------------------------"); if (logger) logger.info("-------------------------");
if (logger) logger.info("ZKey Ok!");
return true; return true;
@ -222,7 +227,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
const nPoints = sections1[idSection][0].size / sG; const nPoints = sections1[idSection][0].size / sG;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) { for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i); if (logger) logger.debug(`Same ratio check ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints - i, MAX_CHUNK_SIZE); const n = Math.min(nPoints - i, MAX_CHUNK_SIZE);
const bases1 = await fd1.read(n*sG); const bases1 = await fd1.read(n*sG);
const bases2 = await fd2.read(n*sG); const bases2 = await fd2.read(n*sG);
@ -267,7 +272,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
let R1 = G.zero; let R1 = G.zero;
for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) { for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`H Verificaition(tau): ${i}/${zkey.domainSize}`); if (logger) logger.debug(`H Verificaition(tau): ${i}/${zkey.domainSize}`);
const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE); const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE);
const buff1 = await fdPTau.read(sG*n, sectionsPTau[2][0].p + zkey.domainSize*sG + i*MAX_CHUNK_SIZE*sG); const buff1 = await fdPTau.read(sG*n, sectionsPTau[2][0].p + zkey.domainSize*sG + i*MAX_CHUNK_SIZE*sG);
@ -295,7 +300,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
await binFileUtils.startReadUniqueSection(fd, sections, 9); await binFileUtils.startReadUniqueSection(fd, sections, 9);
let R2 = G.zero; let R2 = G.zero;
for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) { for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`H Verificaition(lagrange): ${i}/${zkey.domainSize}`); if (logger) logger.debug(`H Verificaition(lagrange): ${i}/${zkey.domainSize}`);
const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE); const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE);
const buff = await fd.read(sG*n); const buff = await fd.read(sG*n);
@ -309,6 +314,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
sr = await sameRatio(curve, R1, R2, zkey.vk_delta_2, zkeyInit.vk_delta_2); sr = await sameRatio(curve, R1, R2, zkey.vk_delta_2, zkeyInit.vk_delta_2);
if (sr !== true) return false; if (sr !== true) return false;
return true; return true;
} }
@ -378,5 +384,5 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
return res; return res;
} }
}; }

View File

@ -1,8 +0,0 @@
module.exports = {
groth16: {
prover: module.require("./zksnark_groth16_prover"),
verifier: module.require("./zksnark_groth16_verifier")
}
};

View File

@ -5,11 +5,13 @@
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// //
// 2019 OKIMS // 2019 OKIMS
// ported to solidity 0.5 // ported to solidity 0.6
// fixed linter warnings // fixed linter warnings
// added requiere error messages // added requiere error messages
// //
pragma solidity ^0.5.0; //
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.6.11;
library Pairing { library Pairing {
struct G1Point { struct G1Point {
uint X; uint X;
@ -44,15 +46,15 @@ library Pairing {
); );
*/ */
} }
/// @return the negation of p, i.e. p.addition(p.negate()) should be zero. /// @return r the negation of p, i.e. p.addition(p.negate()) should be zero.
function negate(G1Point memory p) internal pure returns (G1Point memory) { function negate(G1Point memory p) internal pure returns (G1Point memory r) {
// The prime q in the base field F_q for G1 // The prime q in the base field F_q for G1
uint q = 21888242871839275222246405745257275088696311157297823662689037894645226208583; uint q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
if (p.X == 0 && p.Y == 0) if (p.X == 0 && p.Y == 0)
return G1Point(0, 0); return G1Point(0, 0);
return G1Point(p.X, q - (p.Y % q)); return G1Point(p.X, q - (p.Y % q));
} }
/// @return the sum of two points of G1 /// @return r the sum of two points of G1
function addition(G1Point memory p1, G1Point memory p2) internal view returns (G1Point memory r) { function addition(G1Point memory p1, G1Point memory p2) internal view returns (G1Point memory r) {
uint[4] memory input; uint[4] memory input;
input[0] = p1.X; input[0] = p1.X;
@ -62,13 +64,13 @@ library Pairing {
bool success; bool success;
// solium-disable-next-line security/no-inline-assembly // solium-disable-next-line security/no-inline-assembly
assembly { assembly {
success := staticcall(sub(gas, 2000), 6, input, 0xc0, r, 0x60) success := staticcall(sub(gas(), 2000), 6, input, 0xc0, r, 0x60)
// Use "invalid" to make gas estimation work // Use "invalid" to make gas estimation work
switch success case 0 { invalid() } switch success case 0 { invalid() }
} }
require(success,"pairing-add-failed"); require(success,"pairing-add-failed");
} }
/// @return the product of a point on G1 and a scalar, i.e. /// @return r the product of a point on G1 and a scalar, i.e.
/// p == p.scalar_mul(1) and p.addition(p) == p.scalar_mul(2) for all points p. /// p == p.scalar_mul(1) and p.addition(p) == p.scalar_mul(2) for all points p.
function scalar_mul(G1Point memory p, uint s) internal view returns (G1Point memory r) { function scalar_mul(G1Point memory p, uint s) internal view returns (G1Point memory r) {
uint[3] memory input; uint[3] memory input;
@ -78,7 +80,7 @@ library Pairing {
bool success; bool success;
// solium-disable-next-line security/no-inline-assembly // solium-disable-next-line security/no-inline-assembly
assembly { assembly {
success := staticcall(sub(gas, 2000), 7, input, 0x80, r, 0x60) success := staticcall(sub(gas(), 2000), 7, input, 0x80, r, 0x60)
// Use "invalid" to make gas estimation work // Use "invalid" to make gas estimation work
switch success case 0 { invalid() } switch success case 0 { invalid() }
} }
@ -106,7 +108,7 @@ library Pairing {
bool success; bool success;
// solium-disable-next-line security/no-inline-assembly // solium-disable-next-line security/no-inline-assembly
assembly { assembly {
success := staticcall(sub(gas, 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20) success := staticcall(sub(gas(), 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
// Use "invalid" to make gas estimation work // Use "invalid" to make gas estimation work
switch success case 0 { invalid() } switch success case 0 { invalid() }
} }
@ -174,7 +176,7 @@ contract Verifier {
Pairing.G1Point C; Pairing.G1Point C;
} }
function verifyingKey() internal pure returns (VerifyingKey memory vk) { function verifyingKey() internal pure returns (VerifyingKey memory vk) {
vk.alfa1 = Pairing.G1Point(<%vk_alfa1%>); vk.alfa1 = Pairing.G1Point(<%vk_alpha1%>);
vk.beta2 = Pairing.G2Point(<%vk_beta2%>); vk.beta2 = Pairing.G2Point(<%vk_beta2%>);
vk.gamma2 = Pairing.G2Point(<%vk_gamma2%>); vk.gamma2 = Pairing.G2Point(<%vk_gamma2%>);
vk.delta2 = Pairing.G2Point(<%vk_delta2%>); vk.delta2 = Pairing.G2Point(<%vk_delta2%>);
@ -200,6 +202,7 @@ contract Verifier {
)) return 1; )) return 1;
return 0; return 0;
} }
/// @return r bool true if proof is valid
function verifyProof( function verifyProof(
uint[2] memory a, uint[2] memory a,
uint[2][2] memory b, uint[2][2] memory b,

View File

@ -1,214 +0,0 @@
//
// Copyright 2017 Christian Reitwiessner
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
pragma solidity ^0.4.17;
library Pairing {
struct G1Point {
uint X;
uint Y;
}
// Encoding of field elements is: X[0] * z + X[1]
struct G2Point {
uint[2] X;
uint[2] Y;
}
/// @return the generator of G1
function P1() pure internal returns (G1Point) {
return G1Point(1, 2);
}
/// @return the generator of G2
function P2() pure internal returns (G2Point) {
// Original code point
return G2Point(
[11559732032986387107991004021392285783925812861821192530917403151452391805634,
10857046999023057135944570762232829481370756359578518086990519993285655852781],
[4082367875863433681332203403145435568316851327593401208105741076214120093531,
8495653923123431417604973247489272438418190587263600148770280649306958101930]
);
/*
// Changed by Jordi point
return G2Point(
[10857046999023057135944570762232829481370756359578518086990519993285655852781,
11559732032986387107991004021392285783925812861821192530917403151452391805634],
[8495653923123431417604973247489272438418190587263600148770280649306958101930,
4082367875863433681332203403145435568316851327593401208105741076214120093531]
);
*/
}
/// @return the negation of p, i.e. p.addition(p.negate()) should be zero.
function negate(G1Point p) pure internal returns (G1Point) {
// The prime q in the base field F_q for G1
uint q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
if (p.X == 0 && p.Y == 0)
return G1Point(0, 0);
return G1Point(p.X, q - (p.Y % q));
}
/// @return the sum of two points of G1
function addition(G1Point p1, G1Point p2) view internal returns (G1Point r) {
uint[4] memory input;
input[0] = p1.X;
input[1] = p1.Y;
input[2] = p2.X;
input[3] = p2.Y;
bool success;
assembly {
success := staticcall(sub(gas, 2000), 6, input, 0xc0, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success);
}
/// @return the product of a point on G1 and a scalar, i.e.
/// p == p.scalar_mul(1) and p.addition(p) == p.scalar_mul(2) for all points p.
function scalar_mul(G1Point p, uint s) view internal returns (G1Point r) {
uint[3] memory input;
input[0] = p.X;
input[1] = p.Y;
input[2] = s;
bool success;
assembly {
success := staticcall(sub(gas, 2000), 7, input, 0x80, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require (success);
}
/// @return the result of computing the pairing check
/// e(p1[0], p2[0]) * .... * e(p1[n], p2[n]) == 1
/// For example pairing([P1(), P1().negate()], [P2(), P2()]) should
/// return true.
function pairing(G1Point[] p1, G2Point[] p2) view internal returns (bool) {
require(p1.length == p2.length);
uint elements = p1.length;
uint inputSize = elements * 6;
uint[] memory input = new uint[](inputSize);
for (uint i = 0; i < elements; i++)
{
input[i * 6 + 0] = p1[i].X;
input[i * 6 + 1] = p1[i].Y;
input[i * 6 + 2] = p2[i].X[0];
input[i * 6 + 3] = p2[i].X[1];
input[i * 6 + 4] = p2[i].Y[0];
input[i * 6 + 5] = p2[i].Y[1];
}
uint[1] memory out;
bool success;
assembly {
success := staticcall(sub(gas, 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success);
return out[0] != 0;
}
/// Convenience method for a pairing check for two pairs.
function pairingProd2(G1Point a1, G2Point a2, G1Point b1, G2Point b2) view internal returns (bool) {
G1Point[] memory p1 = new G1Point[](2);
G2Point[] memory p2 = new G2Point[](2);
p1[0] = a1;
p1[1] = b1;
p2[0] = a2;
p2[1] = b2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for three pairs.
function pairingProd3(
G1Point a1, G2Point a2,
G1Point b1, G2Point b2,
G1Point c1, G2Point c2
) view internal returns (bool) {
G1Point[] memory p1 = new G1Point[](3);
G2Point[] memory p2 = new G2Point[](3);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for four pairs.
function pairingProd4(
G1Point a1, G2Point a2,
G1Point b1, G2Point b2,
G1Point c1, G2Point c2,
G1Point d1, G2Point d2
) view internal returns (bool) {
G1Point[] memory p1 = new G1Point[](4);
G2Point[] memory p2 = new G2Point[](4);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p1[3] = d1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
p2[3] = d2;
return pairing(p1, p2);
}
}
contract Verifier {
using Pairing for *;
struct VerifyingKey {
Pairing.G1Point alfa1;
Pairing.G2Point beta2;
Pairing.G2Point gamma2;
Pairing.G2Point delta2;
Pairing.G1Point[] IC;
}
struct Proof {
Pairing.G1Point A;
Pairing.G2Point B;
Pairing.G1Point C;
}
function verifyingKey() pure internal returns (VerifyingKey vk) {
vk.alfa1 = Pairing.G1Point(<%vk_alfa1%>);
vk.beta2 = Pairing.G2Point(<%vk_beta2%>);
vk.gamma2 = Pairing.G2Point(<%vk_gamma2%>);
vk.delta2 = Pairing.G2Point(<%vk_delta2%>);
vk.IC = new Pairing.G1Point[](<%vk_ic_length%>);
<%vk_ic_pts%>
}
function verify(uint[] input, Proof proof) view internal returns (uint) {
uint256 snark_scalar_field = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
VerifyingKey memory vk = verifyingKey();
require(input.length + 1 == vk.IC.length);
// Compute the linear combination vk_x
Pairing.G1Point memory vk_x = Pairing.G1Point(0, 0);
for (uint i = 0; i < input.length; i++) {
require(input[i] < snark_scalar_field,"verifier-gte-snark-scalar-field");
vk_x = Pairing.addition(vk_x, Pairing.scalar_mul(vk.IC[i + 1], input[i]));
}
vk_x = Pairing.addition(vk_x, vk.IC[0]);
if (!Pairing.pairingProd4(
Pairing.negate(proof.A), proof.B,
vk.alfa1, vk.beta2,
vk_x, vk.gamma2,
proof.C, vk.delta2
)) return 1;
return 0;
}
function verifyProof(
uint[2] a,
uint[2][2] b,
uint[2] c,
uint[<%vk_input_length%>] input
) view public returns (bool r) {
Proof memory proof;
proof.A = Pairing.G1Point(a[0], a[1]);
proof.B = Pairing.G2Point([b[0][0], b[0][1]], [b[1][0], b[1][1]]);
proof.C = Pairing.G1Point(c[0], c[1]);
uint[] memory inputValues = new uint[](input.length);
for(uint i = 0; i < input.length; i++){
inputValues[i] = input[i];
}
if (verify(inputValues, proof) == 0) {
return true;
} else {
return false;
}
}
}

View File

@ -1,245 +0,0 @@
//
// Copyright 2017 Christian Reitwiessner
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// 2019 OKIMS
// ported to solidity 0.5
// fixed linter warnings
// added requiere error messages
//
pragma solidity ^0.5.0;
library Pairing {
struct G1Point {
uint X;
uint Y;
}
// Encoding of field elements is: X[0] * z + X[1]
struct G2Point {
uint[2] X;
uint[2] Y;
}
/// @return the generator of G1
function P1() internal pure returns (G1Point memory) {
return G1Point(1, 2);
}
/// @return the generator of G2
function P2() internal pure returns (G2Point memory) {
// Original code point
return G2Point(
[11559732032986387107991004021392285783925812861821192530917403151452391805634,
10857046999023057135944570762232829481370756359578518086990519993285655852781],
[4082367875863433681332203403145435568316851327593401208105741076214120093531,
8495653923123431417604973247489272438418190587263600148770280649306958101930]
);
}
/// @return the negation of p, i.e. p.addition(p.negate()) should be zero.
function negate(G1Point memory p) internal pure returns (G1Point memory) {
// The prime q in the base field F_q for G1
uint q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
if (p.X == 0 && p.Y == 0)
return G1Point(0, 0);
return G1Point(p.X, q - (p.Y % q));
}
/// @return the sum of two points of G1
function addition(G1Point memory p1, G1Point memory p2) internal view returns (G1Point memory r) {
uint[4] memory input;
input[0] = p1.X;
input[1] = p1.Y;
input[2] = p2.X;
input[3] = p2.Y;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 6, input, 0xc0, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success,"pairing-add-failed");
}
/// @return the product of a point on G1 and a scalar, i.e.
/// p == p.scalar_mul(1) and p.addition(p) == p.scalar_mul(2) for all points p.
function scalar_mul(G1Point memory p, uint s) internal view returns (G1Point memory r) {
uint[3] memory input;
input[0] = p.X;
input[1] = p.Y;
input[2] = s;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 7, input, 0x80, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require (success,"pairing-mul-failed");
}
/// @return the result of computing the pairing check
/// e(p1[0], p2[0]) * .... * e(p1[n], p2[n]) == 1
/// For example pairing([P1(), P1().negate()], [P2(), P2()]) should
/// return true.
function pairing(G1Point[] memory p1, G2Point[] memory p2) internal view returns (bool) {
require(p1.length == p2.length,"pairing-lengths-failed");
uint elements = p1.length;
uint inputSize = elements * 6;
uint[] memory input = new uint[](inputSize);
for (uint i = 0; i < elements; i++)
{
input[i * 6 + 0] = p1[i].X;
input[i * 6 + 1] = p1[i].Y;
input[i * 6 + 2] = p2[i].X[0];
input[i * 6 + 3] = p2[i].X[1];
input[i * 6 + 4] = p2[i].Y[0];
input[i * 6 + 5] = p2[i].Y[1];
}
uint[1] memory out;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success,"pairing-opcode-failed");
return out[0] != 0;
}
/// Convenience method for a pairing check for two pairs.
function pairingProd2(G1Point memory a1, G2Point memory a2, G1Point memory b1, G2Point memory b2) internal view returns (bool) {
G1Point[] memory p1 = new G1Point[](2);
G2Point[] memory p2 = new G2Point[](2);
p1[0] = a1;
p1[1] = b1;
p2[0] = a2;
p2[1] = b2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for three pairs.
function pairingProd3(
G1Point memory a1, G2Point memory a2,
G1Point memory b1, G2Point memory b2,
G1Point memory c1, G2Point memory c2
) internal view returns (bool) {
G1Point[] memory p1 = new G1Point[](3);
G2Point[] memory p2 = new G2Point[](3);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for four pairs.
function pairingProd4(
G1Point memory a1, G2Point memory a2,
G1Point memory b1, G2Point memory b2,
G1Point memory c1, G2Point memory c2,
G1Point memory d1, G2Point memory d2
) internal view returns (bool) {
G1Point[] memory p1 = new G1Point[](4);
G2Point[] memory p2 = new G2Point[](4);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p1[3] = d1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
p2[3] = d2;
return pairing(p1, p2);
}
}
contract Verifier {
using Pairing for *;
struct VerifyingKey {
Pairing.G2Point A;
Pairing.G1Point B;
Pairing.G2Point C;
Pairing.G2Point gamma;
Pairing.G1Point gammaBeta1;
Pairing.G2Point gammaBeta2;
Pairing.G2Point Z;
Pairing.G1Point[] IC;
}
struct Proof {
Pairing.G1Point A;
Pairing.G1Point A_p;
Pairing.G2Point B;
Pairing.G1Point B_p;
Pairing.G1Point C;
Pairing.G1Point C_p;
Pairing.G1Point K;
Pairing.G1Point H;
}
function verifyingKey() internal pure returns (VerifyingKey memory vk) {
vk.A = Pairing.G2Point(<%vk_a%>);
vk.B = Pairing.G1Point(<%vk_b%>);
vk.C = Pairing.G2Point(<%vk_c%>);
vk.gamma = Pairing.G2Point(<%vk_g%>);
vk.gammaBeta1 = Pairing.G1Point(<%vk_gb1%>);
vk.gammaBeta2 = Pairing.G2Point(<%vk_gb2%>);
vk.Z = Pairing.G2Point(<%vk_z%>);
vk.IC = new Pairing.G1Point[](<%vk_ic_length%>);
<%vk_ic_pts%>
}
function verify(uint[] memory input, Proof memory proof) internal view returns (uint) {
uint256 snark_scalar_field = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
VerifyingKey memory vk = verifyingKey();
require(input.length + 1 == vk.IC.length,"verifier-bad-input");
// Compute the linear combination vk_x
Pairing.G1Point memory vk_x = Pairing.G1Point(0, 0);
for (uint i = 0; i < input.length; i++) {
require(input[i] < snark_scalar_field,"verifier-gte-snark-scalar-field");
vk_x = Pairing.addition(vk_x, Pairing.scalar_mul(vk.IC[i + 1], input[i]));
}
vk_x = Pairing.addition(vk_x, vk.IC[0]);
if (!Pairing.pairingProd2(proof.A, vk.A, Pairing.negate(proof.A_p), Pairing.P2())) return 1;
if (!Pairing.pairingProd2(vk.B, proof.B, Pairing.negate(proof.B_p), Pairing.P2())) return 2;
if (!Pairing.pairingProd2(proof.C, vk.C, Pairing.negate(proof.C_p), Pairing.P2())) return 3;
if (!Pairing.pairingProd3(
proof.K, vk.gamma,
Pairing.negate(Pairing.addition(vk_x, Pairing.addition(proof.A, proof.C))), vk.gammaBeta2,
Pairing.negate(vk.gammaBeta1), proof.B
)) return 4;
if (!Pairing.pairingProd3(
Pairing.addition(vk_x, proof.A), proof.B,
Pairing.negate(proof.H), vk.Z,
Pairing.negate(proof.C), Pairing.P2()
)) return 5;
return 0;
}
function verifyProof(
uint[2] memory a,
uint[2] memory a_p,
uint[2][2] memory b,
uint[2] memory b_p,
uint[2] memory c,
uint[2] memory c_p,
uint[2] memory h,
uint[2] memory k,
uint[<%vk_input_length%>] memory input
) view public returns (bool r) {
Proof memory proof;
proof.A = Pairing.G1Point(a[0], a[1]);
proof.A_p = Pairing.G1Point(a_p[0], a_p[1]);
proof.B = Pairing.G2Point([b[0][0], b[0][1]], [b[1][0], b[1][1]]);
proof.B_p = Pairing.G1Point(b_p[0], b_p[1]);
proof.C = Pairing.G1Point(c[0], c[1]);
proof.C_p = Pairing.G1Point(c_p[0], c_p[1]);
proof.H = Pairing.G1Point(h[0], h[1]);
proof.K = Pairing.G1Point(k[0], k[1]);
uint[] memory inputValues = new uint[](input.length);
for(uint i = 0; i < input.length; i++){
inputValues[i] = input[i];
}
if (verify(inputValues, proof) == 0) {
return true;
} else {
return false;
}
}
}

View File

@ -13,4 +13,4 @@ template Multiplier(n) {
c <== int[n-1]; c <== int[n-1];
} }
component main = Multiplier(100); component main = Multiplier(1000);

Binary file not shown.

Binary file not shown.

116
test/fullprocess.js Normal file
View File

@ -0,0 +1,116 @@
import * as snarkjs from "../main.js";
import { getCurveFromName } from "../src/curves.js";
import assert from "assert";
import path from "path";
describe("Full process", function () {
this.timeout(100000);
let curve;
const ptau_0 = {type: "mem"};
const ptau_1 = {type: "mem"};
const ptau_2 = {type: "mem"};
const ptau_beacon = {type: "mem"};
const ptau_final = {type: "mem"};
const ptau_challange2 = {type: "mem"};
const ptau_response2 = {type: "mem"};
const zkey_0 = {type: "mem"};
const zkey_1 = {type: "mem"};
const zkey_2 = {type: "mem"};
const zkey_final = {type: "mem"};
const bellman_1 = {type: "mem"};
const bellman_2 = {type: "mem"};
let vKey;
const wtns = {type: "mem"};
let proof;
let publicSignals;
before( async () => {
curve = await getCurveFromName("bn128");
});
after( async () => {
await curve.terminate();
});
it ("powersoftau new", async () => {
await snarkjs.powersOfTau.newAccumulator(curve, 12, ptau_0);
});
it ("powersoftau contribute ", async () => {
await snarkjs.powersOfTau.contribute(ptau_0, ptau_1, "C1", "Entropy1");
});
it ("powersoftau export challange", async () => {
await snarkjs.powersOfTau.exportChallange(ptau_1, ptau_challange2);
});
it ("powersoftau challange contribute", async () => {
await snarkjs.powersOfTau.challangeContribute(curve, ptau_challange2, ptau_response2, "Entropy2");
});
it ("powersoftau import response", async () => {
await snarkjs.powersOfTau.importResponse(ptau_1, ptau_response2, ptau_2, "C2");
});
it ("powersoftau beacon", async () => {
await snarkjs.powersOfTau.beacon(ptau_2, ptau_beacon, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10);
});
it ("powersoftau prepare phase2", async () => {
await snarkjs.powersOfTau.preparePhase2(ptau_beacon, ptau_final);
});
it ("powersoftau verify", async () => {
const res = await snarkjs.powersOfTau.verify(ptau_final);
assert(res);
});
it ("zkey new", async () => {
await snarkjs.zKey.newZKey(path.join("test", "circuit", "circuit.r1cs"), ptau_final, zkey_0);
});
it ("zkey contribute ", async () => {
await snarkjs.zKey.contribute(zkey_0, zkey_1, "p2_C1", "pa_Entropy1");
});
it ("zkey export bellman", async () => {
await snarkjs.zKey.exportBellman(zkey_1, bellman_1);
});
it ("zkey bellman contribute", async () => {
await snarkjs.zKey.bellmanContribute(curve, bellman_1, bellman_2, "pa_Entropy2");
});
it ("zkey import bellman", async () => {
await snarkjs.zKey.importBellman(zkey_1, bellman_2, zkey_2, "C2");
});
it ("zkey beacon", async () => {
await snarkjs.zKey.beacon(zkey_2, zkey_final, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10);
});
it ("zkey verify", async () => {
const res = await snarkjs.zKey.verify(path.join("test", "circuit", "circuit.r1cs"), ptau_final, zkey_final);
assert(res);
});
it ("zkey export verificationkey", async () => {
vKey = await snarkjs.zKey.exportVerificationKey(zkey_final);
});
it ("witness calculate", async () => {
await snarkjs.wtns.calculate({a: 11, b:2}, path.join("test", "circuit", "circuit.wasm"), wtns);
});
it ("groth16 proof", async () => {
const res = await snarkjs.groth16.prove(zkey_final, wtns);
proof = res.proof;
publicSignals = res.publicSignals;
});
it ("groth16 verify", async () => {
const res = await snarkjs.groth16.validate(vKey, publicSignals, proof);
assert(res == true);
});
});

View File

@ -1,46 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of zksnark JavaScript library.
zksnark JavaScript library is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
zksnark JavaScript library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
zksnark JavaScript library. If not, see <https://www.gnu.org/licenses/>.
*/
const chai = require("chai");
const fs = require("fs");
const path = require("path");
const loadR1cs = require("r1csfile").load;
const zkSnark = require("../index.js");
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder;
const assert = chai.assert;
describe("zkSnark Groth", () => {
it("Load a circuit, create trusted setup, create a proof and validate it", async () => {
const cir = await loadR1cs(path.join(__dirname, "circuit", "circuit.r1cs"), true);
const setup = zkSnark.groth.setup(cir);
const wasm = await fs.promises.readFile(path.join(__dirname, "circuit", "circuit.wasm"));
const wc = await WitnessCalculatorBuilder(wasm, {sanityCheck: true});
const witness = await wc.calculateWitness({"a": "33", "b": "34"});
const {proof, publicSignals} = zkSnark.groth.genProof(setup.vk_proof, witness);
assert( zkSnark.groth.isValid(setup.vk_verifier, proof, publicSignals));
}).timeout(10000000);
});

View File

@ -1,105 +1,119 @@
const assert = require("assert"); import assert from "assert";
const bn128 = require("ffjavascript").bn128; import { getCurveFromName } from "../src/curves.js";
import { hex2ByteArray } from "../src/misc.js";
import { Scalar } from "ffjavascript";
const Fq = bn128.F1; import { getG2sp } from "../src/keypair.js";
const getG2sp = require("../src/keypair").getG2sp; describe("keypair", () => {
let curve;
before( async () => {
curve = await getCurveFromName("bn128");
});
after( async () => {
await curve.terminate();
});
const pubKey = { it("It should calculate the right g2_s for the test vectors", async () => {
tau_g1_s: [ const challange = hex2ByteArray(
Fq.e("0x1403cf4fed293e66a8cd522be9f938524111f6f08762371bff53ee387a39cf13"),
Fq.e("0x2accbda355c222301a1bd802db7454d86a4ec2ee89ae895ca21f147d6b705740"),
Fq.e("1")
],
tau_g1_sx: [
Fq.e("0x12996cf89d854246f1ab002e446436b77a64349117ec1fb2aa57a304890e81ef"),
Fq.e("0x0c17fd067df52c480a1db3c6890821f975932d89d0d53c6c60777cc56f1dd712"),
Fq.e("1")
],
alpha_g1_s:[
Fq.e("0x12a64bbe8af7fcb19052e25e188c1fcdac454928142f8e89f58e03249e18b223"),
Fq.e("0x22be31a388d0ec551530e1b1581b671b4340e88990de805a7bfed8bdb9c1accd"),
Fq.e("1")
],
alpha_g1_sx: [
Fq.e("0x262ff8dd594374c6ed5e892ba31315f6e47c500784a12ea8d2c573730888a392"),
Fq.e("0x0b3a94f2b61178f2974e039cfd671e7405ec43eb2c09dc8f43a34f450917a62f"),
Fq.e("1")
],
beta_g1_s: [
Fq.e("0x0d9b3088b69daf6746c6bba4f9b359234abbfd3306bce14b198e7a5556c777e6"),
Fq.e("0x066d1acac914883df6a9dc57dc2037a481ba4b8646efe13e2584b9258bd52d0c"),
Fq.e("1")
],
beta_g1_sx: [
Fq.e("0x248232878c359dbe632c387dc0d955520e8d3363f1cd9621ec9fd4a05460c754"),
Fq.e("0x12074f06ef232a472cb36c328e760c4acfb4bedad4ca3ee09971578a0fe185ab"),
Fq.e("1")
],
tau_g2_spx: [
[
Fq.e("0x0fe02fcc3aee51c1f3a37f3f152ebe5476ae659468f2ee81cdeb19d0dad366c5"),
Fq.e("0x01aeb4db892bcb273aada80f5eab10e2e50ae59a5c274b0d7303f5c5a52ee88b"),
],[
Fq.e("0x2d00022d840d493fb93c68a63b29e2692c0cd3caf354fe60eae1ebacefc2c948"),
Fq.e("0x204065ff10344153a08cfe4ae543c47fba883ef8a54530fa6a52c87e5c28ef2b"),
],[
Fq.e("1"),
Fq.e("0")
]
],
alpha_g2_spx: [
[
Fq.e("0x2e649d01a58a7795762df8f0634c273ebce6950a9a2ba3d4459458620d3164a0"),
Fq.e("0x1b58044d3e205a918124fea3983583199b4f99fd0abb39ede2c684b0810bdc1e"),
],[
Fq.e("0x021d41558cea5fa32c9f3de5834cb2ee45ce4cdf471353395d019dfe0c9c2509"),
Fq.e("0x1c04148bac3f17b219c2655cd63ad2596ea63293103487be488a1d5a9054ddbf"),
],[
Fq.e("1"),
Fq.e("0")
]
],
beta_g2_spx: [
[
Fq.e("0x029251aed5163109667300035ce200b7195fc6e261581ba38776d87d7f0b1a7d"),
Fq.e("0x09d6847f1b945ccdc00418a807f4b0af67ec5c0030c4f203581eff9d4af4347f"),
],[
Fq.e("0x04b62ecdc94bf94fcefdf93f06ca4f63026a47a0d4138941b8ee45b9f7177e5c"),
Fq.e("0x1f0a6bff3945f207f407ff1c813b66a28b495f55a3788c3e200c74817e86f7ce"),
],[
Fq.e("1"),
Fq.e("0")
]
]
};
const challange = Buffer.from(
"bc0bde7980381fa642b2097591dd83f1"+ "bc0bde7980381fa642b2097591dd83f1"+
"ed15b003e15c35520af32c95eb519149"+ "ed15b003e15c35520af32c95eb519149"+
"2a6f3175215635cfc10e6098e2c612d0"+ "2a6f3175215635cfc10e6098e2c612d0"+
"ca84f1a9f90b5333560c8af59b9209f4", "hex"); "ca84f1a9f90b5333560c8af59b9209f4"
);
const tau_g1_s = curve.G1.fromObject([
Scalar.e("0x1403cf4fed293e66a8cd522be9f938524111f6f08762371bff53ee387a39cf13"),
Scalar.e("0x2accbda355c222301a1bd802db7454d86a4ec2ee89ae895ca21f147d6b705740"),
Scalar.e("1")
]);
const tau_g1_sx = curve.G1.fromObject([
Scalar.e("0x12996cf89d854246f1ab002e446436b77a64349117ec1fb2aa57a304890e81ef"),
Scalar.e("0x0c17fd067df52c480a1db3c6890821f975932d89d0d53c6c60777cc56f1dd712"),
Scalar.e("1")
]);
const tau_g2_sp = getG2sp(curve, 0, challange, tau_g1_s, tau_g1_sx);
const tau_g2_spx = curve.G2.fromObject([
[
Scalar.e("0x0fe02fcc3aee51c1f3a37f3f152ebe5476ae659468f2ee81cdeb19d0dad366c5"),
Scalar.e("0x01aeb4db892bcb273aada80f5eab10e2e50ae59a5c274b0d7303f5c5a52ee88b"),
],[
Scalar.e("0x2d00022d840d493fb93c68a63b29e2692c0cd3caf354fe60eae1ebacefc2c948"),
Scalar.e("0x204065ff10344153a08cfe4ae543c47fba883ef8a54530fa6a52c87e5c28ef2b"),
],[
Scalar.e("1"),
Scalar.e("0")
]
]);
describe("keypair", () => { assert(curve.F12.eq(
it("It should calculate the right g2_s for the test vectors", async () => { curve.pairing(tau_g1_sx, tau_g2_sp),
const tau_g2_sp = getG2sp(0, challange, pubKey.tau_g1_s, pubKey.tau_g1_sx); curve.pairing(tau_g1_s, tau_g2_spx)));
assert(bn128.F12.eq(
bn128.pairing(pubKey.tau_g1_sx,tau_g2_sp),
bn128.pairing(pubKey.tau_g1_s, pubKey.tau_g2_spx)));
const alpha_g2_sp = getG2sp(1, challange, pubKey.alpha_g1_s, pubKey.alpha_g1_sx);
assert(bn128.F12.eq(
bn128.pairing(pubKey.alpha_g1_sx, alpha_g2_sp),
bn128.pairing(pubKey.alpha_g1_s , pubKey.alpha_g2_spx)));
const beta_g2_sp = getG2sp(2, challange, pubKey.beta_g1_s, pubKey.beta_g1_sx); const alpha_g1_s = curve.G1.fromObject([
assert(bn128.F12.eq( Scalar.e("0x12a64bbe8af7fcb19052e25e188c1fcdac454928142f8e89f58e03249e18b223"),
bn128.pairing(pubKey.beta_g1_sx, beta_g2_sp), Scalar.e("0x22be31a388d0ec551530e1b1581b671b4340e88990de805a7bfed8bdb9c1accd"),
bn128.pairing(pubKey.beta_g1_s , pubKey.beta_g2_spx))); Scalar.e("1")
]);
const alpha_g1_sx = curve.G1.fromObject([
Scalar.e("0x262ff8dd594374c6ed5e892ba31315f6e47c500784a12ea8d2c573730888a392"),
Scalar.e("0x0b3a94f2b61178f2974e039cfd671e7405ec43eb2c09dc8f43a34f450917a62f"),
Scalar.e("1")
]);
const alpha_g2_sp = getG2sp(curve, 1, challange, alpha_g1_s, alpha_g1_sx);
const alpha_g2_spx = curve.G2.fromObject([
[
Scalar.e("0x2e649d01a58a7795762df8f0634c273ebce6950a9a2ba3d4459458620d3164a0"),
Scalar.e("0x1b58044d3e205a918124fea3983583199b4f99fd0abb39ede2c684b0810bdc1e"),
],[
Scalar.e("0x021d41558cea5fa32c9f3de5834cb2ee45ce4cdf471353395d019dfe0c9c2509"),
Scalar.e("0x1c04148bac3f17b219c2655cd63ad2596ea63293103487be488a1d5a9054ddbf"),
],[
Scalar.e("1"),
Scalar.e("0")
]
]);
assert(curve.F12.eq(
curve.pairing(alpha_g1_sx, alpha_g2_sp),
curve.pairing(alpha_g1_s, alpha_g2_spx)));
const beta_g1_s = curve.G1.fromObject([
Scalar.e("0x0d9b3088b69daf6746c6bba4f9b359234abbfd3306bce14b198e7a5556c777e6"),
Scalar.e("0x066d1acac914883df6a9dc57dc2037a481ba4b8646efe13e2584b9258bd52d0c"),
Scalar.e("1")
]);
const beta_g1_sx = curve.G1.fromObject([
Scalar.e("0x248232878c359dbe632c387dc0d955520e8d3363f1cd9621ec9fd4a05460c754"),
Scalar.e("0x12074f06ef232a472cb36c328e760c4acfb4bedad4ca3ee09971578a0fe185ab"),
Scalar.e("1")
]);
const beta_g2_sp = getG2sp(curve, 2, challange, beta_g1_s, beta_g1_sx);
const beta_g2_spx = curve.G2.fromObject([
[
Scalar.e("0x029251aed5163109667300035ce200b7195fc6e261581ba38776d87d7f0b1a7d"),
Scalar.e("0x09d6847f1b945ccdc00418a807f4b0af67ec5c0030c4f203581eff9d4af4347f"),
],[
Scalar.e("0x04b62ecdc94bf94fcefdf93f06ca4f63026a47a0d4138941b8ee45b9f7177e5c"),
Scalar.e("0x1f0a6bff3945f207f407ff1c813b66a28b495f55a3788c3e200c74817e86f7ce"),
],[
Scalar.e("1"),
Scalar.e("0")
]
]);
assert(curve.F12.eq(
curve.pairing(beta_g1_sx, beta_g2_sp),
curve.pairing(beta_g1_s, beta_g2_spx)));
}); });
}); });

View File

@ -1,48 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of zksnark JavaScript library.
zksnark JavaScript library is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
zksnark JavaScript library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
zksnark JavaScript library. If not, see <https://www.gnu.org/licenses/>.
*/
const chai = require("chai");
const fs = require("fs");
const path = require("path");
const loadR1cs = require("r1csfile").load;
const zkSnark = require("../index.js");
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder;
const assert = chai.assert;
describe("zkSnark kimleeoh", () => {
it("Load a circuit, create trusted setup, create a proof and validate it", async () => {
const cir = await loadR1cs(path.join(__dirname, "circuit", "circuit.r1cs"), true);
const setup = zkSnark.kimleeoh.setup(cir);
const wasm = await fs.promises.readFile(path.join(__dirname, "circuit", "circuit.wasm"));
const wc = await WitnessCalculatorBuilder(wasm, {sanityCheck: true});
const witness = await wc.calculateWitness({"a": "33", "b": "34"});
const {proof, publicSignals} = zkSnark.kimleeoh.genProof(setup.vk_proof, witness);
assert( zkSnark.kimleeoh.isValid(setup.vk_verifier, proof, publicSignals));
}).timeout(10000000);
});

View File

@ -1,47 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of zksnark JavaScript library.
zksnark JavaScript library is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
zksnark JavaScript library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
zksnark JavaScript library. If not, see <https://www.gnu.org/licenses/>.
*/
const chai = require("chai");
const fs = require("fs");
const path = require("path");
const loadR1cs = require("r1csfile").load;
const zkSnark = require("../index.js");
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder;
const assert = chai.assert;
describe("zkSnark Original", () => {
it("Load a circuit, create trusted setup, create a proof and validate it", async () => {
const cir = await loadR1cs(path.join(__dirname, "circuit", "circuit.r1cs"), true);
const setup = zkSnark.original.setup(cir);
const wasm = await fs.promises.readFile(path.join(__dirname, "circuit", "circuit.wasm"));
const wc = await WitnessCalculatorBuilder(wasm, {sanityCheck: true});
const witness = await wc.calculateWitness({"a": "33", "b": "34"});
const {proof, publicSignals} = zkSnark.original.genProof(setup.vk_proof, witness);
assert( zkSnark.original.isValid(setup.vk_verifier, proof, publicSignals));
}).timeout(10000000);
});

BIN
~[object Object].init Normal file

Binary file not shown.