api and tutorial start

This commit is contained in:
Jordi Baylina 2020-07-11 10:31:52 +02:00
parent 1d3ad357c1
commit 9b418568f6
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
83 changed files with 38236 additions and 3461 deletions

View File

@ -5,7 +5,8 @@ module.exports = {
"mocha": true
},
"parserOptions": {
"ecmaVersion": 2017
"ecmaVersion": 2020,
"sourceType": "module"
},
"extends": "eslint:recommended",
"rules": {

212
TUTORIAL.md Normal file
View File

@ -0,0 +1,212 @@
### Install snarkjs and circom
```sh
npm install -g circom
npm install -g snarkjs
```
### Help
```sh
snarkjs
```
In commands that takes long time, you can add the -v or --verbose option to see the progress.
### Start a new ceremony.
```sh
snarkjs powersoftau new bn128 12 pot12_0000.ptau
```
### Contribute in the ceremony
```sh
snarkjs powersoftau contribute pot12_0000.ptau pot12_0001.ptau --name="Example Name" -v
```
### Do a second contribution
```sh
snarkjs powersoftau contribute pot12_0001.ptau pot12_0002.ptau --name="Second contribution Name" -v
```
### Verify the file
```sh
snarkjs powersoftau verify pot12_0002.ptau
```
### Contribute using ther party software.
```sh
snarkjs powersoftau export challange pot12_0002.ptau challange_0003
snarkjs powersoftau challange contribute bn128 challange_0003 response_0003
snarkjs powersoftau import response pot12_0002.ptau response_0003 pot12_0003.ptau -n="Third contribution name"
```
### Add a beacon
```sh
snarkjs powersoftau beacon pot12_0003.ptau pot12_beacon.ptau 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon"
```
### Prepare phase2
```sh
powersoftau prepare phase2 pot12_beacon.ptau pot12_final.ptau -v
```
### Verify the last file
```sh
snarkjs powersoftau verify pot12_final.ptau
```
### Create a circuit
```sh
mkdir mycircuit
cd my mycircuit
cat <<EOT > circuit.circom
template Multiplier(n) {
signal private input a;
signal private input b;
signal output c;
signal int[n];
int[0] <== a*a + b;
for (var i=1; i<n; i++) {
int[i] <== int[i-1]*int[i-1] + b;
}
c <== int[n-1];
}
component main = Multiplier(1000);
EOT
```
### compile the circuit
```sh
circom circuit.circom -r -w -s -v
```
### info of a circuit
```sh
snarkjs r1cs info circuit.r1cs
```
### Print the constraints
```sh
snarkjs r1cs print circuit.r1cs
```
### export r1cs to json
```sh
snarkjs r1cs export json circuit.r1cs circuit.r1cs.json
```
### Generate the reference zKey without contributions from the circuit.
```sh
snarkjs zkey new circuit.r1cs pot12_final.ptau circuit_0000.zkey
```
### Contribute in the phase2 ceremony
```sh
snarkjs zkey contribute circuit_0000.zkey circuit_0001.zkey --name="1st Contributor Name" -v
```
### Do a second phase2 contribution
```sh
snarkjs zkey contribute circuit_0001.zkey circuit_0002.zkey --name="Second contribution Name" -v
```
### Verify the zkey file
```sh
snarkjs zkey verify circuit.r1cs pot12_final.ptau circuit_0002.zkey
```
### Contribute using ther party software.
```sh
snarkjs zkey export bellman circuit_0002.zkey challange_phase2_0003
snarkjs zkey bellman contribute bn128 challange_phase2_0003 response_phase2_0003
snarkjs zkey import bellman circuit_0002.zkey response_phase2_0003 circuit_0003.zkey -n="Third contribution name"
```
### Add a beacon
```sh
snarkjs zkey beacon circuit_0003.zkey circuit_final.zkey 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon phase2"
```
### Verify the final file
```sh
snarkjs zkey verify circuit.r1cs pot12_final.ptau circuit_final.zkey
```
### Export the verification key
```sh
snarkjs zkey export verificationkey circuit_final.zkey verification_key.json
```
### Calculat witess
```sh
cat <<EOT > input.json
{"a": 3, "b": 11}
EOT
snarkjs wtns calculate circuit.wasm input.json witness.wtns
```
### Debug witness calculation
En general when you are developing a new circuit you will want to check for some errors in the witness calculation process.
You can do it by doing
```sh
snarkjs wtns debug circuit.wasm input.json witness.wtns circuit.sym --trigger --get --set
```
This will log every time a new component is started/ended ( --trigger ) when a signal is set (--set) and when it's get (--get)
### Proof calculation
```sh
snarkjs groth16 prove circuit_final.zkey witness.wtns proof.json public.json
```
It is possible also to do the calculate witness and the prove calculation in the same command:
```sh
snarkjs groth16 fullprove input.json circuit.wasm circuit_final.zkey proof.json public.json
```
### Verify
```sh
snarkjs groth16 verify verification_key.json public.json proof.json
```
### Export Solidity Verifier
```sh
snarkjs zkey export solidityverifier circuit_final.zkey verifier.sol
```
You can deploy th "Verifier" smartcontract using remix for example.
In order to simulate a verification call, you can do:
```sh
zkey export soliditycalldata public.json proof.json
```
And cut and paste the resolt directlly in the "verifyProof" field in the deployed smart contract.
This call will return true if the proof and the public data is valid.

6501
build/cli.cjs Executable file

File diff suppressed because it is too large Load Diff

14985
build/main.js Normal file

File diff suppressed because one or more lines are too long

14972
build/snarkjs.js Normal file

File diff suppressed because one or more lines are too long

477
cli.js
View File

@ -1,5 +1,3 @@
#!/usr/bin/env node
/*
Copyright 2018 0KIMS association.
@ -21,35 +19,95 @@
/* eslint-disable no-console */
const fs = require("fs");
const path = require("path");
import fs from "fs";
const zkSnark = require("./index.js");
const {stringifyBigInts, unstringifyBigInts} = require("ffjavascript").utils;
import {load as loadR1cs} from "r1csfile";
const loadR1cs = require("r1csfile").load;
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder;
import loadSyms from "./src/loadsyms.js";
import * as r1cs from "./src/r1cs.js";
const wtnsFile = require("./src/wtnsfile");
import clProcessor from "./src/clprocessor.js";
const loadSyms = require("./src/loadsyms");
const r1cs = require("./src/r1cs");
import * as powersOfTaw from "./src/powersoftau.js";
const clProcessor = require("./src/clprocessor");
import { utils } from "ffjavascript";
const {stringifyBigInts, unstringifyBigInts} = utils;
const powersOfTaw = require("./src/powersoftau");
import * as zkey from "./src/zkey.js";
import * as groth16 from "./src/groth16.js";
import * as wtns from "./src/wtns.js";
import * as curves from "./src/curves.js";
import path from "path";
const solidityGenerator = require("./src/soliditygenerator.js");
const Scalar = require("ffjavascript").Scalar;
const assert = require("assert");
const zkey = require("./src/zkey");
const zksnark = require("./src/zksnark");
const curves = require("./src/curves");
import Logger from "logplease";
const logger = Logger.create("snarkJS", {showTimestamp:false});
Logger.setLogLevel("INFO");
const commands = [
{
cmd: "powersoftau new <curve> <power> [powersoftau_0000.ptau]",
description: "Starts a powers of tau ceremony",
alias: ["ptn"],
options: "-verbose|v",
action: powersOfTawNew
},
{
cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>",
description: "creates a ptau file with a new contribution",
alias: ["ptc"],
options: "-verbose|v -name|n -entropy|e",
action: powersOfTawContribute
},
{
cmd: "powersoftau export challange <powersoftau_0000.ptau> [challange]",
description: "Creates a challange",
alias: ["ptec"],
options: "-verbose|v",
action: powersOfTawExportChallange
},
{
cmd: "powersoftau challange contribute <curve> <challange> [response]",
description: "Contribute to a challange",
alias: ["ptcc"],
options: "-verbose|v -entropy|e",
action: powersOfTawChallangeContribute
},
{
cmd: "powersoftau import response <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>",
description: "import a response to a ptau file",
alias: ["ptir"],
options: "-verbose|v -nopoints -nocheck -name|n",
action: powersOfTawImport
},
{
cmd: "powersoftau verify <powersoftau.ptau>",
description: "verifies a powers of tau file",
alias: ["ptv"],
options: "-verbose|v",
action: powersOfTawVerify
},
{
cmd: "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>",
description: "adds a beacon",
alias: ["ptb"],
options: "-verbose|v -name|n",
action: powersOfTawBeacon
},
{
cmd: "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>",
description: "Prepares phase 2. ",
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
alias: ["pt2"],
options: "-verbose|v",
action: powersOfTawPreparePhase2
},
{
cmd: "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
description: "Exports a power of tau file to a JSON",
alias: ["ptej"],
options: "-verbose|v",
action: powersOfTawExportJson
},
{
cmd: "r1cs info [circuit.r1cs]",
description: "Print statistiscs of a circuit",
@ -86,10 +144,11 @@ const commands = [
cmd: "wtns export json [witness.wtns] [witnes.json]",
description: "Calculate the witness with debug info.",
longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ",
options: "-get|g -set|s -trigger|t",
options: "-verbose|v",
alias: ["wej"],
action: wtnsExportJson
},
/*
{
cmd: "zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]",
description: "Run a simple setup for a circuit generating the proving key.",
@ -97,94 +156,38 @@ const commands = [
options: "-verbose|v -protocol",
action: zksnarkSetup
},
*/
{
cmd: "zksnark prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]",
description: "Generates a zk Proof",
alias: ["zp", "zksnark proof", "proof -pk|provingkey -wt|witness -p|proof -pub|public"],
cmd: "groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]",
description: "Generates a zk Proof from witness",
alias: ["g16p", "zpw", "zksnark proof", "proof -pk|provingkey -wt|witness -p|proof -pub|public"],
options: "-verbose|v -protocol",
action: zksnarkProve
},
{
cmd: "zksnark verify [verification_key.json] [public.json] [proof.json]",
cmd: "groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]",
description: "Generates a zk Proof from input",
alias: ["g16f", "g16i"],
options: "-verbose|v -protocol",
action: zksnarkFullProve
},
{
cmd: "groth16 verify [verification_key.json] [public.json] [proof.json]",
description: "Verify a zk Proof",
alias: ["zv", "verify -vk|verificationkey -pub|public -p|proof"],
alias: ["g16v", "verify -vk|verificationkey -pub|public -p|proof"],
action: zksnarkVerify
},
{
cmd: "solidity genverifier <verificationKey.json> <verifier.sol>",
cmd: "zkey export solidityverifier [circuit.zkey] [verifier.sol]",
description: "Creates a verifier in solidity",
alias: ["ks", "generateverifier -vk|verificationkey -v|verifier"],
action: solidityGenVerifier
alias: ["zkesv", "generateverifier -vk|verificationkey -v|verifier"],
action: zkeyExportSolidityVerifier
},
{
cmd: "solidity gencall <public.json> <proof.json>",
cmd: "zkey export soliditycalldata <public.json> <proof.json>",
description: "Generates call parameters ready to be called.",
alias: ["pc", "generatecall -pub|public -p|proof"],
action: solidityGenCall
},
{
cmd: "powersoftau new <curve> <power> [powersoftau_0000.ptau]",
description: "Starts a powers of tau ceremony",
alias: ["ptn"],
options: "-verbose|v",
action: powersOfTawNew
},
{
cmd: "powersoftau export challange <powersoftau_0000.ptau> [challange]",
description: "Creates a challange",
alias: ["ptec"],
options: "-verbose|v",
action: powersOfTawExportChallange
},
{
cmd: "powersoftau challange contribute <curve> <challange> [response]",
description: "Contribute to a challange",
alias: ["ptcc"],
options: "-verbose|v -entropy|e",
action: powersOfTawChallangeContribute
},
{
cmd: "powersoftau import <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>",
description: "import a response to a ptau file",
alias: ["pti"],
options: "-verbose|v -nopoints -nocheck -name|n",
action: powersOfTawImport
},
{
cmd: "powersoftau verify <powersoftau.ptau>",
description: "verifies a powers of tau file",
alias: ["ptv"],
options: "-verbose|v",
action: powersOfTawVerify
},
{
cmd: "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>",
description: "adds a beacon",
alias: ["ptb"],
options: "-verbose|v -name|n",
action: powersOfTawBeacon
},
{
cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>",
description: "creates a ptau file with a new contribution",
alias: ["ptc"],
options: "-verbose|v -name|n -entropy|e",
action: powersOfTawContribute
},
{
cmd: "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>",
description: "Prepares phase 2. ",
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
alias: ["pt2"],
options: "-verbose|v",
action: powersOfTawPreparePhase2
},
{
cmd: "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
description: "Exports a power of tau file to a JSON",
alias: ["ptej"],
options: "-verbose|v",
action: powersOfTawExportJson
alias: ["zkesc", "generatecall -pub|public -p|proof"],
action: zkeyExportSolidityCalldata
},
{
cmd: "zkey new [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]",
@ -204,7 +207,7 @@ const commands = [
cmd: "zkey import bellman <circuit_old.zkey> <circuit.mpcparams> <circuit_new.zkey>",
description: "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman) ",
alias: ["zkib"],
options: "-verbose|v",
options: "-verbose|v -name|n",
action: zkeyImportBellman
},
{
@ -229,14 +232,14 @@ const commands = [
action: zkeyBeacon
},
{
cmd: "zkey challange contribute <curve> <challange> [response]",
cmd: "zkey bellman contribute <curve> <circuit.mpcparams> <circuit_response.mpcparams>",
description: "contributes to a llallange file in bellman format",
alias: ["zkcc"],
alias: ["zkbc"],
options: "-verbose|v -entropy|e",
action: zkeyChallangeContribute
action: zkeyBellmanContribute
},
{
cmd: "zkey export vkey [circuit.zkey] [verification_key.json]",
cmd: "zkey export verificationkey [circuit.zkey] [verification_key.json]",
description: "Exports a verification key",
alias: ["zkev"],
action: zkeyExportVKey
@ -256,8 +259,7 @@ const commands = [
clProcessor(commands).then( (res) => {
process.exit(res);
}, (err) => {
console.log(err.stack);
console.log("ERROR: " + err);
logger.error(err);
process.exit(1);
});
@ -327,7 +329,9 @@ function changeExt(fileName, newExt) {
async function r1csInfo(params, options) {
const r1csName = params[0] || "circuit.r1cs";
await r1cs.info(r1csName);
if (options.verbose) Logger.setLogLevel("DEBUG");
await r1cs.info(r1csName, logger);
return 0;
@ -337,11 +341,14 @@ async function r1csInfo(params, options) {
async function r1csPrint(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const symName = params[1] || changeExt(r1csName, "sym");
if (options.verbose) Logger.setLogLevel("DEBUG");
const cir = await loadR1cs(r1csName, true, true);
const sym = await loadSyms(symName);
await r1cs.print(cir, sym);
await r1cs.print(cir, sym, logger);
return 0;
}
@ -352,7 +359,12 @@ async function r1csExportJSON(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const jsonName = params[1] || changeExt(r1csName, "json");
await r1cs.exportJson(r1csName, jsonName);
if (options.verbose) Logger.setLogLevel("DEBUG");
const r1csObj = await r1cs.exportJson(r1csName, logger);
const S = JSON.stringify(utils.stringifyBigInts(r1csObj), null, 1);
await fs.promises.writeFile(jsonName, S);
return 0;
}
@ -363,18 +375,11 @@ async function wtnsCalculate(params, options) {
const inputName = params[1] || "input.json";
const witnessName = params[2] || "witness.wtns";
const wasm = await fs.promises.readFile(wasmName);
if (options.verbose) Logger.setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8")));
const wc = await WitnessCalculatorBuilder(wasm, options);
const w = await wc.calculateBinWitness(input);
await wtnsFile.writeBin(witnessName, w, wc.prime);
/*
const w = await wc.calculateWitness(input);
await wtnsFile.write(witnessName, w, wc.prime);
*/
// fs.promises.writeFile(witnessName, JSON.stringify(stringifyBigInts(w), null, 1));
await wtns.calculate(input, wasmName, witnessName, {});
return 0;
}
@ -388,42 +393,11 @@ async function wtnsDebug(params, options) {
const witnessName = params[2] || "witness.wtns";
const symName = params[3] || changeExt(wasmName, "sym");
const wasm = await fs.promises.readFile(wasmName);
if (options.verbose) Logger.setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8")));
let wcOps = {
sanityCheck: true
};
let sym = await loadSyms(symName);
if (options.set) {
if (!sym) sym = await loadSyms(symName);
wcOps.logSetSignal= function(labelIdx, value) {
console.log("SET " + sym.labelIdx2Name[labelIdx] + " <-- " + value.toString());
};
}
if (options.get) {
if (!sym) sym = await loadSyms(symName);
wcOps.logGetSignal= function(varIdx, value) {
console.log("GET " + sym.labelIdx2Name[varIdx] + " --> " + value.toString());
};
}
if (options.trigger) {
if (!sym) sym = await loadSyms(symName);
wcOps.logStartComponent= function(cIdx) {
console.log("START: " + sym.componentIdx2Name[cIdx]);
};
wcOps.logFinishComponent= function(cIdx) {
console.log("FINISH: " + sym.componentIdx2Name[cIdx]);
};
}
const wc = await WitnessCalculatorBuilder(wasm, wcOps);
const w = await wc.calculateWitness(input);
await wtnsFile.write(witnessName, w);
// await fs.promises.writeFile(witnessName, JSON.stringify(stringifyBigInts(w), null, 1));
await wtns.debug(input, wasmName, witnessName, symName, options, logger);
return 0;
}
@ -435,7 +409,9 @@ async function wtnsExportJson(params, options) {
const wtnsName = params[0] || "witness.wtns";
const jsonName = params[1] || "witness.json";
const w = await wtnsFile.read(wtnsName);
if (options.verbose) Logger.setLogLevel("DEBUG");
const w = await wtns.exportJson(wtnsName);
await fs.promises.writeFile(jsonName, JSON.stringify(stringifyBigInts(w), null, 1));
@ -443,7 +419,7 @@ async function wtnsExportJson(params, options) {
}
/*
// zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]
async function zksnarkSetup(params, options) {
@ -465,36 +441,9 @@ async function zksnarkSetup(params, options) {
return 0;
}
/*
// zksnark prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
async function zksnarkProve(params, options) {
const zkeyName = params[0] || "circuit.zkey";
const witnessName = params[1] || "witness.wtns";
const proofName = params[2] || "proof.json";
const publicName = params[3] || "public.json";
const witness = await wtnsFile.read(witnessName);
// const witness = unstringifyBigInts(JSON.parse(fs.readFileSync(witnessName, "utf8")));
const provingKey = await zkeyFile.read(zkeyName);
// const provingKey = unstringifyBigInts(JSON.parse(fs.readFileSync(provingKeyName, "utf8")));
const protocol = provingKey.protocol;
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const {proof, publicSignals} = zkSnark[protocol].genProof(provingKey, witness, options.verbose);
await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
return 0;
}
*/
// zksnark prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
// groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
async function zksnarkProve(params, options) {
const zkeyName = params[0] || "circuit.zkey";
@ -502,8 +451,9 @@ async function zksnarkProve(params, options) {
const proofName = params[2] || "proof.json";
const publicName = params[3] || "public.json";
if (options.verbose) Logger.setLogLevel("DEBUG");
const {proof, publicSignals} = await zksnark.groth16.prover(zkeyName, witnessName, options.verbose);
const {proof, publicSignals} = await groth16.prove(zkeyName, witnessName, logger);
await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
@ -511,41 +461,60 @@ async function zksnarkProve(params, options) {
return 0;
}
// zksnark verify [verification_key.json] [public.json] [proof.json]
// groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]
async function zksnarkFullProve(params, options) {
const inputName = params[0] || "input.json";
const wasmName = params[1] || "circuit.wasm";
const zkeyName = params[2] || "circuit.zkey";
const proofName = params[3] || "proof.json";
const publicName = params[4] || "public.json";
if (options.verbose) Logger.setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8")));
const {proof, publicSignals} = await groth16.fullProve(input, wasmName, zkeyName, logger);
await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
return 0;
}
// groth16 verify [verification_key.json] [public.json] [proof.json]
async function zksnarkVerify(params, options) {
const verificationKeyName = params[0] || "verification_key.json";
const publicName = params[0] || "public.json";
const proofName = params[0] || "proof.json";
const publicName = params[1] || "public.json";
const proofName = params[2] || "proof.json";
const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8")));
const pub = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8")));
/*
const protocol = verificationKey.protocol;
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
if (options.verbose) Logger.setLogLevel("DEBUG");
const isValid = zkSnark[protocol].isValid(verificationKey, proof, pub);
*/
const isValid = await zksnark.groth16.verifier(verificationKey, proof, pub);
const isValid = await groth16.validate(verificationKey, pub, proof, logger);
if (isValid) {
console.log("OK");
return 0;
} else {
console.log("INVALID");
return 1;
}
}
// zkey export vkey [circuit.zkey] [verification_key.json]",
async function zkeyExportVKey(params) {
async function zkeyExportVKey(params, options) {
const zkeyName = params[0] || "circuit.zkey";
const verificationKeyName = params[2] || "verification_key.json";
return await zkey.exportVerificationKey(zkeyName, verificationKeyName);
if (options.verbose) Logger.setLogLevel("DEBUG");
const vKey = await zkey.exportVerificationKey(zkeyName);
const S = JSON.stringify(utils.stringifyBigInts(vKey), null, 1);
await fs.promises.writeFile(verificationKeyName, S);
}
// zkey export json [circuit.zkey] [circuit.zkey.json]",
@ -553,18 +522,23 @@ async function zkeyExportJson(params, options) {
const zkeyName = params[0] || "circuit.zkey";
const zkeyJsonName = params[1] || "circuit.zkey.json";
return await zkey.exportJson(zkeyName, zkeyJsonName, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
const zKey = await zkey.exportJson(zkeyName, logger);
const S = JSON.stringify(utils.stringifyBigInts(zKey), null, 1);
await fs.promises.writeFile(zkeyJsonName, S);
}
// solidity genverifier <verificationKey.json> <verifier.sol>
async function solidityGenVerifier(params, options) {
let verificationKeyName;
// solidity genverifier [circuit.zkey] [verifier.sol]
async function zkeyExportSolidityVerifier(params, options) {
let zkeyName;
let verifierName;
if (params.length < 1) {
verificationKeyName = "verification_key.json";
zkeyName = "circuit.zkey";
} else {
verificationKeyName = params[0];
zkeyName = params[0];
}
if (params.length < 2) {
@ -573,18 +547,11 @@ async function solidityGenVerifier(params, options) {
verifierName = params[1];
}
const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8")));
if (options.verbose) Logger.setLogLevel("DEBUG");
let verifierCode;
if (verificationKey.protocol == "original") {
verifierCode = solidityGenerator.generateVerifier_original(verificationKey);
} else if (verificationKey.protocol == "groth16") {
verifierCode = solidityGenerator.generateVerifier_groth16(verificationKey);
} else if (verificationKey.protocol == "kimleeoh") {
verifierCode = solidityGenerator.generateVerifier_kimleeoh(verificationKey);
} else {
throw new Error("InvalidProof");
}
const templateName = path.join( __dirname, "templates", "verifier_groth16.sol");
const verifierCode = await zkey.exportSolidityVerifier(zkeyName, templateName, logger);
fs.writeFileSync(verifierName, verifierCode, "utf-8");
@ -593,7 +560,7 @@ async function solidityGenVerifier(params, options) {
// solidity gencall <public.json> <proof.json>
async function solidityGenCall(params, options) {
async function zkeyExportSolidityCalldata(params, options) {
let publicName;
let proofName;
@ -609,14 +576,15 @@ async function solidityGenCall(params, options) {
proofName = params[1];
}
if (options.verbose) Logger.setLogLevel("DEBUG");
const public = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const pub = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8")));
let inputs = "";
for (let i=0; i<public.length; i++) {
for (let i=0; i<pub.length; i++) {
if (inputs != "") inputs = inputs + ",";
inputs = inputs + p256(public[i]);
inputs = inputs + p256(pub[i]);
}
let S;
@ -665,7 +633,9 @@ async function powersOfTawNew(params, options) {
const curve = await curves.getCurveFromName(curveName);
return await powersOfTaw.newAccumulator(curve, power, ptauName, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.newAccumulator(curve, power, ptauName, logger);
}
async function powersOfTawExportChallange(params, options) {
@ -680,7 +650,9 @@ async function powersOfTawExportChallange(params, options) {
challangeName = params[1];
}
return await powersOfTaw.exportChallange(ptauName, challangeName, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.exportChallange(ptauName, challangeName, logger);
}
// powersoftau challange contribute <curve> <challange> [response]
@ -698,7 +670,9 @@ async function powersOfTawChallangeContribute(params, options) {
responseName = params[2];
}
return await powersOfTaw.challangeContribute(curve, challangeName, responseName, options.entropy, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.challangeContribute(curve, challangeName, responseName, options.entropy, logger);
}
@ -716,7 +690,9 @@ async function powersOfTawImport(params, options) {
if (options.nopoints) importPoints = false;
if (options.nocheck) doCheck = false;
const res = await powersOfTaw.importResponse(oldPtauName, response, newPtauName, options.name, importPoints, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
const res = await powersOfTaw.importResponse(oldPtauName, response, newPtauName, options.name, importPoints, logger);
if (res) return res;
if (!doCheck) return;
@ -729,12 +705,12 @@ async function powersOfTawVerify(params, options) {
ptauName = params[0];
const res = await powersOfTaw.verify(ptauName, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
const res = await powersOfTaw.verify(ptauName, logger);
if (res === true) {
console.log("Powers of tau OK!");
return 0;
} else {
console.log("=======>INVALID Powers of tau<==========");
return 1;
}
}
@ -750,7 +726,9 @@ async function powersOfTawBeacon(params, options) {
beaconHashStr = params[2];
numIterationsExp = params[3];
return await powersOfTaw.beacon(oldPtauName, newPtauName, options.name ,numIterationsExp, beaconHashStr, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.beacon(oldPtauName, newPtauName, options.name ,beaconHashStr, numIterationsExp, logger);
}
async function powersOfTawContribute(params, options) {
@ -760,7 +738,9 @@ async function powersOfTawContribute(params, options) {
oldPtauName = params[0];
newPtauName = params[1];
return await powersOfTaw.contribute(oldPtauName, newPtauName, options.name , options.entropy, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.contribute(oldPtauName, newPtauName, options.name , options.entropy, logger);
}
async function powersOfTawPreparePhase2(params, options) {
@ -770,7 +750,9 @@ async function powersOfTawPreparePhase2(params, options) {
oldPtauName = params[0];
newPtauName = params[1];
return await powersOfTaw.preparePhase2(oldPtauName, newPtauName, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return await powersOfTaw.preparePhase2(oldPtauName, newPtauName, logger);
}
// powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
@ -781,7 +763,13 @@ async function powersOfTawExportJson(params, options) {
ptauName = params[0];
jsonName = params[1];
return await powersOfTaw.exportJson(ptauName, jsonName, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
const pTau = await powersOfTaw.exportJson(ptauName, logger);
const S = JSON.stringify(stringifyBigInts(pTau), null, 1);
await fs.promises.writeFile(jsonName, S);
}
@ -809,7 +797,9 @@ async function zkeyNew(params, options) {
zkeyName = params[2];
}
return zkey.new(r1csName, ptauName, zkeyName, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.newZKey(r1csName, ptauName, zkeyName, logger);
}
// zkey export bellman [circuit.zkey] [circuit.mpcparams]
@ -829,7 +819,9 @@ async function zkeyExportBellman(params, options) {
mpcparamsName = params[1];
}
return zkey.exportBellman(zkeyName, mpcparamsName, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.exportBellman(zkeyName, mpcparamsName, logger);
}
@ -844,7 +836,9 @@ async function zkeyImportBellman(params, options) {
mpcParamsName = params[1];
zkeyNameNew = params[2];
return zkey.importBellman(zkeyNameOld, mpcParamsName, zkeyNameNew, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.importBellman(zkeyNameOld, mpcParamsName, zkeyNameNew, options.name, logger);
}
// phase2 verify [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]
@ -871,12 +865,12 @@ async function zkeyVerify(params, options) {
zkeyName = params[2];
}
const res = await zkey.verify(r1csName, ptauName, zkeyName, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
const res = await zkey.verify(r1csName, ptauName, zkeyName, logger);
if (res === true) {
console.log("zKey OK!");
return 0;
} else {
console.log("=======>INVALID zKey<==========");
return 1;
}
@ -891,8 +885,9 @@ async function zkeyContribute(params, options) {
zkeyOldName = params[0];
zkeyNewName = params[1];
if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, options.verbose);
return zkey.contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, logger);
}
// zkey beacon <circuit_old.zkey> <circuit_new.zkey> <beaconHash(Hex)> <numIterationsExp>
@ -907,12 +902,14 @@ async function zkeyBeacon(params, options) {
beaconHashStr = params[2];
numIterationsExp = params[3];
return await zkey.beacon(zkeyOldName, zkeyNewName, options.name ,numIterationsExp, beaconHashStr, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return await zkey.beacon(zkeyOldName, zkeyNewName, options.name ,beaconHashStr, numIterationsExp, logger);
}
// zkey challange contribute <curve> <challange> [response]",
async function zkeyChallangeContribute(params, options) {
async function zkeyBellmanContribute(params, options) {
let challangeName;
let responseName;
@ -926,6 +923,8 @@ async function zkeyChallangeContribute(params, options) {
responseName = params[2];
}
return zkey.challangeContribute(curve, challangeName, responseName, options.entropy, options.verbose);
if (options.verbose) Logger.setLogLevel("DEBUG");
return zkey.bellmanContribute(curve, challangeName, responseName, options.entropy, logger);
}

View File

@ -0,0 +1,17 @@
import resolve from "rollup-plugin-node-resolve";
import commonJS from "rollup-plugin-commonjs";
export default {
input: "main.js",
output: {
file: "build/main.js",
format: "cjs",
},
external: ["fs", "os", "worker_threads", "readline", "crypto", "path"],
plugins: [
resolve({ preferBuiltins: true }),
commonJS({
preserveSymlinks: true
}),
]
};

View File

@ -0,0 +1,41 @@
import resolve from "rollup-plugin-node-resolve";
import commonJS from "rollup-plugin-commonjs";
import json from "rollup-plugin-json";
export default {
input: "cli.js",
output: {
file: "build/cli.cjs",
format: "cjs",
banner: "#! /usr/bin/env node\n",
},
external: [
"fs",
"os",
"worker_threads",
"readline",
"crypto",
"path",
"big-integer",
"wasmsnark",
"circom_runtime",
"blake2b-wasm",
"ffjavascript",
"keccak",
"yargs",
"logplease"
],
plugins: [
resolve({
preferBuiltins: true,
}),
commonJS({
preserveSymlinks: true,
include: "node_modules/**",
exclude: "node_modules/big-integer/**"
}),
json()
]
};

View File

@ -0,0 +1,23 @@
import resolve from "rollup-plugin-node-resolve";
import commonJS from "rollup-plugin-commonjs";
import ignore from "rollup-plugin-ignore";
import replace from "rollup-plugin-replace";
export default {
input: "main.js",
output: {
file: "build/snarkjs.js",
format: "iife",
sourcemap: "inline",
globals: {
os: "null"
},
name: "snarkjs"
},
plugins: [
ignore(["fs", "os", "crypto", "readline", "worker_threads"]),
resolve(),
commonJS(),
replace({ "process.browser": !!process.env.BROWSER }),
]
};

View File

@ -1,35 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
exports.original = {
setup: require("./src/setup_original.js"),
genProof: require("./src/prover_original.js"),
isValid: require("./src/verifier_original.js")
};
exports.groth16 = {
setup: require("./src/setup_groth16.js"),
genProof: require("./src/prover_groth16.js"),
isValid: require("./src/verifier_groth16.js")
};
exports.kimleeoh = {
setup: require("./src/setup_kimleeoh.js"),
genProof: require("./src/prover_kimleeoh.js"),
isValid: require("./src/verifier_kimleeoh.js")
};

7
main.js Normal file
View File

@ -0,0 +1,7 @@
export * as groth16 from "./src/groth16.js";
export * as powersOfTau from "./src/powersoftau.js";
export * as r1cs from "./src/r1cs.js";
export * as wtns from "./src/wtns.js";
export * as zKey from "./src/zkey.js";

255
package-lock.json generated
View File

@ -36,6 +36,27 @@
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
"dev": true
},
"@types/estree": {
"version": "0.0.45",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.45.tgz",
"integrity": "sha512-jnqIUKDUqJbDIUxm0Uj7bnlMnRm1T/eZ9N+AVMqhPgzrba2GhGG5o/jCTwmdPK709nEZsGoMzXEDUjcXHa3W0g==",
"dev": true
},
"@types/node": {
"version": "14.0.18",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.18.tgz",
"integrity": "sha512-0Z3nS5acM0cIV4JPzrj9g/GH0Et5vmADWtip3YOXOp1NpOLU8V3KoZDc8ny9c1pe/YSYYzQkAWob6dyV/EWg4g==",
"dev": true
},
"@types/resolve": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-0.0.8.tgz",
"integrity": "sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==",
"dev": true,
"requires": {
"@types/node": "*"
}
},
"acorn": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz",
@ -119,7 +140,8 @@
"assertion-error": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz",
"integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw=="
"integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==",
"dev": true
},
"astral-regex": {
"version": "1.0.0",
@ -144,33 +166,17 @@
"integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==",
"dev": true
},
"blake2b": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/blake2b/-/blake2b-2.1.3.tgz",
"integrity": "sha512-pkDss4xFVbMb4270aCyGD3qLv92314Et+FsKzilCLxDz5DuZ2/1g3w4nmBbu6nKApPspnjG7JcwTjGZnduB1yg==",
"blake2b-wasm": {
"version": "git+https://github.com/jbaylina/blake2b-wasm.git#0d5f024b212429c7f50a7f533aa3a2406b5b42b3",
"from": "git+https://github.com/jbaylina/blake2b-wasm.git",
"requires": {
"blake2b-wasm": "^1.1.0",
"nanoassert": "^1.0.0"
},
"dependencies": {
"blake2b-wasm": {
"version": "1.1.7",
"resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-1.1.7.tgz",
"integrity": "sha512-oFIHvXhlz/DUgF0kq5B1CqxIDjIJwh9iDeUUGQUcvgiGz7Wdw03McEO7CfLBy7QKGdsydcMCgO9jFNBAFCtFcA==",
"requires": {
"nanoassert": "^1.0.0"
}
}
}
},
"blake2b-wasm": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-2.1.0.tgz",
"integrity": "sha512-8zKXt9nk4cUCBU2jaUcSYcPA+UESwWOmb9Gsi8J35BifVb+tjVmbDhZbvmVmZEk6xZN1y35RNW6VqOwb0mkqsg==",
"dev": true,
"requires": {
"nanoassert": "^1.0.0"
}
"blakejs": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.1.0.tgz",
"integrity": "sha1-ad+S75U6qIylGjLfarHFShVfx6U="
},
"brace-expansion": {
"version": "1.1.11",
@ -197,6 +203,12 @@
"integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
"dev": true
},
"builtin-modules": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.1.0.tgz",
"integrity": "sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==",
"dev": true
},
"callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@ -212,6 +224,7 @@
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz",
"integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==",
"dev": true,
"requires": {
"assertion-error": "^1.1.0",
"check-error": "^1.0.2",
@ -241,7 +254,8 @@
"check-error": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz",
"integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII="
"integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=",
"dev": true
},
"chokidar": {
"version": "3.3.0",
@ -260,11 +274,11 @@
}
},
"circom_runtime": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/circom_runtime/-/circom_runtime-0.0.6.tgz",
"integrity": "sha512-o0T5MuWzxnxinWG3+CygS/kZouoP+z5ZrufUwqKJy3gsVFJhkbqMpfKmcBGjhExB3uatA7cKyOiRAOLOz5+t5w==",
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/circom_runtime/-/circom_runtime-0.0.8.tgz",
"integrity": "sha512-4ddyXq5doq0Mj3QXUWy3owmiE+gI4EMYwn7UjFeQKUb9ieXK6ZKvz3RVP+fktcuPQvdc69q5X310trnX7d7Xcw==",
"requires": {
"ffjavascript": "0.1.0",
"ffjavascript": "0.2.2",
"fnv-plus": "^1.3.1"
}
},
@ -313,11 +327,6 @@
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=",
"dev": true
},
"commander": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz",
"integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg=="
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@ -354,6 +363,7 @@
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz",
"integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==",
"dev": true,
"requires": {
"type-detect": "^4.0.0"
}
@ -435,7 +445,8 @@
"escape-string-regexp": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ="
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
"dev": true
},
"eslint": {
"version": "6.8.0",
@ -579,6 +590,12 @@
"integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
"dev": true
},
"estree-walker": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz",
"integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==",
"dev": true
},
"esutils": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
@ -629,16 +646,18 @@
"dev": true
},
"fastfile": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.1.tgz",
"integrity": "sha512-Fk8PWafGWGEUw7oPq/dJen92ASxknCEy4ZC8n4VEvSwCp/jcReyEmVoWsRIWTf+IvAp2MzvFi54vOPeK2LQZtQ=="
"version": "0.0.5",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.5.tgz",
"integrity": "sha512-h6YDy9iI1gITf900quL91qnBl25JtqU5KD82NzhW0B35YFjGhXwWSkUA8g+nyz1th95RWEhtonz7O2AiSL+lQg=="
},
"ffjavascript": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.1.0.tgz",
"integrity": "sha512-dmKlUasSfvUcxBm8nCSKl2x7EFJsXA7OVP8XLFA03T2+6mAc3IiVLC2ambEVOcMOhyhl0vJfVZjM9f9d38D1rw==",
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.2.tgz",
"integrity": "sha512-Fp3qbKCuk3ZuOgMhpV6E5fiv/ZfMV5iuiGCk2YBb2rdFkgHza5Vc/AZTDDIwetoq1kh/ZX3Ky8k214jzDHVFDw==",
"requires": {
"big-integer": "^1.6.48"
"big-integer": "^1.6.48",
"wasmcurves": "0.0.4",
"worker-threads": "^1.0.0"
}
},
"figures": {
@ -740,7 +759,8 @@
"get-func-name": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz",
"integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE="
"integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=",
"dev": true
},
"get-stream": {
"version": "4.1.0",
@ -1019,6 +1039,12 @@
"is-extglob": "^2.1.1"
}
},
"is-module": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz",
"integrity": "sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=",
"dev": true
},
"is-number": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
@ -1031,6 +1057,15 @@
"integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=",
"dev": true
},
"is-reference": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/is-reference/-/is-reference-1.2.1.tgz",
"integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==",
"dev": true,
"requires": {
"@types/estree": "*"
}
},
"is-regex": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz",
@ -1138,6 +1173,20 @@
"chalk": "^2.4.2"
}
},
"logplease": {
"version": "1.2.15",
"resolved": "https://registry.npmjs.org/logplease/-/logplease-1.2.15.tgz",
"integrity": "sha512-jLlHnlsPSJjpwUfcNyUxXCl33AYg2cHhIf9QhGL2T4iPT0XPB+xP1LRKFPgIg1M/sg9kAJvy94w9CzBNrfnstA=="
},
"magic-string": {
"version": "0.25.7",
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.7.tgz",
"integrity": "sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA==",
"dev": true,
"requires": {
"sourcemap-codec": "^1.4.4"
}
},
"map-age-cleaner": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz",
@ -1575,10 +1624,17 @@
"resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz",
"integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A="
},
"path-parse": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
"integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==",
"dev": true
},
"pathval": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz",
"integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA="
"integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=",
"dev": true
},
"picomatch": {
"version": "2.2.2",
@ -1614,12 +1670,12 @@
"dev": true
},
"r1csfile": {
"version": "0.0.5",
"resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.5.tgz",
"integrity": "sha512-B+BdKPb/WUTp4N/3X4d1Spgx9Ojx5tFVejGZRJxpTtzq34mC8Vi/czWfiPj85V8kud31lCfYcZ16z7+czvM0Sw==",
"version": "0.0.9",
"resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.9.tgz",
"integrity": "sha512-VEp8K+Y3z+rRepjVgnnHI0fMgkTts6jYGr6R2WYWTJzW/g08rChWKErjwJRp4VRmqBGHNDV73GImLCxmf3+/7w==",
"requires": {
"fastfile": "0.0.1",
"ffjavascript": "0.1.0"
"fastfile": "0.0.5",
"ffjavascript": "0.2.2"
}
},
"readdirp": {
@ -1647,6 +1703,15 @@
"resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz",
"integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE="
},
"resolve": {
"version": "1.17.0",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz",
"integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==",
"dev": true,
"requires": {
"path-parse": "^1.0.6"
}
},
"resolve-from": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
@ -1672,6 +1737,75 @@
"glob": "^7.1.3"
}
},
"rollup": {
"version": "2.20.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.20.0.tgz",
"integrity": "sha512-hkbp//ne1om8+PQRpd81zk0KDvbJxkLZdZJh1ZNxjd1EkI0H1TmYuHqqXx88yciS+5YnMom3geubQjTeeUnNNw==",
"dev": true,
"requires": {
"fsevents": "~2.1.2"
}
},
"rollup-plugin-commonjs": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/rollup-plugin-commonjs/-/rollup-plugin-commonjs-10.1.0.tgz",
"integrity": "sha512-jlXbjZSQg8EIeAAvepNwhJj++qJWNJw1Cl0YnOqKtP5Djx+fFGkp3WRh+W0ASCaFG5w1jhmzDxgu3SJuVxPF4Q==",
"dev": true,
"requires": {
"estree-walker": "^0.6.1",
"is-reference": "^1.1.2",
"magic-string": "^0.25.2",
"resolve": "^1.11.0",
"rollup-pluginutils": "^2.8.1"
}
},
"rollup-plugin-ignore": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/rollup-plugin-ignore/-/rollup-plugin-ignore-1.0.6.tgz",
"integrity": "sha512-OC9h/VMWcOJBwtHxLCaYeuGhqlOWTaE0/S1u5BZqxR8KB+0SjvCRPoHNMdCZQ3c3yVPWFOB2GM49atg2RvGicQ==",
"dev": true
},
"rollup-plugin-json": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/rollup-plugin-json/-/rollup-plugin-json-4.0.0.tgz",
"integrity": "sha512-hgb8N7Cgfw5SZAkb3jf0QXii6QX/FOkiIq2M7BAQIEydjHvTyxXHQiIzZaTFgx1GK0cRCHOCBHIyEkkLdWKxow==",
"dev": true,
"requires": {
"rollup-pluginutils": "^2.5.0"
}
},
"rollup-plugin-node-resolve": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-5.2.0.tgz",
"integrity": "sha512-jUlyaDXts7TW2CqQ4GaO5VJ4PwwaV8VUGA7+km3n6k6xtOEacf61u0VXwN80phY/evMcaS+9eIeJ9MOyDxt5Zw==",
"dev": true,
"requires": {
"@types/resolve": "0.0.8",
"builtin-modules": "^3.1.0",
"is-module": "^1.0.0",
"resolve": "^1.11.1",
"rollup-pluginutils": "^2.8.1"
}
},
"rollup-plugin-replace": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/rollup-plugin-replace/-/rollup-plugin-replace-2.2.0.tgz",
"integrity": "sha512-/5bxtUPkDHyBJAKketb4NfaeZjL5yLZdeUihSfbF2PQMz+rSTEb8ARKoOl3UBT4m7/X+QOXJo3sLTcq+yMMYTA==",
"dev": true,
"requires": {
"magic-string": "^0.25.2",
"rollup-pluginutils": "^2.6.0"
}
},
"rollup-pluginutils": {
"version": "2.8.2",
"resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz",
"integrity": "sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==",
"dev": true,
"requires": {
"estree-walker": "^0.6.1"
}
},
"run-async": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz",
@ -1735,6 +1869,12 @@
"is-fullwidth-code-point": "^2.0.0"
}
},
"sourcemap-codec": {
"version": "1.4.8",
"resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz",
"integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==",
"dev": true
},
"sprintf-js": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
@ -1892,7 +2032,8 @@
"type-detect": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz",
"integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g=="
"integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==",
"dev": true
},
"type-fest": {
"version": "0.8.1",
@ -1915,6 +2056,15 @@
"integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==",
"dev": true
},
"wasmcurves": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/wasmcurves/-/wasmcurves-0.0.4.tgz",
"integrity": "sha512-c/Tob+F/7jJhep1b2qtj54r4nkGaRifNbQ1OJx8cBBFH1RlHbWIbISHWONClOxiVwy/JZOpbN4SgvSX/4lF80A==",
"requires": {
"big-integer": "^1.6.42",
"blakejs": "^1.1.0"
}
},
"which": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz",
@ -1943,6 +2093,11 @@
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
"dev": true
},
"worker-threads": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/worker-threads/-/worker-threads-1.0.0.tgz",
"integrity": "sha512-vK6Hhvph8oLxocEJIlc3YfGAZhm210uGzjZsXSu+JYLAQ/s/w4Tqgl60JrdH58hW8NSGP4m3bp8a92qPXgX05w=="
},
"wrap-ansi": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz",

View File

@ -1,13 +1,22 @@
{
"name": "snarkjs",
"type": "module",
"version": "0.1.31",
"description": "zkSNARKs implementation in JavaScript",
"main": "index.js",
"main": "./build/main.cjs",
"module": "./main.js",
"exports": {
"import": "./main.js",
"require": "./build/main.cjs"
},
"scripts": {
"test": "mocha"
"test": "mocha",
"build": "rollup -c config/rollup.cjs.config.js",
"buildcli": "rollup -c config/rollup.cli.config.js",
"buildiife": "BROWSER=true rollup -c config/rollup.iife.config.js"
},
"bin": {
"snarkjs": "cli.js"
"snarkjs": "build/cli.js"
},
"directories": {
"templates": "templates"
@ -29,16 +38,24 @@
},
"dependencies": {
"blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git",
"circom_runtime": "0.0.6",
"ffjavascript": "0.1.0",
"circom_runtime": "0.0.8",
"fastfile": "0.0.5",
"ffjavascript": "0.2.2",
"keccak": "^3.0.0",
"r1csfile": "0.0.5",
"logplease": "^1.2.15",
"r1csfile": "0.0.9",
"yargs": "^12.0.5"
},
"devDependencies": {
"chai": "^4.2.0",
"eslint": "^6.8.0",
"lodash": "^4.17.15",
"mocha": "^7.1.1"
"mocha": "^7.1.1",
"rollup": "^2.20.0",
"rollup-plugin-commonjs": "^10.1.0",
"rollup-plugin-ignore": "^1.0.6",
"rollup-plugin-json": "^4.0.0",
"rollup-plugin-node-resolve": "^5.2.0",
"rollup-plugin-replace": "^2.2.0"
}
}

View File

@ -1,8 +1,8 @@
const Scalar = require("ffjavascript").Scalar;
const fastFile = require("fastfile");
const assert = require("assert");
async function readBinFile(fileName, type, maxVersion) {
import { Scalar } from "ffjavascript";
import * as fastFile from "fastfile";
export async function readBinFile(fileName, type, maxVersion) {
const fd = await fastFile.readExisting(fileName);
@ -10,11 +10,11 @@ async function readBinFile(fileName, type, maxVersion) {
let readedType = "";
for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]);
if (readedType != type) assert(false, fileName + ": Invalid File format");
if (readedType != type) throw new Error(fileName + ": Invalid File format");
let v = await fd.readULE32();
if (v>maxVersion) assert(false, "Version not supported");
if (v>maxVersion) throw new Error("Version not supported");
const nSections = await fd.readULE32();
@ -34,7 +34,7 @@ async function readBinFile(fileName, type, maxVersion) {
return {fd, sections};
}
async function createBinFile(fileName, type, version, nSections) {
export async function createBinFile(fileName, type, version, nSections) {
const fd = await fastFile.createOverride(fileName);
@ -48,8 +48,8 @@ async function createBinFile(fileName, type, version, nSections) {
return fd;
}
async function startWriteSection(fd, idSection) {
assert(typeof fd.writingSection === "undefined", "Already writing a section");
export async function startWriteSection(fd, idSection) {
if (typeof fd.writingSection !== "undefined") throw new Error("Already writing a section");
await fd.writeULE32(idSection); // Header type
fd.writingSection = {
pSectionSize: fd.pos
@ -57,8 +57,8 @@ async function startWriteSection(fd, idSection) {
await fd.writeULE64(0); // Temporally set to 0 length
}
async function endWriteSection(fd) {
assert(typeof fd.writingSection != "undefined", "Not writing a section");
export async function endWriteSection(fd) {
if (typeof fd.writingSection === "undefined") throw new Error("Not writing a section");
const sectionSize = fd.pos - fd.writingSection.pSectionSize - 8;
const oldPos = fd.pos;
@ -68,36 +68,36 @@ async function endWriteSection(fd) {
delete fd.writingSection;
}
async function startReadUniqueSection(fd, sections, idSection) {
assert(typeof fd.readingSection === "undefined", "Already reading a section");
if (!sections[idSection]) assert(false, fd.fileName + ": Missing section "+ idSection );
if (sections[idSection].length>1) assert(false, fd.fileName +": Section Duplicated " +idSection);
export async function startReadUniqueSection(fd, sections, idSection) {
if (typeof fd.readingSection !== "undefined") throw new Error("Already reading a section");
if (!sections[idSection]) throw new Error(fd.fileName + ": Missing section "+ idSection );
if (sections[idSection].length>1) throw new Error(fd.fileName +": Section Duplicated " +idSection);
fd.pos = sections[idSection][0].p;
fd.readingSection = sections[idSection][0];
}
async function endReadSection(fd, noCheck) {
assert(typeof fd.readingSection != "undefined", "Not reading a section");
export async function endReadSection(fd, noCheck) {
if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
if (!noCheck) {
assert.equal(fd.pos-fd.readingSection.p, fd.readingSection.size);
if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
}
delete fd.readingSection;
}
async function writeBigInt(fd, n, n8, pos) {
export async function writeBigInt(fd, n, n8, pos) {
const buff = new Uint8Array(n8);
Scalar.toRprLE(buff, 0, n, n8);
await fd.write(buff, pos);
}
async function readBigInt(fd, n8, pos) {
export async function readBigInt(fd, n8, pos) {
const buff = await fd.read(n8, pos);
return Scalar.fromRprLE(buff, 0, n8);
}
async function copySection(fdFrom, sections, fdTo, sectionId) {
export async function copySection(fdFrom, sections, fdTo, sectionId) {
const chunkSize = fdFrom.pageSize;
await startReadUniqueSection(fdFrom, sections, sectionId);
await startWriteSection(fdTo, sectionId);
@ -111,14 +111,14 @@ async function copySection(fdFrom, sections, fdTo, sectionId) {
}
async function readFullSection(fd, sections, idSection) {
export async function readFullSection(fd, sections, idSection) {
await startReadUniqueSection(fd, sections, idSection);
const res = await fd.read(fd.readingSection.size);
await endReadSection(fd);
return res;
}
async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
export async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
const MAX_BUFF_SIZE = fd1.pageSize * 16;
await startReadUniqueSection(fd1, sections1, idSection);
await startReadUniqueSection(fd2, sections2, idSection);
@ -134,16 +134,3 @@ async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
await endReadSection(fd2);
return true;
}
module.exports.readBinFile = readBinFile;
module.exports.createBinFile = createBinFile;
module.exports.writeBigInt = writeBigInt;
module.exports.readBigInt = readBigInt;
module.exports.startWriteSection = startWriteSection;
module.exports.endWriteSection = endWriteSection;
module.exports.startReadUniqueSection = startReadUniqueSection;
module.exports.endReadSection = endReadSection;
module.exports.copySection = copySection;
module.exports.readFullSection = readFullSection;
module.exports.sectionIsEqual = sectionIsEqual;

View File

@ -1,17 +1,15 @@
#!/usr/bin/env node
const version = require("../package").version;
import pkg from "../package.json";
const version = pkg.version;
let selectedCommand = null;
module.exports = async function clProcessor(commands) {
export default async function clProcessor(commands) {
const cl = [];
const argv = {};
for (let i=2; i<process.argv.length; i++) {
if (process.argv[i][0] == "-") {
let S = process.argv[i];
while (S[0] == "-") S = S.slice(1);
const arr = S.split("=")
const arr = S.split("=");
if (arr.length > 1) {
argv[arr[0]] = arr.slice(1).join("=");
} else {
@ -34,7 +32,7 @@ module.exports = async function clProcessor(commands) {
const options = getOptions(cmd.options);
await cmd.action(m, options);
} else {
await cmd.action(m);
await cmd.action(m, {});
}
} else {
if (m.length>0) console.log("Invalid number of parameters");
@ -198,7 +196,6 @@ module.exports = async function clProcessor(commands) {
}
S += " " + pl.params.join(" ");
console.log(S);
// console.log("");
}
}

View File

@ -1,6 +1,4 @@
const Scalar = require("ffjavascript").Scalar;
const buildBn128 = require("ffjavascript").buildBn128;
const buildBls12381 = require("ffjavascript").buildBls12381;
import { Scalar, buildBn128, buildBls12381} from "ffjavascript";
const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
@ -8,7 +6,7 @@ const bn128r = Scalar.e("2188824287183927522224640574525727508854836440041603434
const bls12381q = Scalar.e("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16);
const bn128q = Scalar.e("21888242871839275222246405745257275088696311157297823662689037894645226208583");
module.exports.getCurveFromR = async function getCurveFromR(r) {
export async function getCurveFromR(r) {
let curve;
if (Scalar.eq(r, bn128r)) {
curve = await buildBn128();
@ -20,7 +18,7 @@ module.exports.getCurveFromR = async function getCurveFromR(r) {
return curve;
};
module.exports.getCurveFromQ = async function getCurveFromQ(q) {
export async function getCurveFromQ(q) {
let curve;
if (Scalar.eq(q, bn128q)) {
curve = await buildBn128();
@ -32,7 +30,7 @@ module.exports.getCurveFromQ = async function getCurveFromQ(q) {
return curve;
};
module.exports.getCurveFromName = async function getCurveFromName(name) {
export async function getCurveFromName(name) {
let curve;
const normName = normalizeName(name);
if (["BN128", "BN254", "ALTBN128"].indexOf(normName) >= 0) {

3
src/groth16.js Normal file
View File

@ -0,0 +1,3 @@
export {default as fullProve} from "./groth16_fullprove.js";
export {default as prove} from "./groth16_prove.js";
export {default as validate} from "./groth16_verify.js";

10
src/groth16_fullprove.js Normal file
View File

@ -0,0 +1,10 @@
import groth16_prove from "./groth16_prove.js";
import wtns_calculate from "./wtns_calculate.js";
export default async function groth16ProofFromInput(input, wasmFile, zkeyFileName, logger) {
const wtns= {
type: "mem"
};
await wtns_calculate(input, wasmFile, wtns);
return await groth16_prove(zkeyFileName, wtns, logger);
}

View File

@ -1,14 +1,15 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey").utils;
const wtnsFile = require("./wtnsfile");
const getCurve = require("./curves").getCurveFromQ;
const {log2} = require("./misc");
const Scalar = require("ffjavascript").Scalar;
import * as binFileUtils from "./binfileutils.js";
import * as zkeyUtils from "./zkey_utils.js";
import * as wtnsUtils from "./wtns_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import { log2 } from "./misc.js";
import { Scalar, utils } from "ffjavascript";
const {stringifyBigInts} = utils;
async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
export default async function groth16ProofFromInput(zkeyFileName, witnessFileName, logger) {
const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils.readBinFile(witnessFileName, "wtns", 2);
const wtns = await wtnsFile.readHeader(fdWtns, sectionsWtns);
const wtns = await wtnsUtils.readHeader(fdWtns, sectionsWtns);
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2);
@ -53,7 +54,7 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
const buffPodd_T = await joinABC(curve, zkey, buffAodd_T, buffBodd_T, buffCodd_T);
const proof = {};
let proof = {};
proof.pi_a = await curve.G1.multiExpAffine(buffBasesA, buffWitness);
let pib1 = await curve.G1.multiExpAffine(buffBasesB1, buffWitness);
@ -81,7 +82,7 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( zkey.vk_delta_1, Fr.neg(Fr.mul(r,s) )));
const publicSignals = [];
let publicSignals = [];
for (let i=1; i<= zkey.nPublic; i++) {
const b = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8);
@ -97,6 +98,10 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
await fdZKey.close();
await fdWtns.close();
proof = stringifyBigInts(proof);
publicSignals = stringifyBigInts(publicSignals);
return {proof, publicSignals};
}
@ -237,4 +242,3 @@ async function joinABC(curve, zkey, a, b, c) {
return outBuff;
}
module.exports = groth16Prover;

View File

@ -18,11 +18,12 @@
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
import { Scalar } from "ffjavascript";
import * as curves from "./curves.js";
import { utils } from "ffjavascript";
const {unstringifyBigInts} = utils;
const Scalar = require("ffjavascript").Scalar;
const curves = require("./curves");
module.exports = async function isValid(vk_verifier, proof, publicSignals) {
export default async function isValid(vk_verifier, publicSignals, proof, logger) {
/*
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
@ -30,6 +31,10 @@ module.exports = async function isValid(vk_verifier, proof, publicSignals) {
}
*/
vk_verifier = unstringifyBigInts(vk_verifier);
proof = unstringifyBigInts(proof);
publicSignals = unstringifyBigInts(publicSignals);
const curve = await curves.getCurveFromName(vk_verifier.curve);
const IC0 = curve.G1.fromObject(vk_verifier.IC[0]);
@ -62,6 +67,11 @@ module.exports = async function isValid(vk_verifier, proof, publicSignals) {
vk_alpha_1, vk_beta_2
);
if (! res) return false;
if (! res) {
if (logger) logger.error("Invalid proof");
return false;
}
if (logger) logger.info("OK!");
return true;
};

View File

@ -1,9 +1,9 @@
const blake2b = require("blake2b-wasm");
import blake2b from "blake2b-wasm";
const ChaCha = require("ffjavascript").ChaCha;
import { ChaCha } from "ffjavascript";
function hashToG2(curve, hash) {
export function hashToG2(curve, hash) {
const hashV = new DataView(hash.buffer, hash.byteOffset, hash.byteLength);
const seed = [];
for (let i=0; i<8; i++) {
@ -17,7 +17,7 @@ function hashToG2(curve, hash) {
return g2_sp;
}
function getG2sp(curve, persinalization, challange, g1s, g1sx) {
export function getG2sp(curve, persinalization, challange, g1s, g1sx) {
const h = blake2b(64);
const b1 = new Uint8Array([persinalization]);
@ -40,7 +40,7 @@ function calculatePubKey(k, curve, personalization, challangeHash, rng ) {
return k;
}
function createPTauKey(curve, challangeHash, rng) {
export function createPTauKey(curve, challangeHash, rng) {
const key = {
tau: {},
alpha: {},
@ -55,7 +55,7 @@ function createPTauKey(curve, challangeHash, rng) {
return key;
}
function createDeltaKey(curve, transcript, rng) {
export function createDeltaKey(curve, transcript, rng) {
const delta = {};
delta.prvKey = curve.Fr.fromRng(rng);
delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
@ -64,8 +64,3 @@ function createDeltaKey(curve, transcript, rng) {
delta.g2_spx = curve.G2.toAffine(curve.G2.timesScalar(delta.g2_sp, delta.prvKey));
return delta;
}
module.exports.createPTauKey = createPTauKey;
module.exports.getG2sp = getG2sp;
module.exports.hashToG2 = hashToG2;
module.exports.createDeltaKey =createDeltaKey;

View File

@ -1,12 +1,14 @@
const fs = require("fs");
import * as fastFile from "fastFile";
module.exports = async function loadSymbols(symFileName) {
export default async function loadSymbols(symFileName) {
const sym = {
labelIdx2Name: [ "one" ],
varIdx2Name: [ "one" ],
componentIdx2Name: []
};
const symsStr = await fs.promises.readFile(symFileName, "utf8");
const fd = await fastFile.readExisting(symFileName);
const buff = await fd.read(fd.totalSize);
const symsStr = new TextDecoder("utf-8").decode(buff);
const lines = symsStr.split("\n");
for (let i=0; i<lines.length; i++) {
const arr = lines[i].split(",");
@ -22,6 +24,8 @@ module.exports = async function loadSymbols(symFileName) {
}
}
await fd.close();
return sym;
function extractComponent(name) {
@ -29,4 +33,4 @@ module.exports = async function loadSymbols(symFileName) {
arr.pop(); // Remove the lasr element
return arr.join(".");
}
};
}

View File

@ -1,7 +1,8 @@
const Blake2b = require("blake2b-wasm");
const readline = require("readline");
const ChaCha = require("ffjavascript").ChaCha;
const crypto = require("crypto");
/* global window */
import Blake2b from "blake2b-wasm";
import readline from "readline";
import { ChaCha } from "ffjavascript";
import crypto from "crypto";
const _revTable = [];
for (let i=0; i<256; i++) {
@ -19,7 +20,7 @@ function _revSlow(idx, bits) {
return res;
}
function bitReverse(idx, bits) {
export function bitReverse(idx, bits) {
return (
_revTable[idx >>> 24] |
(_revTable[(idx >>> 16) & 0xFF] << 8) |
@ -29,13 +30,13 @@ function bitReverse(idx, bits) {
}
function log2( V )
export function log2( V )
{
return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) );
}
function formatHash(b) {
export function formatHash(b, title) {
const a = new DataView(b.buffer, b.byteOffset, b.byteLength);
let S = "";
for (let i=0; i<4; i++) {
@ -46,10 +47,11 @@ function formatHash(b) {
S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0");
}
}
if (title) S = title + "\n" + S;
return S;
}
function hashIsEqual(h1, h2) {
export function hashIsEqual(h1, h2) {
if (h1.byteLength != h2.byteLength) return false;
var dv1 = new Int8Array(h1);
var dv2 = new Int8Array(h2);
@ -60,14 +62,14 @@ function hashIsEqual(h1, h2) {
return true;
}
function cloneHasher(h) {
export function cloneHasher(h) {
const ph = h.getPartialHash();
const res = Blake2b(64);
res.setPartialHash(ph);
return res;
}
async function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
export async function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
if (curve.G1.isZero(g1s)) return false;
if (curve.G1.isZero(g1sx)) return false;
if (curve.G2.isZero(g2s)) return false;
@ -78,20 +80,23 @@ async function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
}
export function askEntropy() {
if (process.browser) {
return window.prompt("Enter a random text. (Entropy): ", "");
} else {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
function askEntropy() {
return new Promise((resolve) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
});
return new Promise((resolve) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
});
}
}
async function getRandomRng(entropy) {
// Generate a random key
export async function getRandomRng(entropy) {
// Generate a random Rng
while (!entropy) {
entropy = await askEntropy();
}
@ -109,7 +114,7 @@ async function getRandomRng(entropy) {
return rng;
}
function rngFromBeaconParams(beaconHash, numIterationsExp) {
export function rngFromBeaconParams(beaconHash, numIterationsExp) {
let nIterationsInner;
let nIterationsOuter;
if (numIterationsExp<32) {
@ -138,25 +143,17 @@ function rngFromBeaconParams(beaconHash, numIterationsExp) {
return rng;
}
function hex2ByteArray(s) {
export function hex2ByteArray(s) {
if (s instanceof Uint8Array) return s;
if (s.slice(0,2) == "0x") s= s.slice(2);
return new Uint8Array(s.match(/[\da-f]{2}/gi).map(function (h) {
return parseInt(h, 16);
}));
}
function byteArray2hex(byteArray) {
export function byteArray2hex(byteArray) {
return Array.prototype.map.call(byteArray, function(byte) {
return ("0" + (byte & 0xFF).toString(16)).slice(-2);
}).join("");
}
module.exports.bitReverse = bitReverse;
module.exports.log2 = log2;
module.exports.formatHash = formatHash;
module.exports.hashIsEqual = hashIsEqual;
module.exports.cloneHasher = cloneHasher;
module.exports.sameRatio = sameRatio;
module.exports.getRandomRng = getRandomRng;
module.exports.rngFromBeaconParams = rngFromBeaconParams;
module.exports.hex2ByteArray = hex2ByteArray;
module.exports.byteArray2hex = byteArray2hex;

View File

@ -1,6 +1,5 @@
const buildTaskManager = require("./taskmanager");
const binFileUtils = require("./binfileutils");
import * as binFileUtils from "./binfileutils.js";
/*
This function creates a new section in the fdTo file with id idSection.
@ -9,7 +8,7 @@ const binFileUtils = require("./binfileutils");
It also updates the newChallangeHasher with the new points
*/
async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, verbose) {
export async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, logger) {
const MAX_CHUNK_SIZE = 1 << 16;
const G = curve[groupName];
const sG = G.F.n8*2;
@ -20,7 +19,7 @@ async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, group
let t = first;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if (verbose) console.log(`Applying key: ${sectionName}: ${i}/${nPoints}`);
if (logger) logger.debug(`Applying key: ${sectionName}: ${i}/${nPoints}`);
const n= Math.min(nPoints - i, MAX_CHUNK_SIZE);
let buff;
buff = await fdOld.read(n*sG);
@ -35,13 +34,13 @@ async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, group
async function applyKeyToChallangeSection(fdOld, fdNew, responseHasher, curve, groupName, nPoints, first, inc, formatOut, sectionName, verbose) {
export async function applyKeyToChallangeSection(fdOld, fdNew, responseHasher, curve, groupName, nPoints, first, inc, formatOut, sectionName, logger) {
const G = curve[groupName];
const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i<nPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
if (logger) logger.debug(`Applying key ${sectionName}: ${i}/${nPoints}`);
const n= Math.min(nPoints-i, chunkSize );
const buffInU = await fdOld.read(n * sG);
const buffInLEM = await G.batchUtoLEM(buffInU);
@ -59,6 +58,3 @@ async function applyKeyToChallangeSection(fdOld, fdNew, responseHasher, curve, g
}
}
module.exports.applyKeyToChallangeSection = applyKeyToChallangeSection;
module.exports.applyKeyToSection = applyKeyToSection;

View File

@ -1,31 +0,0 @@
const blake2wasm = require("blake2b-wasm");
async function run() {
await blake2wasm.ready();
const hasher1 = blake2wasm(64);
hasher1.update(Uint8Array.of(1,2,3,4));
const ph = hasher1.getPartialHash();
hasher1.update(Uint8Array.of(5,6,7,8));
console.log(hasher1.digest("hex"));
const hasher2 = blake2wasm(64);
hasher2.setPartialHash(ph);
hasher2.update(Uint8Array.of(5,6,7,8));
console.log(hasher2.digest("hex"));
}
run().then(() => {
process.exit();
});

View File

@ -1,11 +1,10 @@
module.exports.newAccumulator = require("./powersoftau_new");
module.exports.exportChallange = require("./powersoftau_exportchallange");
module.exports.challangeContribute = require("./powersoftau_challangecontribute");
module.exports.importResponse = require("./powersoftau_import");
module.exports.verify = require("./powersoftau_verify");
module.exports.challangeContribute = require("./powersoftau_challangecontribute");
module.exports.beacon = require("./powersoftau_beacon");
module.exports.contribute = require("./powersoftau_contribute");
module.exports.preparePhase2 = require("./powersoftau_preparephase2");
module.exports.exportJson = require("./powersoftau_exportjson");
export {default as newAccumulator} from "./powersoftau_new.js";
export {default as exportChallange} from "./powersoftau_export_challange.js";
export {default as importResponse} from "./powersoftau_import.js";
export {default as verify} from "./powersoftau_verify.js";
export {default as challangeContribute} from "./powersoftau_challange_contribute.js";
export {default as beacon} from "./powersoftau_beacon.js";
export {default as contribute} from "./powersoftau_contribute.js";
export {default as preparePhase2} from "./powersoftau_preparephase2.js";
export {default as exportJson} from "./powersoftau_export_json.js";

View File

@ -1,26 +1,26 @@
const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils");
const misc = require("./misc");
const binFileUtils = require("./binfileutils");
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as misc from "./misc.js";
import * as binFileUtils from "./binfileutils.js";
async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp, beaconHashStr, verbose) {
export default async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,numIterationsExp, logger) {
const beaconHash = misc.hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length))
{
console.log("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false;
}
if (beaconHash.length>=256) {
console.log("Maximum lenght of beacon hash is 255 bytes");
if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
return false;
}
numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) {
console.log("Invalid numIterationsExp. (Must be between 10 and 63)");
if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
return false;
}
@ -30,10 +30,11 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
if (power != ceremonyPower) {
throw new Error("This file has been reduced. You cannot contribute into a reduced file.");
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
return false;
}
if (sections[12]) {
console.log("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
if (logger) logger.warn("Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
}
const contributions = await utils.readContributions(fdOld, curve, sections);
const curContribution = {
@ -48,7 +49,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose);
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, logger);
}
curContribution.key = utils.keyFromBeacon(curve, lastChallangeHash, beaconHash, numIterationsExp);
@ -62,15 +63,15 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
const startSections = [];
let firstPoints;
firstPoints = await processSection(2, "G1", (1<<power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1" );
firstPoints = await processSection(2, "G1", (1<<power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1", logger );
curContribution.tauG1 = firstPoints[1];
firstPoints = await processSection(3, "G2", (1<<power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2" );
firstPoints = await processSection(3, "G2", (1<<power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2", logger );
curContribution.tauG2 = firstPoints[1];
firstPoints = await processSection(4, "G1", (1<<power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1" );
firstPoints = await processSection(4, "G1", (1<<power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1", logger );
curContribution.alphaG1 = firstPoints[0];
firstPoints = await processSection(5, "G1", (1<<power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1" );
firstPoints = await processSection(5, "G1", (1<<power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1", logger );
curContribution.betaG1 = firstPoints[0];
firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2" );
firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2", logger );
curContribution.betaG2 = firstPoints[0];
curContribution.partialHash = responseHasher.getPartialHash();
@ -82,22 +83,20 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest();
console.log("Contribution Response Hash imported: ");
console.log(misc.formatHash(hashResponse));
if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: "));
const nextChallangeHasher = new Blake2b(64);
nextChallangeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1");
await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2");
await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1");
await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1");
await hashSection(fdNew, "G2", 6, 1 , "betaG2");
await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1", logger);
await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2", logger);
await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1", logger);
await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1", logger);
await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger);
curContribution.nextChallange = nextChallangeHasher.digest();
console.log("Next Challange Hash: ");
console.log(misc.formatHash(curContribution.nextChallange));
if (logger) logger.info(misc.formatHash(curContribution.nextChallange, "Next Challange Hash: "));
contributions.push(curContribution);
@ -106,9 +105,9 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
await fdOld.close();
await fdNew.close();
return;
return hashResponse;
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) {
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName, logger) {
const res = [];
fdOld.pos = sections[sectionId][0].p;
@ -121,7 +120,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i<NPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
if (logger) logger.debug(`applying key${sectionName}: ${i}/${NPoints}`);
const n= Math.min(NPoints-i, chunkSize );
const buffIn = await fdOld.read(n * sG);
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
@ -150,7 +149,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
}
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName) {
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName, logger) {
const G = curve[groupName];
const sG = G.F.n8*2;
@ -160,7 +159,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) {
if ((verbose)&&i) console.log(`Hashing ${sectionName}: ` + i);
if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG);
@ -174,4 +173,3 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
}
}
module.exports = beacon;

View File

@ -16,23 +16,22 @@
// G2*tp*alpha (compressed)
// G2*up*beta (compressed)
const fastFile = require("fastfile");
const assert = require("assert");
const Blake2b = require("blake2b-wasm");
const fs = require("fs");
const utils = require("./powersoftau_utils");
const misc = require("./misc");
const { applyKeyToChallangeSection } = require("./mpc_applykey");
const keyPair = require("./keypair");
import * as fastFile from "fastfile";
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as misc from "./misc.js";
import { applyKeyToChallangeSection } from "./mpc_applykey.js";
import * as keyPair from "./keypair.js";
async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) {
export default async function challangeContribute(curve, challangeFilename, responesFileName, entropy, logger) {
await Blake2b.ready();
let stats = await fs.promises.stat(challangeFilename);
const fdFrom = await fastFile.readExisting(challangeFilename);
const sG1 = curve.F1.n64*8*2;
const sG2 = curve.F2.n64*8*2;
const domainSize = (stats.size + sG1 - 64 - sG2) / (4*sG1 + sG2);
const domainSize = (fdFrom.totalSize + sG1 - 64 - sG2) / (4*sG1 + sG2);
let e = domainSize;
let power = 0;
while (e>1) {
@ -40,49 +39,37 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
power += 1;
}
assert(1<<power == domainSize, "Invalid file size");
console.log("Power to tau size: "+power);
if (1<<power != domainSize) throw new Error("Invalid file size");
if (logger) logger.debug("Power to tau size: "+power);
const fdFrom = await fastFile.readExisting(challangeFilename);
const rng = await misc.getRandomRng(entropy);
const fdTo = await fastFile.createOverride(responesFileName);
// Calculate the hash
console.log("Hashing challange");
if (logger) logger.debug("Hashing challange");
const challangeHasher = Blake2b(64);
for (let i=0; i<stats.size; i+= fdFrom.pageSize) {
const s = Math.min(stats.size - i, fdFrom.pageSize);
for (let i=0; i<fdFrom.totalSize; i+= fdFrom.pageSize) {
const s = Math.min(fdFrom.totalSize - i, fdFrom.pageSize);
const buff = await fdFrom.read(s);
challangeHasher.update(buff);
}
const claimedHash = await fdFrom.read(64, 0);
console.log("Claimed Previus Challange Hash: ");
console.log(misc.formatHash(claimedHash));
if (logger) logger.info(misc.formatHash(claimedHash, "Claimed Previus Response Hash: "));
const challangeHash = challangeHasher.digest();
console.log("Current Challange Hash: ");
console.log(misc.formatHash(challangeHash));
const rng = await misc.getRandomRng(entropy);
if (logger) logger.info(misc.formatHash(challangeHash, "Current Challange Hash: "));
const key = keyPair.createPTauKey(curve, challangeHash, rng);
if (verbose) {
if (logger) {
["tau", "alpha", "beta"].forEach( (k) => {
console.log(k, ".g1_s_x: " + key[k].g1_s[0].toString(16));
console.log(k, ".g1_s_y: " + key[k].g1_s[1].toString(16));
console.log(k, ".g1_sx_x: " + key[k].g1_sx[0].toString(16));
console.log(k, ".g1_sx_y: " + key[k].g1_sx[1].toString(16));
console.log(k, ".g2_sp_x_c0: " + key[k].g2_sp[0][0].toString(16));
console.log(k, ".g2_sp_x_c1: " + key[k].g2_sp[0][1].toString(16));
console.log(k, ".g2_sp_y_c0: " + key[k].g2_sp[1][0].toString(16));
console.log(k, ".g2_sp_y_c1: " + key[k].g2_sp[1][1].toString(16));
console.log(k, ".g2_spx_x_c0: " + key[k].g2_spx[0][0].toString(16));
console.log(k, ".g2_spx_x_c1: " + key[k].g2_spx[0][1].toString(16));
console.log(k, ".g2_spx_y_c0: " + key[k].g2_spx[1][0].toString(16));
console.log(k, ".g2_spx_y_c1: " + key[k].g2_spx[1][1].toString(16));
console.log("");
logger.debug(k + ".g1_s: " + curve.G1.toString(key[k].g1_s, 16));
logger.debug(k + ".g1_sx: " + curve.G1.toString(key[k].g1_sx, 16));
logger.debug(k + ".g2_sp: " + curve.G2.toString(key[k].g2_sp, 16));
logger.debug(k + ".g2_spx: " + curve.G2.toString(key[k].g2_spx, 16));
logger.debug("");
});
}
@ -91,11 +78,11 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
await fdTo.write(challangeHash);
responseHasher.update(challangeHash);
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power)*2-1, curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG1" , verbose );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", (1<<power) , curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG2" , verbose );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.alpha.prvKey, key.tau.prvKey, "COMPRESSED", "alphaTauG1", verbose );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG1" , verbose );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", 1 , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG2" , verbose );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power)*2-1, curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG1" , logger );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", (1<<power) , curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG2" , logger );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.alpha.prvKey, key.tau.prvKey, "COMPRESSED", "alphaTauG1", logger );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG1" , logger );
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", 1 , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG2" , logger );
// Write and hash key
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
@ -103,11 +90,9 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
await fdTo.write(buffKey);
responseHasher.update(buffKey);
const responseHash = responseHasher.digest();
console.log("Contribution Response Hash: ");
console.log(misc.formatHash(responseHash));
if (logger) logger.info(misc.formatHash(responseHash, "Contribution Response Hash: "));
await fdTo.close();
await fdFrom.close();
}
module.exports = challangeContribute;

View File

@ -5,22 +5,23 @@
// 2^N AlphaTauG1 Points (uncompressed)
// 2^N BetaTauG1 Points (uncompressed)
const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils");
const keyPair = require("./keypair");
const binFileUtils = require("./binfileutils");
const misc = require("./misc");
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as keyPair from "./keypair.js";
import * as binFileUtils from "./binfileutils.js";
import * as misc from "./misc.js";
async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbose) {
export default async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logger) {
await Blake2b.ready();
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
if (power != ceremonyPower) {
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
throw new Error("This file has been reduced. You cannot contribute into a reduced file.");
}
if (sections[12]) {
console.log("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
if (logger) logger.warn("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
}
const contributions = await utils.readContributions(fdOld, curve, sections);
const curContribution = {
@ -30,15 +31,16 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
let lastChallangeHash;
const rng = await misc.getRandomRng(entropy);
if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose);
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, logger);
}
// Generate a random key
const rng = await misc.getRandomRng(entropy);
curContribution.key = keyPair.createPTauKey(curve, lastChallangeHash, rng);
@ -72,8 +74,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest();
console.log("Contribution Response Hash imported: ");
console.log(misc.formatHash(hashResponse));
if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: "));
const nextChallangeHasher = new Blake2b(64);
nextChallangeHasher.update(hashResponse);
@ -86,8 +87,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
curContribution.nextChallange = nextChallangeHasher.digest();
console.log("Next Challange Hash: ");
console.log(misc.formatHash(curContribution.nextChallange));
if (logger) logger.info(misc.formatHash(curContribution.nextChallange, "Next Challange Hash: "));
contributions.push(curContribution);
@ -96,7 +96,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
await fdOld.close();
await fdNew.close();
return;
return hashResponse;
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) {
const res = [];
@ -111,7 +111,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i<NPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
if (logger) logger.debug(`processing: ${sectionName}: ${i}/${NPoints}`);
const n= Math.min(NPoints-i, chunkSize );
const buffIn = await fdOld.read(n * sG);
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
@ -150,7 +150,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) {
if ((verbose)&&i) console.log(`Hashing ${sectionName}: ` + i);
if ((logger)&&i) logger.debug(`Hashing ${sectionName}: ` + i);
const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG);
@ -166,4 +166,3 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
}
module.exports = contribute;

View File

@ -6,13 +6,13 @@
// 2^N BetaTauG1 Points (uncompressed)
// BetaG2 (uncompressed)
const fastFile = require("fastfile");
const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils");
const binFileUtils = require("./binfileutils");
const misc = require("./misc");
import * as fastFile from "fastfile";
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as binFileUtils from "./binfileutils.js";
import * as misc from "./misc.js";
async function exportChallange(pTauFilename, challangeFilename, verbose) {
export default async function exportChallange(pTauFilename, challangeFilename, logger) {
await Blake2b.ready();
const {fd: fdFrom, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1);
@ -28,11 +28,9 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
curChallangeHash = contributions[contributions.length-1].nextChallange;
}
console.log("Last Response Hash: ");
console.log(misc.formatHash(lastResponseHash));
if (logger) logger.info(misc.formatHash(lastResponseHash, "Last Response Hash: "));
console.log("New Challange Hash: ");
console.log(misc.formatHash(curChallangeHash));
if (logger) logger.info(misc.formatHash(curChallangeHash, "New Challange Hash: "));
const fdTo = await fastFile.createOverride(challangeFilename);
@ -53,12 +51,14 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
const calcCurChallangeHash = toHash.digest();
if (!misc.hashIsEqual (curChallangeHash, calcCurChallangeHash)) {
console.log("Calc Curret Challange Hash: ");
console.log(misc.formatHash(calcCurChallangeHash));
if (logger) logger.info(misc.formatHash(calcCurChallangeHash, "Calc Curret Challange Hash: "));
if (logger) logger.error("PTau file is corrupted. Calculated new challange hash does not match with the eclared one");
throw new Error("PTau file is corrupted. Calculated new challange hash does not match with the eclared one");
}
return curChallangeHash;
async function exportSection(sectionId, groupName, nPoints, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
@ -66,7 +66,7 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
await binFileUtils.startReadUniqueSection(fdFrom, sections, sectionId);
for (let i=0; i< nPoints; i+= nPointsChunk) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
if (logger) logger.debug(`Exporting ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
let buff;
buff = await fdFrom.read(n*sG);
@ -80,4 +80,3 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
}
module.exports = exportChallange;

View File

@ -1,9 +1,7 @@
const utils = require("./powersoftau_utils");
const binFileUtils = require("./binfileutils");
const {stringifyBigInts} = require("ffjavascript").utils;
const fs = require("fs");
import * as utils from "./powersoftau_utils.js";
import * as binFileUtils from "./binfileutils.js";
async function exportJson(pTauFilename, jsonFileName, verbose) {
export default async function exportJson(pTauFilename, verbose) {
const {fd, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fd, sections);
@ -26,8 +24,8 @@ async function exportJson(pTauFilename, jsonFileName, verbose) {
await fd.close();
const S = JSON.stringify(stringifyBigInts(pTau), null, 1);
await fs.promises.writeFile(jsonFileName, S);
return pTau;
async function exportSection(sectionId, groupName, nPoints, sectionName) {
@ -69,5 +67,4 @@ async function exportJson(pTauFilename, jsonFileName, verbose) {
}
module.exports = exportJson;

View File

@ -1,12 +1,10 @@
const assert = require("assert");
const fastFile = require("fastfile");
const Blake2b = require("blake2b-wasm");
const fs = require("fs");
const utils = require("./powersoftau_utils");
const binFileUtils = require("./binfileutils");
const misc = require("./misc");
import * as fastFile from "fastfile";
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as binFileUtils from "./binfileutils.js";
import * as misc from "./misc.js";
async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, verbose) {
export default async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, logger) {
await Blake2b.ready();
@ -15,39 +13,40 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
const contributions = await utils.readContributions(fdOld, curve, sections);
const currentContribution = {};
if (name) currentContribution.name = name;
const sG1 = curve.F1.n8*2;
const scG1 = curve.F1.n8; // Compresed size
const sG2 = curve.F2.n8*2;
const scG2 = curve.F2.n8; // Compresed size
let stats = await fs.promises.stat(contributionFilename);
assert.equal(stats.size,
const fdResponse = await fastFile.readExisting(contributionFilename);
if (fdResponse.totalSize !=
64 + // Old Hash
((1<<power)*2-1)*scG1 +
(1<<power)*scG2 +
(1<<power)*scG1 +
(1<<power)*scG1 +
scG2 +
sG1*6 + sG2*3,
"Size of the contribution is invalid"
);
sG1*6 + sG2*3)
throw new Error("Size of the contribution is invalid");
let lastChallangeHash;
if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose);
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, logger);
}
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7);
await utils.writePTauHeader(fdNew, curve, power);
const fdResponse = await fastFile.readExisting(contributionFilename);
const contributionPreviousHash = await fdResponse.read(64);
assert(misc.hashIsEqual(contributionPreviousHash,lastChallangeHash),
"Wrong contribution. this contribution is not based on the previus hash");
if(!misc.hashIsEqual(contributionPreviousHash,lastChallangeHash))
throw new Error("Wrong contribution. this contribution is not based on the previus hash");
const hasherResponse = new Blake2b(64);
hasherResponse.update(contributionPreviousHash);
@ -75,22 +74,20 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
hasherResponse.update(new Uint8Array(buffKey));
const hashResponse = hasherResponse.digest();
console.log("Contribution Response Hash imported: ");
console.log(misc.formatHash(hashResponse));
if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: "));
const nextChallangeHasher = new Blake2b(64);
nextChallangeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1");
await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2");
await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1");
await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1");
await hashSection(fdNew, "G2", 6, 1 , "betaG2");
await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1", logger);
await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2", logger);
await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1", logger);
await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1", logger);
await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger);
currentContribution.nextChallange = nextChallangeHasher.digest();
console.log("Next Challange Hash: ");
console.log(misc.formatHash(currentContribution.nextChallange));
if (logger) logger.info(misc.formatHash(currentContribution.nextChallange, "Next Challange Hash: "));
contributions.push(currentContribution);
@ -100,6 +97,8 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
await fdNew.close();
await fdOld.close();
return currentContribution.nextChallange;
async function processSection(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
const G = curve[groupName];
@ -114,7 +113,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
startSections[sectionId] = fdTo.pos;
for (let i=0; i< nPoints; i += nPointsChunk) {
if ((verbose)&&i) console.log(`Importing ${sectionName}: ` + i);
if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffC = await fdFrom.read(n * scG);
@ -138,7 +137,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
}
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName) {
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName, logger) {
const G = curve[groupName];
const sG = G.F.n8*2;
@ -148,7 +147,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) {
if ((verbose)&&i) console.log(`Hashing ${sectionName}: ` + i);
if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG);
@ -163,4 +162,3 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
}
module.exports = importResponse;

View File

@ -46,13 +46,12 @@ contributions(7)
]
*/
const ptauUtils = require("./powersoftau_utils");
const binFileUtils = require("./binfileutils");
const utils = require("./powersoftau_utils");
const Blake2b = require("blake2b-wasm");
const misc = require("./misc");
import * as ptauUtils from "./powersoftau_utils.js";
import * as binFileUtils from "./binfileutils.js";
import Blake2b from "blake2b-wasm";
import * as misc from "./misc.js";
async function newAccumulator(curve, power, fileName, verbose) {
export default async function newAccumulator(curve, power, fileName, logger) {
await Blake2b.ready();
@ -69,7 +68,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
const nTauG1 = (1 << power) * 2 -1;
for (let i=0; i< nTauG1; i++) {
await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG1: " + i);
if ((logger)&&((i%100000) == 0)&&i) logger.info("tauG1: " + i);
}
await binFileUtils.endWriteSection(fd);
@ -79,7 +78,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
const nTauG2 = (1 << power);
for (let i=0; i< nTauG2; i++) {
await fd.write(buffG2);
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG2: " + i);
if ((logger)&&((i%100000) == 0)&&i) logger.log("tauG2: " + i);
}
await binFileUtils.endWriteSection(fd);
@ -89,7 +88,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
const nAlfaTauG1 = (1 << power);
for (let i=0; i< nAlfaTauG1; i++) {
await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("alphaTauG1: " + i);
if ((logger)&&((i%100000) == 0)&&i) logger.log("alphaTauG1: " + i);
}
await binFileUtils.endWriteSection(fd);
@ -99,7 +98,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
const nBetaTauG1 = (1 << power);
for (let i=0; i< nBetaTauG1; i++) {
await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("betaTauG1: " + i);
if ((logger)&&((i%100000) == 0)&&i) logger.log("betaTauG1: " + i);
}
await binFileUtils.endWriteSection(fd);
@ -117,14 +116,12 @@ async function newAccumulator(curve, power, fileName, verbose) {
await fd.close();
const firstChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose);
const firstChallangeHash = ptauUtils.calculateFirstChallangeHash(curve, power, logger);
console.log("Blank Contribution Hash:");
console.log(misc.formatHash(Blake2b(64).digest()));
if (logger) logger.debug(misc.formatHash(Blake2b(64).digest(), "Blank Contribution Hash:"));
console.log("First Contribution Hash:");
console.log(misc.formatHash(firstChallangeHash));
if (logger) logger.info(misc.formatHash(firstChallangeHash, "First Contribution Hash:"));
return firstChallangeHash;
}
module.exports = newAccumulator;

View File

@ -1,10 +1,9 @@
const binFileUtils = require("./binfileutils");
const utils = require("./powersoftau_utils");
const fastFile = require("fastfile");
const {bitReverse} = require("./misc");
const fs = require("fs");
import * as binFileUtils from "./binfileutils.js";
import * as utils from "./powersoftau_utils.js";
import * as fastFile from "fastfile";
import { bitReverse } from "./misc.js";
async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
export default async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
@ -12,7 +11,8 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
await utils.writePTauHeader(fdNew, curve, power);
const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
// const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
const fdTmp = await fastFile.createOverride({type: "mem"});
await binFileUtils.copySection(fdOld, sections, fdNew, 2);
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
@ -30,13 +30,13 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
await fdNew.close();
await fdTmp.close();
await fs.promises.unlink(newPTauFilename+ ".tmp");
// await fs.promises.unlink(newPTauFilename+ ".tmp");
return;
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
const CHUNKPOW = 16;
if (verbose) console.log("Starting section: "+sectionName);
if (logger) logger.debug("Starting section: "+sectionName);
await binFileUtils.startWriteSection(fdNew, newSectionId);
@ -63,7 +63,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
fdTmp.pos =0;
for (let i=0; i<nChunks; i++) {
let buff;
if (verbose) console.log(`${sectionName} Prepare ${i+1}/${nChunks}`);
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
buff = await fdOld.read(pointsPerChunk*sGin);
buff = await G.batchToJacobian(buff);
for (let j=0; j<pointsPerChunk; j++) {
@ -74,7 +74,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
await binFileUtils.endReadSection(fdOld, true);
for (let j=0; j<nChunks; j++) {
if (verbose) console.log(`${sectionName} ${p} FFTMix ${j+1}/${nChunks}`);
if (logger) logger.debug(`${sectionName} ${p} FFTMix ${j+1}/${nChunks}`);
let buff;
fdTmp.pos = (j*pointsPerChunk)*sGmid;
buff = await fdTmp.read(pointsPerChunk*sGmid);
@ -87,7 +87,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
const nChunksPerGroup = nChunks / nGroups;
for (let j=0; j<nGroups; j++) {
for (let k=0; k <nChunksPerGroup/2; k++) {
if (verbose) console.log(`${sectionName} ${i}/${p} FFTJoin ${j+1}/${nGroups} ${k}/${nChunksPerGroup/2}`);
if (logger) logger.debug(`${sectionName} ${i}/${p} FFTJoin ${j+1}/${nGroups} ${k}/${nChunksPerGroup/2}`);
const first = Fr.pow( PFr.w[i], k*pointsPerChunk);
const inc = PFr.w[i];
const o1 = j*nChunksPerGroup + k;
@ -125,7 +125,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
fdTmp.pos = 0;
const factor = Fr.inv( Fr.e( 1<< p));
for (let i=0; i<nChunks; i++) {
if (verbose) console.log(`${sectionName} ${p} FFTFinal ${i+1}/${nChunks}`);
if (logger) logger.debug(`${sectionName} ${p} FFTFinal ${i+1}/${nChunks}`);
let buff;
buff = await fdTmp.read(pointsPerChunk * sGmid);
buff = await G.fftFinal(buff, factor);
@ -145,4 +145,3 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
}
}
module.exports = preparePhase2;

View File

@ -1,48 +0,0 @@
const binFileUtils = require("./binfileutils");
const utils = require("./powersoftau_utils");
async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
await utils.writePTauHeader(fdNew, curve, power);
await binFileUtils.copySection(fdOld, sections, fdNew, 2);
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", (1<<power) , "tauG1" );
await processSection(3, 13, "G2", (1<<power) , "tauG2" );
await processSection(4, 14, "G1", (1<<power) , "alphaTauG1" );
await processSection(5, 15, "G1", (1<<power) , "betaTauG1" );
await fdOld.close();
await fdNew.close();
return;
async function processSection(oldSectionId, newSectionId, Gstr, NPoints, sectionName) {
if (verbose) console.log("Starting section: "+sectionName);
const G = curve[Gstr];
const sG = G.F.n8*2;
let buff;
await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
buff = await fdOld.read(sG*NPoints);
await binFileUtils.endReadSection(fdOld, true);
buff = await G.ifft(buff, verbose ? console.log : null);
await binFileUtils.startWriteSection(fdNew, newSectionId);
await fdNew.write(buff);
await binFileUtils.endWriteSection(fdNew);
}
}
module.exports = preparePhase2;

View File

@ -1,11 +1,10 @@
const assert = require("assert");
const Scalar = require("ffjavascript").Scalar;
const Blake2b = require("blake2b-wasm");
const keyPair = require("./keypair");
const misc = require("./misc");
const {getCurveFromQ} = require("./curves");
import { Scalar } from "ffjavascript";
import Blake2b from "blake2b-wasm";
import * as keyPair from "./keypair.js";
import * as misc from "./misc.js";
import { getCurveFromQ } from "./curves.js";
async function writePTauHeader(fd, curve, power, ceremonyPower) {
export async function writePTauHeader(fd, curve, power, ceremonyPower) {
// Write the header
///////////
@ -31,9 +30,9 @@ async function writePTauHeader(fd, curve, power, ceremonyPower) {
fd.pos = oldPos;
}
async function readPTauHeader(fd, sections) {
if (!sections[1]) assert(false, fd.fileName + ": File has no header");
if (sections[1].length>1) assert(false, fd.fileName +": File has more than one header");
export async function readPTauHeader(fd, sections) {
if (!sections[1]) throw new Error(fd.fileName + ": File has no header");
if (sections[1].length>1) throw new Error(fd.fileName +": File has more than one header");
fd.pos = sections[1][0].p;
const n8 = await fd.readULE32();
@ -42,25 +41,25 @@ async function readPTauHeader(fd, sections) {
const curve = await getCurveFromQ(q);
assert(curve.F1.n64*8 == n8, fd.fileName +": Invalid size");
if (curve.F1.n64*8 != n8) throw new Error(fd.fileName +": Invalid size");
const power = await fd.readULE32();
const ceremonyPower = await fd.readULE32();
assert.equal(fd.pos-sections[1][0].p, sections[1][0].size);
if (fd.pos-sections[1][0].p != sections[1][0].size) throw new Error("Invalid PTau header size");
return {curve, power, ceremonyPower};
}
async function readPtauPubKey(fd, curve, montgomery) {
export async function readPtauPubKey(fd, curve, montgomery) {
const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3);
return fromPtauPubKeyRpr(buff, 0, curve, montgomery);
}
function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
export function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
const key = {
tau: {},
@ -103,7 +102,7 @@ function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
}
}
function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
export function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
writeG1(key.tau.g1_s);
writeG1(key.tau.g1_sx);
@ -136,7 +135,7 @@ function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
return buff;
}
async function writePtauPubKey(fd, curve, key, montgomery) {
export async function writePtauPubKey(fd, curve, key, montgomery) {
const buff = new Uint8Array(curve.F1.n8*2*6 + curve.F2.n8*2*3);
toPtauPubKeyRpr(buff, 0, curve, key, montgomery);
await fd.write(buff);
@ -206,9 +205,9 @@ async function readContribution(fd, curve) {
}
}
async function readContributions(fd, curve, sections) {
if (!sections[7]) assert(false, fd.fileName + ": File has no contributions");
if (sections[7][0].length>1) assert(false, fd.fileName +": File has more than one contributions section");
export async function readContributions(fd, curve, sections) {
if (!sections[7]) throw new Error(fd.fileName + ": File has no contributions");
if (sections[7][0].length>1) throw new Error(fd.fileName +": File has more than one contributions section");
fd.pos = sections[7][0].p;
const nContributions = await fd.readULE32();
@ -219,7 +218,7 @@ async function readContributions(fd, curve, sections) {
contributions.push(c);
}
assert.equal(fd.pos-sections[7][0].p, sections[7][0].size);
if (fd.pos-sections[7][0].p != sections[7][0].size) throw new Error("Invalid contribution section size");
return contributions;
}
@ -274,7 +273,7 @@ async function writeContribution(fd, curve, contribution) {
}
async function writeContributions(fd, curve, contributions) {
export async function writeContributions(fd, curve, contributions) {
await fd.writeULE32(7); // Header type
const pContributionsSize = fd.pos;
@ -292,8 +291,8 @@ async function writeContributions(fd, curve, contributions) {
fd.pos = oldPos;
}
function calculateFirstChallangeHash(curve, power, verbose) {
if (verbose) console.log("Calculating First Challange Hash");
export function calculateFirstChallangeHash(curve, power, logger) {
if (logger) logger.debug("Calculating First Challange Hash");
const hasher = new Blake2b(64);
@ -307,14 +306,14 @@ function calculateFirstChallangeHash(curve, power, verbose) {
let n;
n=(1 << power)*2 -1;
if (verbose) console.log("tauG1");
if (logger) logger.debug("Calculate Initial Hash: tauG1");
hashBlock(vG1, n);
n= 1 << power;
if (verbose) console.log("tauG2");
if (logger) logger.debug("Calculate Initial Hash: tauG2");
hashBlock(vG2, n);
if (verbose) console.log("alphaTauG1");
if (logger) logger.debug("Calculate Initial Hash: alphaTauG1");
hashBlock(vG1, n);
if (verbose) console.log("betaTauG1");
if (logger) logger.debug("Calculate Initial Hash: betaTauG1");
hashBlock(vG1, n);
hasher.update(vG2);
@ -330,7 +329,7 @@ function calculateFirstChallangeHash(curve, power, verbose) {
}
for (let i=0; i<nBlocks; i++) {
hasher.update(bigBuff);
if (verbose) console.log(i*blockSize);
if (logger) logger.debug("Initial hash: " +i*blockSize);
}
for (let i=0; i<rem; i++) {
hasher.update(buff);
@ -339,7 +338,7 @@ function calculateFirstChallangeHash(curve, power, verbose) {
}
function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) {
export function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) {
const rng = misc.rngFromBeaconParams(beaconHash, numIterationsExp);
@ -348,14 +347,3 @@ function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) {
return key;
}
module.exports.readPTauHeader = readPTauHeader;
module.exports.writePTauHeader = writePTauHeader;
module.exports.readPtauPubKey = readPtauPubKey;
module.exports.writePtauPubKey = writePtauPubKey;
module.exports.readContributions = readContributions;
module.exports.writeContributions = writeContributions;
module.exports.calculateFirstChallangeHash = calculateFirstChallangeHash;
module.exports.toPtauPubKeyRpr = toPtauPubKeyRpr;
module.exports.fromPtauPubKeyRpr = fromPtauPubKeyRpr;
module.exports.keyFromBeacon = keyFromBeacon;

View File

@ -1,54 +1,53 @@
const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils");
const keyPair = require("./keypair");
const assert = require("assert");
const crypto = require("crypto");
const binFileUtils = require("./binfileutils");
const ChaCha = require("ffjavascript").ChaCha;
const misc = require("./misc");
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as keyPair from "./keypair.js";
import crypto from "crypto";
import * as binFileUtils from "./binfileutils.js";
import { ChaCha } from "ffjavascript";
import * as misc from "./misc.js";
const sameRatio = misc.sameRatio;
async function verifyContribution(curve, cur, prev) {
async function verifyContribution(curve, cur, prev, logger) {
let sr;
if (cur.type == 1) { // Verify the beacon.
const beaconKey = utils.keyFromBeacon(curve, prev.nextChallange, cur.beaconHash, cur.numIterationsExp);
if (!curve.G1.eq(cur.key.tau.g1_s, beaconKey.tau.g1_s)) {
console.log(`BEACON key (tauG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
if (logger) logger.error(`BEACON key (tauG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.tau.g1_sx, beaconKey.tau.g1_sx)) {
console.log(`BEACON key (tauG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
if (logger) logger.error(`BEACON key (tauG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.tau.g2_spx, beaconKey.tau.g2_spx)) {
console.log(`BEACON key (tauG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
if (logger) logger.error(`BEACON key (tauG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.alpha.g1_s, beaconKey.alpha.g1_s)) {
console.log(`BEACON key (alphaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
if (logger) logger.error(`BEACON key (alphaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.alpha.g1_sx, beaconKey.alpha.g1_sx)) {
console.log(`BEACON key (alphaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
if (logger) logger.error(`BEACON key (alphaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.alpha.g2_spx, beaconKey.alpha.g2_spx)) {
console.log(`BEACON key (alphaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
if (logger) logger.error(`BEACON key (alphaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.beta.g1_s, beaconKey.beta.g1_s)) {
console.log(`BEACON key (betaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
if (logger) logger.error(`BEACON key (betaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.beta.g1_sx, beaconKey.beta.g1_sx)) {
console.log(`BEACON key (betaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
if (logger) logger.error(`BEACON key (betaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.beta.g2_spx, beaconKey.beta.g2_spx)) {
console.log(`BEACON key (betaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
if (logger) logger.error(`BEACON key (betaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
}
@ -59,56 +58,57 @@ async function verifyContribution(curve, cur, prev) {
sr = await sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
if (sr !== true) {
console.log("INVALID key (tau) in challange #"+cur.id);
if (logger) logger.error("INVALID key (tau) in challange #"+cur.id);
return false;
}
sr = await sameRatio(curve, cur.key.alpha.g1_s, cur.key.alpha.g1_sx, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
if (sr !== true) {
console.log("INVALID key (alpha) in challange #"+cur.id);
if (logger) logger.error("INVALID key (alpha) in challange #"+cur.id);
return false;
}
sr = await sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
if (sr !== true) {
console.log("INVALID key (beta) in challange #"+cur.id);
if (logger) logger.error("INVALID key (beta) in challange #"+cur.id);
return false;
}
sr = await sameRatio(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
if (sr !== true) {
console.log("INVALID tau*G1. challange #"+cur.id+" It does not follow the previous contribution");
if (logger) logger.error("INVALID tau*G1. challange #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2);
if (sr !== true) {
console.log("INVALID tau*G2. challange #"+cur.id+" It does not follow the previous contribution");
if (logger) logger.error("INVALID tau*G2. challange #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
if (sr !== true) {
console.log("INVALID alpha*G1. challange #"+cur.id+" It does not follow the previous contribution");
if (logger) logger.error("INVALID alpha*G1. challange #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
if (sr !== true) {
console.log("INVALID beta*G1. challange #"+cur.id+" It does not follow the previous contribution");
if (logger) logger.error("INVALID beta*G1. challange #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, prev.betaG2, cur.betaG2);
if (sr !== true) {
console.log("INVALID beta*G2. challange #"+cur.id+"It does not follow the previous contribution");
if (logger) logger.error("INVALID beta*G2. challange #"+cur.id+"It does not follow the previous contribution");
return false;
}
if (logger) logger.info("Powers Of tau file OK!");
return true;
}
async function verify(tauFilename, verbose) {
export default async function verify(tauFilename, logger) {
let sr;
await Blake2b.ready();
@ -116,22 +116,22 @@ async function verify(tauFilename, verbose) {
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fd, sections);
const contrs = await utils.readContributions(fd, curve, sections);
if (verbose) console.log("power: 2**" + power);
if (logger) logger.debug("power: 2**" + power);
// Verify Last contribution
if (verbose) console.log("Computing initial contribution hash");
if (logger) logger.debug("Computing initial contribution hash");
const initialContribution = {
tauG1: curve.G1.g,
tauG2: curve.G2.g,
alphaG1: curve.G1.g,
betaG1: curve.G1.g,
betaG2: curve.G2.g,
nextChallange: utils.calculateFirstChallangeHash(curve, ceremonyPower, verbose),
nextChallange: utils.calculateFirstChallangeHash(curve, ceremonyPower, logger),
responseHash: Blake2b(64).digest()
};
if (contrs.length == 0) {
console.log("This file has no contribution! It cannot be used in production");
if (logger) logger.error("This file has no contribution! It cannot be used in production");
return false;
}
@ -142,8 +142,8 @@ async function verify(tauFilename, verbose) {
prevContr = initialContribution;
}
const curContr = contrs[contrs.length-1];
if (verbose) console.log("Validating contribution #"+contrs[contrs.length-1].id);
const res = await verifyContribution(curve, curContr,prevContr, verbose);
if (logger) logger.debug("Validating contribution #"+contrs[contrs.length-1].id);
const res = await verifyContribution(curve, curContr, prevContr, logger);
if (!res) return false;
@ -155,71 +155,71 @@ async function verify(tauFilename, verbose) {
// await test();
// Verify Section tau*G1
if (verbose) console.log("Verifying powers in tau*G1 section");
const rTau1 = await processSection(2, "G1", "tauG1", (1 << power)*2-1, [0, 1]);
if (logger) logger.debug("Verifying powers in tau*G1 section");
const rTau1 = await processSection(2, "G1", "tauG1", (1 << power)*2-1, [0, 1], logger);
sr = await sameRatio(curve, rTau1.R1, rTau1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) {
console.log("tauG1 section. Powers do not match");
if (logger) logger.error("tauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) {
console.log("First element of tau*G1 section must be the generator");
if (logger) logger.error("First element of tau*G1 section must be the generator");
return false;
}
if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) {
console.log("Second element of tau*G1 section does not match the one in the contribution section");
if (logger) logger.error("Second element of tau*G1 section does not match the one in the contribution section");
return false;
}
// await test();
// Verify Section tau*G2
if (verbose) console.log("Verifying powers in tau*G2 section");
const rTau2 = await processSection(3, "G2", "tauG2", 1 << power, [0, 1]);
if (logger) logger.debug("Verifying powers in tau*G2 section");
const rTau2 = await processSection(3, "G2", "tauG2", 1 << power, [0, 1], logger);
sr = await sameRatio(curve, curve.G1.g, curContr.tauG1, rTau2.R1, rTau2.R2);
if (sr !== true) {
console.log("tauG2 section. Powers do not match");
if (logger) logger.error("tauG2 section. Powers do not match");
return false;
}
if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) {
console.log("First element of tau*G2 section must be the generator");
if (logger) logger.error("First element of tau*G2 section must be the generator");
return false;
}
if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) {
console.log("Second element of tau*G2 section does not match the one in the contribution section");
if (logger) logger.error("Second element of tau*G2 section does not match the one in the contribution section");
return false;
}
// Verify Section alpha*tau*G1
if (verbose) console.log("Verifying powers in alpha*tau*G1 section");
const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 1 << power, [0]);
if (logger) logger.debug("Verifying powers in alpha*tau*G1 section");
const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 1 << power, [0], logger);
sr = await sameRatio(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) {
console.log("alphaTauG1 section. Powers do not match");
if (logger) logger.error("alphaTauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) {
console.log("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section");
if (logger) logger.error("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section");
return false;
}
// Verify Section beta*tau*G1
if (verbose) console.log("Verifying powers in beta*tau*G1 section");
const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 1 << power, [0]);
if (logger) logger.debug("Verifying powers in beta*tau*G1 section");
const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 1 << power, [0], logger);
sr = await sameRatio(curve, rBetaTauG1.R1, rBetaTauG1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) {
console.log("betaTauG1 section. Powers do not match");
if (logger) logger.error("betaTauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) {
console.log("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section");
if (logger) logger.error("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section");
return false;
}
//Verify Beta G2
const betaG2 = await processSectionBetaG2();
const betaG2 = await processSectionBetaG2(logger);
if (!curve.G2.eq(curContr.betaG2, betaG2)) {
console.log("betaG2 element in betaG2 section does not match the one in the contribution section");
if (logger) logger.error("betaG2 element in betaG2 section does not match the one in the contribution section");
return false;
}
@ -228,14 +228,11 @@ async function verify(tauFilename, verbose) {
// Check the nextChallangeHash
if (!misc.hashIsEqual(nextContributionHash,curContr.nextChallange)) {
console.log("Hash of the values does not match the next challange of the last contributor in the contributions section");
if (logger) logger.error("Hash of the values does not match the next challange of the last contributor in the contributions section");
return false;
}
if (verbose) {
console.log("Next challange hash: ");
console.log(misc.formatHash(nextContributionHash));
}
if (logger) logger.info(misc.formatHash(nextContributionHash, "Next challange hash: "));
// Verify Previous contributions
@ -243,24 +240,26 @@ async function verify(tauFilename, verbose) {
for (let i = contrs.length-2; i>=0; i--) {
const curContr = contrs[i];
const prevContr = (i>0) ? contrs[i-1] : initialContribution;
const res = await verifyContribution(curve, curContr, prevContr);
const res = await verifyContribution(curve, curContr, prevContr, logger);
if (!res) return false;
printContribution(curContr, prevContr);
printContribution(curContr, prevContr, logger);
}
console.log("-----------------------------------------------------");
if (logger) logger.info("-----------------------------------------------------");
if ((!sections[12]) || (!sections[13]) || (!sections[14]) || (!sections[15])) {
console.log("this file does not contain phase2 precalculated values. Please run: ");
console.log(" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony." );
if (logger) logger.warn(
"this file does not contain phase2 precalculated values. Please run: \n" +
" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony."
);
} else {
let res;
res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1");
res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1", logger);
if (!res) return false;
res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2");
res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2", logger);
if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1");
res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1", logger);
if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1");
res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1", logger);
if (!res) return false;
}
@ -269,11 +268,11 @@ async function verify(tauFilename, verbose) {
return true;
function printContribution(curContr, prevContr) {
console.log("-----------------------------------------------------");
console.log(`Contribution #${curContr.id}: ${curContr.name ||""}`);
if (!logger) return;
logger.info("-----------------------------------------------------");
logger.info(`Contribution #${curContr.id}: ${curContr.name ||""}`);
console.log("\tNext Challange");
console.log(misc.formatHash(curContr.nextChallange));
logger.info(misc.formatHash(curContr.nextChallange, "Next Challange: "));
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
utils.toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false);
@ -283,26 +282,30 @@ async function verify(tauFilename, verbose) {
responseHasher.update(buffV);
const responseHash = responseHasher.digest();
console.log("\tResponse Hash");
console.log(misc.formatHash(responseHash));
logger.info(misc.formatHash(responseHash, "Response Hash:"));
console.log("\tBased on challange");
console.log(misc.formatHash(prevContr.nextChallange));
logger.info(misc.formatHash(prevContr.nextChallange, "Response Hash:"));
if (curContr.type == 1) {
console.log(`Beacon generator: ${misc.byteArray2hex(curContr.beaconHash)}`);
console.log(`Beacon iterations Exp: ${curContr.numIterationsExp}`);
logger.info(`Beacon generator: ${misc.byteArray2hex(curContr.beaconHash)}`);
logger.info(`Beacon iterations Exp: ${curContr.numIterationsExp}`);
}
}
async function processSectionBetaG2() {
async function processSectionBetaG2(logger) {
const G = curve.G2;
const sG = G.F.n8*2;
const buffUv = new Uint8Array(sG);
if (!sections[6]) assert(false, "File has no BetaG2 section");
if (sections[6].length>1) assert(false, "File has more than one GetaG2 section");
if (!sections[6]) {
logger.error("File has no BetaG2 section");
throw new Error("File has no BetaG2 section");
}
if (sections[6].length>1) {
logger.error("File has no BetaG2 section");
throw new Error("File has more than one GetaG2 section");
}
fd.pos = sections[6][0].p;
const buff = await fd.read(sG);
@ -314,7 +317,7 @@ async function verify(tauFilename, verbose) {
return P;
}
async function processSection(idSection, groupName, sectionName, nPoints, singularPointIndexes) {
async function processSection(idSection, groupName, sectionName, nPoints, singularPointIndexes, logger) {
const MAX_CHUNK_SIZE = 1<<16;
const G = curve[groupName];
const sG = G.F.n8*2;
@ -328,7 +331,7 @@ async function verify(tauFilename, verbose) {
let lastBase = G.zero;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
if (logger) logger.debug(`points relations: ${sectionName}: ${i}/${nPoints} `);
const n = Math.min(nPoints - i, MAX_CHUNK_SIZE);
const bases = await fd.read(n*sG);
@ -374,9 +377,9 @@ async function verify(tauFilename, verbose) {
}
async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName) {
async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName, logger) {
if (verbose) console.log(`Verifying phase2 calculated values ${sectionName}...`);
if (logger) logger.debug(`Verifying phase2 calculated values ${sectionName}...`);
const G = curve[gName];
const sG = G.F.n8*2;
@ -396,7 +399,7 @@ async function verify(tauFilename, verbose) {
return true;
async function verifyPower(p) {
if (verbose) console.log(`Power ${p}...`);
if (logger) logger.debug(`Power ${p}...`);
const n8r = curve.Fr.n8;
const nPoints = 1<<p;
let buff_r = new Uint8Array(nPoints * n8r);
@ -425,7 +428,7 @@ async function verify(tauFilename, verbose) {
const resLagrange = await G.multiExpAffine(buffG, buff_r);
if (!G.eq(resTau, resLagrange)) {
console.log("Phase2 caclutation does not match with powers of tau");
if (logger) logger.error("Phase2 caclutation does not match with powers of tau");
return false;
}
@ -433,5 +436,3 @@ async function verify(tauFilename, verbose) {
}
}
}
module.exports = verify;

View File

@ -1,211 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function genProof(vk_proof, witness, verbose) {
const proof = {};
const r = PolF.F.random();
const s = PolF.F.random();
/* Uncomment to generate a deterministic proof to debug
const r = PolF.F.zero;
const s = PolF.F.zero;
*/
proof.pi_a = G1.zero;
proof.pi_b = G2.zero;
proof.pi_c = G1.zero;
let pib1 = G1.zero;
// Skip public entries and the "1" signal that are forced by the verifier
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[s], witness[s]));
// pi_b = pi_b + B[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B2[s], witness[s]));
pib1 = G1.add( pib1, G1.timesScalar( vk_proof.B1[s], witness[s]));
if ((verbose)&&(s%1000 == 1)) console.log("A, B1, B2: ", s);
}
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[s], witness[s]));
if ((verbose)&&(s%1000 == 1)) console.log("C: ", s);
}
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.vk_delta_2, s ));
pib1 = G1.add( pib1, vk_proof.vk_beta_1 );
pib1 = G1.add( pib1, G1.timesScalar( vk_proof.vk_delta_1, s ));
const h = calculateH(vk_proof, witness);
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_onlyc", proof.pi_c);
for (let i = 0; i < h.length; i++) {
// console.log(i + "->" + h[i].toString());
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.hExps[i], h[i]));
if ((verbose)&&(i%1000 == 1)) console.log("H: ", i);
}
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_candh", proof.pi_c);
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.vk_delta_1, PolF.F.neg(PolF.F.mul(r,s) )));
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
proof.pi_a = G1.toAffine(proof.pi_a);
proof.pi_b = G2.toAffine(proof.pi_b);
proof.pi_c = G1.toAffine(proof.pi_c);
proof.protocol = "groth";
return {proof, publicSignals};
};
/*
// Old Method. (It's clear for academic understanding)
function calculateH(vk_proof, witness) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
const polC_T = new Array(m).fill(PolF.F.zero);
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
for (let c in vk_proof.polsC[s]) {
polC_T[c] = F.add(polC_T[c], F.mul(witness[s], vk_proof.polsC[s][c]));
}
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polAB_S = PolF.mul(polA_S, polB_S);
const polC_S = PolF.ifft(polC_T);
const polABC_S = PolF.sub(polAB_S, polC_S);
const H_S = polABC_S.slice(m);
return H_S;
}
*/
function calculateH(vk_proof, witness) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
for (let i=0; i<vk_proof.ccoefs.length; i++) {
const coef = vk_proof.ccoefs[i];
if (coef.matrix == 0) {
polA_T[coef.constraint] = F.add( polA_T[coef.constraint], F.mul(witness[ coef.signal ], coef.value) );
} else if (coef.matrix == 1) {
polB_T[coef.constraint] = F.add( polB_T[coef.constraint], F.mul(witness[ coef.signal ], coef.value) );
}
}
/*
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
}
*/
const polC_T = new Array(polA_T.length);
for (let i=0; i<polA_T.length; i++) {
polC_T[i] = PolF.F.mul(polA_T[i], polB_T[i]);
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polC_S = PolF.ifft(polC_T);
// F(wx) = [1, w, w^2, ...... w^(m-1)] in time is the same than shift in in frequency
const r = PolF.log2(m)+1;
PolF._setRoots(r);
for (let i=0; i<polA_S.length; i++) {
polA_S[i] = PolF.F.mul( polA_S[i], PolF.roots[r][i]);
polB_S[i] = PolF.F.mul( polB_S[i], PolF.roots[r][i]);
polC_S[i] = PolF.F.mul( polC_S[i], PolF.roots[r][i]);
}
const polA_Todd = PolF.fft(polA_S);
const polB_Todd = PolF.fft(polB_S);
const polC_Todd = PolF.fft(polC_S);
const polABC_Todd = new Array(polA_S.length);
for (let i=0; i<polA_S.length; i++) {
polABC_Todd[i] = PolF.F.sub(PolF.F.mul( polA_Todd[i], polB_Todd[i]), polC_Todd[i]);
}
return polABC_Todd;
}

View File

@ -1,176 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
const createKeccakHash = require("keccak");
const utils = require("ffjavascript").utils;
/*
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function genProof(vk_proof, witness) {
const proof = {};
const r = PolF.F.random();
const s = PolF.F.random();
// const r = PolF.F.zero;
// const s = PolF.F.zero;
/* Uncomment to generate a deterministic proof to debug
const r = PolF.F.zero;
const s = PolF.F.zero;
*/
proof.pi_a = G1.zero;
proof.pi_b = G2.zero;
proof.pi_c = G1.zero;
let pib1 = G1.zero;
let piadelta = G1.zero;
// Skip public entries and the "1" signal that are forced by the verifier
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[s], witness[s]));
// pi_b = pi_b + B[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B2[s], witness[s]));
piadelta = G1.add( piadelta, G1.timesScalar( vk_proof.Adelta[s], witness[s]));
pib1 = G1.add( pib1, G1.timesScalar( vk_proof.B1[s], witness[s]));
}
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[s], witness[s]));
}
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( G1.g, r ));
piadelta = G1.add( piadelta, vk_proof.vk_alphadelta_1);
piadelta = G1.add( piadelta, G1.timesScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( G2.g, s ));
pib1 = G1.add( pib1, vk_proof.vk_beta_1 );
pib1 = G1.add( pib1, G1.timesScalar( G1.g, s ));
proof.pi_a = G1.toAffine(proof.pi_a);
proof.pi_b = G2.toAffine(proof.pi_b);
const buff = Buffer.concat([
utils.beInt2Buff(proof.pi_a[0],32),
utils.beInt2Buff(proof.pi_a[1],32),
utils.beInt2Buff(proof.pi_b[0][0],32),
utils.beInt2Buff(proof.pi_b[0][1],32),
utils.beInt2Buff(proof.pi_b[1][0],32),
utils.beInt2Buff(proof.pi_b[1][1],32)
]);
const h1buff = createKeccakHash("keccak256").update(buff).digest();
const h2buff = createKeccakHash("keccak256").update(h1buff).digest();
const h1 = utils.beBuff2int(h1buff);
const h2 = utils.beBuff2int(h2buff);
// const h1 = PolF.F.zero;
// const h2 = PolF.F.zero;
// console.log(h1.toString());
// console.log(h2.toString());
const h = calculateH(vk_proof, witness);
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_onlyc", proof.pi_c);
for (let i = 0; i < h.length; i++) {
// console.log(i + "->" + h[i].toString());
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.hExps[i], h[i]));
}
// proof.pi_c = G1.toAffine(proof.pi_c);
// console.log("pi_candh", proof.pi_c);
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( G1.g, PolF.F.neg(PolF.F.mul(r,s) )));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( piadelta, h2 ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( pib1, h1 ));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.vk_delta_1, PolF.F.mul(h1,h2)));
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
proof.pi_c = G1.toAffine(proof.pi_c);
proof.protocol = "kimleeoh";
return {proof, publicSignals};
};
function calculateH(vk_proof, witness) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
const polC_T = new Array(m).fill(PolF.F.zero);
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
for (let c in vk_proof.polsC[s]) {
polC_T[c] = F.add(polC_T[c], F.mul(witness[s], vk_proof.polsC[s][c]));
}
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polAB_S = PolF.mul(polA_S, polB_S);
const polC_S = PolF.ifft(polC_T);
const polABC_S = PolF.sub(polAB_S, polC_S);
const H_S = polABC_S.slice(m);
return H_S;
}

View File

@ -1,209 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function genProof(vk_proof, witness) {
const proof = {};
const d1 = PolF.F.random();
const d2 = PolF.F.random();
const d3 = PolF.F.random();
proof.pi_a = G1.zero;
proof.pi_ap = G1.zero;
proof.pi_b = G2.zero;
proof.pi_bp = G1.zero;
proof.pi_c = G1.zero;
proof.pi_cp = G1.zero;
proof.pi_kp = G1.zero;
proof.pi_h = G1.zero;
// Skip public entries and the "1" signal that are forced by the verifier
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_ap = G1.add( proof.pi_ap, G1.timesScalar( vk_proof.Ap[s], witness[s]));
}
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_bp = G1.add( proof.pi_bp, G1.timesScalar( vk_proof.Bp[s], witness[s]));
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_cp = G1.add( proof.pi_cp, G1.timesScalar( vk_proof.Cp[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[s], witness[s]));
}
proof.pi_a = G1.add( proof.pi_a, G1.timesScalar( vk_proof.A[vk_proof.nVars], d1));
proof.pi_ap = G1.add( proof.pi_ap, G1.timesScalar( vk_proof.Ap[vk_proof.nVars], d1));
proof.pi_b = G2.add( proof.pi_b, G2.timesScalar( vk_proof.B[vk_proof.nVars], d2));
proof.pi_bp = G1.add( proof.pi_bp, G1.timesScalar( vk_proof.Bp[vk_proof.nVars], d2));
proof.pi_c = G1.add( proof.pi_c, G1.timesScalar( vk_proof.C[vk_proof.nVars], d3));
proof.pi_cp = G1.add( proof.pi_cp, G1.timesScalar( vk_proof.Cp[vk_proof.nVars], d3));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[vk_proof.nVars ], d1));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[vk_proof.nVars+1], d2));
proof.pi_kp = G1.add( proof.pi_kp, G1.timesScalar( vk_proof.Kp[vk_proof.nVars+2], d3));
/*
let polA = [];
let polB = [];
let polC = [];
for (let s= 0; s< vk_proof.nVars; s++) {
polA = PolF.add(
polA,
PolF.mul(
vk_proof.polsA[s],
[witness[s]] ));
polB = PolF.add(
polB,
PolF.mul(
vk_proof.polsB[s],
[witness[s]] ));
polC = PolF.add(
polC,
PolF.mul(
vk_proof.polsC[s],
[witness[s]] ));
}
let polFull = PolF.sub(PolF.mul( polA, polB), polC);
const h = PolF.div(polFull, vk_proof.polZ );
*/
const h = calculateH(vk_proof, witness, d1, d2, d3);
// console.log(h.length + "/" + vk_proof.hExps.length);
for (let i = 0; i < h.length; i++) {
proof.pi_h = G1.add( proof.pi_h, G1.timesScalar( vk_proof.hExps[i], h[i]));
}
proof.pi_a = G1.toAffine(proof.pi_a);
proof.pi_b = G2.toAffine(proof.pi_b);
proof.pi_c = G1.toAffine(proof.pi_c);
proof.pi_ap = G1.toAffine(proof.pi_ap);
proof.pi_bp = G1.toAffine(proof.pi_bp);
proof.pi_cp = G1.toAffine(proof.pi_cp);
proof.pi_kp = G1.toAffine(proof.pi_kp);
proof.pi_h = G1.toAffine(proof.pi_h);
// proof.h=h;
proof.protocol = "original";
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
return {proof, publicSignals};
};
function calculateH(vk_proof, witness, d1, d2, d3) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
const polC_T = new Array(m).fill(PolF.F.zero);
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
for (let c in vk_proof.polsC[s]) {
polC_T[c] = F.add(polC_T[c], F.mul(witness[s], vk_proof.polsC[s][c]));
}
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polAB_S = PolF.mul(polA_S, polB_S);
const polC_S = PolF.ifft(polC_T);
const polABC_S = PolF.sub(polAB_S, polC_S);
const polZ_S = new Array(m+1).fill(F.zero);
polZ_S[m] = F.one;
polZ_S[0] = F.neg(F.one);
let H_S = PolF.div(polABC_S, polZ_S);
/*
const H2S = PolF.mul(H_S, polZ_S);
if (PolF.equals(H2S, polABC_S)) {
console.log("Is Divisible!");
} else {
console.log("ERROR: Not divisible!");
}
*/
/* add coefficients of the polynomial (d2*A + d1*B - d3) + d1*d2*Z */
H_S = PolF.extend(H_S, m+1);
for (let i=0; i<m; i++) {
const d2A = PolF.F.mul(d2, polA_S[i]);
const d1B = PolF.F.mul(d1, polB_S[i]);
H_S[i] = PolF.F.add(H_S[i], PolF.F.add(d2A, d1B));
}
H_S[0] = PolF.F.sub(H_S[0], d3);
// Z = x^m -1
const d1d2 = PolF.F.mul(d1, d2);
H_S[m] = PolF.F.add(H_S[m], d1d2);
H_S[0] = PolF.F.sub(H_S[0], d1d2);
H_S = PolF.reduce(H_S);
return H_S;
}

View File

@ -1,3 +1,4 @@
module.exports.print = require("./r1cs_print");
module.exports.info = require("./r1cs_info");
module.exports.exportJson = require("./r1cs_export_json");
export {default as print} from "./r1cs_print.js";
export {default as info} from "./r1cs_info.js";
export {default as exportJson} from "./r1cs_export_json.js";

View File

@ -1,14 +1,8 @@
const {stringifyBigInts} = require("ffjavascript").utils;
const fs = require("fs");
const readZKey = require("./zkey_utils").read;
const loadR1cs = require("r1csfile").load;
import {load as loadR1cs} from "r1csfile";
module.exports = r1csExportJson;
async function r1csExportJson(r1csFileName, jsonFileName, verbose) {
export default async function r1csExportJson(r1csFileName, logger) {
const cir = await loadR1cs(r1csFileName, true, true);
const S = JSON.stringify(stringifyBigInts(cir), null, 1);
await fs.promises.writeFile(jsonFileName, S);
return cir;
}

View File

@ -1,25 +1,25 @@
const Scalar = require("ffjavascript").Scalar;
const loadR1cs = require("r1csfile").load;
module.exports = r1csInfo;
import { Scalar } from "ffjavascript";
import {load as loadR1cs} from "r1csfile";
const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617", 16);
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
export default async function r1csInfo(r1csName, logger) {
async function r1csInfo(r1csName) {
const cir = await loadR1cs(r1csName);
if (Scalar.eq(cir.prime, bn128r)) {
console.log("# Curve: bn-128");
if (logger) logger.info("Curve: bn-128");
} else if (Scalar.eq(cir.prime, bls12381r)) {
console.log("# Curve: bls12-381");
if (logger) logger.info("Curve: bls12-381");
} else {
console.log(`# Unknown Curve. Prime: ${Scalar.toString(cir.r)}`);
if (logger) logger.info(`Unknown Curve. Prime: ${Scalar.toString(cir.prime)}`);
}
console.log(`# Wires: ${cir.nVars}`);
console.log(`# Constraints: ${cir.nConstraints}`);
console.log(`# Private Inputs: ${cir.nPrvInputs}`);
console.log(`# Public Inputs: ${cir.nPubInputs}`);
console.log(`# Outputs: ${cir.nOutputs}`);
if (logger) logger.info(`# of Wires: ${cir.nVars}`);
if (logger) logger.info(`# of Constraints: ${cir.nConstraints}`);
if (logger) logger.info(`# of Private Inputs: ${cir.nPrvInputs}`);
if (logger) logger.info(`# of Public Inputs: ${cir.nPubInputs}`);
if (logger) logger.info(`# of Outputs: ${cir.nOutputs}`);
return cir;
}

View File

@ -1,5 +1,5 @@
module.exports = function r1csPrint(r1cs, syms) {
export default function r1csPrint(r1cs, syms, logger) {
for (let i=0; i<r1cs.constraints.length; i++) {
printCostraint(r1cs.constraints[i]);
}
@ -21,7 +21,7 @@ module.exports = function r1csPrint(r1cs, syms) {
return S;
};
const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`;
console.log(S);
if (logger) logger.info(S);
}
};
}

View File

@ -1,251 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
*/
module.exports = function setup(circuit, verbose) {
const setup = {
vk_proof : {
protocol: "groth16",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.q = bn128.q;
setup.vk_proof.r = bn128.r;
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
calculatePolinomials(setup, circuit);
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit, verbose);
setup.vk_verifier = {
protocol: setup.vk_proof.protocol,
nPublic: setup.vk_proof.nPublic,
IC: setup.vk_proof.IC,
vk_alpha_1: setup.vk_proof.vk_alpha_1,
vk_beta_2: setup.vk_proof.vk_beta_2,
vk_gamma_2: setup.vk_proof.vk_gamma_2,
vk_delta_2: setup.vk_proof.vk_delta_2,
vk_alphabeta_12: bn128.pairing( setup.vk_proof.vk_alpha_1 , setup.vk_proof.vk_beta_2 )
};
return setup;
};
function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsA = new Array(circuit.nVars);
setup.vk_proof.polsB = new Array(circuit.nVars);
setup.vk_proof.polsC = new Array(circuit.nVars);
for (let i=0; i<circuit.nVars; i++) {
setup.vk_proof.polsA[i] = {};
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
setup.vk_proof.ccoefs = [];
for (let m=0; m<2; m++) {
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][m]) {
setup.vk_proof.ccoefs.push({
matrix: m,
constraint: c,
signal: s,
value: circuit.constraints[c][m][s]
});
}
}
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = circuit.constraints[c][0][s];
}
for (let s in circuit.constraints[c][1]) {
setup.vk_proof.polsB[s][c] = circuit.constraints[c][1][s];
}
for (let s in circuit.constraints[c][2]) {
setup.vk_proof.polsC[s][c] = circuit.constraints[c][2][s];
}
}
/**
* add and process the constraints
* input_i * 0 = 0
* to ensure soundness of input consistency
*/
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
setup.vk_proof.ccoefs.push({
matrix: 0,
constraint: circuit.nConstraints + i,
signal: i,
value: F.one
});
}
}
function calculateValuesAtT(setup, circuit) {
const z_t = PolF.computeVanishingPolinomial(setup.vk_proof.domainBits, setup.toxic.t);
const u = PolF.evaluateLagrangePolynomials(setup.vk_proof.domainBits, setup.toxic.t);
const a_t = new Array(circuit.nVars).fill(F.zero);
const b_t = new Array(circuit.nVars).fill(F.zero);
const c_t = new Array(circuit.nVars).fill(F.zero);
// TODO: substitute setup.polsA for coeficients
for (let s=0; s<circuit.nVars; s++) {
for (let c in setup.vk_proof.polsA[s]) {
a_t[s] = F.add(a_t[s], F.mul(u[c], setup.vk_proof.polsA[s][c]));
}
for (let c in setup.vk_proof.polsB[s]) {
b_t[s] = F.add(b_t[s], F.mul(u[c], setup.vk_proof.polsB[s][c]));
}
for (let c in setup.vk_proof.polsC[s]) {
c_t[s] = F.add(c_t[s], F.mul(u[c], setup.vk_proof.polsC[s][c]));
}
}
return {a_t, b_t, c_t, z_t};
}
function calculateEncriptedValuesAtT(setup, circuit, verbose) {
const v = calculateValuesAtT(setup, circuit);
setup.vk_proof.A = new Array(circuit.nVars);
setup.vk_proof.B1 = new Array(circuit.nVars);
setup.vk_proof.B2 = new Array(circuit.nVars);
setup.vk_proof.C = new Array(circuit.nVars);
setup.vk_proof.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.toxic.kalpha = F.random();
setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random();
setup.toxic.kdelta = F.random();
let invDelta = F.inv(setup.toxic.kdelta);
let invGamma = F.inv(setup.toxic.kgamma);
setup.vk_proof.vk_alpha_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_beta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kdelta));
setup.vk_proof.vk_gamma_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kgamma));
for (let s=0; s<circuit.nVars; s++) {
const A = G1.timesScalar(G1.g, v.a_t[s]);
setup.vk_proof.A[s] = A;
const B1 = G1.timesScalar(G1.g, v.b_t[s]);
setup.vk_proof.B1[s] = B1;
const B2 = G2.timesScalar(G2.g, v.b_t[s]);
setup.vk_proof.B2[s] = B2;
if ((verbose)&&(s%1000 == 1)) console.log("A, B1, B2: ", s);
}
for (let s=0; s<=setup.vk_proof.nPublic; s++) {
let ps =
F.mul(
invGamma,
F.add(
F.add(
F.mul(v.a_t[s], setup.toxic.kbeta),
F.mul(v.b_t[s], setup.toxic.kalpha)),
v.c_t[s]));
const IC = G1.timesScalar(G1.g, ps);
setup.vk_proof.IC[s]=IC;
}
for (let s=setup.vk_proof.nPublic+1; s<circuit.nVars; s++) {
let ps =
F.mul(
invDelta,
F.add(
F.add(
F.mul(v.a_t[s], setup.toxic.kbeta),
F.mul(v.b_t[s], setup.toxic.kalpha)),
v.c_t[s]));
const C = G1.timesScalar(G1.g, ps);
setup.vk_proof.C[s]=C;
if ((verbose)&&(s%1000 == 1)) console.log("C: ", s);
}
// Calculate HExps
const maxH = setup.vk_proof.domainSize+1;
setup.vk_proof.hExps = new Array(maxH);
const zod = F.mul(invDelta, v.z_t);
setup.vk_proof.hExps[0] = G1.toAffine(G1.timesScalar(G1.g, zod));
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.timesScalar(G1.g, F.mul(eT, zod));
eT = F.mul(eT, setup.toxic.t);
if ((verbose)&&(i%1000 == 1)) console.log("Tau: ", i);
}
G1.multiAffine(setup.vk_proof.A);
G1.multiAffine(setup.vk_proof.B1);
G2.multiAffine(setup.vk_proof.B2);
G1.multiAffine(setup.vk_proof.C);
G1.multiAffine(setup.vk_proof.hExps);
G1.multiAffine(setup.vk_proof.IC);
}

View File

@ -1,232 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bigInt = require("big-integer");
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").ZqField;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
*/
module.exports = function setup(circuit) {
const setup = {
vk_proof : {
protocol: "kimleeoh",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
vk_verifier: {
protocol: "kimleeoh",
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
calculatePolinomials(setup, circuit);
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit);
return setup;
};
function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsA = new Array(circuit.nVars);
setup.vk_proof.polsB = new Array(circuit.nVars);
setup.vk_proof.polsC = new Array(circuit.nVars);
for (let i=0; i<circuit.nVars; i++) {
setup.vk_proof.polsA[i] = {};
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = circuit.constraints[c][0][s];
}
for (let s in circuit.constraints[c][1]) {
setup.vk_proof.polsB[s][c] = circuit.constraints[c][1][s];
}
for (let s in circuit.constraints[c][2]) {
setup.vk_proof.polsC[s][c] = circuit.constraints[c][2][s];
}
}
/**
* add and process the constraints
* input_i * 0 = 0
* to ensure soundness of input consistency
*/
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
}
}
function calculateValuesAtT(setup, circuit) {
const z_t = PolF.computeVanishingPolinomial(setup.vk_proof.domainBits, setup.toxic.t);
const u = PolF.evaluateLagrangePolynomials(setup.vk_proof.domainBits, setup.toxic.t);
const a_t = new Array(circuit.nVars).fill(F.zero);
const b_t = new Array(circuit.nVars).fill(F.zero);
const c_t = new Array(circuit.nVars).fill(F.zero);
// TODO: substitute setup.polsA for coeficients
for (let s=0; s<circuit.nVars; s++) {
for (let c in setup.vk_proof.polsA[s]) {
a_t[s] = F.add(a_t[s], F.mul(u[c], setup.vk_proof.polsA[s][c]));
}
for (let c in setup.vk_proof.polsB[s]) {
b_t[s] = F.add(b_t[s], F.mul(u[c], setup.vk_proof.polsB[s][c]));
}
for (let c in setup.vk_proof.polsC[s]) {
c_t[s] = F.add(c_t[s], F.mul(u[c], setup.vk_proof.polsC[s][c]));
}
}
return {a_t, b_t, c_t, z_t};
}
function calculateEncriptedValuesAtT(setup, circuit) {
const v = calculateValuesAtT(setup, circuit);
setup.vk_proof.A = new Array(circuit.nVars);
setup.vk_proof.Adelta = new Array(circuit.nVars);
setup.vk_proof.B1 = new Array(circuit.nVars);
setup.vk_proof.B2 = new Array(circuit.nVars);
setup.vk_proof.C = new Array(circuit.nVars);
setup.vk_verifier.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.toxic.kalpha = F.random();
setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random();
setup.toxic.kdelta = F.random();
const gammaSquare = F.mul(setup.toxic.kgamma, setup.toxic.kgamma);
setup.vk_proof.vk_alpha_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_alphadelta_1 = G1.toAffine(G1.timesScalar( G1.g, F.mul(setup.toxic.kalpha, setup.toxic.kdelta)));
setup.vk_proof.vk_beta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_alpha_1 = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kalpha));
setup.vk_verifier.vk_beta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_gamma_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_delta_2 = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kdelta));
setup.vk_verifier.vk_alphabeta_12 = bn128.pairing( setup.vk_verifier.vk_alpha_1 , setup.vk_verifier.vk_beta_2 );
for (let s=0; s<circuit.nVars; s++) {
const A = G1.toAffine(G1.timesScalar(G1.g, F.mul(setup.toxic.kgamma, v.a_t[s])));
setup.vk_proof.A[s] = A;
setup.vk_proof.Adelta[s] = G1.toAffine(G1.timesScalar(A, setup.toxic.kdelta));
const B1 = G1.toAffine(G1.timesScalar(G1.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
setup.vk_proof.B1[s] = B1;
const B2 = G2.toAffine(G2.timesScalar(G2.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
setup.vk_proof.B2[s] = B2;
}
for (let s=0; s<=setup.vk_proof.nPublic; s++) {
let ps =
F.add(
F.mul(
setup.toxic.kgamma,
v.c_t[s]
),
F.add(
F.mul(
setup.toxic.kbeta,
v.a_t[s]
),
F.mul(
setup.toxic.kalpha,
v.b_t[s]
)
)
);
const IC = G1.toAffine(G1.timesScalar(G1.g, ps));
setup.vk_verifier.IC[s]=IC;
}
for (let s=setup.vk_proof.nPublic+1; s<circuit.nVars; s++) {
let ps =
F.add(
F.mul(
gammaSquare,
v.c_t[s]
),
F.add(
F.mul(
F.mul(setup.toxic.kbeta, setup.toxic.kgamma),
v.a_t[s]
),
F.mul(
F.mul(setup.toxic.kalpha, setup.toxic.kgamma),
v.b_t[s]
)
)
);
const C = G1.toAffine(G1.timesScalar(G1.g, ps));
setup.vk_proof.C[s]=C;
}
// Calculate HExps
const maxH = setup.vk_proof.domainSize+1;
setup.vk_proof.hExps = new Array(maxH);
const zod = F.mul(gammaSquare, v.z_t);
setup.vk_proof.hExps[0] = G1.toAffine(G1.timesScalar(G1.g, zod));
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.toAffine(G1.timesScalar(G1.g, F.mul(eT, zod)));
eT = F.mul(eT, setup.toxic.t);
}
}

View File

@ -1,236 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bn128 = require("ffjavascript").bn128;
const PolField = require("ffjavascript").PolField;
const ZqField = require("ffjavascript").F1Field;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
*/
module.exports = function setup(circuit) {
const setup = {
vk_proof : {
protocol: "original",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
vk_verifier: {
protocol: "original",
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
calculatePolinomials(setup, circuit);
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit);
calculateHexps(setup, circuit);
return setup;
};
function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsA = new Array(circuit.nVars);
setup.vk_proof.polsB = new Array(circuit.nVars);
setup.vk_proof.polsC = new Array(circuit.nVars);
for (let i=0; i<circuit.nVars; i++) {
setup.vk_proof.polsA[i] = {};
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = circuit.constraints[c][0][s];
}
for (let s in circuit.constraints[c][1]) {
setup.vk_proof.polsB[s][c] = circuit.constraints[c][1][s];
}
for (let s in circuit.constraints[c][2]) {
setup.vk_proof.polsC[s][c] = circuit.constraints[c][2][s];
}
}
/**
* add and process the constraints
* input_i * 0 = 0
* to ensure soundness of input consistency
*/
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
}
}
function calculateValuesAtT(setup, circuit) {
const z_t = PolF.computeVanishingPolinomial(setup.vk_proof.domainBits, setup.toxic.t);
const u = PolF.evaluateLagrangePolynomials(setup.vk_proof.domainBits, setup.toxic.t);
const a_t = new Array(circuit.nVars).fill(F.zero);
const b_t = new Array(circuit.nVars).fill(F.zero);
const c_t = new Array(circuit.nVars).fill(F.zero);
// TODO: substitute setup.polsA for coeficients
for (let s=0; s<circuit.nVars; s++) {
for (let c in setup.vk_proof.polsA[s]) {
a_t[s] = F.add(a_t[s], F.mul(u[c], setup.vk_proof.polsA[s][c]));
}
for (let c in setup.vk_proof.polsB[s]) {
b_t[s] = F.add(b_t[s], F.mul(u[c], setup.vk_proof.polsB[s][c]));
}
for (let c in setup.vk_proof.polsC[s]) {
c_t[s] = F.add(c_t[s], F.mul(u[c], setup.vk_proof.polsC[s][c]));
}
}
return {a_t, b_t, c_t, z_t};
}
function calculateEncriptedValuesAtT(setup, circuit) {
const v = calculateValuesAtT(setup, circuit);
setup.vk_proof.A = new Array(circuit.nVars+1);
setup.vk_proof.B = new Array(circuit.nVars+1);
setup.vk_proof.C = new Array(circuit.nVars+1);
setup.vk_proof.Ap = new Array(circuit.nVars+1);
setup.vk_proof.Bp = new Array(circuit.nVars+1);
setup.vk_proof.Cp = new Array(circuit.nVars+1);
setup.vk_proof.Kp = new Array(circuit.nVars+3);
setup.vk_verifier.IC = new Array(circuit.nPubInputs);
setup.vk_verifier.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.toxic.ka = F.random();
setup.toxic.kb = F.random();
setup.toxic.kc = F.random();
setup.toxic.ra = F.random();
setup.toxic.rb = F.random();
setup.toxic.rc = F.mul(setup.toxic.ra, setup.toxic.rb);
setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random();
const gb = F.mul(setup.toxic.kbeta, setup.toxic.kgamma);
setup.vk_verifier.vk_a = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.ka));
setup.vk_verifier.vk_b = G1.toAffine(G1.timesScalar( G1.g, setup.toxic.kb));
setup.vk_verifier.vk_c = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kc));
setup.vk_verifier.vk_gb_1 = G1.toAffine(G1.timesScalar( G1.g, gb));
setup.vk_verifier.vk_gb_2 = G2.toAffine(G2.timesScalar( G2.g, gb));
setup.vk_verifier.vk_g = G2.toAffine(G2.timesScalar( G2.g, setup.toxic.kgamma));
for (let s=0; s<circuit.nVars; s++) {
// A[i] = G1 * polA(t)
const raat = F.mul(setup.toxic.ra, v.a_t[s]);
const A = G1.toAffine(G1.timesScalar(G1.g, raat));
setup.vk_proof.A[s] = A;
if (s <= setup.vk_proof.nPublic) {
setup.vk_verifier.IC[s]=A;
}
// B1[i] = G1 * polB(t)
const rbbt = F.mul(setup.toxic.rb, v.b_t[s]);
const B1 = G1.toAffine(G1.timesScalar(G1.g, rbbt));
// B2[i] = G2 * polB(t)
const B2 = G2.toAffine(G2.timesScalar(G2.g, rbbt));
setup.vk_proof.B[s]=B2;
// C[i] = G1 * polC(t)
const rcct = F.mul(setup.toxic.rc, v.c_t[s]);
const C = G1.toAffine(G1.timesScalar( G1.g, rcct));
setup.vk_proof.C[s] =C;
// K = G1 * (A+B+C)
const kt = F.add(F.add(raat, rbbt), rcct);
const K = G1.toAffine(G1.timesScalar( G1.g, kt));
/*
// Comment this lines to improve the process
const Ktest = G1.toAffine(G1.add(G1.add(A, B1), C));
if (!G1.equals(K, Ktest)) {
console.log ("=====FAIL======");
}
*/
if (s > setup.vk_proof.nPublic) {
setup.vk_proof.Ap[s] = G1.toAffine(G1.timesScalar(A, setup.toxic.ka));
}
setup.vk_proof.Bp[s] = G1.toAffine(G1.timesScalar(B1, setup.toxic.kb));
setup.vk_proof.Cp[s] = G1.toAffine(G1.timesScalar(C, setup.toxic.kc));
setup.vk_proof.Kp[s] = G1.toAffine(G1.timesScalar(K, setup.toxic.kbeta));
}
// Extra coeficients
const A = G1.timesScalar( G1.g, F.mul(setup.toxic.ra, v.z_t));
setup.vk_proof.A[circuit.nVars] = G1.toAffine(A);
setup.vk_proof.Ap[circuit.nVars] = G1.toAffine(G1.timesScalar(A, setup.toxic.ka));
const B1 = G1.timesScalar( G1.g, F.mul(setup.toxic.rb, v.z_t));
const B2 = G2.timesScalar( G2.g, F.mul(setup.toxic.rb, v.z_t));
setup.vk_proof.B[circuit.nVars] = G2.toAffine(B2);
setup.vk_proof.Bp[circuit.nVars] = G1.toAffine(G1.timesScalar(B1, setup.toxic.kb));
const C = G1.timesScalar( G1.g, F.mul(setup.toxic.rc, v.z_t));
setup.vk_proof.C[circuit.nVars] = G1.toAffine(C);
setup.vk_proof.Cp[circuit.nVars] = G1.toAffine(G1.timesScalar(C, setup.toxic.kc));
setup.vk_proof.Kp[circuit.nVars ] = G1.toAffine(G1.timesScalar(A, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+1] = G1.toAffine(G1.timesScalar(B1, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+2] = G1.toAffine(G1.timesScalar(C, setup.toxic.kbeta));
// setup.vk_verifier.A[0] = G1.toAffine(G1.add(setup.vk_verifier.A[0], setup.vk_proof.A[circuit.nVars]));
// vk_z
setup.vk_verifier.vk_z = G2.toAffine(G2.timesScalar(
G2.g,
F.mul(setup.toxic.rc, v.z_t)));
}
function calculateHexps(setup) {
const maxH = setup.vk_proof.domainSize+1;
setup.vk_proof.hExps = new Array(maxH);
setup.vk_proof.hExps[0] = G1.g;
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.toAffine(G1.timesScalar(G1.g, eT));
eT = F.mul(eT, setup.toxic.t);
}
}

3
src/solidity.js Normal file
View File

@ -0,0 +1,3 @@
export {default as genGroth16Verifier} from "./solidity_gengroth16verifier.js";
export {default as prove} from "./groth16_prove.js";
export {default as validate} from "./groth16_verify.js";

View File

@ -1,150 +0,0 @@
const path = require("path");
const fs = require("fs");
module.exports.generateVerifier_original = generateVerifier_original;
module.exports.generateVerifier_groth16 = generateVerifier_groth16;
// Not ready yet
// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
function generateVerifier_original(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_original.sol"), "utf-8");
const vka_str = `[${verificationKey.vk_a[0][1].toString()},`+
`${verificationKey.vk_a[0][0].toString()}], `+
`[${verificationKey.vk_a[1][1].toString()},` +
`${verificationKey.vk_a[1][0].toString()}]`;
template = template.replace("<%vk_a%>", vka_str);
const vkb_str = `${verificationKey.vk_b[0].toString()},`+
`${verificationKey.vk_b[1].toString()}`;
template = template.replace("<%vk_b%>", vkb_str);
const vkc_str = `[${verificationKey.vk_c[0][1].toString()},`+
`${verificationKey.vk_c[0][0].toString()}], `+
`[${verificationKey.vk_c[1][1].toString()},` +
`${verificationKey.vk_c[1][0].toString()}]`;
template = template.replace("<%vk_c%>", vkc_str);
const vkg_str = `[${verificationKey.vk_g[0][1].toString()},`+
`${verificationKey.vk_g[0][0].toString()}], `+
`[${verificationKey.vk_g[1][1].toString()},` +
`${verificationKey.vk_g[1][0].toString()}]`;
template = template.replace("<%vk_g%>", vkg_str);
const vkgb1_str = `${verificationKey.vk_gb_1[0].toString()},`+
`${verificationKey.vk_gb_1[1].toString()}`;
template = template.replace("<%vk_gb1%>", vkgb1_str);
const vkgb2_str = `[${verificationKey.vk_gb_2[0][1].toString()},`+
`${verificationKey.vk_gb_2[0][0].toString()}], `+
`[${verificationKey.vk_gb_2[1][1].toString()},` +
`${verificationKey.vk_gb_2[1][0].toString()}]`;
template = template.replace("<%vk_gb2%>", vkgb2_str);
const vkz_str = `[${verificationKey.vk_z[0][1].toString()},`+
`${verificationKey.vk_z[0][0].toString()}], `+
`[${verificationKey.vk_z[1][1].toString()},` +
`${verificationKey.vk_z[1][0].toString()}]`;
template = template.replace("<%vk_z%>", vkz_str);
// The points
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
let vi = "";
for (let i=0; i<verificationKey.IC.length; i++) {
if (vi != "") vi = vi + " ";
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
`${verificationKey.IC[i][1].toString()});\n`;
}
template = template.replace("<%vk_ic_pts%>", vi);
return template;
}
function generateVerifier_groth16(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8");
const vkalpha1_str = `${verificationKey.vk_alpha_1[0].toString()},`+
`${verificationKey.vk_alpha_1[1].toString()}`;
template = template.replace("<%vk_alpha1%>", vkalpha1_str);
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
`${verificationKey.vk_beta_2[0][0].toString()}], `+
`[${verificationKey.vk_beta_2[1][1].toString()},` +
`${verificationKey.vk_beta_2[1][0].toString()}]`;
template = template.replace("<%vk_beta2%>", vkbeta2_str);
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
`${verificationKey.vk_delta_2[0][0].toString()}], `+
`[${verificationKey.vk_delta_2[1][1].toString()},` +
`${verificationKey.vk_delta_2[1][0].toString()}]`;
template = template.replace("<%vk_delta2%>", vkdelta2_str);
// The points
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
let vi = "";
for (let i=0; i<verificationKey.IC.length; i++) {
if (vi != "") vi = vi + " ";
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
`${verificationKey.IC[i][1].toString()});\n`;
}
template = template.replace("<%vk_ic_pts%>", vi);
return template;
}
function generateVerifier_kimleeoh(verificationKey) {
assert(false); // Not implemented yet because it requires G2 exponentiation onchain.
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8");
const vkalpha1_str = `${verificationKey.vk_alpha_1[0].toString()},`+
`${verificationKey.vk_alpha_1[1].toString()}`;
template = template.replace("<%vk_alpha1%>", vkalpha1_str);
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
`${verificationKey.vk_beta_2[0][0].toString()}], `+
`[${verificationKey.vk_beta_2[1][1].toString()},` +
`${verificationKey.vk_beta_2[1][0].toString()}]`;
template = template.replace("<%vk_beta2%>", vkbeta2_str);
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
`${verificationKey.vk_delta_2[0][0].toString()}], `+
`[${verificationKey.vk_delta_2[1][1].toString()},` +
`${verificationKey.vk_delta_2[1][0].toString()}]`;
template = template.replace("<%vk_delta2%>", vkdelta2_str);
// The points
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
let vi = "";
for (let i=0; i<verificationKey.IC.length; i++) {
if (vi != "") vi = vi + " ";
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
`${verificationKey.IC[i][1].toString()});\n`;
}
template = template.replace("<%vk_ic_pts%>", vi);
return template;
}

View File

@ -1,5 +1,3 @@
const assert = require("assert");
const inBrowser = (typeof window !== "undefined");
let NodeWorker;
if (!inBrowser) {
@ -87,8 +85,8 @@ async function buildTaskManager(fn, mods, initTask) {
return function(e) {
function finishTask() {
// It can not be a waiting task and it's terminating
assert( !(tm.waitingTask && tm.terminateDeferred));
if ( (tm.waitingTask && tm.terminateDeferred))
throw new Error("It can not be a waiting task and it's terminating");
if (tm.terminateDeferred) {
tm.workers[i].worker.postMessage({cmd: "TERMINATE"});
@ -130,7 +128,8 @@ async function buildTaskManager(fn, mods, initTask) {
}
function processTask(i, task, asyncCb) {
assert(tm.workers[i].state == "READY");
if (tm.workers[i].state != "READY")
throw new Error("Worker is not ready");
tm.workers[i].asyncCb = asyncCb;
tm.workers[i].state = "WORKING";
@ -163,7 +162,8 @@ async function buildTaskManager(fn, mods, initTask) {
tm.finish = function() {
const self = this;
assert (self.terminatePromise == null);
if (self.terminatePromise != null)
throw new Error("Task manager already terminated");
self.terminateDeferred = new Deferred();
@ -178,8 +178,8 @@ async function buildTaskManager(fn, mods, initTask) {
tm.addTask = function (task, asyncCb) {
const self = this;
assert (!self.waitingTask);
assert(!self.terminateDeferred);
if (self.waitingTask) throw new Error("Waiting task pending");
if (self.terminateDeferred) throw new Error("New task after task manager terminated");
const deferral = new Deferred();
let i;
for (i=0; i<tm.workers.length; i++) {

View File

@ -1,45 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
/*
const G1 = bn128.G1;
*/
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
if (! bn128.F12.eq(
bn128.pairing( proof.pi_a , proof.pi_b ),
bn128.F12.mul(
vk_verifier.vk_alphabeta_12,
bn128.F12.mul(
bn128.pairing( cpub , vk_verifier.vk_gamma_2 ),
bn128.pairing( proof.pi_c , vk_verifier.vk_delta_2 )
))))
return false;
return true;
};

View File

@ -1,75 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const createKeccakHash = require("keccak");
const utils = require("ffjavascript").utils;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
const buff = Buffer.concat([
utils.beInt2Buff(proof.pi_a[0], 32),
utils.beInt2Buff(proof.pi_a[1], 32),
utils.beInt2Buff(proof.pi_b[0][0], 32),
utils.beInt2Buff(proof.pi_b[0][1], 32),
utils.beInt2Buff(proof.pi_b[1][0], 32),
utils.beInt2Buff(proof.pi_b[1][1], 32),
]);
const h1buff = createKeccakHash("keccak256").update(buff).digest();
const h2buff = createKeccakHash("keccak256").update(h1buff).digest();
const h1 = utils.beBuff2int(h1buff);
const h2 = utils.beBuff2int(h2buff);
// const h1 = bn128.Fr.zero;
// const h2 = bn128.Fr.zero;
// console.log(h1.toString());
// console.log(h2.toString());
if (! bn128.F12.eq(
bn128.pairing(
G1.add(proof.pi_a, G1.timesScalar(G1.g, h1)),
G2.add(proof.pi_b, G2.timesScalar(vk_verifier.vk_delta_2, h2))
),
bn128.F12.mul(
vk_verifier.vk_alphabeta_12,
bn128.F12.mul(
bn128.pairing( cpub , vk_verifier.vk_gamma_2 ),
bn128.pairing( proof.pi_c , G2.g )
))))
return false;
return true;
};

View File

@ -1,66 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bn128 = require("ffjavascript").bn128;
/*
const G1 = bn128.G1;
const G2 = bn128.G2;
*/
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let full_pi_a = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
full_pi_a = G1.add( full_pi_a, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
full_pi_a = G1.add( full_pi_a, proof.pi_a);
if (! bn128.F12.eq(
bn128.pairing( proof.pi_a , vk_verifier.vk_a ),
bn128.pairing( proof.pi_ap , G2.g )))
return false;
if (! bn128.F12.eq(
bn128.pairing( vk_verifier.vk_b, proof.pi_b ),
bn128.pairing( proof.pi_bp , G2.g )))
return false;
if (! bn128.F12.eq(
bn128.pairing( proof.pi_c , vk_verifier.vk_c ),
bn128.pairing( proof.pi_cp , G2.g )))
return false;
if (! bn128.F12.eq(
bn128.F12.mul(
bn128.pairing( G1.add(full_pi_a, proof.pi_c) , vk_verifier.vk_gb_2 ),
bn128.pairing( vk_verifier.vk_gb_1 , proof.pi_b )
),
bn128.pairing( proof.pi_kp , vk_verifier.vk_g )))
return false;
if (! bn128.F12.eq(
bn128.pairing( full_pi_a , proof.pi_b ),
bn128.F12.mul(
bn128.pairing( proof.pi_h , vk_verifier.vk_z ),
bn128.pairing( proof.pi_c , G2.g )
)))
return false;
return true;
};

3
src/wtns.js Normal file
View File

@ -0,0 +1,3 @@
export {default as calculate} from "./wtns_calculate.js";
export {default as debug} from "./wtns_debug.js";
export {default as exportJson} from "./wtns_export_json.js";

22
src/wtns_calculate.js Normal file
View File

@ -0,0 +1,22 @@
import * as fastFile from "fastfile";
import circomRuntime from "circom_runtime";
import * as wtnsUtils from "./wtns_utils.js";
import * as binFileUtils from "./binfileutils.js";
const { WitnessCalculatorBuilder } = circomRuntime;
export default async function wtnsCalculate(input, wasmFileName, wtnsFileName, options) {
const fdWasm = await fastFile.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
const wc = await WitnessCalculatorBuilder(wasm);
const w = await wc.calculateBinWitness(input);
const fdWtns = await binFileUtils.createBinFile(wtnsFileName, "wtns", 2, 2);
await wtnsUtils.writeBin(fdWtns, w, wc.prime);
await fdWtns.close();
}

51
src/wtns_debug.js Normal file
View File

@ -0,0 +1,51 @@
import * as fastFile from "fastfile";
import circomRuntime from "circom_runtime";
import * as wtnsUtils from "./wtns_utils.js";
import * as binFileUtils from "./binfileutils.js";
import loadSyms from "./loadsyms.js";
const { WitnessCalculatorBuilder } = circomRuntime;
export default async function wtnsDebug(input, wasmFileName, wtnsFileName, symName, options, logger) {
const fdWasm = await fastFile.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
let wcOps = {
sanityCheck: true
};
let sym = await loadSyms(symName);
if (options.set) {
if (!sym) sym = await loadSyms(symName);
wcOps.logSetSignal= function(labelIdx, value) {
if (logger) logger.info("SET " + sym.labelIdx2Name[labelIdx] + " <-- " + value.toString());
};
}
if (options.get) {
if (!sym) sym = await loadSyms(symName);
wcOps.logGetSignal= function(varIdx, value) {
if (logger) logger.info("GET " + sym.labelIdx2Name[varIdx] + " --> " + value.toString());
};
}
if (options.trigger) {
if (!sym) sym = await loadSyms(symName);
wcOps.logStartComponent= function(cIdx) {
if (logger) logger.info("START: " + sym.componentIdx2Name[cIdx]);
};
wcOps.logFinishComponent= function(cIdx) {
if (logger) logger.info("FINISH: " + sym.componentIdx2Name[cIdx]);
};
}
const wc = await WitnessCalculatorBuilder(wasm, wcOps);
const w = await wc.calculateWitness(input);
const fdWtns = await binFileUtils.createBinFile(wtnsFileName, "wtns", 2, 2);
await wtnsUtils.write(fdWtns, w, wc.prime);
await fdWtns.close();
}

8
src/wtns_export_json.js Normal file
View File

@ -0,0 +1,8 @@
import {read} from "./wtns_utils.js";
export default async function wtnsExportJson(wtnsFileName) {
const w = await read(wtnsFileName);
return w;
}

View File

@ -1,11 +1,9 @@
const Scalar = require("ffjavascript").Scalar;
const assert = require("assert");
const binFileUtils = require("./binfileutils");
import { Scalar } from "ffjavascript";
import * as binFileUtils from "./binfileutils.js";
async function writeWtns(fileName, witness, prime) {
const fd = await binFileUtils.createOverride(fileName,"wtns", 2, 2);
export async function write(fd, witness, prime) {
await binFileUtils.startWriteSection(fd, 1);
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
@ -20,21 +18,18 @@ async function writeWtns(fileName, witness, prime) {
}
await binFileUtils.endWriteSection(fd, 2);
await fd.close();
}
async function writeWtnsBin(fileName, witnessBin, prime) {
witnessBin = Buffer.from(witnessBin);
const fd = await binFileUtils.createBinFile(fileName, "wtns", 2, 2);
export async function writeBin(fd, witnessBin, prime) {
await binFileUtils.startWriteSection(fd, 1);
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
await fd.writeULE32(n8);
await binFileUtils.writeBigInt(fd, prime, n8);
assert(witnessBin.length % n8 == 0);
if (witnessBin.byteLength % n8 != 0) {
throw new Error("Invalid witness length");
}
await fd.writeULE32(witnessBin.byteLength / n8);
await binFileUtils.endWriteSection(fd);
@ -43,10 +38,9 @@ async function writeWtnsBin(fileName, witnessBin, prime) {
await fd.write(witnessBin);
await binFileUtils.endWriteSection(fd);
await fd.close();
}
async function readWtnsHeader(fd, sections) {
export async function readHeader(fd, sections) {
await binFileUtils.startReadUniqueSection(fd, sections, 1);
const n8 = await fd.readULE32();
@ -58,11 +52,11 @@ async function readWtnsHeader(fd, sections) {
}
async function readWtns(fileName) {
export async function read(fileName) {
const {fd, sections} = await binFileUtils.readBinFile(fileName, "wtns", 2);
const {n8, nWitness} = await readWtnsHeader(fd, sections);
const {n8, nWitness} = await readHeader(fd, sections);
await binFileUtils.startReadUniqueSection(fd, sections, 2);
const res = [];
@ -77,7 +71,3 @@ async function readWtns(fileName) {
return res;
}
module.exports.read = readWtns;
module.exports.readHeader = readWtnsHeader;
module.exports.writeBin = writeWtnsBin;
module.exports.write = writeWtns;

View File

@ -1,11 +1,10 @@
module.exports.new = require("./zkey_new.js");
module.exports.exportBellman = require("./zkey_export_bellman.js");
module.exports.importBellman = require("./zkey_import_bellman.js");
module.exports.verify = require("./zkey_verify.js");
module.exports.contribute = require("./zkey_contribute.js");
module.exports.beacon = require("./zkey_beacon.js");
module.exports.exportJson = require("./zkey_export_json.js");
module.exports.utils = require("./zkey_utils.js");
module.exports.challangeContribute = require("./zkey_challangecontribute.js");
module.exports.exportVerificationKey = require("./zkey_export_verificationkey.js");
export {default as newZKey} from "./zkey_new.js";
export {default as exportBellman} from "./zkey_export_bellman.js";
export {default as importBellman} from "./zkey_import_bellman.js";
export {default as verify} from "./zkey_verify.js";
export {default as contribute} from "./zkey_contribute.js";
export {default as beacon} from "./zkey_beacon.js";
export {default as exportJson} from "./zkey_export_json.js";
export {default as bellmanContribute} from "./zkey_bellman_contribute.js";
export {default as exportVerificationKey} from "./zkey_export_verificationkey.js";
export {default as exportSolidityVerifier} from "./zkey_export_solidityverifier.js";

View File

@ -1,31 +1,31 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const getCurve = require("./curves").getCurveFromQ;
const misc = require("./misc");
const Blake2b = require("blake2b-wasm");
const utils = require("./zkey_utils");
const hashToG2 = require("./keypair").hashToG2;
const {applyKeyToSection} = require("./mpc_applykey");
import * as binFileUtils from "./binfileutils.js";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import * as misc from "./misc.js";
import Blake2b from "blake2b-wasm";
import * as utils from "./zkey_utils.js";
import { hashToG2 as hashToG2 } from "./keypair.js";
import { applyKeyToSection } from "./mpc_applykey.js";
module.exports = async function beacon(zkeyNameOld, zkeyNameNew, name, numIterationsExp, beaconHashStr, verbose) {
export default async function beacon(zkeyNameOld, zkeyNameNew, name, beaconHashStr, numIterationsExp, logger) {
await Blake2b.ready();
const beaconHash = misc.hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length))
{
console.log("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false;
}
if (beaconHash.length>=256) {
console.log("Maximum lenght of beacon hash is 255 bytes");
if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
return false;
}
numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) {
console.log("Invalid numIterationsExp. (Must be between 10 and 63)");
if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
return false;
}
@ -89,8 +89,8 @@ module.exports = async function beacon(zkeyNameOld, zkeyNameNew, name, numIterat
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
const invDelta = curve.Fr.inv(curContribution.delta.prvKey);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", verbose);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", verbose);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", logger);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", logger);
await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams);
@ -102,8 +102,7 @@ module.exports = async function beacon(zkeyNameOld, zkeyNameNew, name, numIterat
const contribuionHash = contributionHasher.digest();
console.log("Contribution Hash: ");
console.log(misc.formatHash(contribuionHash));
if (logger) logger.info(misc.formatHash(contribuionHash, "Contribution Hash: "));
return true;
};
return contribuionHash;
}

View File

@ -16,15 +16,15 @@
// G2*tp*alpha (compressed)
// G2*up*beta (compressed)
const fastFile = require("fastfile");
const Blake2b = require("blake2b-wasm");
const utils = require("./zkey_utils");
const misc = require("./misc");
const { applyKeyToChallangeSection } = require("./mpc_applykey");
const {hashPubKey} = require("./zkey_utils");
const hashToG2 = require("./keypair").hashToG2;
import * as fastFile from "fastfile";
import Blake2b from "blake2b-wasm";
import * as utils from "./zkey_utils.js";
import * as misc from "./misc.js";
import { applyKeyToChallangeSection } from "./mpc_applykey.js";
import { hashPubKey } from "./zkey_utils.js";
import { hashToG2 as hashToG2 } from "./keypair.js";
async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) {
export default async function bellmanContribute(curve, challangeFilename, responesFileName, entropy, logger) {
await Blake2b.ready();
const rng = await misc.getRandomRng(entropy);
@ -58,12 +58,12 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
// H
const nH = await fdFrom.readUBE32();
await fdTo.writeUBE32(nH);
await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nH, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "H", verbose);
await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nH, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "H", logger);
// L
const nL = await fdFrom.readUBE32();
await fdTo.writeUBE32(nL);
await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nL, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "L", verbose);
await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nL, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "L", logger);
// A
const nA = await fdFrom.readUBE32();
@ -138,12 +138,15 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
const contributionHasher = Blake2b(64);
hashPubKey(contributionHasher, curve, curContribution);
console.log("Contribution Hash: ");
console.log(misc.formatHash(contributionHasher.digest()));
const contributionHash = contributionHasher.digest();
if (logger) logger.info(misc.formatHash(contributionHash, "Contribution Hash: "));
await fdTo.close();
await fdFrom.close();
return contributionHash;
async function copy(nBytes) {
const CHUNK_SIZE = fdFrom.pageSize*2;
for (let i=0; i<nBytes; i+= CHUNK_SIZE) {
@ -177,5 +180,3 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
}
module.exports = challangeContribute;

View File

@ -1,14 +1,14 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const getCurve = require("./curves").getCurveFromQ;
const misc = require("./misc");
const Blake2b = require("blake2b-wasm");
const utils = require("./zkey_utils");
const hashToG2 = require("./keypair").hashToG2;
const {applyKeyToSection} = require("./mpc_applykey");
import * as binFileUtils from "./binfileutils.js";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import * as misc from "./misc.js";
import Blake2b from "blake2b-wasm";
import * as utils from "./zkey_utils.js";
import { hashToG2 as hashToG2 } from "./keypair.js";
import { applyKeyToSection } from "./mpc_applykey.js";
module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name, entropy, verbose) {
export default async function phase2contribute(zkeyNameOld, zkeyNameNew, name, entropy, logger) {
await Blake2b.ready();
const {fd: fdOld, sections: sections} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
@ -68,8 +68,8 @@ module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
const invDelta = curve.Fr.inv(curContribution.delta.prvKey);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", verbose);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", verbose);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", logger);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", logger);
await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams);
@ -81,8 +81,7 @@ module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name
const contribuionHash = contributionHasher.digest();
console.log("Contribution Hash: ");
console.log(misc.formatHash(contribuionHash));
if (logger) logger.info(misc.formatHash(contribuionHash, "Contribution Hash: "));
return true;
};
return contribuionHash;
}

View File

@ -1,10 +1,10 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const fastFile = require("fastfile");
const getCurve = require("./curves").getCurveFromQ;
import * as binFileUtils from "./binfileutils.js";
import * as zkeyUtils from "./zkey_utils.js";
import * as fastFile from "fastfile";
import { getCurveFromQ as getCurve } from "./curves.js";
module.exports = async function phase2exportMPCParams(zkeyName, mpcparamsName, verbose) {
export default async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey, "groth16");
@ -40,8 +40,8 @@ module.exports = async function phase2exportMPCParams(zkeyName, mpcparamsName,
const buffBasesH_Lodd = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 9);
let buffBasesH_Tau;
buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", verbose ? console.log : undefined);
buffBasesH_Tau = await curve.G1.batchApplyKey(buffBasesH_Tau, curve.Fr.neg(curve.Fr.e(2)), curve.Fr.w[zkey.power+1], "jacobian", "affine", verbose ? console.log : undefined);
buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", logger);
buffBasesH_Tau = await curve.G1.batchApplyKey(buffBasesH_Tau, curve.Fr.neg(curve.Fr.e(2)), curve.Fr.w[zkey.power+1], "jacobian", "affine", logger);
// Remove last element. (The degree of H will be allways m-2)
buffBasesH_Tau = buffBasesH_Tau.slice(0, buffBasesH_Tau.byteLength - sG1);

View File

@ -1,12 +1,8 @@
const {stringifyBigInts} = require("ffjavascript").utils;
const fs = require("fs");
const readZKey = require("./zkey_utils").read;
module.exports = zkeyExportJson;
import { readZKey as readZKey } from "./zkey_utils.js";
async function zkeyExportJson(zkeyFileName, jsonFileName, verbose) {
export default async function zkeyExportJson(zkeyFileName, verbose) {
const zKey = await readZKey(zkeyFileName);
const S = JSON.stringify(stringifyBigInts(zKey), null, 1);
await fs.promises.writeFile(jsonFileName, S);
return zKey;
}

View File

@ -0,0 +1,56 @@
const moduleURL = new URL(import.meta.url);
const __dirname = path.dirname(moduleURL.pathname);
import * as fastFile from "fastfile";
import exportVerificationKey from "./zkey_export_verificationkey.js";
// Not ready yet
// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
export default async function exportSolidityVerifier(zKeyName, templateName, logger) {
const verificationKey = await exportVerificationKey(zKeyName, logger);
const fd = await fastFile.readExisting(templateName);
const buff = await fd.read(fd.totalSize);
let template = new TextDecoder("utf-8").decode(buff);
const vkalpha1_str = `${verificationKey.vk_alpha_1[0].toString()},`+
`${verificationKey.vk_alpha_1[1].toString()}`;
template = template.replace("<%vk_alpha1%>", vkalpha1_str);
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
`${verificationKey.vk_beta_2[0][0].toString()}], `+
`[${verificationKey.vk_beta_2[1][1].toString()},` +
`${verificationKey.vk_beta_2[1][0].toString()}]`;
template = template.replace("<%vk_beta2%>", vkbeta2_str);
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
`${verificationKey.vk_delta_2[0][0].toString()}], `+
`[${verificationKey.vk_delta_2[1][1].toString()},` +
`${verificationKey.vk_delta_2[1][0].toString()}]`;
template = template.replace("<%vk_delta2%>", vkdelta2_str);
// The points
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
let vi = "";
for (let i=0; i<verificationKey.IC.length; i++) {
if (vi != "") vi = vi + " ";
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
`${verificationKey.IC[i][1].toString()});\n`;
}
template = template.replace("<%vk_ic_pts%>", vi);
return template;
}

View File

@ -1,10 +1,11 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const getCurve = require("./curves").getCurveFromQ;
const {stringifyBigInts} = require("ffjavascript").utils;
const fs = require("fs");
import * as binFileUtils from "./binfileutils.js";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import { utils } from "ffjavascript";
const {stringifyBigInts} = utils;
module.exports = async function zkeyExportVerificationKey(zkeyName, verificationKeyName) {
export default async function zkeyExportVerificationKey(zkeyName, logger) {
const {fd, sections} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fd, sections, "groth16");
@ -14,7 +15,7 @@ module.exports = async function zkeyExportVerificationKey(zkeyName, verificatio
const alphaBeta = await curve.pairing( zkey.vk_alpha_1 , zkey.vk_beta_2 );
const vKey = {
let vKey = {
protocol: zkey.protocol,
curve: curve.name,
nPublic: zkey.nPublic,
@ -39,5 +40,9 @@ module.exports = async function zkeyExportVerificationKey(zkeyName, verificatio
}
await binFileUtils.endReadSection(fd);
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(vKey), null, 1), "utf-8");
};
vKey = stringifyBigInts(vKey);
await fd.close();
return vKey;
}

View File

@ -1,10 +1,10 @@
const zkeyUtils = require("./zkey_utils");
const binFileUtils = require("./binfileutils");
const fastFile = require("fastfile");
const getCurve = require("./curves").getCurveFromQ;
const misc = require("./misc");
import * as zkeyUtils from "./zkey_utils.js";
import * as binFileUtils from "./binfileutils.js";
import * as fastFile from "fastfile";
import { getCurveFromQ as getCurve } from "./curves.js";
import * as misc from "./misc.js";
module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, verbose) {
export default async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, name, logger) {
const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
const zkeyHeader = await zkeyUtils.readHeader(fdZKeyOld, sectionsZKeyOld, "groth16");
@ -52,23 +52,30 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
}
if (!misc.hashIsEqual(newMPCParams.csHash, oldMPCParams.csHash)) {
console.log("Hash of the original circuit does not match with the MPC one");
if (logger) logger.error("Hash of the original circuit does not match with the MPC one");
return false;
}
if (oldMPCParams.contributions.length > newMPCParams.contributions.length) {
console.log("The impoerted file does not include new contributions");
if (logger) logger.error("The impoerted file does not include new contributions");
return false;
}
for (let i=0; i<oldMPCParams.contributions.length; i++) {
if (!contributionIsEqual(oldMPCParams.contributions[i], newMPCParams.contributions[i])) {
console.log(`Previos contribution ${i} does not match`);
if (logger) logger.error(`Previos contribution ${i} does not match`);
return false;
}
}
// Set the same name to all new controbutions
if (name) {
for (let i=oldMPCParams.contributions.length; i<newMPCParams.contributions.length; i++) {
newMPCParams.contributions[i].name = name;
}
}
const fdZKeyNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
fdMPCParams.pos = 0;
@ -84,7 +91,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// IC (Keep original)
const nIC = await fdMPCParams.readUBE32();
if (nIC != zkeyHeader.nPublic +1) {
console.log("Invalid number of points in IC");
if (logger) logger.error("Invalid number of points in IC");
await fdZKeyNew.discard();
return false;
}
@ -97,7 +104,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// H Section
const nH = await fdMPCParams.readUBE32();
if (nH != zkeyHeader.domainSize-1) {
console.log("Invalid number of points in H");
if (logger) logger.error("Invalid number of points in H");
await fdZKeyNew.discard();
return false;
}
@ -108,8 +115,8 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
buffH.set(buffTauLEM); // Let the last one to zero.
const n2Inv = curve.Fr.neg(curve.Fr.inv(curve.Fr.e(2)));
const wInv = curve.Fr.inv(curve.Fr.w[zkeyHeader.power+1]);
buffH = await curve.G1.batchApplyKey(buffH, n2Inv, wInv, "affine", "jacobian", verbose ? console.log : undefined);
buffH = await curve.G1.ifft(buffH, "jacobian", "affine", verbose ? console.log : undefined);
buffH = await curve.G1.batchApplyKey(buffH, n2Inv, wInv, "affine", "jacobian", logger);
buffH = await curve.G1.ifft(buffH, "jacobian", "affine", logger);
await binFileUtils.startWriteSection(fdZKeyNew, 9);
await fdZKeyNew.write(buffH);
await binFileUtils.endWriteSection(fdZKeyNew);
@ -117,7 +124,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// C Secion (L section)
const nL = await fdMPCParams.readUBE32();
if (nL != (zkeyHeader.nVars-zkeyHeader.nPublic-1)) {
console.log("Invalid number of points in L");
if (logger) logger.error("Invalid number of points in L");
await fdZKeyNew.discard();
return false;
}
@ -131,7 +138,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// A Section
const nA = await fdMPCParams.readUBE32();
if (nA != zkeyHeader.nVars) {
console.log("Invalid number of points in A");
if (logger) logger.error("Invalid number of points in A");
await fdZKeyNew.discard();
return false;
}
@ -141,7 +148,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// B1 Section
const nB1 = await fdMPCParams.readUBE32();
if (nB1 != zkeyHeader.nVars) {
console.log("Invalid number of points in B1");
if (logger) logger.error("Invalid number of points in B1");
await fdZKeyNew.discard();
return false;
}
@ -151,7 +158,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
// B2 Section
const nB2 = await fdMPCParams.readUBE32();
if (nB2 != zkeyHeader.nVars) {
console.log("Invalid number of points in B2");
if (logger) logger.error("Invalid number of points in B2");
await fdZKeyNew.discard();
return false;
}
@ -187,5 +194,5 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
}
};
}

View File

@ -1,20 +1,18 @@
const r1csFile = require("r1csfile");
const utils = require("./powersoftau_utils");
const binFileUtils = require("./binfileutils");
const assert = require("assert");
const {log2} = require("./misc");
const Scalar = require("ffjavascript").Scalar;
const Blake2b = require("blake2b-wasm");
const misc = require("./misc");
import {loadHeader as loadR1csHeader} from "r1csfile";
import * as utils from "./powersoftau_utils.js";
import * as binFileUtils from "./binfileutils.js";
import { log2, formatHash } from "./misc.js";
import { Scalar } from "ffjavascript";
import Blake2b from "blake2b-wasm";
module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose) {
export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
await Blake2b.ready();
const csHasher = Blake2b(64);
const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1);
const r1cs = await r1csFile.loadHeader(fdR1cs, sectionsR1cs);
const r1cs = await loadR1csHeader(fdR1cs, sectionsR1cs);
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdPTau, sectionsPTau);
@ -25,19 +23,19 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
const sG2 = curve.G2.F.n8*2;
if (r1cs.prime != curve.r) {
console.log("r1cs curve does not match powers of tau ceremony curve");
if (logger) logger.error("r1cs curve does not match powers of tau ceremony curve");
return -1;
}
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
if (cirPower > power) {
console.log(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints} > 2**${power}`);
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints} > 2**${power}`);
return -1;
}
if (!sectionsPTau[12]) {
console.log("Powers of tau is not prepared.");
if (logger) logger.error("Powers of tau is not prepared.");
return -1;
}
@ -124,7 +122,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
let nCoefs = 0;
fdZKey.pos += 4;
for (let c=0; c<r1cs.nConstraints; c++) {
if (verbose && (c%1000 == 0) && (c >0)) console.log(`${c}/${r1cs.nConstraints}`);
if ((logger)&(c%10000 == 0)) logger.debug(`processing constraints: ${c}/${r1cs.nConstraints}`);
const nA = await fdR1cs.readULE32();
for (let i=0; i<nA; i++) {
const s = await fdR1cs.readULE32();
@ -248,15 +246,14 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
await fdZKey.writeULE32(0);
await binFileUtils.endWriteSection(fdZKey);
console.log("Circuit hash: ");
console.log(misc.formatHash(csHash));
if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
await fdZKey.close();
await fdPTau.close();
await fdR1cs.close();
return 0;
return csHash;
async function writeFr2(buff) {
const n = curve.Fr.fromRprLE(buff, 0);
@ -274,7 +271,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
await binFileUtils.startWriteSection(fdZKey, idSection);
for (let i=0; i<arr.length; i+= CHUNK_SIZE) {
if (verbose) console.log(`${sectionName}: ${i}/${arr.length}`);
if (logger) logger.debug(`Writing points ${sectionName}: ${i}/${arr.length}`);
const n = Math.min(arr.length -i, CHUNK_SIZE);
const subArr = arr.slice(i, i + n);
await composeAndWritePointsChunk(groupName, subArr);
@ -326,7 +323,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
fnBatchToAffine = "g2m_batchToAffine";
fnZero = "g2m_zero";
} else {
assert(false);
throw new Error("Invalid group");
}
let acc =0;
for (let i=0; i<arr.length; i++) acc += arr[i] ? arr[i].length : 0;
@ -393,14 +390,14 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
async function hashHPoints() {
const CHUNK_SIZE = 1<<16;
const CHUNK_SIZE = 1<<14;
hashU32(domainSize-1);
for (let i=0; i<domainSize-1; i+= CHUNK_SIZE) {
if (verbose) console.log(`HashingHPoints: ${i}/${domainSize}`);
if (logger) logger.debug(`HashingHPoints: ${i}/${domainSize}`);
const n = Math.min(domainSize-1, CHUNK_SIZE);
await hashHPointsChunk(i*CHUNK_SIZE, n);
await hashHPointsChunk(i, n);
}
}

View File

@ -26,16 +26,13 @@
// PointsH(9)
// Contributions(10)
import { Scalar, F1Field } from "ffjavascript";
import * as binFileUtils from "./binfileutils.js";
const Scalar = require("ffjavascript").Scalar;
const F1Field = require("ffjavascript").F1Field;
const assert = require("assert");
const binFileUtils = require("./binfileutils");
import { getCurveFromQ as getCurve } from "./curves.js";
import { log2 } from "./misc.js";
const getCurve = require("./curves").getCurveFromQ;
const {log2} = require("./misc");
async function writeHeader(fd, zkey) {
export async function writeHeader(fd, zkey) {
// Write the header
///////////
@ -74,7 +71,7 @@ async function writeHeader(fd, zkey) {
}
async function writeZKey(fileName, zkey) {
export async function writeZKey(fileName, zkey) {
let curve = getCurve(zkey.q);
@ -187,7 +184,7 @@ async function readG2(fd, curve) {
async function readHeader(fd, sections, protocol) {
export async function readHeader(fd, sections, protocol) {
if (protocol != "groth16") throw new Error("Protocol not supported: "+protocol);
const zkey = {};
@ -196,7 +193,7 @@ async function readHeader(fd, sections, protocol) {
/////////////////////
await binFileUtils.startReadUniqueSection(fd, sections, 1);
const protocolId = await fd.readULE32();
if (protocolId != 1) assert("File is not groth");
if (protocolId != 1) throw new Error("File is not groth");
zkey.protocol = "groth16";
await binFileUtils.endReadSection(fd);
@ -229,7 +226,7 @@ async function readHeader(fd, sections, protocol) {
}
async function readZKey(fileName) {
export async function readZKey(fileName) {
const {fd, sections} = await binFileUtils.readBinFile(fileName, "zkey", 1);
const zkey = await readHeader(fd, sections, "groth16");
@ -377,7 +374,7 @@ async function readContribution(fd, curve) {
}
async function readMPCParams(fd, curve, sections) {
export async function readMPCParams(fd, curve, sections) {
await binFileUtils.startReadUniqueSection(fd, sections, 10);
const res = { contributions: []};
res.csHash = await fd.read(64);
@ -424,7 +421,7 @@ async function writeContribution(fd, curve, c) {
}
async function writeMPCParams(fd, curve, mpcParams) {
export async function writeMPCParams(fd, curve, mpcParams) {
await binFileUtils.startWriteSection(fd, 10);
await fd.write(mpcParams.csHash);
await fd.writeULE32(mpcParams.contributions.length);
@ -434,19 +431,19 @@ async function writeMPCParams(fd, curve, mpcParams) {
await binFileUtils.endWriteSection(fd);
}
function hashG1(hasher, curve, p) {
export function hashG1(hasher, curve, p) {
const buff = new Uint8Array(curve.G1.F.n8*2);
curve.G1.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
function hashG2(hasher,curve, p) {
export function hashG2(hasher,curve, p) {
const buff = new Uint8Array(curve.G2.F.n8*2);
curve.G2.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
function hashPubKey(hasher, curve, c) {
export function hashPubKey(hasher, curve, c) {
hashG1(hasher, curve, c.deltaAfter);
hashG1(hasher, curve, c.delta.g1_s);
hashG1(hasher, curve, c.delta.g1_sx);
@ -454,13 +451,3 @@ function hashPubKey(hasher, curve, c) {
hasher.update(c.transcript);
}
module.exports.readHeader = readHeader;
module.exports.writeHeader = writeHeader;
module.exports.read = readZKey;
module.exports.write = writeZKey;
module.exports.readMPCParams = readMPCParams;
module.exports.writeMPCParams = writeMPCParams;
module.exports.hashG1 = hashG1;
module.exports.hashG2 = hashG2;
module.exports.hashPubKey = hashPubKey;

View File

@ -1,18 +1,18 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const getCurve = require("./curves").getCurveFromQ;
const Blake2b = require("blake2b-wasm");
const misc = require("./misc");
const Scalar = require("ffjavascript").Scalar;
const hashToG2 = require("./keypair").hashToG2;
import * as binFileUtils from "./binfileutils.js";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import Blake2b from "blake2b-wasm";
import * as misc from "./misc.js";
import { hashToG2 as hashToG2 } from "./keypair.js";
const sameRatio = misc.sameRatio;
const crypto = require("crypto");
const ChaCha = require("ffjavascript").ChaCha;
const newZKey = require("./zkey_new");
const {hashG1, hashPubKey} = require("./zkey_utils");
import crypto from "crypto";
import newZKey from "./zkey_new.js";
import {hashG1, hashPubKey} from "./zkey_utils.js";
import { Scalar, ChaCha } from "ffjavascript";
module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, verbose) {
export default async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
let sr;
await Blake2b.ready();
@ -93,7 +93,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
||(zkeyInit.n8q != zkey.n8q)
||(zkeyInit.n8r != zkey.n8r))
{
console.log("INVALID: Different curves");
if (logger) logger.error("INVALID: Different curves");
return false;
}
@ -101,110 +101,115 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
||(zkeyInit.nPublic != zkey.nPublic)
||(zkeyInit.domainSize != zkey.domainSize))
{
console.log("INVALID: Different circuit parameters");
if (logger) logger.error("INVALID: Different circuit parameters");
return false;
}
if (!curve.G1.eq(zkey.vk_alpha_1, zkeyInit.vk_alpha_1)) {
console.log("INVALID: Invalid alpha1");
if (logger) logger.error("INVALID: Invalid alpha1");
return false;
}
if (!curve.G1.eq(zkey.vk_beta_1, zkeyInit.vk_beta_1)) {
console.log("INVALID: Invalid beta1");
if (logger) logger.error("INVALID: Invalid beta1");
return false;
}
if (!curve.G2.eq(zkey.vk_beta_2, zkeyInit.vk_beta_2)) {
console.log("INVALID: Invalid beta2");
if (logger) logger.error("INVALID: Invalid beta2");
return false;
}
if (!curve.G2.eq(zkey.vk_gamma_2, zkeyInit.vk_gamma_2)) {
console.log("INVALID: Invalid gamma2");
if (logger) logger.error("INVALID: Invalid gamma2");
return false;
}
if (!curve.G1.eq(zkey.vk_delta_1, curDelta)) {
console.log("INVALID: Invalud delta1");
if (logger) logger.error("INVALID: Invalud delta1");
return false;
}
sr = await sameRatio(curve, curve.G1.g, curDelta, curve.G2.g, zkey.vk_delta_2);
if (sr !== true) {
console.log("INVALID: Invalud delta2");
if (logger) logger.error("INVALID: Invalud delta2");
return false;
}
const mpcParamsInit = await zkeyUtils.readMPCParams(fdInit, curve, sectionsInit);
if (!misc.hashIsEqual(mpcParams.csHash, mpcParamsInit.csHash)) {
console.log("INVALID: Circuit does not match");
if (logger) logger.error("INVALID: Circuit does not match");
return false;
}
// Check sizes of sections
if (sections[8][0].size != sG1*(zkey.nVars-zkey.nPublic-1)) {
console.log("INVALID: Invalid L section size");
if (logger) logger.error("INVALID: Invalid L section size");
return false;
}
if (sections[9][0].size != sG1*(zkey.domainSize)) {
console.log("INVALID: Invalid H section size");
if (logger) logger.error("INVALID: Invalid H section size");
return false;
}
let ss;
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 3);
if (!ss) {
console.log("INVALID: IC section is not identical");
if (logger) logger.error("INVALID: IC section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 4);
if (!ss) {
console.log("Coeffs section is not identical");
if (logger) logger.error("Coeffs section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 5);
if (!ss) {
console.log("A section is not identical");
if (logger) logger.error("A section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 6);
if (!ss) {
console.log("B1 section is not identical");
if (logger) logger.error("B1 section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 7);
if (!ss) {
console.log("B2 section is not identical");
if (logger) logger.error("B2 section is not identical");
return false;
}
// Check L
sr = await sectionHasSameRatio("G1", fdInit, sectionsInit, fd, sections, 8, zkey.vk_delta_2, zkeyInit.vk_delta_2, "L section");
if (sr!==true) {
console.log("L section does not match");
if (logger) logger.error("L section does not match");
return false;
}
// Check H
sr = await sameRatioH();
if (sr!==true) {
console.log("H section does not match");
if (logger) logger.error("H section does not match");
return false;
}
if (logger) logger.info(misc.formatHash(mpcParams.csHash, "Circuit Hash: "));
await fd.close();
await fdInit.close();
for (let i=mpcParams.contributions.length-1; i>=0; i--) {
const c = mpcParams.contributions[i];
console.log("-------------------------");
console.log(`contribution #${i+1}${c.name ? c.name : ""}:`);
console.log(misc.formatHash(c.contributionHash));
if (logger) logger.info("-------------------------");
if (logger) logger.info(misc.formatHash(c.contributionHash, `contribution #${i+1} ${c.name ? c.name : ""}:`));
if (c.type == 1) {
console.log(`Beacon generator: ${misc.byteArray2hex(c.beaconHash)}`);
console.log(`Beacon iterations Exp: ${c.numIterationsExp}`);
if (logger) logger.info(`Beacon generator: ${misc.byteArray2hex(c.beaconHash)}`);
if (logger) logger.info(`Beacon iterations Exp: ${c.numIterationsExp}`);
}
}
console.log("-------------------------");
if (logger) logger.info("-------------------------");
if (logger) logger.info("ZKey Ok!");
return true;
@ -222,7 +227,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
const nPoints = sections1[idSection][0].size / sG;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
if (logger) logger.debug(`Same ratio check ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints - i, MAX_CHUNK_SIZE);
const bases1 = await fd1.read(n*sG);
const bases2 = await fd2.read(n*sG);
@ -267,7 +272,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
let R1 = G.zero;
for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`H Verificaition(tau): ${i}/${zkey.domainSize}`);
if (logger) logger.debug(`H Verificaition(tau): ${i}/${zkey.domainSize}`);
const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE);
const buff1 = await fdPTau.read(sG*n, sectionsPTau[2][0].p + zkey.domainSize*sG + i*MAX_CHUNK_SIZE*sG);
@ -295,7 +300,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
await binFileUtils.startReadUniqueSection(fd, sections, 9);
let R2 = G.zero;
for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`H Verificaition(lagrange): ${i}/${zkey.domainSize}`);
if (logger) logger.debug(`H Verificaition(lagrange): ${i}/${zkey.domainSize}`);
const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE);
const buff = await fd.read(sG*n);
@ -309,6 +314,7 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
sr = await sameRatio(curve, R1, R2, zkey.vk_delta_2, zkeyInit.vk_delta_2);
if (sr !== true) return false;
return true;
}
@ -378,5 +384,5 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
return res;
}
};
}

View File

@ -1,8 +0,0 @@
module.exports = {
groth16: {
prover: module.require("./zksnark_groth16_prover"),
verifier: module.require("./zksnark_groth16_verifier")
}
};

View File

@ -5,11 +5,13 @@
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// 2019 OKIMS
// ported to solidity 0.5
// ported to solidity 0.6
// fixed linter warnings
// added requiere error messages
//
pragma solidity ^0.5.0;
//
// SPDX-License-Identifier: GPL-3.0
pragma solidity ^0.6.11;
library Pairing {
struct G1Point {
uint X;
@ -44,15 +46,15 @@ library Pairing {
);
*/
}
/// @return the negation of p, i.e. p.addition(p.negate()) should be zero.
function negate(G1Point memory p) internal pure returns (G1Point memory) {
/// @return r the negation of p, i.e. p.addition(p.negate()) should be zero.
function negate(G1Point memory p) internal pure returns (G1Point memory r) {
// The prime q in the base field F_q for G1
uint q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
if (p.X == 0 && p.Y == 0)
return G1Point(0, 0);
return G1Point(p.X, q - (p.Y % q));
}
/// @return the sum of two points of G1
/// @return r the sum of two points of G1
function addition(G1Point memory p1, G1Point memory p2) internal view returns (G1Point memory r) {
uint[4] memory input;
input[0] = p1.X;
@ -62,13 +64,13 @@ library Pairing {
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 6, input, 0xc0, r, 0x60)
success := staticcall(sub(gas(), 2000), 6, input, 0xc0, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success,"pairing-add-failed");
}
/// @return the product of a point on G1 and a scalar, i.e.
/// @return r the product of a point on G1 and a scalar, i.e.
/// p == p.scalar_mul(1) and p.addition(p) == p.scalar_mul(2) for all points p.
function scalar_mul(G1Point memory p, uint s) internal view returns (G1Point memory r) {
uint[3] memory input;
@ -78,7 +80,7 @@ library Pairing {
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 7, input, 0x80, r, 0x60)
success := staticcall(sub(gas(), 2000), 7, input, 0x80, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
@ -106,7 +108,7 @@ library Pairing {
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
success := staticcall(sub(gas(), 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
@ -174,7 +176,7 @@ contract Verifier {
Pairing.G1Point C;
}
function verifyingKey() internal pure returns (VerifyingKey memory vk) {
vk.alfa1 = Pairing.G1Point(<%vk_alfa1%>);
vk.alfa1 = Pairing.G1Point(<%vk_alpha1%>);
vk.beta2 = Pairing.G2Point(<%vk_beta2%>);
vk.gamma2 = Pairing.G2Point(<%vk_gamma2%>);
vk.delta2 = Pairing.G2Point(<%vk_delta2%>);
@ -200,6 +202,7 @@ contract Verifier {
)) return 1;
return 0;
}
/// @return r bool true if proof is valid
function verifyProof(
uint[2] memory a,
uint[2][2] memory b,

View File

@ -1,214 +0,0 @@
//
// Copyright 2017 Christian Reitwiessner
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
pragma solidity ^0.4.17;
library Pairing {
struct G1Point {
uint X;
uint Y;
}
// Encoding of field elements is: X[0] * z + X[1]
struct G2Point {
uint[2] X;
uint[2] Y;
}
/// @return the generator of G1
function P1() pure internal returns (G1Point) {
return G1Point(1, 2);
}
/// @return the generator of G2
function P2() pure internal returns (G2Point) {
// Original code point
return G2Point(
[11559732032986387107991004021392285783925812861821192530917403151452391805634,
10857046999023057135944570762232829481370756359578518086990519993285655852781],
[4082367875863433681332203403145435568316851327593401208105741076214120093531,
8495653923123431417604973247489272438418190587263600148770280649306958101930]
);
/*
// Changed by Jordi point
return G2Point(
[10857046999023057135944570762232829481370756359578518086990519993285655852781,
11559732032986387107991004021392285783925812861821192530917403151452391805634],
[8495653923123431417604973247489272438418190587263600148770280649306958101930,
4082367875863433681332203403145435568316851327593401208105741076214120093531]
);
*/
}
/// @return the negation of p, i.e. p.addition(p.negate()) should be zero.
function negate(G1Point p) pure internal returns (G1Point) {
// The prime q in the base field F_q for G1
uint q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
if (p.X == 0 && p.Y == 0)
return G1Point(0, 0);
return G1Point(p.X, q - (p.Y % q));
}
/// @return the sum of two points of G1
function addition(G1Point p1, G1Point p2) view internal returns (G1Point r) {
uint[4] memory input;
input[0] = p1.X;
input[1] = p1.Y;
input[2] = p2.X;
input[3] = p2.Y;
bool success;
assembly {
success := staticcall(sub(gas, 2000), 6, input, 0xc0, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success);
}
/// @return the product of a point on G1 and a scalar, i.e.
/// p == p.scalar_mul(1) and p.addition(p) == p.scalar_mul(2) for all points p.
function scalar_mul(G1Point p, uint s) view internal returns (G1Point r) {
uint[3] memory input;
input[0] = p.X;
input[1] = p.Y;
input[2] = s;
bool success;
assembly {
success := staticcall(sub(gas, 2000), 7, input, 0x80, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require (success);
}
/// @return the result of computing the pairing check
/// e(p1[0], p2[0]) * .... * e(p1[n], p2[n]) == 1
/// For example pairing([P1(), P1().negate()], [P2(), P2()]) should
/// return true.
function pairing(G1Point[] p1, G2Point[] p2) view internal returns (bool) {
require(p1.length == p2.length);
uint elements = p1.length;
uint inputSize = elements * 6;
uint[] memory input = new uint[](inputSize);
for (uint i = 0; i < elements; i++)
{
input[i * 6 + 0] = p1[i].X;
input[i * 6 + 1] = p1[i].Y;
input[i * 6 + 2] = p2[i].X[0];
input[i * 6 + 3] = p2[i].X[1];
input[i * 6 + 4] = p2[i].Y[0];
input[i * 6 + 5] = p2[i].Y[1];
}
uint[1] memory out;
bool success;
assembly {
success := staticcall(sub(gas, 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success);
return out[0] != 0;
}
/// Convenience method for a pairing check for two pairs.
function pairingProd2(G1Point a1, G2Point a2, G1Point b1, G2Point b2) view internal returns (bool) {
G1Point[] memory p1 = new G1Point[](2);
G2Point[] memory p2 = new G2Point[](2);
p1[0] = a1;
p1[1] = b1;
p2[0] = a2;
p2[1] = b2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for three pairs.
function pairingProd3(
G1Point a1, G2Point a2,
G1Point b1, G2Point b2,
G1Point c1, G2Point c2
) view internal returns (bool) {
G1Point[] memory p1 = new G1Point[](3);
G2Point[] memory p2 = new G2Point[](3);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for four pairs.
function pairingProd4(
G1Point a1, G2Point a2,
G1Point b1, G2Point b2,
G1Point c1, G2Point c2,
G1Point d1, G2Point d2
) view internal returns (bool) {
G1Point[] memory p1 = new G1Point[](4);
G2Point[] memory p2 = new G2Point[](4);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p1[3] = d1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
p2[3] = d2;
return pairing(p1, p2);
}
}
contract Verifier {
using Pairing for *;
struct VerifyingKey {
Pairing.G1Point alfa1;
Pairing.G2Point beta2;
Pairing.G2Point gamma2;
Pairing.G2Point delta2;
Pairing.G1Point[] IC;
}
struct Proof {
Pairing.G1Point A;
Pairing.G2Point B;
Pairing.G1Point C;
}
function verifyingKey() pure internal returns (VerifyingKey vk) {
vk.alfa1 = Pairing.G1Point(<%vk_alfa1%>);
vk.beta2 = Pairing.G2Point(<%vk_beta2%>);
vk.gamma2 = Pairing.G2Point(<%vk_gamma2%>);
vk.delta2 = Pairing.G2Point(<%vk_delta2%>);
vk.IC = new Pairing.G1Point[](<%vk_ic_length%>);
<%vk_ic_pts%>
}
function verify(uint[] input, Proof proof) view internal returns (uint) {
uint256 snark_scalar_field = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
VerifyingKey memory vk = verifyingKey();
require(input.length + 1 == vk.IC.length);
// Compute the linear combination vk_x
Pairing.G1Point memory vk_x = Pairing.G1Point(0, 0);
for (uint i = 0; i < input.length; i++) {
require(input[i] < snark_scalar_field,"verifier-gte-snark-scalar-field");
vk_x = Pairing.addition(vk_x, Pairing.scalar_mul(vk.IC[i + 1], input[i]));
}
vk_x = Pairing.addition(vk_x, vk.IC[0]);
if (!Pairing.pairingProd4(
Pairing.negate(proof.A), proof.B,
vk.alfa1, vk.beta2,
vk_x, vk.gamma2,
proof.C, vk.delta2
)) return 1;
return 0;
}
function verifyProof(
uint[2] a,
uint[2][2] b,
uint[2] c,
uint[<%vk_input_length%>] input
) view public returns (bool r) {
Proof memory proof;
proof.A = Pairing.G1Point(a[0], a[1]);
proof.B = Pairing.G2Point([b[0][0], b[0][1]], [b[1][0], b[1][1]]);
proof.C = Pairing.G1Point(c[0], c[1]);
uint[] memory inputValues = new uint[](input.length);
for(uint i = 0; i < input.length; i++){
inputValues[i] = input[i];
}
if (verify(inputValues, proof) == 0) {
return true;
} else {
return false;
}
}
}

View File

@ -1,245 +0,0 @@
//
// Copyright 2017 Christian Reitwiessner
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// 2019 OKIMS
// ported to solidity 0.5
// fixed linter warnings
// added requiere error messages
//
pragma solidity ^0.5.0;
library Pairing {
struct G1Point {
uint X;
uint Y;
}
// Encoding of field elements is: X[0] * z + X[1]
struct G2Point {
uint[2] X;
uint[2] Y;
}
/// @return the generator of G1
function P1() internal pure returns (G1Point memory) {
return G1Point(1, 2);
}
/// @return the generator of G2
function P2() internal pure returns (G2Point memory) {
// Original code point
return G2Point(
[11559732032986387107991004021392285783925812861821192530917403151452391805634,
10857046999023057135944570762232829481370756359578518086990519993285655852781],
[4082367875863433681332203403145435568316851327593401208105741076214120093531,
8495653923123431417604973247489272438418190587263600148770280649306958101930]
);
}
/// @return the negation of p, i.e. p.addition(p.negate()) should be zero.
function negate(G1Point memory p) internal pure returns (G1Point memory) {
// The prime q in the base field F_q for G1
uint q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
if (p.X == 0 && p.Y == 0)
return G1Point(0, 0);
return G1Point(p.X, q - (p.Y % q));
}
/// @return the sum of two points of G1
function addition(G1Point memory p1, G1Point memory p2) internal view returns (G1Point memory r) {
uint[4] memory input;
input[0] = p1.X;
input[1] = p1.Y;
input[2] = p2.X;
input[3] = p2.Y;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 6, input, 0xc0, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success,"pairing-add-failed");
}
/// @return the product of a point on G1 and a scalar, i.e.
/// p == p.scalar_mul(1) and p.addition(p) == p.scalar_mul(2) for all points p.
function scalar_mul(G1Point memory p, uint s) internal view returns (G1Point memory r) {
uint[3] memory input;
input[0] = p.X;
input[1] = p.Y;
input[2] = s;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 7, input, 0x80, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require (success,"pairing-mul-failed");
}
/// @return the result of computing the pairing check
/// e(p1[0], p2[0]) * .... * e(p1[n], p2[n]) == 1
/// For example pairing([P1(), P1().negate()], [P2(), P2()]) should
/// return true.
function pairing(G1Point[] memory p1, G2Point[] memory p2) internal view returns (bool) {
require(p1.length == p2.length,"pairing-lengths-failed");
uint elements = p1.length;
uint inputSize = elements * 6;
uint[] memory input = new uint[](inputSize);
for (uint i = 0; i < elements; i++)
{
input[i * 6 + 0] = p1[i].X;
input[i * 6 + 1] = p1[i].Y;
input[i * 6 + 2] = p2[i].X[0];
input[i * 6 + 3] = p2[i].X[1];
input[i * 6 + 4] = p2[i].Y[0];
input[i * 6 + 5] = p2[i].Y[1];
}
uint[1] memory out;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success,"pairing-opcode-failed");
return out[0] != 0;
}
/// Convenience method for a pairing check for two pairs.
function pairingProd2(G1Point memory a1, G2Point memory a2, G1Point memory b1, G2Point memory b2) internal view returns (bool) {
G1Point[] memory p1 = new G1Point[](2);
G2Point[] memory p2 = new G2Point[](2);
p1[0] = a1;
p1[1] = b1;
p2[0] = a2;
p2[1] = b2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for three pairs.
function pairingProd3(
G1Point memory a1, G2Point memory a2,
G1Point memory b1, G2Point memory b2,
G1Point memory c1, G2Point memory c2
) internal view returns (bool) {
G1Point[] memory p1 = new G1Point[](3);
G2Point[] memory p2 = new G2Point[](3);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for four pairs.
function pairingProd4(
G1Point memory a1, G2Point memory a2,
G1Point memory b1, G2Point memory b2,
G1Point memory c1, G2Point memory c2,
G1Point memory d1, G2Point memory d2
) internal view returns (bool) {
G1Point[] memory p1 = new G1Point[](4);
G2Point[] memory p2 = new G2Point[](4);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p1[3] = d1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
p2[3] = d2;
return pairing(p1, p2);
}
}
contract Verifier {
using Pairing for *;
struct VerifyingKey {
Pairing.G2Point A;
Pairing.G1Point B;
Pairing.G2Point C;
Pairing.G2Point gamma;
Pairing.G1Point gammaBeta1;
Pairing.G2Point gammaBeta2;
Pairing.G2Point Z;
Pairing.G1Point[] IC;
}
struct Proof {
Pairing.G1Point A;
Pairing.G1Point A_p;
Pairing.G2Point B;
Pairing.G1Point B_p;
Pairing.G1Point C;
Pairing.G1Point C_p;
Pairing.G1Point K;
Pairing.G1Point H;
}
function verifyingKey() internal pure returns (VerifyingKey memory vk) {
vk.A = Pairing.G2Point(<%vk_a%>);
vk.B = Pairing.G1Point(<%vk_b%>);
vk.C = Pairing.G2Point(<%vk_c%>);
vk.gamma = Pairing.G2Point(<%vk_g%>);
vk.gammaBeta1 = Pairing.G1Point(<%vk_gb1%>);
vk.gammaBeta2 = Pairing.G2Point(<%vk_gb2%>);
vk.Z = Pairing.G2Point(<%vk_z%>);
vk.IC = new Pairing.G1Point[](<%vk_ic_length%>);
<%vk_ic_pts%>
}
function verify(uint[] memory input, Proof memory proof) internal view returns (uint) {
uint256 snark_scalar_field = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
VerifyingKey memory vk = verifyingKey();
require(input.length + 1 == vk.IC.length,"verifier-bad-input");
// Compute the linear combination vk_x
Pairing.G1Point memory vk_x = Pairing.G1Point(0, 0);
for (uint i = 0; i < input.length; i++) {
require(input[i] < snark_scalar_field,"verifier-gte-snark-scalar-field");
vk_x = Pairing.addition(vk_x, Pairing.scalar_mul(vk.IC[i + 1], input[i]));
}
vk_x = Pairing.addition(vk_x, vk.IC[0]);
if (!Pairing.pairingProd2(proof.A, vk.A, Pairing.negate(proof.A_p), Pairing.P2())) return 1;
if (!Pairing.pairingProd2(vk.B, proof.B, Pairing.negate(proof.B_p), Pairing.P2())) return 2;
if (!Pairing.pairingProd2(proof.C, vk.C, Pairing.negate(proof.C_p), Pairing.P2())) return 3;
if (!Pairing.pairingProd3(
proof.K, vk.gamma,
Pairing.negate(Pairing.addition(vk_x, Pairing.addition(proof.A, proof.C))), vk.gammaBeta2,
Pairing.negate(vk.gammaBeta1), proof.B
)) return 4;
if (!Pairing.pairingProd3(
Pairing.addition(vk_x, proof.A), proof.B,
Pairing.negate(proof.H), vk.Z,
Pairing.negate(proof.C), Pairing.P2()
)) return 5;
return 0;
}
function verifyProof(
uint[2] memory a,
uint[2] memory a_p,
uint[2][2] memory b,
uint[2] memory b_p,
uint[2] memory c,
uint[2] memory c_p,
uint[2] memory h,
uint[2] memory k,
uint[<%vk_input_length%>] memory input
) view public returns (bool r) {
Proof memory proof;
proof.A = Pairing.G1Point(a[0], a[1]);
proof.A_p = Pairing.G1Point(a_p[0], a_p[1]);
proof.B = Pairing.G2Point([b[0][0], b[0][1]], [b[1][0], b[1][1]]);
proof.B_p = Pairing.G1Point(b_p[0], b_p[1]);
proof.C = Pairing.G1Point(c[0], c[1]);
proof.C_p = Pairing.G1Point(c_p[0], c_p[1]);
proof.H = Pairing.G1Point(h[0], h[1]);
proof.K = Pairing.G1Point(k[0], k[1]);
uint[] memory inputValues = new uint[](input.length);
for(uint i = 0; i < input.length; i++){
inputValues[i] = input[i];
}
if (verify(inputValues, proof) == 0) {
return true;
} else {
return false;
}
}
}

View File

@ -2,7 +2,7 @@ template Multiplier(n) {
signal private input a;
signal private input b;
signal output c;
signal int[n];
int[0] <== a*a + b;
@ -13,4 +13,4 @@ template Multiplier(n) {
c <== int[n-1];
}
component main = Multiplier(100);
component main = Multiplier(1000);

Binary file not shown.

Binary file not shown.

116
test/fullprocess.js Normal file
View File

@ -0,0 +1,116 @@
import * as snarkjs from "../main.js";
import { getCurveFromName } from "../src/curves.js";
import assert from "assert";
import path from "path";
describe("Full process", function () {
this.timeout(100000);
let curve;
const ptau_0 = {type: "mem"};
const ptau_1 = {type: "mem"};
const ptau_2 = {type: "mem"};
const ptau_beacon = {type: "mem"};
const ptau_final = {type: "mem"};
const ptau_challange2 = {type: "mem"};
const ptau_response2 = {type: "mem"};
const zkey_0 = {type: "mem"};
const zkey_1 = {type: "mem"};
const zkey_2 = {type: "mem"};
const zkey_final = {type: "mem"};
const bellman_1 = {type: "mem"};
const bellman_2 = {type: "mem"};
let vKey;
const wtns = {type: "mem"};
let proof;
let publicSignals;
before( async () => {
curve = await getCurveFromName("bn128");
});
after( async () => {
await curve.terminate();
});
it ("powersoftau new", async () => {
await snarkjs.powersOfTau.newAccumulator(curve, 12, ptau_0);
});
it ("powersoftau contribute ", async () => {
await snarkjs.powersOfTau.contribute(ptau_0, ptau_1, "C1", "Entropy1");
});
it ("powersoftau export challange", async () => {
await snarkjs.powersOfTau.exportChallange(ptau_1, ptau_challange2);
});
it ("powersoftau challange contribute", async () => {
await snarkjs.powersOfTau.challangeContribute(curve, ptau_challange2, ptau_response2, "Entropy2");
});
it ("powersoftau import response", async () => {
await snarkjs.powersOfTau.importResponse(ptau_1, ptau_response2, ptau_2, "C2");
});
it ("powersoftau beacon", async () => {
await snarkjs.powersOfTau.beacon(ptau_2, ptau_beacon, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10);
});
it ("powersoftau prepare phase2", async () => {
await snarkjs.powersOfTau.preparePhase2(ptau_beacon, ptau_final);
});
it ("powersoftau verify", async () => {
const res = await snarkjs.powersOfTau.verify(ptau_final);
assert(res);
});
it ("zkey new", async () => {
await snarkjs.zKey.newZKey(path.join("test", "circuit", "circuit.r1cs"), ptau_final, zkey_0);
});
it ("zkey contribute ", async () => {
await snarkjs.zKey.contribute(zkey_0, zkey_1, "p2_C1", "pa_Entropy1");
});
it ("zkey export bellman", async () => {
await snarkjs.zKey.exportBellman(zkey_1, bellman_1);
});
it ("zkey bellman contribute", async () => {
await snarkjs.zKey.bellmanContribute(curve, bellman_1, bellman_2, "pa_Entropy2");
});
it ("zkey import bellman", async () => {
await snarkjs.zKey.importBellman(zkey_1, bellman_2, zkey_2, "C2");
});
it ("zkey beacon", async () => {
await snarkjs.zKey.beacon(zkey_2, zkey_final, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10);
});
it ("zkey verify", async () => {
const res = await snarkjs.zKey.verify(path.join("test", "circuit", "circuit.r1cs"), ptau_final, zkey_final);
assert(res);
});
it ("zkey export verificationkey", async () => {
vKey = await snarkjs.zKey.exportVerificationKey(zkey_final);
});
it ("witness calculate", async () => {
await snarkjs.wtns.calculate({a: 11, b:2}, path.join("test", "circuit", "circuit.wasm"), wtns);
});
it ("groth16 proof", async () => {
const res = await snarkjs.groth16.prove(zkey_final, wtns);
proof = res.proof;
publicSignals = res.publicSignals;
});
it ("groth16 verify", async () => {
const res = await snarkjs.groth16.validate(vKey, publicSignals, proof);
assert(res == true);
});
});

View File

@ -1,46 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of zksnark JavaScript library.
zksnark JavaScript library is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
zksnark JavaScript library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
zksnark JavaScript library. If not, see <https://www.gnu.org/licenses/>.
*/
const chai = require("chai");
const fs = require("fs");
const path = require("path");
const loadR1cs = require("r1csfile").load;
const zkSnark = require("../index.js");
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder;
const assert = chai.assert;
describe("zkSnark Groth", () => {
it("Load a circuit, create trusted setup, create a proof and validate it", async () => {
const cir = await loadR1cs(path.join(__dirname, "circuit", "circuit.r1cs"), true);
const setup = zkSnark.groth.setup(cir);
const wasm = await fs.promises.readFile(path.join(__dirname, "circuit", "circuit.wasm"));
const wc = await WitnessCalculatorBuilder(wasm, {sanityCheck: true});
const witness = await wc.calculateWitness({"a": "33", "b": "34"});
const {proof, publicSignals} = zkSnark.groth.genProof(setup.vk_proof, witness);
assert( zkSnark.groth.isValid(setup.vk_verifier, proof, publicSignals));
}).timeout(10000000);
});

View File

@ -1,105 +1,119 @@
const assert = require("assert");
const bn128 = require("ffjavascript").bn128;
const Fq = bn128.F1;
const getG2sp = require("../src/keypair").getG2sp;
const pubKey = {
tau_g1_s: [
Fq.e("0x1403cf4fed293e66a8cd522be9f938524111f6f08762371bff53ee387a39cf13"),
Fq.e("0x2accbda355c222301a1bd802db7454d86a4ec2ee89ae895ca21f147d6b705740"),
Fq.e("1")
],
tau_g1_sx: [
Fq.e("0x12996cf89d854246f1ab002e446436b77a64349117ec1fb2aa57a304890e81ef"),
Fq.e("0x0c17fd067df52c480a1db3c6890821f975932d89d0d53c6c60777cc56f1dd712"),
Fq.e("1")
],
alpha_g1_s:[
Fq.e("0x12a64bbe8af7fcb19052e25e188c1fcdac454928142f8e89f58e03249e18b223"),
Fq.e("0x22be31a388d0ec551530e1b1581b671b4340e88990de805a7bfed8bdb9c1accd"),
Fq.e("1")
],
alpha_g1_sx: [
Fq.e("0x262ff8dd594374c6ed5e892ba31315f6e47c500784a12ea8d2c573730888a392"),
Fq.e("0x0b3a94f2b61178f2974e039cfd671e7405ec43eb2c09dc8f43a34f450917a62f"),
Fq.e("1")
],
beta_g1_s: [
Fq.e("0x0d9b3088b69daf6746c6bba4f9b359234abbfd3306bce14b198e7a5556c777e6"),
Fq.e("0x066d1acac914883df6a9dc57dc2037a481ba4b8646efe13e2584b9258bd52d0c"),
Fq.e("1")
],
beta_g1_sx: [
Fq.e("0x248232878c359dbe632c387dc0d955520e8d3363f1cd9621ec9fd4a05460c754"),
Fq.e("0x12074f06ef232a472cb36c328e760c4acfb4bedad4ca3ee09971578a0fe185ab"),
Fq.e("1")
],
tau_g2_spx: [
[
Fq.e("0x0fe02fcc3aee51c1f3a37f3f152ebe5476ae659468f2ee81cdeb19d0dad366c5"),
Fq.e("0x01aeb4db892bcb273aada80f5eab10e2e50ae59a5c274b0d7303f5c5a52ee88b"),
],[
Fq.e("0x2d00022d840d493fb93c68a63b29e2692c0cd3caf354fe60eae1ebacefc2c948"),
Fq.e("0x204065ff10344153a08cfe4ae543c47fba883ef8a54530fa6a52c87e5c28ef2b"),
],[
Fq.e("1"),
Fq.e("0")
]
],
alpha_g2_spx: [
[
Fq.e("0x2e649d01a58a7795762df8f0634c273ebce6950a9a2ba3d4459458620d3164a0"),
Fq.e("0x1b58044d3e205a918124fea3983583199b4f99fd0abb39ede2c684b0810bdc1e"),
],[
Fq.e("0x021d41558cea5fa32c9f3de5834cb2ee45ce4cdf471353395d019dfe0c9c2509"),
Fq.e("0x1c04148bac3f17b219c2655cd63ad2596ea63293103487be488a1d5a9054ddbf"),
],[
Fq.e("1"),
Fq.e("0")
]
],
beta_g2_spx: [
[
Fq.e("0x029251aed5163109667300035ce200b7195fc6e261581ba38776d87d7f0b1a7d"),
Fq.e("0x09d6847f1b945ccdc00418a807f4b0af67ec5c0030c4f203581eff9d4af4347f"),
],[
Fq.e("0x04b62ecdc94bf94fcefdf93f06ca4f63026a47a0d4138941b8ee45b9f7177e5c"),
Fq.e("0x1f0a6bff3945f207f407ff1c813b66a28b495f55a3788c3e200c74817e86f7ce"),
],[
Fq.e("1"),
Fq.e("0")
]
]
};
const challange = Buffer.from(
"bc0bde7980381fa642b2097591dd83f1"+
"ed15b003e15c35520af32c95eb519149"+
"2a6f3175215635cfc10e6098e2c612d0"+
"ca84f1a9f90b5333560c8af59b9209f4", "hex");
import assert from "assert";
import { getCurveFromName } from "../src/curves.js";
import { hex2ByteArray } from "../src/misc.js";
import { Scalar } from "ffjavascript";
import { getG2sp } from "../src/keypair.js";
describe("keypair", () => {
let curve;
before( async () => {
curve = await getCurveFromName("bn128");
});
after( async () => {
await curve.terminate();
});
it("It should calculate the right g2_s for the test vectors", async () => {
const tau_g2_sp = getG2sp(0, challange, pubKey.tau_g1_s, pubKey.tau_g1_sx);
assert(bn128.F12.eq(
bn128.pairing(pubKey.tau_g1_sx,tau_g2_sp),
bn128.pairing(pubKey.tau_g1_s, pubKey.tau_g2_spx)));
const challange = hex2ByteArray(
"bc0bde7980381fa642b2097591dd83f1"+
"ed15b003e15c35520af32c95eb519149"+
"2a6f3175215635cfc10e6098e2c612d0"+
"ca84f1a9f90b5333560c8af59b9209f4"
);
const alpha_g2_sp = getG2sp(1, challange, pubKey.alpha_g1_s, pubKey.alpha_g1_sx);
assert(bn128.F12.eq(
bn128.pairing(pubKey.alpha_g1_sx, alpha_g2_sp),
bn128.pairing(pubKey.alpha_g1_s , pubKey.alpha_g2_spx)));
const tau_g1_s = curve.G1.fromObject([
Scalar.e("0x1403cf4fed293e66a8cd522be9f938524111f6f08762371bff53ee387a39cf13"),
Scalar.e("0x2accbda355c222301a1bd802db7454d86a4ec2ee89ae895ca21f147d6b705740"),
Scalar.e("1")
]);
const tau_g1_sx = curve.G1.fromObject([
Scalar.e("0x12996cf89d854246f1ab002e446436b77a64349117ec1fb2aa57a304890e81ef"),
Scalar.e("0x0c17fd067df52c480a1db3c6890821f975932d89d0d53c6c60777cc56f1dd712"),
Scalar.e("1")
]);
const tau_g2_sp = getG2sp(curve, 0, challange, tau_g1_s, tau_g1_sx);
const beta_g2_sp = getG2sp(2, challange, pubKey.beta_g1_s, pubKey.beta_g1_sx);
assert(bn128.F12.eq(
bn128.pairing(pubKey.beta_g1_sx, beta_g2_sp),
bn128.pairing(pubKey.beta_g1_s , pubKey.beta_g2_spx)));
const tau_g2_spx = curve.G2.fromObject([
[
Scalar.e("0x0fe02fcc3aee51c1f3a37f3f152ebe5476ae659468f2ee81cdeb19d0dad366c5"),
Scalar.e("0x01aeb4db892bcb273aada80f5eab10e2e50ae59a5c274b0d7303f5c5a52ee88b"),
],[
Scalar.e("0x2d00022d840d493fb93c68a63b29e2692c0cd3caf354fe60eae1ebacefc2c948"),
Scalar.e("0x204065ff10344153a08cfe4ae543c47fba883ef8a54530fa6a52c87e5c28ef2b"),
],[
Scalar.e("1"),
Scalar.e("0")
]
]);
assert(curve.F12.eq(
curve.pairing(tau_g1_sx, tau_g2_sp),
curve.pairing(tau_g1_s, tau_g2_spx)));
const alpha_g1_s = curve.G1.fromObject([
Scalar.e("0x12a64bbe8af7fcb19052e25e188c1fcdac454928142f8e89f58e03249e18b223"),
Scalar.e("0x22be31a388d0ec551530e1b1581b671b4340e88990de805a7bfed8bdb9c1accd"),
Scalar.e("1")
]);
const alpha_g1_sx = curve.G1.fromObject([
Scalar.e("0x262ff8dd594374c6ed5e892ba31315f6e47c500784a12ea8d2c573730888a392"),
Scalar.e("0x0b3a94f2b61178f2974e039cfd671e7405ec43eb2c09dc8f43a34f450917a62f"),
Scalar.e("1")
]);
const alpha_g2_sp = getG2sp(curve, 1, challange, alpha_g1_s, alpha_g1_sx);
const alpha_g2_spx = curve.G2.fromObject([
[
Scalar.e("0x2e649d01a58a7795762df8f0634c273ebce6950a9a2ba3d4459458620d3164a0"),
Scalar.e("0x1b58044d3e205a918124fea3983583199b4f99fd0abb39ede2c684b0810bdc1e"),
],[
Scalar.e("0x021d41558cea5fa32c9f3de5834cb2ee45ce4cdf471353395d019dfe0c9c2509"),
Scalar.e("0x1c04148bac3f17b219c2655cd63ad2596ea63293103487be488a1d5a9054ddbf"),
],[
Scalar.e("1"),
Scalar.e("0")
]
]);
assert(curve.F12.eq(
curve.pairing(alpha_g1_sx, alpha_g2_sp),
curve.pairing(alpha_g1_s, alpha_g2_spx)));
const beta_g1_s = curve.G1.fromObject([
Scalar.e("0x0d9b3088b69daf6746c6bba4f9b359234abbfd3306bce14b198e7a5556c777e6"),
Scalar.e("0x066d1acac914883df6a9dc57dc2037a481ba4b8646efe13e2584b9258bd52d0c"),
Scalar.e("1")
]);
const beta_g1_sx = curve.G1.fromObject([
Scalar.e("0x248232878c359dbe632c387dc0d955520e8d3363f1cd9621ec9fd4a05460c754"),
Scalar.e("0x12074f06ef232a472cb36c328e760c4acfb4bedad4ca3ee09971578a0fe185ab"),
Scalar.e("1")
]);
const beta_g2_sp = getG2sp(curve, 2, challange, beta_g1_s, beta_g1_sx);
const beta_g2_spx = curve.G2.fromObject([
[
Scalar.e("0x029251aed5163109667300035ce200b7195fc6e261581ba38776d87d7f0b1a7d"),
Scalar.e("0x09d6847f1b945ccdc00418a807f4b0af67ec5c0030c4f203581eff9d4af4347f"),
],[
Scalar.e("0x04b62ecdc94bf94fcefdf93f06ca4f63026a47a0d4138941b8ee45b9f7177e5c"),
Scalar.e("0x1f0a6bff3945f207f407ff1c813b66a28b495f55a3788c3e200c74817e86f7ce"),
],[
Scalar.e("1"),
Scalar.e("0")
]
]);
assert(curve.F12.eq(
curve.pairing(beta_g1_sx, beta_g2_sp),
curve.pairing(beta_g1_s, beta_g2_spx)));
});
});

View File

@ -1,48 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of zksnark JavaScript library.
zksnark JavaScript library is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
zksnark JavaScript library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
zksnark JavaScript library. If not, see <https://www.gnu.org/licenses/>.
*/
const chai = require("chai");
const fs = require("fs");
const path = require("path");
const loadR1cs = require("r1csfile").load;
const zkSnark = require("../index.js");
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder;
const assert = chai.assert;
describe("zkSnark kimleeoh", () => {
it("Load a circuit, create trusted setup, create a proof and validate it", async () => {
const cir = await loadR1cs(path.join(__dirname, "circuit", "circuit.r1cs"), true);
const setup = zkSnark.kimleeoh.setup(cir);
const wasm = await fs.promises.readFile(path.join(__dirname, "circuit", "circuit.wasm"));
const wc = await WitnessCalculatorBuilder(wasm, {sanityCheck: true});
const witness = await wc.calculateWitness({"a": "33", "b": "34"});
const {proof, publicSignals} = zkSnark.kimleeoh.genProof(setup.vk_proof, witness);
assert( zkSnark.kimleeoh.isValid(setup.vk_verifier, proof, publicSignals));
}).timeout(10000000);
});

View File

@ -1,47 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of zksnark JavaScript library.
zksnark JavaScript library is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
zksnark JavaScript library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
zksnark JavaScript library. If not, see <https://www.gnu.org/licenses/>.
*/
const chai = require("chai");
const fs = require("fs");
const path = require("path");
const loadR1cs = require("r1csfile").load;
const zkSnark = require("../index.js");
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder;
const assert = chai.assert;
describe("zkSnark Original", () => {
it("Load a circuit, create trusted setup, create a proof and validate it", async () => {
const cir = await loadR1cs(path.join(__dirname, "circuit", "circuit.r1cs"), true);
const setup = zkSnark.original.setup(cir);
const wasm = await fs.promises.readFile(path.join(__dirname, "circuit", "circuit.wasm"));
const wc = await WitnessCalculatorBuilder(wasm, {sanityCheck: true});
const witness = await wc.calculateWitness({"a": "33", "b": "34"});
const {proof, publicSignals} = zkSnark.original.genProof(setup.vk_proof, witness);
assert( zkSnark.original.isValid(setup.vk_verifier, proof, publicSignals));
}).timeout(10000000);
});

BIN
~[object Object].init Normal file

Binary file not shown.