Compare commits

..

1 Commits

131 changed files with 9554 additions and 58861 deletions

View File

@ -1,12 +1,14 @@
module.exports = {
"plugins": [
"mocha"
],
"env": {
"es6": true,
"node": true,
"mocha": true
},
"parserOptions": {
"ecmaVersion": 2020,
"sourceType": "module"
"ecmaVersion": 2017
},
"extends": "eslint:recommended",
"rules": {
@ -25,6 +27,7 @@ module.exports = {
"semi": [
"error",
"always"
]
],
"mocha/no-exclusive-tests": "error"
}
};

View File

@ -1,25 +0,0 @@
name: tests
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [12.x, 14.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- run: npm install -g circom@latest
- run: npm install
- name: mocha tests
run: npm run test

View File

@ -1,140 +0,0 @@
name: Check snarkjs tutorial
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [12.x, 14.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- run: npm install -g circom@latest
- run: npm install
- run: npm link
- name: 1. Start a new powers of tau ceremony
run: snarkjs powersoftau new bn128 12 pot12_0000.ptau -v
- name: 2. Contribute to the ceremony
run: snarkjs powersoftau contribute pot12_0000.ptau pot12_0001.ptau -e="some random text" --name="First contribution" -v
- name: 3. Provide a second contribution
run: snarkjs powersoftau contribute pot12_0001.ptau pot12_0002.ptau --name="Second contribution" -e="some random text" -v
- name: 4. Provide a third contribution using third party software
run: |
snarkjs powersoftau export challenge pot12_0002.ptau challenge_0003
snarkjs powersoftau challenge contribute bn128 challenge_0003 response_0003 -e="some random text"
snarkjs powersoftau import response pot12_0002.ptau response_0003 pot12_0003.ptau -n="Third contribution name"
- name: 5. Verify the protocol so far
run: snarkjs powersoftau verify pot12_0003.ptau
- name: 6. Apply a random beacon
run: snarkjs powersoftau beacon pot12_0003.ptau pot12_beacon.ptau 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon"
- name: 7. Prepare phase 2
run: snarkjs powersoftau prepare phase2 pot12_beacon.ptau pot12_final.ptau -v
- name: 8. Verify the final ptau
run: snarkjs powersoftau verify pot12_final.ptau -v
- name: 9. Create the circuit
run: |
cat <<EOT > circuit.circom
template Multiplier(n) {
signal private input a;
signal private input b;
signal output c;
signal int[n];
int[0] <== a*a + b;
for (var i=1; i<n; i++) {
int[i] <== int[i-1]*int[i-1] + b;
}
c <== int[n-1];
}
component main = Multiplier(1000);
EOT
- name: 10. Compile the circuit
run: circom circuit.circom --r1cs --wasm --sym -v
- name: 11. View information about the circuit
run: snarkjs r1cs info circuit.r1cs
- name: 12. Print the constraints
run: snarkjs r1cs print circuit.r1cs circuit.sym
- name: 13. Export r1cs to json
run: |
snarkjs r1cs export json circuit.r1cs circuit.r1cs.json
cat circuit.r1cs.json
- name: 14. Generate the reference zkey without phase 2 contributions
run: snarkjs groth16 setup circuit.r1cs pot12_final.ptau circuit_0000.zkey
- name: 15. Contribute to the phase 2 ceremony
run: snarkjs zkey contribute circuit_0000.zkey circuit_0001.zkey --name="1st Contributor Name" -e="some random text" -v
- name: 16. Provide a second contribution
run: snarkjs zkey contribute circuit_0001.zkey circuit_0002.zkey --name="Second contribution Name" -e="some random text" -v
- name: 17. Provide a third contribution using third party software
run: |
snarkjs zkey export bellman circuit_0002.zkey challenge_phase2_0003
snarkjs zkey bellman contribute bn128 challenge_phase2_0003 response_phase2_0003 -e="some random text"
snarkjs zkey import bellman circuit_0002.zkey response_phase2_0003 circuit_0003.zkey -n="Third contribution name"
- name: 18. Verify the latest zkey
run: snarkjs zkey verify circuit.r1cs pot12_final.ptau circuit_0003.zkey
- name: 19. Apply a random beacon
run: snarkjs zkey beacon circuit_0003.zkey circuit_final.zkey 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon phase2"
- name: 20. Verify the final zkey
run: snarkjs zkey verify circuit.r1cs pot12_final.ptau circuit_final.zkey
- name: 21. Export the verification key
run: snarkjs zkey export verificationkey circuit_final.zkey verification_key.json
- name: 22. Calculate the witness
run: |
cat <<EOT > input.json
{"a": 3, "b": 11}
EOT
snarkjs wtns calculate circuit.wasm input.json witness.wtns
- name: 23. Debug the final witness calculation
run: snarkjs wtns debug circuit.wasm input.json witness.wtns circuit.sym --trigger --get --set
- name: 24. Create the proof
run: snarkjs groth16 prove circuit_final.zkey witness.wtns proof.json public.json
- name: 25. Verify the proof
run: snarkjs groth16 verify verification_key.json public.json proof.json
- name: 26. Turn the verifier into a smart contract
run: snarkjs zkey export solidityverifier circuit_final.zkey verifier.sol
- name: 27. Simulate a verification call
run: snarkjs zkey export soliditycalldata public.json proof.json
- name: 28. Plonk setup
run: snarkjs plonk setup circuit.r1cs pot12_final.ptau circuit_final.zkey
- name: 29. Export the verification key
run: snarkjs zkey export verificationkey circuit_final.zkey verification_key.json
- name: 30. Create a PLONK proof
run: snarkjs plonk prove circuit_final.zkey witness.wtns proof.json public.json
- name: 31. Verify the PLONK proof
run: snarkjs plonk verify verification_key.json public.json proof.json
- name: 32. Turn the PLONK verifier into a smart contract
run: snarkjs zkey export solidityverifier circuit_final.zkey verifier.sol
- name: 33. Simulate a PLONK verification call
run: snarkjs zkey export soliditycalldata public.json proof.json

1
.gitignore vendored
View File

@ -66,4 +66,3 @@ typings/
tmp
.DS_Store
stats.html

106
.vscode/launch.json vendored
View File

@ -1,106 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "pwa-node",
"request": "launch",
"name": "plonk setup",
"skipFiles": [
"<node_internals>/**"
],
"program": "cli.js",
"args": [
"pks",
"test/plonk_circuit/circuit.r1cs",
"test/plonk_circuit/powersOfTau15_final.ptau",
"test/plonk_circuit/circuit.zkey"
]
},
{
"type": "pwa-node",
"request": "launch",
"name": "plonk prove",
"skipFiles": [
"<node_internals>/**"
],
"program": "cli.js",
"args": [
"pkp",
"test/plonk_circuit/circuit.zkey",
"test/plonk_circuit/witness.wtns",
"test/plonk_circuit/proof.json",
"test/plonk_circuit/public.json",
"-v"
]
},
{
"type": "pwa-node",
"request": "launch",
"name": "plonk export vk",
"skipFiles": [
"<node_internals>/**"
],
"program": "cli.js",
"args": [
"zkev",
"test/plonk_circuit/circuit.zkey",
"test/plonk_circuit/verification_key.json",
]
},
{
"type": "pwa-node",
"request": "launch",
"name": "plonk verify",
"skipFiles": [
"<node_internals>/**"
],
"program": "cli.js",
"args": [
"pkv",
"test/plonk_circuit/verification_key.json",
"test/plonk_circuit/public.json",
"test/plonk_circuit/proof.json",
"-v"
]
},
{
"type": "pwa-node",
"request": "launch",
"name": "export solidity calldata",
"skipFiles": [
"<node_internals>/**"
],
"program": "cli.js",
"args": [
"zkesc",
"test/plonk_circuit/public.json",
"test/plonk_circuit/proof.json",
]
},
{
"type": "pwa-node",
"request": "launch",
"name": "export solidity verifier",
"skipFiles": [
"<node_internals>/**"
],
"program": "cli.js",
"args": [
"zkesv",
"test/plonk_circuit/circuit.zkey",
"test/plonk_circuit/verifier.sol",
]
},
{
"type": "node",
"request": "launch",
"name": "Mocha all tests",
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
"cwd": "${workspaceRoot}",
"internalConsoleOptions": "openOnSessionStart"
}
]
}

View File

@ -1,7 +1,7 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2020 0Kims Association <https://0kims.org>
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.

643
README.md
View File

@ -1,590 +1,133 @@
# snarkjs: JavaScript implementation of zkSNARKs.
![tests](https://github.com/iden3/snarkjs/workflows/tests/badge.svg)![Check%20snarkjs%20tutorial](https://github.com/iden3/snarkjs/workflows/Check%20snarkjs%20tutorial/badge.svg)
This is a JavaScript implementation of zkSNARK schemes. It allows the original 8points protocol
and the Groth Protocol (3 point only and 3 pairings)
# snarkjs
This library allows to do the trusted setup, generate proofs and verify the proofs.
This is a **JavaScript and Pure Web Assembly implementation of zkSNARK and PLONK schemes.** It uses the Groth16 Protocol (3 point only and 3 pairings) and PLONK.
This library uses the compiled circuits generated by the jaz compiler.
This library includes all the tools required to perform trusted setup multi-party ceremonies: including the universal [*powers of tau*](https://medium.com/coinmonks/announcing-the-perpetual-powers-of-tau-ceremony-to-benefit-all-zk-snark-projects-c3da86af8377) ceremony, and the second phase circuit specific ceremonies.
### Tutorial.
> Any zk-snark project can pick a round from the common phase 1 to start their circuit-specific phase 2 ceremony.
A good starting point [is this tutorial](https://github.com/iden3/circom/blob/master/TUTORIAL.md)
The formats used in this library for the multi-party computation are compatible with the ones used in [Semaphore's Perpetual Powers of Tau](https://github.com/weijiekoh/perpetualpowersoftau) and [other implementations](https://github.com/kobigurk/phase2-bn254).
Also this [video](https://www.youtube.com/watch?v=-9TJa1hVsKA) is a good starting point.
This library uses the compiled circuits generated by the [circom](https://github.com/iden3/circom) compiler.
It works in [`node.js`](#using-node) as well as directly in the [browser](#in-the-browser).
It's an [ES module](https://hacks.mozilla.org/2018/03/es-modules-a-cartoon-deep-dive/), so it can be directly imported into bigger projects using [Rollup](https://rollupjs.org/guide/en/) or [Webpack](https://webpack.js.org/).
The low-level cryptography is performed directly in `wasm`, and uses worker threads to parallelize the computations. The result is a high performance library with benchmarks comparable to host implementations.
## Preliminaries
### Install node v14
First off, make sure you have a recent version of `Node.js` installed. While any version after `v12` should work fine, we recommend you install `v14` or later.
If youre not sure which version of Node you have installed, you can run:
## Install.
```sh
node -v
npm install snarkjs
```
To download the latest version of Node, see [here](https://nodejs.org/en/download/).
## Usage from command line.
### Install snarkjs and circom
To install `circom` and `snarkjs`, run:
```sh
npm install -g circom@latest
npm install -g snarkjs@latest
```
If you're seeing an error, try prefixing both commands with `sudo` and running them again.
### Understand the `help` command
To see a list of all `snarkjs` commands, as well as descriptions about their inputs and outputs, run:
```sh
snarkjs --help
```
Will show all the info in how to use the cli.
You can also use the `--help` option with specific commands:
## Usage from javascript
```sh
snarkjs groth16 prove --help
```
Most of the commands have an alternative shorter alias (which you can discover using `--help`).
For example, the previous command can also be invoked with:
```sh
snarkjs g16p --help
```
### Debugging tip
If you a feel a command is taking longer than it should, re-run it with a `-v` or `--verbose` option to see more details about how it's progressing and where it's getting blocked.
## Guide
### 0. Create and move into a new directory
```sh
mkdir snarkjs_example
cd snarkjs_example
```
### 1. Start a new powers of tau ceremony
```sh
snarkjs powersoftau new bn128 12 pot12_0000.ptau -v
```
The `new` command is used to start a powers of tau ceremony.
The first parameter after `new` refers to the type of curve you wish to use. At the moment, we support both `bn128` and `bls12-381`.
The second parameter, in this case `12`, is the power of two of the maximum number of constraints that the ceremony can accept: in this case, the number of constraints is `2 ^ 12 = 4096`. The maximum value supported here is `28`, which means you can use `snarkjs` to securely generate zk-snark parameters for circuits with up to `2 ^ 28` (≈268 million) constraints.
### 2. Contribute to the ceremony
```sh
snarkjs powersoftau contribute pot12_0000.ptau pot12_0001.ptau --name="First contribution" -v
```
The `contribute` command creates a ptau file with a new contribution.
You'll be prompted to enter some random text to provide an extra source of entropy.
`contribute` takes as input the transcript of the protocol so far, in this case `pot12_0000.ptau`, and outputs a new transcript, in this case `pot12_0001.ptau`, which includes the computation carried out by the new contributor (`ptau` files contain a history of all the challenges and responses that have taken place so far).
`name` can be anything you want, and is just included for reference (it will be printed when you verify the file (step 5).
### 3. Provide a second contribution
```sh
snarkjs powersoftau contribute pot12_0001.ptau pot12_0002.ptau --name="Second contribution" -v -e="some random text"
```
By letting you write the random text as part of the command, the `-e` parameter allows `contribute` to be non-interactive.
### 4. Provide a third contribution using third party software
```sh
snarkjs powersoftau export challenge pot12_0002.ptau challenge_0003
snarkjs powersoftau challenge contribute bn128 challenge_0003 response_0003 -e="some random text"
snarkjs powersoftau import response pot12_0002.ptau response_0003 pot12_0003.ptau -n="Third contribution name"
```
The challenge and response files are compatible with [this software](https://github.com/kobigurk/phase2-bn254).
This allows you to use different types of software in a single ceremony.
### 5. Verify the protocol so far
```sh
snarkjs powersoftau verify pot12_0003.ptau
```
The `verify` command verifies a `ptau` (powers of tau) file. Which means it checks all the contributions to the multi-party computation (MPC) up to that point. It also prints the hashes of all the intermediate results to the console.
If everything checks out, you should see the following at the top of the output:
```sh
[INFO] snarkJS: Powers Of tau file OK!
```
In sum, whenever a new zk-snark project needs to perform a trusted setup, you can just pick the latest `ptau` file, and run the `verify` command to verify the entire chain of challenges and responses so far.
### 6. Apply a random beacon
```sh
snarkjs powersoftau beacon pot12_0003.ptau pot12_beacon.ptau 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon"
```
The `beacon` command creates a `ptau` file with a contribution applied in the form of a random beacon.
We need to apply a random beacon in order to finalise phase 1 of the trusted setup.
> To paraphrase Sean Bowe and Ariel Gabizon, a random beacon is a source of public randomness that is not available before a fixed time. The beacon itself can be a delayed hash function (e.g. 2^40 iterations of SHA256) evaluated on some high entropy and publicly available data. Possible sources of data include: the closing value of the stock market on a certain date in the future, the output of a selected set of national lotteries, or the value of a block at a particular height in one or more blockchains. E.g. the hash of the 11 millionth Ethereum block (which as of this writing is some 3 months in the future). See [here](https://eprint.iacr.org/2017/1050.pdf) for more on the importance of a random beacon.
For the purposes of this tutorial, the beacon is essentially a delayed hash function evaluated on `0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f` (in practice this value will be some form of high entropy and publicly available data of your choice). The next input -- in our case `10` -- just tells `snarkjs` to perform `2 ^ 10` iterations of this hash function.
> Note that [security holds](https://eprint.iacr.org/2017/1050) even if an adversary has limited influence on the beacon.
### 7. Prepare phase 2
```sh
snarkjs powersoftau prepare phase2 pot12_beacon.ptau pot12_final.ptau -v
```
We're now ready to prepare phase 2 of the setup (the circuit-specific phase).
Under the hood, the `prepare phase2` command calculates the encrypted evaluation of the Lagrange polynomials at tau for `tau`, `alpha*tau` and `beta*tau`. It takes the beacon `ptau` file we generated in the previous step, and outputs a final `ptau` file which will be used to generate the circuit proving and verification keys.
---
**NOTE**
Ptau files for bn128 with the peraperPhase2 54 contributions and a beacon, can be found here:
| power | maxConstraints | file | hash |
|-------|----------------|-----------|-------|
| 8 | 256 | [powersOfTau28_hez_final_08.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_08.ptau) | d6a8fb3a04feb600096c3b791f936a578c4e664d262e4aa24beed1b7a9a96aa5eb72864d628db247e9293384b74b36ffb52ca8d148d6e1b8b51e279fdf57b583 |
| 9 | 512 | [powersOfTau28_hez_final_09.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_09.ptau) | 94f108a80e81b5d932d8e8c9e8fd7f46cf32457e31462deeeef37af1b71c2c1b3c71fb0d9b59c654ec266b042735f50311f9fd1d4cadce47ab234ad163157cb5 |
| 10 | 1k | [powersOfTau28_hez_final_10.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_10.ptau) | 6cfeb8cda92453099d20120bdd0e8a5c4e7706c2da9a8f09ccc157ed2464d921fd0437fb70db42104769efd7d6f3c1f964bcf448c455eab6f6c7d863e88a5849 |
| 11 | 2k | [powersOfTau28_hez_final_11.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_11.ptau) | 47c282116b892e5ac92ca238578006e31a47e7c7e70f0baa8b687f0a5203e28ea07bbbec765a98dcd654bad618475d4661bfaec3bd9ad2ed12e7abc251d94d33 |
| 12 | 4k | [powersOfTau28_hez_final_12.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_12.ptau) | ded2694169b7b08e898f736d5de95af87c3f1a64594013351b1a796dbee393bd825f88f9468c84505ddd11eb0b1465ac9b43b9064aa8ec97f2b73e04758b8a4a |
| 13 | 8k | [powersOfTau28_hez_final_13.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_13.ptau) | 58efc8bf2834d04768a3d7ffcd8e1e23d461561729beaac4e3e7a47829a1c9066d5320241e124a1a8e8aa6c75be0ba66f65bc8239a0542ed38e11276f6fdb4d9 |
| 14 | 16k | [powersOfTau28_hez_final_14.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_14.ptau) | eeefbcf7c3803b523c94112023c7ff89558f9b8e0cf5d6cdcba3ade60f168af4a181c9c21774b94fbae6c90411995f7d854d02ebd93fb66043dbb06f17a831c1 |
| 15 | 32k | [powersOfTau28_hez_final_15.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_15.ptau) | 982372c867d229c236091f767e703253249a9b432c1710b4f326306bfa2428a17b06240359606cfe4d580b10a5a1f63fbed499527069c18ae17060472969ae6e |
| 16 | 64k | [powersOfTau28_hez_final_16.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_16.ptau) | 6a6277a2f74e1073601b4f9fed6e1e55226917efb0f0db8a07d98ab01df1ccf43eb0e8c3159432acd4960e2f29fe84a4198501fa54c8dad9e43297453efec125 |
| 17 | 128k | [powersOfTau28_hez_final_17.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_17.ptau) | 6247a3433948b35fbfae414fa5a9355bfb45f56efa7ab4929e669264a0258976741dfbe3288bfb49828e5df02c2e633df38d2245e30162ae7e3bcca5b8b49345 |
| 18 | 256k | [powersOfTau28_hez_final_18.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_18.ptau) | 7e6a9c2e5f05179ddfc923f38f917c9e6831d16922a902b0b4758b8e79c2ab8a81bb5f29952e16ee6c5067ed044d7857b5de120a90704c1d3b637fd94b95b13e |
| 19 | 512k | [powersOfTau28_hez_final_19.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_19.ptau) | bca9d8b04242f175189872c42ceaa21e2951e0f0f272a0cc54fc37193ff6648600eaf1c555c70cdedfaf9fb74927de7aa1d33dc1e2a7f1a50619484989da0887 |
| 20 | 1M | [powersOfTau28_hez_final_20.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_20.ptau) | 89a66eb5590a1c94e3f1ee0e72acf49b1669e050bb5f93c73b066b564dca4e0c7556a52b323178269d64af325d8fdddb33da3a27c34409b821de82aa2bf1a27b |
| 21 | 2M | [powersOfTau28_hez_final_21.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_21.ptau) | 9aef0573cef4ded9c4a75f148709056bf989f80dad96876aadeb6f1c6d062391f07a394a9e756d16f7eb233198d5b69407cca44594c763ab4a5b67ae73254678 |
| 22 | 4M | [powersOfTau28_hez_final_22.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_22.ptau) | 0d64f63dba1a6f11139df765cb690da69d9b2f469a1ddd0de5e4aa628abb28f787f04c6a5fb84a235ec5ea7f41d0548746653ecab0559add658a83502d1cb21b |
| 23 | 8M | [powersOfTau28_hez_final_23.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_23.ptau) | 3063a0bd81d68711197c8820a92466d51aeac93e915f5136d74f63c394ee6d88c5e8016231ea6580bec02e25d491f319d92e77f5c7f46a9caa8f3b53c0ea544f |
| 24 | 16M | [powersOfTau28_hez_final_24.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_24.ptau) | fa404d140d5819d39984833ca5ec3632cd4995f81e82db402371a4de7c2eae8687c62bc632a95b0c6aadba3fb02680a94e09174b7233ccd26d78baca2647c733 |
| 25 | 32M | [powersOfTau28_hez_final_25.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_25.ptau) | 0377d860cdb09a8a31ea1b0b8c04335614c8206357181573bf294c25d5ca7dff72387224fbd868897e6769f7805b3dab02854aec6d69d7492883b5e4e5f35eeb |
| 26 | 64M | [powersOfTau28_hez_final_26.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_26.ptau) | 418dee4a74b9592198bd8fd02ad1aea76f9cf3085f206dfd7d594c9e264ae919611b1459a1cc920c2f143417744ba9edd7b8d51e44be9452344a225ff7eead19 |
| 27 | 128M | [powersOfTau28_hez_final_27.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final_27.ptau) | 10ffd99837c512ef99752436a54b9810d1ac8878d368fb4b806267bdd664b4abf276c9cd3c4b9039a1fa4315a0c326c0e8e9e8fe0eb588ffd4f9021bf7eae1a1 |
| 28 | 256M | [powersOfTau28_hez_final.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final.ptau) | 55c77ce8562366c91e7cda394cf7b7c15a06c12d8c905e8b36ba9cf5e13eb37d1a429c589e8eaba4c591bc4b88a0e2828745a53e170eac300236f5c1a326f41a |
There is a file truncated for each power of two.
The complete file is [powersOfTau28_hez_final.ptau](https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final.ptau) which includes 2**28 powers.
And it's blake2b hash is:
55c77ce8562366c91e7cda394cf7b7c15a06c12d8c905e8b36ba9cf5e13eb37d1a429c589e8eaba4c591bc4b88a0e2828745a53e170eac300236f5c1a326f41a
You can find more information about the ceremony [here](https://github.com/weijiekoh/perpetualpowersoftau)
The last ptau file was geneerated using this procedure:
https://www.reddit.com/r/ethereum/comments/iftos6/powers_of_tau_selection_for_hermez_rollup/
---
### 8. Verify the final `ptau`
```sh
snarkjs powersoftau verify pot12_final.ptau
```
The `verify` command verifies a powers of tau file.
Before we go ahead and create the circuit, we perform a final check and verify the final protocol transcript.
> Notice there is no longer a warning informing you that the file does not contain phase 2 precalculated values.
### 9. Create the circuit
```sh
cat <<EOT > circuit.circom
template Multiplier(n) {
signal private input a;
signal private input b;
signal output c;
signal int[n];
int[0] <== a*a + b;
for (var i=1; i<n; i++) {
int[i] <== int[i-1]*int[i-1] + b;
}
c <== int[n-1];
}
component main = Multiplier(1000);
EOT
```
We create a circom file that allows us to easily test the system with a different number of constraints.
In this case, we've chosen `1000`, but we can change this to anything we want (as long as the value we choose is below the number we defined in step 1).
### 10. Compile the circuit
```sh
circom circuit.circom --r1cs --wasm --sym -v
```
The `circom` command takes one input (the circuit to compile, in our case `circuit.circom`) and three options:
- `r1cs`: generates `circuit.r1cs` (the r1cs constraint system of the circuit in binary format).
- `wasm`: generates `circuit.wasm` (the wasm code to generate the witness more on that later).
- `sym`: generates `circuit.sym` (a symbols file required for debugging and printing the constraint system in an annotated mode).
### 11. View information about the circuit
```sh
snarkjs r1cs info circuit.r1cs
```
The `info` command is used to print circuit stats.
You should see the following output:
```
[INFO] snarkJS: Curve: bn-128
[INFO] snarkJS: # of Wires: 1003
[INFO] snarkJS: # of Constraints: 1000
[INFO] snarkJS: # of Private Inputs: 2
[INFO] snarkJS: # of Public Inputs: 0
[INFO] snarkJS: # of Outputs: 1
```
This information fits with our mental map of the circuit we created: we had two private inputs `a` and `b`, one output `c`, and a thousand constraints of the form `a * b = c.`
### 12. Print the constraints
```sh
snarkjs r1cs print circuit.r1cs circuit.sym
```
To double check, we print the constraints of the circuit.
You should see a thousand constraints of the form:
```
[ -main.int[i] ] * [ main.int[i] ] - [ main.b -main.int[i+1] ] = 0
```
### 13. Export r1cs to json
```sh
snarkjs r1cs export json circuit.r1cs circuit.r1cs.json
cat circuit.r1cs.json
```
We export `r1cs` to `json` format to make it human readable.
### 14. Setup
Currently, snarkjs supports 2 proving systems: groth16 and PLONK.
Groth16 requires a trusted ceremony for each circuit. PLONK does not require it, it's enought with the powers of tau ceremony which is universal.
#### Plonk
```sh
snarkjs plonk setup circuit.r1cs pot12_final.ptau circuit_final.zkey
```
You can jump directly to Section 21 as PLONK does not require a specific trusted ceremony.
#### Groth16
```sh
snarkjs groth16 setup circuit.r1cs pot12_final.ptau circuit_0000.zkey
```
This generates the reference `zkey` without phase 2 contributions
IMPORTANT: Do not use this zkey in production, as it's not safe. It requires at least a contribution,
The `zkey new` command creates an initial `zkey` file with zero contributions.
The `zkey` is a zero-knowledge key that includes both the proving and verification keys as well as phase 2 contributions.
Importantly, one can verify whether a `zkey` belongs to a specific circuit or not.
Note that `circuit_0000.zkey` (the output of the `zkey` command above) does not include any contributions yet, so it cannot be used in a final circuit.
*The following steps (15-20) are similar to the equivalent phase 1 steps, except we use `zkey` instead of `powersoftau` as the main command, and we generate `zkey` rather that `ptau` files.*
### 15. Contribute to the phase 2 ceremony
```sh
snarkjs zkey contribute circuit_0000.zkey circuit_0001.zkey --name="1st Contributor Name" -v
```
The `zkey contribute` command creates a `zkey` file with a new contribution.
As in phase 1, you'll be prompted to enter some random text to provide an extra source of entropy.
### 16. Provide a second contribution
```sh
snarkjs zkey contribute circuit_0001.zkey circuit_0002.zkey --name="Second contribution Name" -v -e="Another random entropy"
```
We provide a second contribution.
### 17. Provide a third contribution using third party software
```sh
snarkjs zkey export bellman circuit_0002.zkey challenge_phase2_0003
snarkjs zkey bellman contribute bn128 challenge_phase2_0003 response_phase2_0003 -e="some random text"
snarkjs zkey import bellman circuit_0002.zkey response_phase2_0003 circuit_0003.zkey -n="Third contribution name"
```
And a third using [third-party software](https://github.com/kobigurk/phase2-bn254).
### 18. Verify the latest `zkey`
```sh
snarkjs zkey verify circuit.r1cs pot12_final.ptau circuit_0003.zkey
```
The `zkey verify` command verifies a `zkey` file. It also prints the hashes of all the intermediary results to the console.
We verify the `zkey` file we created in the previous step. Which means we check all the contributions to the second phase of the multi-party computation (MPC) up to that point.
This command also checks that the `zkey` file matches the circuit.
If everything checks out, you should see the following:
```
[INFO] snarkJS: ZKey Ok!
```
### 19. Apply a random beacon
```sh
snarkjs zkey beacon circuit_0003.zkey circuit_final.zkey 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon phase2"
```
The `zkey beacon` command creates a `zkey` file with a contribution applied in the form of a random beacon.
We use it to apply a random beacon to the latest `zkey` after the final contribution has been made (this is necessary in order to generate a final `zkey` file and finalise phase 2 of the trusted setup).
### 20. Verify the final `zkey`
```sh
snarkjs zkey verify circuit.r1cs pot12_final.ptau circuit_final.zkey
```
Before we go ahead and export the verification key as a `json`, we perform a final check and verify the final protocol transcript (`zkey`).
### 21. Export the verification key
```sh
snarkjs zkey export verificationkey circuit_final.zkey verification_key.json
```
We export the verification key from `circuit_final.zkey` into `verification_key.json`.
### 22. Calculate the witness
```sh
cat <<EOT > input.json
{"a": 3, "b": 11}
EOT
snarkjs wtns calculate circuit.wasm input.json witness.wtns
```
Calculate the witness (given the inputs `a = 3` and `b = 11`).
### 23. Debug the final witness calculation
```sh
snarkjs wtns debug circuit.wasm input.json witness.wtns circuit.sym --trigger --get --set
```
And check for any errors in the witness calculation process (best practice).
The `wtns debug` command logs every time a new component starts/ends (`--trigger`), when a signal is set (`--set`) and when it's read (`--get`).
### 24. Create the proof
#### PLONK
```sh
snarkjs plonk prove circuit_final.zkey witness.wtns proof.json public.json
```
#### Groth16
```sh
snarkjs groth16 prove circuit_final.zkey witness.wtns proof.json public.json
```
We create the proof. this command generates the files `proof.json` and `public.json`: `proof.json` contains the actual proof, whereas `public.json` contains the values of the public inputs and output.
> Note that it's also possible to create the proof and calculate the witness in the same command by running:
> ```sh
> snarkjs groth16 fullprove input.json circuit.wasm circuit_final.zkey proof.json public.json
> ```
### 25. Verify the proof
#### PLONK
```sh
snarkjs plonk verify verification_key.json public.json proof.json
```
#### Groth16
```sh
snarkjs groth16 verify verification_key.json public.json proof.json
```
We use the this command to verify the proof, passing in the `verification_key` we exported earlier.
If all is well, you should see that `OK` has been outputted to your console. This signifies the proof is valid.
### 26. Turn the verifier into a smart contract
```sh
snarkjs zkey export solidityverifier circuit_final.zkey verifier.sol
```
Finally, we export the verifier as a Solidity smart-contract so that we can publish it on-chain -- using [remix](https://remix.ethereum.org/) for example. For the details on how to do this, refer to section 4 of [this tutorial](https://blog.iden3.io/first-zk-proof.html).
### 27. Simulate a verification call
```sh
snarkjs zkey export soliditycalldata public.json proof.json
```
We use `soliditycalldata` to simulate a verification call, and cut and paste the result directly in the verifyProof field in the deployed smart contract in the remix envirotment.
And voila! That's all there is to it :)
## Using Node
```sh
npm init
npm install snarkjs
```
### Import.
```js
const snarkjs = require("snarkjs");
const fs = require("fs");
const zkSnark = require("snarkjs");
```
async function run() {
const { proof, publicSignals } = await snarkjs.groth16.fullProve({a: 10, b: 21}, "circuit.wasm", "circuit_final.zkey");
### Load a circuit.
console.log("Proof: ");
console.log(JSON.stringify(proof, null, 1));
```js
// "myCircuit.cir" is the output of the jaz compiler
const vKey = JSON.parse(fs.readFileSync("verification_key.json"));
const circuitDef = JSON.parse(fs.readFileSync("myCircuit.cir", "utf8"));
const circuit = new zkSnark.Circuit(circuitDef);
```
const res = await snarkjs.groth16.verify(vKey, publicSignals, proof);
### Inspect the circuit.
if (res === true) {
console.log("Verification OK");
} else {
console.log("Invalid proof");
}
```js
// `signalId` can always be a number or an alias string
circuit.nConstraints; // number of constraints
circuit.nSignals; // number of signals
circuit.nPublic; // number of public signals (nOutputs + nPublicInputs)
// The array of signals is always sorted in this order:
// [ 1, outputs, publicInputs, privateInputs, internalSignals, constants]
// returns a,b and c coeficients of the `signalId` on a given `constraint`
circuit.a(constraint, signalId)
circuit.b(constraint, signalId)
circuit.c(constraint, signalId)
circuit.nOutputs // number of public outputs
circuit.pubInputs // number of public inputs
circuit.nPrvInputs // number of private inputs
circuit.nInputs // number of inputs ( nPublicInputs + nPrivateInputs)
circuit.nVars // number of variables ( not including constants (one is a variable) )
circuit.nSignals // number of signals ( including constants )
circuit.outputIdx(i) // returns the index of the i'th output
circuit.inputIdx(i) // returns the index of the i'th input
circuit.pubInputIdx(i) // returns the index of the i'th public input
circuit.prvInputIdx(i) // returns the index of the i'th private input
circuit.varIdx(i) // returns the index of the i'th variable
circuit.constantIdx(i) // returns the index of the i'th constant
circuit.signalIdx(i) // returns the index of the i'th signal
// returns signal Idx given a signalId
// if the idx >= n , it is a constant
// if the idx == -1, the signal does not exist
circuit.getSignalIdx(name);
// returns an array aliases names of the i'th signal
circuit.signalNames(i)
// input is a key value object where keys are the signal names
// of all the inputs (public and private)
// returns an array of values representing the witness
circuit.calculateWitness(input)
```
### Trusted setup.
```js
const setup = zkSnark.setup(circuit);
fs.writeFileSync("myCircuit.vk_proof", JSON.stringify(setup.vk_proof), "utf8");
fs.writeFileSync("myCircuit.vk_verifier", JSON.stringify(setup.vk_verifier), "utf8");
setup.toxic // Must be discarded.
```
### Generate proof.
```js
const circuitDef = JSON.parse(fs.readFileSync("myCircuit.cir", "utf8"));
const circuit = new zkSnark.Circuit(circuitDef);
const input = {
"main.pubIn1": "123",
"main.out1": "456"
}
const witness = circuit.calculateWitness(input);
const vk_proof = JSON.parse(fs.readFileSync("myCircuit.vk_proof", "utf8"));
run().then(() => {
process.exit(0);
});
const {proof, publicSignals} = zkSnark.genProof(vk_proof, witness);
```
## In the browser
### Verifier.
Load `snarkjs.min.js` and start using it as usual.
```js
const vk_verifier = JSON.parse(fs.readFileSync("myCircuit.vk_verifier", "utf8"));
```
cp node_modules/snarkjs/build/snarkjs.min.js .
```
```html
<!doctype html>
<html>
<head>
<title>Snarkjs client example</title>
</head>
<body>
<h1>Snarkjs client example</h1>
<button id="bGenProof"> Create proof </button>
<!-- JS-generated output will be added here. -->
<pre class="proof"> Proof: <code id="proof"></code></pre>
<pre class="proof"> Result: <code id="result"></code></pre>
<script src="snarkjs.min.js"> </script>
<!-- This is the bundle generated by rollup.js -->
<script>
const proofCompnent = document.getElementById('proof');
const resultComponent = document.getElementById('result');
const bGenProof = document.getElementById("bGenProof");
bGenProof.addEventListener("click", calculateProof);
async function calculateProof() {
const { proof, publicSignals } =
await snarkjs.groth16.fullProve( { a: 3, b: 11}, "circuit.wasm", "circuit_final.zkey");
proofCompnent.innerHTML = JSON.stringify(proof, null, 1);
const vkey = await fetch("verification_key.json").then( function(res) {
return res.json();
});
const res = await snarkjs.groth16.verify(vkey, publicSignals, proof);
resultComponent.innerHTML = res;
if (zkSnark.isValid(vk_verifier, proof, publicSignals)) {
console.log("The proof is valid");
} else {
console.log("The proof is not valid");
}
</script>
</body>
</html>
```
## Further resources
- [Announcing the Perpetual Powers of Tau Ceremony to benefit all zk-SNARK projects](https://medium.com/coinmonks/announcing-the-perpetual-powers-of-tau-ceremony-to-benefit-all-zk-snark-projects-c3da86af8377)
- [Scalable Multi-party Computation for zk-SNARK Parameters in
the Random Beacon Model](https://eprint.iacr.org/2017/1050.pdf)
- [phase2-bn254](https://github.com/kobigurk/phase2-bn254)
- [Perpetual Powers of Tau](https://github.com/weijiekoh/perpetualpowersoftau)
- [Powers of Tau](https://github.com/ebfull/powersoftau)
- [Trusted setup ceremonies explored](https://www.zeroknowledge.fm/133)
- [Simple react projct using snarkjs](https://github.com/LHerskind/snarkjs-react)
## Final note
We hope you enjoyed this quick walk-through. Please address any questions you may have to our [telegram group](https://t.me/iden3io) (its also a great way to join the community and stay up-to-date with the latest circom and snarkjs developments) 💙
## License
snarkjs is part of the iden3 project copyright 2018 0KIMS association and published with GPL-3 license. Please check the COPYING file for more details.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

10
build/snarkjs.min.js vendored

File diff suppressed because one or more lines are too long

1513
cli.js

File diff suppressed because it is too large Load Diff

View File

@ -1,16 +0,0 @@
import fs from "fs";
import { builtinModules as builtin } from "module";
const pkg = JSON.parse(fs.readFileSync("./package.json"));
export default {
input: "main.js",
output: {
file: "build/main.cjs",
format: "cjs",
},
external: [
...Object.keys(pkg.dependencies),
...builtin,
]
};

View File

@ -1,17 +0,0 @@
import fs from "fs";
import { builtinModules as builtin } from "module";
const pkg = JSON.parse(fs.readFileSync("./package.json"));
export default {
input: "cli.js",
output: {
file: "build/cli.cjs",
format: "cjs",
banner: "#! /usr/bin/env node\n",
},
external: [
...Object.keys(pkg.dependencies),
...builtin,
]
};

View File

@ -1,46 +0,0 @@
import { nodeResolve } from "@rollup/plugin-node-resolve";
import commonJS from "@rollup/plugin-commonjs";
import inject from "@rollup/plugin-inject";
import virtual from "@rollup/plugin-virtual";
import replace from "@rollup/plugin-replace";
import visualizer from "rollup-plugin-visualizer";
const empty = "export default {}";
export default {
input: "main.js",
output: {
file: "build/snarkjs.js",
format: "iife",
sourcemap: "inline",
globals: {
os: "null"
},
name: "snarkjs"
},
plugins: [
virtual({
fs: empty,
os: empty,
crypto: empty,
readline: empty,
ejs: empty,
// Stub out a "global" module that we can inject later
global: empty,
}),
nodeResolve({
browser: true,
preferBuiltins: false,
exportConditions: ['browser', 'default', 'module', 'require']
}),
commonJS(),
replace({
"process.browser": !!process.env.BROWSER
}),
inject({
// Inject the "global" virtual module if we see any reference to `global` in the code
global: "global",
}),
visualizer(),
]
};

View File

@ -1,15 +0,0 @@
import config from './rollup.iife.config';
import { terser } from "rollup-plugin-terser";
export default {
...config,
output: {
...config.output,
file: "build/snarkjs.min.js",
sourcemap: false,
},
plugins: [
...config.plugins,
terser(),
]
};

44
index.js Normal file
View File

@ -0,0 +1,44 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
exports.Circuit = require("./src/circuit.js");
exports.original = {
setup: require("./src/setup_original.js"),
genProof: require("./src/prover_original.js"),
isValid: require("./src/verifier_original.js")
};
exports.groth = {
setup: require("./src/setup_groth.js"),
genProof: require("./src/prover_groth.js"),
isValid: require("./src/verifier_groth.js")
};
exports.kimleeoh = {
setup: require("./src/setup_kimleeoh.js"),
genProof: require("./src/prover_kimleeoh.js"),
isValid: require("./src/verifier_kimleeoh.js")
};
exports.bigInt = require("./src/bigint.js");
exports.ZqField = require("./src/zqfield.js");
exports.stringifyBigInts = require("./src/stringifybigint.js").stringifyBigInts;
exports.unstringifyBigInts = require("./src/stringifybigint.js").unstringifyBigInts;
const Bn128 = require("./src/bn128.js");
exports.bn128 = new Bn128();

View File

@ -1,8 +0,0 @@
export * as groth16 from "./src/groth16.js";
export * as powersOfTau from "./src/powersoftau.js";
export * as r1cs from "./src/r1cs.js";
export * as wtns from "./src/wtns.js";
export * as zKey from "./src/zkey.js";
export * as plonk from "./src/plonk.js";

6098
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,23 +1,13 @@
{
"name": "snarkjs",
"type": "module",
"version": "0.4.5",
"version": "0.1.20",
"description": "zkSNARKs implementation in JavaScript",
"main": "./build/main.cjs",
"module": "./main.js",
"exports": {
"import": "./main.js",
"require": "./build/main.cjs"
},
"main": "index.js",
"scripts": {
"test": "mocha",
"build": "rollup -c config/rollup.cjs.config.js",
"buildcli": "rollup -c config/rollup.cli.config.js",
"buildiife": "BROWSER=true rollup -c config/rollup.iife.config.js",
"buildiifemin": "BROWSER=true rollup -c config/rollup.iife_min.config.js"
"test": "mocha"
},
"bin": {
"snarkjs": "build/cli.cjs"
"snarkjs": "cli.js"
},
"directories": {
"templates": "templates"
@ -38,29 +28,15 @@
"url": "https://github.com/iden3/snarkjs.git"
},
"dependencies": {
"@iden3/binfileutils": "0.0.8",
"blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git",
"circom_runtime": "0.1.13",
"ejs": "^3.1.6",
"fastfile": "0.0.19",
"ffjavascript": "0.2.36",
"js-sha3": "^0.8.0",
"logplease": "^1.2.15",
"r1csfile": "0.0.32",
"readline": "^1.3.0"
"big-integer": "^1.6.43",
"chai": "^4.2.0",
"escape-string-regexp": "^1.0.5",
"eslint": "^5.16.0",
"keccak": "^2.0.0",
"yargs": "^12.0.5"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^17.0.0",
"@rollup/plugin-inject": "^4.0.2",
"@rollup/plugin-json": "^4.1.0",
"@rollup/plugin-node-resolve": "^11.1.0",
"@rollup/plugin-replace": "^2.3.4",
"@rollup/plugin-virtual": "^2.0.3",
"chai": "^4.2.0",
"eslint": "^6.8.0",
"mocha": "^7.1.1",
"rollup": "^2.36.2",
"rollup-plugin-terser": "^7.0.2",
"rollup-plugin-visualizer": "^4.2.0"
"eslint-plugin-mocha": "^5.3.0",
"mocha": "^5.2.0"
}
}

View File

@ -1,105 +0,0 @@
import {getCurveFromName} from "../src/curves.js";
async function run() {
const curve = await getCurveFromName("bn128");
const Fr = curve.Fr;
Fr.s = 2;
const powers = [];
let t;
let inc = Fr.e(2);
t = Fr.e(1);
for (let i=0; i<8; i++) {
powers[i] = t;
t = Fr.mul(t, inc);
}
printArr("powers", powers);
const shift_to_small_m = Fr.exp(Fr.shift, 4);
const one_over_denom = Fr.inv(Fr.sub(shift_to_small_m, Fr.one));
const t0=[];
const t1=[];
let sInvAcc = Fr.one;
for (let i=0; i<4; i++) {
t0[i] =
Fr.mul(
Fr.sub(
powers[i+4],
Fr.mul(shift_to_small_m, powers[i])
),
Fr.neg(one_over_denom)
);
t1[i] =
Fr.mul(
Fr.mul(
Fr.sub(powers[i+4], powers[i]),
sInvAcc
),
one_over_denom
);
sInvAcc = Fr.mul(sInvAcc, Fr.shiftInv);
}
printArr("t0", t0);
printArr("t1", t1);
const T0 = await Fr.ifft(t0);
const T1 = await Fr.ifft(t1);
printArr("T0", T0);
printArr("T1", T1);
const lEvs = [];
for (let i=0; i<4; i++) {
lEvs[i] = T0[i];
lEvs[i+4] =T1[i];
}
printArr("LEvs", lEvs);
const p = [Fr.e(10), Fr.e(22), Fr.e(324), Fr.e(46), Fr.e(35), Fr.e(56), Fr.e(557), Fr.e(18)];
const pt = lc(p, powers);
console.log( "p[t]: " + Fr.toString(pt) );
const P = await Fr.fft(p);
const Pt = lc(P, lEvs);
console.log( "P[t]: " + Fr.toString(Pt) );
function printArr(s, a) {
console.log(s+": [");
for (let i=0; i<a.length; i++) {
console.log(" "+ Fr.toString(a[i]));
}
console.log("]");
}
function lc(a, b) {
let acc = Fr.e(0);
for (let i=0; i<a.length; i++) {
acc = Fr.add(acc, Fr.mul(a[i], b[i]));
}
return acc;
}
}
run().then( () => {
process.exit(0);
});

View File

@ -1,73 +0,0 @@
import {getCurveFromName} from "../src/curves.js";
async function run() {
const curve = await getCurveFromName("bn128");
const Fr = curve.Fr;
Fr.s = 2;
const P = [Fr.e(1), Fr.e(2), Fr.e(3), Fr.e(4)];
printArr("P", P);
const p = await Fr.ifft(P);
printArr("p", p);
const p2 = [];
for (let i=0; i<4; i++) {
p2[i] = p[i];
p2[i+4] = Fr.zero;
}
printArr("p2", p2);
const P2 = await Fr.fft(p2);
printArr("P2", P2);
const pOdd = [];
let accShift;
const shift_to_small_m = Fr.exp(Fr.shift, 4);
// accShift = Fr.e(-1);
// accShift = Fr.sub(Fr.one, shift_to_small_m);
accShift = Fr.one;
for (let i=0; i<4; i++) {
pOdd[i] = Fr.mul(p[i], accShift);
accShift = Fr.mul(accShift, Fr.shift);
}
printArr("pOdd", pOdd);
const POdd = await Fr.fft(pOdd);
printArr("POdd", POdd);
function printArr(s, a) {
console.log(s+": [");
for (let i=0; i<a.length; i++) {
console.log(" "+ Fr.toString(a[i]));
}
console.log("]");
}
function lc(a, b) {
let acc = Fr.e(0);
for (let i=0; i<a.length; i++) {
acc = Fr.add(acc, Fr.mul(a[i], b[i]));
}
return acc;
}
}
run().then( () => {
process.exit(0);
});

View File

@ -1,63 +0,0 @@
import {getCurveFromName} from "../src/curves.js";
async function run() {
const curve = await getCurveFromName("bn128");
const Fr = curve.Fr;
Fr.s = 2;
const p = [Fr.e(1), Fr.e(2), Fr.e(3), Fr.e(4), Fr.e(-1), Fr.e(-2), Fr.e(-3), Fr.e(-4)];
printArr("p", p);
const P = await Fr.fft(p);
printArr("P", P);
const pOdd = [];
let accShift;
const shift_to_small_m = Fr.exp(Fr.shift, 4);
// accShift = Fr.e(-1);
accShift = Fr.sub(Fr.one, shift_to_small_m);
for (let i=0; i<4; i++) {
pOdd[i] = Fr.mul(p[i], accShift);
accShift = Fr.mul(accShift, Fr.shift);
}
printArr("pOdd", pOdd);
const POdd = await Fr.fft(pOdd);
printArr("POdd", POdd);
function printArr(s, a) {
console.log(s+": [");
for (let i=0; i<a.length; i++) {
console.log(" "+ Fr.toString(a[i]));
}
console.log("]");
}
function lc(a, b) {
let acc = Fr.e(0);
for (let i=0; i<a.length; i++) {
acc = Fr.add(acc, Fr.mul(a[i], b[i]));
}
return acc;
}
}
run().then( () => {
process.exit(0);
});

View File

@ -1,66 +0,0 @@
import {getCurveFromName} from "../src/curves.js";
async function run() {
const curve = await getCurveFromName("bn128");
const Fr = curve.Fr;
Fr.s = 2;
const p = [Fr.e(1), Fr.e(2), Fr.e(3), Fr.e(4)];
printArr("p", p);
const pz = [];
for (let i=0; i<4; i++) {
pz[i] = Fr.neg(p[i]);
pz[i+4] = p[i];
}
printArr("pz", pz);
const PZ = await Fr.fft(pz);
printArr("PZ", PZ);
const pOdd = [];
let accShift;
const shift_to_small_m = Fr.exp(Fr.shift, 4);
// accShift = Fr.e(-1);
accShift = Fr.sub(shift_to_small_m, Fr.one);
for (let i=0; i<4; i++) {
pOdd[i] = Fr.mul(p[i], accShift);
accShift = Fr.mul(accShift, Fr.shift);
}
printArr("pOdd", pOdd);
const POdd = await Fr.fft(pOdd);
printArr("POdd", POdd);
function printArr(s, a) {
console.log(s+": [");
for (let i=0; i<a.length; i++) {
console.log(" "+ Fr.toString(a[i]));
}
console.log("]");
}
function lc(a, b) {
let acc = Fr.e(0);
for (let i=0; i<a.length; i++) {
acc = Fr.add(acc, Fr.mul(a[i], b[i]));
}
return acc;
}
}
run().then( () => {
process.exit(0);
});

View File

@ -1,99 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
const SUBARRAY_SIZE = 0x40000;
const BigArrayHandler = {
get: function(obj, prop) {
if (!isNaN(prop)) {
return obj.getElement(prop);
} else return obj[prop];
},
set: function(obj, prop, value) {
if (!isNaN(prop)) {
return obj.setElement(prop, value);
} else {
obj[prop] = value;
return true;
}
}
};
class _BigArray {
constructor (initSize) {
this.length = initSize || 0;
this.arr = new Array(SUBARRAY_SIZE);
for (let i=0; i<initSize; i+=SUBARRAY_SIZE) {
this.arr[i/SUBARRAY_SIZE] = new Array(Math.min(SUBARRAY_SIZE, initSize - i));
}
return this;
}
push () {
for (let i=0; i<arguments.length; i++) {
this.setElement (this.length, arguments[i]);
}
}
slice (f, t) {
const arr = new Array(t-f);
for (let i=f; i< t; i++) arr[i-f] = this.getElement(i);
return arr;
}
getElement(idx) {
idx = parseInt(idx);
const idx1 = Math.floor(idx / SUBARRAY_SIZE);
const idx2 = idx % SUBARRAY_SIZE;
return this.arr[idx1] ? this.arr[idx1][idx2] : undefined;
}
setElement(idx, value) {
idx = parseInt(idx);
const idx1 = Math.floor(idx / SUBARRAY_SIZE);
if (!this.arr[idx1]) {
this.arr[idx1] = new Array(SUBARRAY_SIZE);
}
const idx2 = idx % SUBARRAY_SIZE;
this.arr[idx1][idx2] = value;
if (idx >= this.length) this.length = idx+1;
return true;
}
getKeys() {
const newA = new BigArray();
for (let i=0; i<this.arr.length; i++) {
if (this.arr[i]) {
for (let j=0; j<this.arr[i].length; j++) {
if (typeof this.arr[i][j] !== "undefined") {
newA.push(i*SUBARRAY_SIZE+j);
}
}
}
}
return newA;
}
}
class BigArray {
constructor( initSize ) {
const obj = new _BigArray(initSize);
const extObj = new Proxy(obj, BigArrayHandler);
return extObj;
}
}
export default BigArray;

522
src/bigint.js Normal file
View File

@ -0,0 +1,522 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* global BigInt */
const bigInt = require("big-integer");
let wBigInt;
if (typeof(BigInt) != "undefined") {
wBigInt = BigInt;
wBigInt.one = wBigInt(1);
wBigInt.zero = wBigInt(0);
// Affine
wBigInt.genAffine = (q) => {
const nq = -q;
return (a) => {
let aux = a;
if (aux < 0) {
if (aux <= nq) {
aux = aux % q;
}
if (aux < wBigInt.zero) {
aux = aux + q;
}
} else {
if (aux >= q) {
aux = aux % q;
}
}
return aux.valueOf();
};
};
// Inverse
wBigInt.genInverse = (q) => {
return (a) => {
let t = wBigInt.zero;
let r = q;
let newt = wBigInt.one;
let newr = wBigInt.affine(a, q);
while (newr!=wBigInt.zero) {
let q = r/newr;
[t, newt] = [newt, t-q*newt];
[r, newr] = [newr, r-q*newr];
}
if (t<wBigInt.zero) t += q;
return t;
};
};
// Add
wBigInt.genAdd = (q) => {
if (q) {
return (a,b) => (a+b) % q;
} else {
return (a,b) => a+b;
}
};
// Sub
wBigInt.genSub = (q) => {
if (q) {
return (a,b) => (a-b) % q;
} else {
return (a,b) => a-b;
}
};
// Neg
wBigInt.genNeg = (q) => {
if (q) {
return (a) => (-a) % q;
} else {
return (a) => -a;
}
};
// Mul
wBigInt.genMul = (q) => {
if (q) {
return (a,b) => (a*b) % q;
} else {
return (a,b) => a*b;
}
};
// Shr
wBigInt.genShr = () => {
return (a,b) => a >> wBigInt(b);
};
// Shl
wBigInt.genShl = (q) => {
if (q) {
return (a,b) => (a << wBigInt(b)) % q;
} else {
return (a,b) => a << wBigInt(b);
}
};
// Equals
wBigInt.genEquals = (q) => {
if (q) {
return (a,b) => (a.affine(q) == b.affine(q));
} else {
return (a,b) => a == b;
}
};
// Square
wBigInt.genSquare = (q) => {
if (q) {
return (a) => (a*a) %q;
} else {
return (a) => a*a;
}
};
// Double
wBigInt.genDouble = (q) => {
if (q) {
return (a) => (a+a) %q;
} else {
return (a) => a+a;
}
};
// IsZero
wBigInt.genIsZero = (q) => {
if (q) {
return (a) => (a.affine(q) == wBigInt.zero);
} else {
return (a) => a == wBigInt.zero;
}
};
// Other minor functions
wBigInt.prototype.isOdd = function() {
return (this & wBigInt.one) == wBigInt(1);
};
wBigInt.prototype.isNegative = function() {
return this < wBigInt.zero;
};
wBigInt.prototype.and = function(m) {
return this & m;
};
wBigInt.prototype.div = function(c) {
return this / c;
};
wBigInt.prototype.mod = function(c) {
return this % c;
};
wBigInt.prototype.pow = function(c) {
return this ** c;
};
wBigInt.prototype.abs = function() {
return (this > wBigInt.zero) ? this : -this;
};
wBigInt.prototype.modPow = function(e, m) {
let acc = wBigInt.one;
let exp = this;
let rem = e;
while (rem) {
if (rem & wBigInt.one) {
acc = (acc * exp) %m;
}
exp = (exp * exp) % m;
rem = rem >> wBigInt.one;
}
return acc;
};
wBigInt.prototype.greaterOrEquals = function(b) {
return this >= b;
};
wBigInt.prototype.greater = function(b) {
return this > b;
};
wBigInt.prototype.gt = wBigInt.prototype.greater;
wBigInt.prototype.lesserOrEquals = function(b) {
return this <= b;
};
wBigInt.prototype.lesser = function(b) {
return this < b;
};
wBigInt.prototype.lt = wBigInt.prototype.lesser;
wBigInt.prototype.equals = function(b) {
return this == b;
};
wBigInt.prototype.eq = wBigInt.prototype.equals;
wBigInt.prototype.neq = function(b) {
return this != b;
};
wBigInt.prototype.toJSNumber = function() {
return Number(this);
};
} else {
var oldProto = bigInt.prototype;
wBigInt = function(a) {
if ((typeof a == "string") && (a.slice(0,2) == "0x")) {
return bigInt(a.slice(2), 16);
} else {
return bigInt(a);
}
};
wBigInt.one = bigInt.one;
wBigInt.zero = bigInt.zero;
wBigInt.prototype = oldProto;
wBigInt.prototype.div = function(c) {
return this.divide(c);
};
// Affine
wBigInt.genAffine = (q) => {
const nq = wBigInt.zero.minus(q);
return (a) => {
let aux = a;
if (aux.isNegative()) {
if (aux.lesserOrEquals(nq)) {
aux = aux.mod(q);
}
if (aux.isNegative()) {
aux = aux.add(q);
}
} else {
if (aux.greaterOrEquals(q)) {
aux = aux.mod(q);
}
}
return aux;
};
};
// Inverse
wBigInt.genInverse = (q) => {
return (a) => a.affine(q).modInv(q);
};
// Add
wBigInt.genAdd = (q) => {
if (q) {
return (a,b) => {
const r = a.add(b);
return r.greaterOrEquals(q) ? r.minus(q) : r;
};
} else {
return (a,b) => a.add(b);
}
};
// Sub
wBigInt.genSub = (q) => {
if (q) {
return (a,b) => a.greaterOrEquals(b) ? a.minus(b) : a.minus(b).add(q);
} else {
return (a,b) => a.minus(b);
}
};
wBigInt.genNeg = (q) => {
if (q) {
return (a) => a.isZero() ? a : q.minus(a);
} else {
return (a) => wBigInt.zero.minus(a);
}
};
// Mul
wBigInt.genMul = (q) => {
if (q) {
return (a,b) => a.times(b).mod(q);
} else {
return (a,b) => a.times(b);
}
};
// Shr
wBigInt.genShr = () => {
return (a,b) => a.shiftRight(wBigInt(b).value);
};
// Shr
wBigInt.genShl = (q) => {
if (q) {
return (a,b) => a.shiftLeft(wBigInt(b).value).mod(q);
} else {
return (a,b) => a.shiftLeft(wBigInt(b).value);
}
};
// Square
wBigInt.genSquare = (q) => {
if (q) {
return (a) => a.square().mod(q);
} else {
return (a) => a.square();
}
};
// Double
wBigInt.genDouble = (q) => {
if (q) {
return (a) => a.add(a).mod(q);
} else {
return (a) => a.add(a);
}
};
// Equals
wBigInt.genEquals = (q) => {
if (q) {
return (a,b) => a.affine(q).equals(b.affine(q));
} else {
return (a,b) => a.equals(b);
}
};
// IsZero
wBigInt.genIsZero = (q) => {
if (q) {
return (a) => (a.affine(q).isZero());
} else {
return (a) => a.isZero();
}
};
}
wBigInt.affine = function(a, q) {
return wBigInt.genAffine(q)(a);
};
wBigInt.prototype.affine = function (q) {
return wBigInt.affine(this, q);
};
wBigInt.inverse = function(a, q) {
return wBigInt.genInverse(q)(a);
};
wBigInt.prototype.inverse = function (q) {
return wBigInt.genInverse(q)(this);
};
wBigInt.add = function(a, b, q) {
return wBigInt.genAdd(q)(a,b);
};
wBigInt.prototype.add = function (a, q) {
return wBigInt.genAdd(q)(this, a);
};
wBigInt.sub = function(a, b, q) {
return wBigInt.genSub(q)(a,b);
};
wBigInt.prototype.sub = function (a, q) {
return wBigInt.genSub(q)(this, a);
};
wBigInt.neg = function(a, q) {
return wBigInt.genNeg(q)(a);
};
wBigInt.prototype.neg = function (q) {
return wBigInt.genNeg(q)(this);
};
wBigInt.mul = function(a, b, q) {
return wBigInt.genMul(q)(a,b);
};
wBigInt.prototype.mul = function (a, q) {
return wBigInt.genMul(q)(this, a);
};
wBigInt.shr = function(a, b, q) {
return wBigInt.genShr(q)(a,b);
};
wBigInt.prototype.shr = function (a, q) {
return wBigInt.genShr(q)(this, a);
};
wBigInt.shl = function(a, b, q) {
return wBigInt.genShl(q)(a,b);
};
wBigInt.prototype.shl = function (a, q) {
return wBigInt.genShl(q)(this, a);
};
wBigInt.equals = function(a, b, q) {
return wBigInt.genEquals(q)(a,b);
};
wBigInt.prototype.equals = function (a, q) {
return wBigInt.genEquals(q)(this, a);
};
wBigInt.square = function(a, q) {
return wBigInt.genSquare(q)(a);
};
wBigInt.prototype.square = function (q) {
return wBigInt.genSquare(q)(this);
};
wBigInt.double = function(a, q) {
return wBigInt.genDouble(q)(a);
};
wBigInt.prototype.double = function (q) {
return wBigInt.genDouble(q)(this);
};
wBigInt.isZero = function(a, q) {
return wBigInt.genIsZero(q)(a);
};
wBigInt.prototype.isZero = function (q) {
return wBigInt.genIsZero(q)(this);
};
wBigInt.leBuff2int = function(buff) {
let res = wBigInt.zero;
for (let i=0; i<buff.length; i++) {
const n = wBigInt(buff[i]);
res = res.add(n.shl(i*8));
}
return res;
};
wBigInt.leInt2Buff = function(n, len) {
let r = n;
let o =0;
const buff = Buffer.alloc(len);
while ((r.greater(wBigInt.zero))&&(o<buff.length)) {
let c = Number(r.and(wBigInt("255")));
buff[o] = c;
o++;
r = r.shr(8);
}
if (r.greater(wBigInt.zero)) throw new Error("Number does not feed in buffer");
return buff;
};
wBigInt.prototype.leInt2Buff = function (len) {
return wBigInt.leInt2Buff(this,len);
};
wBigInt.beBuff2int = function(buff) {
let res = wBigInt.zero;
for (let i=0; i<buff.length; i++) {
const n = wBigInt(buff[buff.length - i - 1]);
res = res.add(n.shl(i*8));
}
return res;
};
wBigInt.beInt2Buff = function(n, len) {
let r = n;
let o =len-1;
const buff = Buffer.alloc(len);
while ((r.greater(wBigInt.zero))&&(o>=0)) {
let c = Number(r.and(wBigInt("255")));
buff[o] = c;
o--;
r = r.shr(8);
}
if (r.greater(wBigInt.zero)) throw new Error("Number does not feed in buffer");
return buff;
};
wBigInt.prototype.beInt2Buff = function (len) {
return wBigInt.beInt2Buff(this,len);
};
module.exports = wBigInt;

445
src/bn128.js Normal file
View File

@ -0,0 +1,445 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bigInt = require("./bigint.js");
const F1Field = require("./zqfield.js");
const F2Field = require("./f2field.js");
const F3Field = require("./f3field.js");
const GCurve = require("./gcurve.js");
class BN128 {
constructor() {
this.q = bigInt("21888242871839275222246405745257275088696311157297823662689037894645226208583");
this.r = bigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617");
this.g1 = [ bigInt(1), bigInt(2), bigInt(1)];
this.g2 = [
[
bigInt("10857046999023057135944570762232829481370756359578518086990519993285655852781"),
bigInt("11559732032986387107991004021392285783925812861821192530917403151452391805634")
],
[
bigInt("8495653923123431417604973247489272438418190587263600148770280649306958101930"),
bigInt("4082367875863433681332203403145435568316851327593401208105741076214120093531")
],
[
bigInt("1"),
bigInt("0")
]
];
this.nonResidueF2 = bigInt("21888242871839275222246405745257275088696311157297823662689037894645226208582");
this.nonResidueF6 = [ bigInt("9"), bigInt("1") ];
this.F1 = new F1Field(this.q);
this.F2 = new F2Field(this.F1, this.nonResidueF2);
this.G1 = new GCurve(this.F1, this.g1);
this.G2 = new GCurve(this.F2, this.g2);
this.F6 = new F3Field(this.F2, this.nonResidueF6);
this.F12 = new F2Field(this.F6, this.nonResidueF6);
this.Fr = new F1Field(this.r);
const self = this;
this.F12._mulByNonResidue = function(a) {
return [self.F2.mul(this.nonResidue, a[2]), a[0], a[1]];
};
this._preparePairing();
}
_preparePairing() {
this.loopCount = bigInt("29793968203157093288");// CONSTANT
// Set loopCountNeg
if (this.loopCount.isNegative()) {
this.loopCount = this.loopCount.neg();
this.loopCountNeg = true;
} else {
this.loopCountNeg = false;
}
// Set loop_count_bits
let lc = this.loopCount;
this.loop_count_bits = []; // Constant
while (!lc.isZero()) {
this.loop_count_bits.push( lc.isOdd() );
lc = lc.shr(1);
}
this.two_inv = this.F1.inverse(bigInt(2));
this.coef_b = bigInt(3);
this.twist = [bigInt(9) , bigInt(1)];
this.twist_coeff_b = this.F2.mulScalar( this.F2.inverse(this.twist), this.coef_b );
this.frobenius_coeffs_c1_1 = bigInt("21888242871839275222246405745257275088696311157297823662689037894645226208582");
this.twist_mul_by_q_X =
[
bigInt("21575463638280843010398324269430826099269044274347216827212613867836435027261"),
bigInt("10307601595873709700152284273816112264069230130616436755625194854815875713954")
];
this.twist_mul_by_q_Y =
[
bigInt("2821565182194536844548159561693502659359617185244120367078079554186484126554"),
bigInt("3505843767911556378687030309984248845540243509899259641013678093033130930403")
];
this.final_exponent = bigInt("552484233613224096312617126783173147097382103762957654188882734314196910839907541213974502761540629817009608548654680343627701153829446747810907373256841551006201639677726139946029199968412598804882391702273019083653272047566316584365559776493027495458238373902875937659943504873220554161550525926302303331747463515644711876653177129578303191095900909191624817826566688241804408081892785725967931714097716709526092261278071952560171111444072049229123565057483750161460024353346284167282452756217662335528813519139808291170539072125381230815729071544861602750936964829313608137325426383735122175229541155376346436093930287402089517426973178917569713384748081827255472576937471496195752727188261435633271238710131736096299798168852925540549342330775279877006784354801422249722573783561685179618816480037695005515426162362431072245638324744480");
}
pairing(p1, p2) {
const pre1 = this.precomputeG1(p1);
const pre2 = this.precomputeG2(p2);
const r1 = this.millerLoop(pre1, pre2);
const res = this.finalExponentiation(r1);
return res;
}
precomputeG1(p) {
const Pcopy = this.G1.affine(p);
const res = {};
res.PX = Pcopy[0];
res.PY = Pcopy[1];
return res;
}
precomputeG2(p) {
const Qcopy = this.G2.affine(p);
const res = {
QX: Qcopy[0],
QY: Qcopy[1],
coeffs: []
};
const R = {
X: Qcopy[0],
Y: Qcopy[1],
Z: this.F2.one
};
let c;
for (let i = this.loop_count_bits.length-2; i >= 0; --i)
{
const bit = this.loop_count_bits[i];
c = this._doubleStep(R);
res.coeffs.push(c);
if (bit)
{
c = this._addStep(Qcopy, R);
res.coeffs.push(c);
}
}
const Q1 = this.G2.affine(this._g2MulByQ(Qcopy));
if (!this.F2.equals(Q1[2], this.F2.one))
{
throw new Error("Expected values are not equal");
}
const Q2 = this.G2.affine(this._g2MulByQ(Q1));
if (!this.F2.equals(Q2[2], this.F2.one))
{
throw new Error("Expected values are not equal");
}
if (this.loopCountNeg)
{
R.Y = this.F2.neg(R.Y);
}
Q2[1] = this.F2.neg(Q2[1]);
c = this._addStep(Q1, R);
res.coeffs.push(c);
c = this._addStep(Q2, R);
res.coeffs.push(c);
return res;
}
millerLoop(pre1, pre2) {
let f = this.F12.one;
let idx = 0;
let c;
for (let i = this.loop_count_bits.length-2; i >= 0; --i)
{
const bit = this.loop_count_bits[i];
/* code below gets executed for all bits (EXCEPT the MSB itself) of
alt_bn128_param_p (skipping leading zeros) in MSB to LSB
order */
c = pre2.coeffs[idx++];
f = this.F12.square(f);
f = this._mul_by_024(
f,
c.ell_0,
this.F2.mulScalar(c.ell_VW , pre1.PY),
this.F2.mulScalar(c.ell_VV , pre1.PX));
if (bit)
{
c = pre2.coeffs[idx++];
f = this._mul_by_024(
f,
c.ell_0,
this.F2.mulScalar(c.ell_VW, pre1.PY),
this.F2.mulScalar(c.ell_VV, pre1.PX));
}
}
if (this.loopCountNeg)
{
f = this.F12.inverse(f);
}
c = pre2.coeffs[idx++];
f = this._mul_by_024(
f,
c.ell_0,
this.F2.mulScalar(c.ell_VW, pre1.PY),
this.F2.mulScalar(c.ell_VV, pre1.PX));
c = pre2.coeffs[idx++];
f = this._mul_by_024(
f,
c.ell_0,
this.F2.mulScalar(c.ell_VW, pre1.PY),
this.F2.mulScalar(c.ell_VV, pre1.PX));
return f;
}
finalExponentiation(elt) {
// TODO: There is an optimization in FF
const res = this.F12.exp(elt,this.final_exponent);
return res;
}
_doubleStep(current) {
const X = current.X;
const Y = current.Y;
const Z = current.Z;
const A = this.F2.mulScalar(this.F2.mul(X,Y), this.two_inv); // A = X1 * Y1 / 2
const B = this.F2.square(Y); // B = Y1^2
const C = this.F2.square(Z); // C = Z1^2
const D = this.F2.add(C, this.F2.add(C,C)); // D = 3 * C
const E = this.F2.mul(this.twist_coeff_b, D); // E = twist_b * D
const F = this.F2.add(E, this.F2.add(E,E)); // F = 3 * E
const G =
this.F2.mulScalar(
this.F2.add( B , F ),
this.two_inv); // G = (B+F)/2
const H =
this.F2.sub(
this.F2.square( this.F2.add(Y,Z) ),
this.F2.add( B , C)); // H = (Y1+Z1)^2-(B+C)
const I = this.F2.sub(E, B); // I = E-B
const J = this.F2.square(X); // J = X1^2
const E_squared = this.F2.square(E); // E_squared = E^2
current.X = this.F2.mul( A, this.F2.sub(B,F) ); // X3 = A * (B-F)
current.Y =
this.F2.sub(
this.F2.sub( this.F2.square(G) , E_squared ),
this.F2.add( E_squared , E_squared )); // Y3 = G^2 - 3*E^2
current.Z = this.F2.mul( B, H ); // Z3 = B * H
const c = {
ell_0 : this.F2.mul( I, this.twist), // ell_0 = xi * I
ell_VW: this.F2.neg( H ), // ell_VW = - H (later: * yP)
ell_VV: this.F2.add( J , this.F2.add(J,J) ) // ell_VV = 3*J (later: * xP)
};
return c;
}
_addStep(base, current) {
const X1 = current.X;
const Y1 = current.Y;
const Z1 = current.Z;
const x2 = base[0];
const y2 = base[1];
const D = this.F2.sub( X1, this.F2.mul(x2,Z1) ); // D = X1 - X2*Z1
// console.log("Y: "+ A[0].affine(this.q).toString(16));
const E = this.F2.sub( Y1, this.F2.mul(y2,Z1) ); // E = Y1 - Y2*Z1
const F = this.F2.square(D); // F = D^2
const G = this.F2.square(E); // G = E^2
const H = this.F2.mul(D,F); // H = D*F
const I = this.F2.mul(X1,F); // I = X1 * F
const J =
this.F2.sub(
this.F2.add( H, this.F2.mul(Z1,G) ),
this.F2.add( I, I )); // J = H + Z1*G - (I+I)
current.X = this.F2.mul( D , J ); // X3 = D*J
current.Y =
this.F2.sub(
this.F2.mul( E , this.F2.sub(I,J) ),
this.F2.mul( H , Y1)); // Y3 = E*(I-J)-(H*Y1)
current.Z = this.F2.mul(Z1,H);
const c = {
ell_0 :
this.F2.mul(
this.twist,
this.F2.sub(
this.F2.mul(E , x2),
this.F2.mul(D , y2))), // ell_0 = xi * (E * X2 - D * Y2)
ell_VV : this.F2.neg(E), // ell_VV = - E (later: * xP)
ell_VW : D // ell_VW = D (later: * yP )
};
return c;
}
_mul_by_024(a, ell_0, ell_VW, ell_VV) {
// Old implementation
/*
const b = [
[ell_0, this.F2.zero, ell_VV],
[this.F2.zero, ell_VW, this.F2.zero]
];
return this.F12.mul(a,b);
*/
// This is a new implementation,
// But it does not look worthy
// at least in javascript.
let z0 = a[0][0];
let z1 = a[0][1];
let z2 = a[0][2];
let z3 = a[1][0];
let z4 = a[1][1];
let z5 = a[1][2];
const x0 = ell_0;
const x2 = ell_VV;
const x4 = ell_VW;
const D0 = this.F2.mul(z0, x0);
const D2 = this.F2.mul(z2, x2);
const D4 = this.F2.mul(z4, x4);
const t2 = this.F2.add(z0, z4);
let t1 = this.F2.add(z0, z2);
const s0 = this.F2.add(this.F2.add(z1,z3),z5);
// For z.a_.a_ = z0.
let S1 = this.F2.mul(z1, x2);
let T3 = this.F2.add(S1, D4);
let T4 = this.F2.add( this.F2.mul(this.nonResidueF6, T3),D0);
z0 = T4;
// For z.a_.b_ = z1
T3 = this.F2.mul(z5, x4);
S1 = this.F2.add(S1, T3);
T3 = this.F2.add(T3, D2);
T4 = this.F2.mul(this.nonResidueF6, T3);
T3 = this.F2.mul(z1, x0);
S1 = this.F2.add(S1, T3);
T4 = this.F2.add(T4, T3);
z1 = T4;
// For z.a_.c_ = z2
let t0 = this.F2.add(x0, x2);
T3 = this.F2.sub(
this.F2.mul(t1, t0),
this.F2.add(D0, D2));
T4 = this.F2.mul(z3, x4);
S1 = this.F2.add(S1, T4);
// For z.b_.a_ = z3 (z3 needs z2)
t0 = this.F2.add(z2, z4);
z2 = this.F2.add(T3, T4);
t1 = this.F2.add(x2, x4);
T3 = this.F2.sub(
this.F2.mul(t0,t1),
this.F2.add(D2, D4));
T4 = this.F2.mul(this.nonResidueF6, T3);
T3 = this.F2.mul(z3, x0);
S1 = this.F2.add(S1, T3);
T4 = this.F2.add(T4, T3);
z3 = T4;
// For z.b_.b_ = z4
T3 = this.F2.mul(z5, x2);
S1 = this.F2.add(S1, T3);
T4 = this.F2.mul(this.nonResidueF6, T3);
t0 = this.F2.add(x0, x4);
T3 = this.F2.sub(
this.F2.mul(t2,t0),
this.F2.add(D0, D4));
T4 = this.F2.add(T4, T3);
z4 = T4;
// For z.b_.c_ = z5.
t0 = this.F2.add(this.F2.add(x0, x2), x4);
T3 = this.F2.sub(this.F2.mul(s0, t0), S1);
z5 = T3;
return [
[z0, z1, z2],
[z3, z4, z5]
];
}
_g2MulByQ(p) {
const fmx = [p[0][0], this.F1.mul(p[0][1], this.frobenius_coeffs_c1_1 )];
const fmy = [p[1][0], this.F1.mul(p[1][1], this.frobenius_coeffs_c1_1 )];
const fmz = [p[2][0], this.F1.mul(p[2][1], this.frobenius_coeffs_c1_1 )];
return [
this.F2.mul(this.twist_mul_by_q_X , fmx),
this.F2.mul(this.twist_mul_by_q_Y , fmy),
fmz
];
}
}
module.exports = BN128;

233
src/calculateWitness.js Normal file
View File

@ -0,0 +1,233 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bigInt = require("./bigint");
module.exports = calculateWitness;
function calculateWitness(circuit, inputSignals, options) {
options = options || {};
if (!options.logFunction) options.logFunction = console.log;
const ctx = new RTCtx(circuit, options);
function iterateSelector(values, sels, cb) {
if (!Array.isArray(values)) {
return cb(sels, values);
}
for (let i=0; i<values.length; i++) {
sels.push(i);
iterateSelector(values[i], sels, cb);
sels.pop(i);
}
}
ctx.setSignal("one", [], bigInt(1));
for (let c in ctx.notInitSignals) {
if (ctx.notInitSignals[c] == 0) ctx.triggerComponent(c);
}
for (let s in inputSignals) {
ctx.currentComponent = "main";
iterateSelector(inputSignals[s], [], function(selector, value) {
if (typeof(value) == "undefined") throw new Error("Signal not defined:" + s);
ctx.setSignal(s, selector, bigInt(value));
});
}
for (let i=0; i<circuit.nInputs; i++) {
const idx = circuit.inputIdx(i);
if (typeof(ctx.witness[idx]) == "undefined") {
throw new Error("Input Signal not assigned: " + circuit.signalNames(idx));
}
}
for (let i=0; i<ctx.witness.length; i++) {
if (typeof(ctx.witness[i]) == "undefined") {
throw new Error("Signal not assigned: " + circuit.signalNames(i));
}
if (options.logOutput) options.logFunction(circuit.signalNames(i) + " --> " + ctx.witness[i].toString());
}
return ctx.witness.slice(0, circuit.nVars);
// return ctx.witness;
}
class RTCtx {
constructor(circuit, options) {
this.options = options;
this.scopes = [];
this.circuit = circuit;
this.witness = new Array(circuit.nSignals);
this.notInitSignals = {};
for (let c in this.circuit.components) {
this.notInitSignals[c] = this.circuit.components[c].inputSignals;
}
}
_sels2str(sels) {
let res = "";
for (let i=0; i<sels.length; i++) {
res += `[${sels[i]}]`;
}
return res;
}
setPin(componentName, componentSels, signalName, signalSels, value) {
let fullName = componentName=="one" ? "one" : this.currentComponent + "." + componentName;
fullName += this._sels2str(componentSels) +
"."+
signalName+
this._sels2str(signalSels);
this.setSignalFullName(fullName, value);
}
setSignal(name, sels, value) {
let fullName = this.currentComponent ? this.currentComponent + "." + name : name;
fullName += this._sels2str(sels);
this.setSignalFullName(fullName, value);
}
triggerComponent(c) {
if (this.options.logTrigger) this.options.logFunction("Component Treiggered: " + this.circuit.components[c].name);
// Set notInitSignals to -1 to not initialize again
this.notInitSignals[c] --;
const oldComponent = this.currentComponent;
this.currentComponent = this.circuit.components[c].name;
const template = this.circuit.components[c].template;
const newScope = {};
for (let p in this.circuit.components[c].params) {
newScope[p] = this.circuit.components[c].params[p];
}
const oldScope = this.scopes;
this.scopes = [ this.scopes[0], newScope ];
// TODO set params.
this.circuit.templates[template](this);
this.scopes = oldScope;
this.currentComponent = oldComponent;
if (this.options.logTrigger) this.options.logFunction("End Component Treiggered: " + this.circuit.components[c].name);
}
callFunction(functionName, params) {
const newScope = {};
for (let p=0; p<this.circuit.functions[functionName].params.length; p++) {
const paramName = this.circuit.functions[functionName].params[p];
newScope[paramName] = params[p];
}
const oldScope = this.scopes;
this.scopes = [ this.scopes[0], newScope ];
// TODO set params.
const res = this.circuit.functions[functionName].func(this);
this.scopes = oldScope;
return res;
}
setSignalFullName(fullName, value) {
if (this.options.logSet) this.options.logFunction("set " + fullName + " <-- " + value.toString());
const sId = this.circuit.getSignalIdx(fullName);
let firstInit =false;
if (typeof(this.witness[sId]) == "undefined") {
firstInit = true;
}
this.witness[sId] = bigInt(value);
const callComponents = [];
for (let i=0; i<this.circuit.signals[sId].triggerComponents.length; i++) {
var idCmp = this.circuit.signals[sId].triggerComponents[i];
if (firstInit) this.notInitSignals[idCmp] --;
callComponents.push(idCmp);
}
callComponents.map( (c) => {
if (this.notInitSignals[c] == 0) this.triggerComponent(c);
});
return this.witness[sId];
}
setVar(name, sels, value) {
function setVarArray(a, sels2, value) {
if (sels2.length == 1) {
a[sels2[0]] = value;
} else {
if (typeof(a[sels2[0]]) == "undefined") a[sels2[0]] = [];
setVarArray(a[sels2[0]], sels2.slice(1), value);
}
}
const scope = this.scopes[this.scopes.length-1];
if (sels.length == 0) {
scope[name] = value;
} else {
if (typeof(scope[name]) == "undefined") scope[name] = [];
setVarArray(scope[name], sels, value);
}
return value;
}
getVar(name, sels) {
function select(a, sels2) {
return (sels2.length == 0) ? a : select(a[sels2[0]], sels2.slice(1));
}
for (let i=this.scopes.length-1; i>=0; i--) {
if (typeof(this.scopes[i][name]) != "undefined") return select(this.scopes[i][name], sels);
}
throw new Error("Variable not defined: " + name);
}
getSignal(name, sels) {
let fullName = name=="one" ? "one" : this.currentComponent + "." + name;
fullName += this._sels2str(sels);
return this.getSignalFullName(fullName);
}
getPin(componentName, componentSels, signalName, signalSels) {
let fullName = componentName=="one" ? "one" : this.currentComponent + "." + componentName;
fullName += this._sels2str(componentSels) +
"."+
signalName+
this._sels2str(signalSels);
return this.getSignalFullName(fullName);
}
getSignalFullName(fullName) {
const sId = this.circuit.getSignalIdx(fullName);
if (typeof(this.witness[sId]) == "undefined") {
throw new Error("Signal not initialized: "+fullName);
}
if (this.options.logGet) this.options.logFunction("get --->" + fullName + " = " + this.witness[sId].toString() );
return this.witness[sId];
}
assert(a,b,errStr) {
const ba = bigInt(a);
const bb = bigInt(b);
if (!ba.equals(bb)) {
throw new Error("Constraint doesn't match "+ this.currentComponent+": "+ errStr + " -> "+ ba.toString() + " != " + bb.toString());
}
}
}

191
src/circuit.js Normal file
View File

@ -0,0 +1,191 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bigInt = require("./bigint.js");
const __P__ = bigInt("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const __MASK__ = bigInt("28948022309329048855892746252171976963317496166410141009864396001978282409983"); // 0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
const calculateWitness = require("./calculateWitness.js");
module.exports = class Circuit {
constructor(circuitDef) {
this.nPubInputs = circuitDef.nPubInputs;
this.nPrvInputs = circuitDef.nPrvInputs;
this.nInputs = circuitDef.nInputs;
this.nOutputs = circuitDef.nOutputs;
this.nVars = circuitDef.nVars;
this.nSignals = circuitDef.nSignals;
this.nConstants = circuitDef.nConstants;
this.nConstraints = circuitDef.constraints.length;
this.signalName2Idx = circuitDef.signalName2Idx;
this.components = circuitDef.components;
this.componentName2Idx = circuitDef.componentName2Idx;
this.signals = circuitDef.signals;
this.constraints = circuitDef.constraints;
this.templates = {};
for (let t in circuitDef.templates) {
this.templates[t] = eval(" const __f= " +circuitDef.templates[t] + "\n__f");
}
this.functions = {};
for (let f in circuitDef.functions) {
this.functions[f] = {
params: circuitDef.functions[f].params,
func: eval(" const __f= " +circuitDef.functions[f].func + "\n__f;")
};
}
}
calculateWitness(input, log) {
return calculateWitness(this, input, log);
}
checkWitness(w) {
const evalLC = (lc, w) => {
let acc = bigInt(0);
for (let k in lc) {
acc= acc.add(bigInt(w[k]).mul(bigInt(lc[k]))).mod(__P__);
}
return acc;
}
const checkConstraint = (ct, w) => {
const a=evalLC(ct[0],w);
const b=evalLC(ct[1],w);
const c=evalLC(ct[2],w);
const res = (a.mul(b).sub(c)).affine(__P__);
if (!res.isZero()) return false;
return true;
}
for (let i=0; i<this.constraints.length; i++) {
if (!checkConstraint(this.constraints[i], w)) {
this.printCostraint(this.constraints[i]);
return false;
}
}
return true;
}
printCostraint(c) {
const lc2str = (lc) => {
let S = "";
for (let k in lc) {
let name = this.signals[k].names[0];
if (name == "one") name = "";
let v = bigInt(lc[k]);
let vs;
if (!v.lesserOrEquals(__P__.shr(bigInt(1)))) {
v = __P__.sub(v);
vs = "-"+v.toString();
} else {
if (S!="") {
vs = "+"+v.toString();
} else {
vs = "";
}
if (vs!="1") {
vs = vs + v.toString();;
}
}
S= S + " " + vs + name;
}
return S;
};
const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`;
console.log(S);
}
printConstraints() {
for (let i=0; i<this.constraints.length; i++) {
this.printCostraint(this.constraints[i]);
}
}
getSignalIdx(name) {
if (typeof(this.signalName2Idx[name]) != "undefined") return this.signalName2Idx[name];
if (!isNaN(name)) return Number(name);
throw new Error("Invalid signal identifier: "+ name);
}
// returns the index of the i'th output
outputIdx(i) {
if (i>=this.nOutputs) throw new Error("Accessing an invalid output: "+i);
return i+1;
}
// returns the index of the i'th input
inputIdx(i) {
if (i>=this.nInputs) throw new Error("Accessing an invalid input: "+i);
return this.nOutputs + 1 + i;
}
// returns the index of the i'th public input
pubInputIdx(i) {
if (i>=this.nPubInputs) throw new Error("Accessing an invalid pubInput: "+i);
return this.inputIdx(i);
}
// returns the index of the i'th private input
prvInputIdx(i) {
if (i>=this.nPrvInputs) throw new Error("Accessing an invalid prvInput: "+i);
return this.inputIdx(this.nPubInputs + i);
}
// returns the index of the i'th variable
varIdx(i) {
if (i>=this.nVars) throw new Error("Accessing an invalid variable: "+i);
return i;
}
// returns the index of the i'th constant
constantIdx(i) {
if (i>=this.nConstants) throw new Error("Accessing an invalid constant: "+i);
return this.nVars + i;
}
// returns the index of the i'th signal
signalIdx(i) {
if (i>=this.nSignls) throw new Error("Accessing an invalid signal: "+i);
return i;
}
signalNames(i) {
return this.signals[ this.getSignalIdx(i) ].names.join(", ");
}
a(constraint, signalIdx) {
return bigInt(this.constraints[constraint][0][signalIdx] || 0 );
}
b(constraint, signalIdx) {
return bigInt(this.constraints[constraint][1][signalIdx] || 0);
}
c(constraint, signalIdx) {
return bigInt(this.constraints[constraint][2][signalIdx] || 0);
}
};

View File

@ -1,282 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
/*
import pkg from "../package.json";
const version = pkg.version;
*/
import path from "path";
import fs from "fs";
const __dirname = path.dirname(new URL(import.meta.url).pathname);
let pkgS;
try {
pkgS = fs.readFileSync(path.join(__dirname, "package.json"));
} catch (err) {
pkgS = fs.readFileSync(path.join(__dirname, "..","package.json"));
}
const pkg = JSON.parse(pkgS);
const version = pkg.version;
let selectedCommand = null;
export default async function clProcessor(commands) {
const cl = [];
const argv = {};
for (let i=2; i<process.argv.length; i++) {
if (process.argv[i][0] == "-") {
let S = process.argv[i];
while (S[0] == "-") S = S.slice(1);
const arr = S.split("=");
if (arr.length > 1) {
argv[arr[0]] = arr.slice(1).join("=");
} else {
argv[arr[0]] = true;
}
} else {
cl.push(process.argv[i]);
}
}
for (let i=0; i<commands.length; i++) {
const cmd = commands[i];
const m = calculateMatch(commands[i], cl);
if (m) {
if ((argv.h) || (argv.help)) {
helpCmd(cmd);
return;
}
if (areParamsValid(cmd.cmd, m)) {
if (cmd.options) {
const options = getOptions(cmd.options);
await cmd.action(m, options);
} else {
await cmd.action(m, {});
}
} else {
if (m.length>0) console.log("Invalid number of parameters");
helpCmd(cmd);
return 99;
}
return;
}
}
if (cl.length>0) console.log("Invalid command");
helpAll();
return 99;
function calculateMatch(cmd, cl) {
const alias = [];
const m = parseLine(cmd.cmd);
alias.push(m);
if (cmd.alias) {
if (Array.isArray(cmd.alias)) {
for (let i=0; i<cmd.alias.length; i++) {
const a = parseLine(cmd.alias[i]);
alias.push({
cmd: a.cmd,
params: m.params
});
}
} else {
const a = parseLine(cmd.alias);
alias.push({
cmd: a.cmd,
params: m.params
});
}
}
for (let i=0; i<cl.length; i++) {
for (let j=0; j<alias.length; j++) {
const w = alias[j].cmd.shift();
if (cl[i].toUpperCase() == w.toUpperCase()) {
if (alias[j].cmd.length == 0) {
return buildRemaining(alias[j].params, cl.slice(i+1));
}
} else {
alias.splice(j, 1);
j--;
}
}
}
return null;
function buildRemaining(defParams, cl) {
const res = [];
let p=0;
for (let i=0; i<defParams.length; i++) {
if (defParams[i][0]=="-") {
res.push(getOption(defParams[i]).val);
} else {
if (p<cl.length) {
res.push(cl[p++]);
} else {
res.push(null);
}
}
}
while (p<cl.length) {
res.push(cl[p++]);
}
return res;
}
}
function parseLine(l) {
const words = l.match(/(\S+)/g);
for (let i=0; i<words.length; i++) {
if ( (words[i][0] == "<")
|| (words[i][0] == "[")
|| (words[i][0] == "-"))
{
return {
cmd: words.slice(0,i),
params: words.slice(i)
};
}
}
return {
cmd: words,
params: []
};
}
function getOption(o) {
const arr1 = o.slice(1).split(":");
const arr2 = arr1[0].split("|");
for (let i = 0; i<arr2.length; i++) {
if (argv[arr2[i]]) return {
key: arr2[0],
val: argv[arr2[i]]
};
}
return {
key: arr2[0],
val: (arr1.length >1) ? arr1[1] : null
};
}
function areParamsValid(cmd, params) {
while ((params.length)&&(!params[params.length-1])) params.pop();
const pl = parseLine(cmd);
if (params.length > pl.params.length) return false;
let minParams = pl.params.length;
while ((minParams>0)&&(pl.params[minParams-1][0] == "[")) minParams --;
if (params.length < minParams) return false;
for (let i=0; (i< pl.params.length)&&(pl.params[i][0]=="<"); i++) {
if (typeof params[i] == "undefined") return false;
}
return true;
}
function getOptions(options) {
const res = {};
const opts = options.match(/(\S+)/g);
for (let i=0; i<opts.length; i++) {
const o = getOption(opts[i]);
res[o.key] = o.val;
}
return res;
}
function printVersion() {
console.log("snarkjs@"+version);
}
function epilog() {
console.log(` Copyright (C) 2018 0kims association
This program comes with ABSOLUTELY NO WARRANTY;
This is free software, and you are welcome to redistribute it
under certain conditions; see the COPYING file in the official
repo directory at https://github.com/iden3/snarkjs `);
}
function helpAll() {
printVersion();
epilog();
console.log("");
console.log("Usage:");
console.log(" snarkjs <full command> ... <options>");
console.log(" or snarkjs <shorcut> ... <options>");
console.log("");
console.log("Type snarkjs <command> --help to get more information for that command");
console.log("");
console.log("Full Command Description");
console.log("============ =================");
for (let i=0; i<commands.length; i++) {
const cmd = commands[i];
let S = "";
const pl = parseLine(cmd.cmd);
S += pl.cmd.join(" ");
while (S.length<30) S = S+" ";
S += cmd.description;
console.log(S);
S = " Usage: snarkjs ";
if (cmd.alias) {
if (Array.isArray(cmd.alias)) {
S += cmd.alias[0];
} else {
S += cmd.alias;
}
} else {
S += pl.cmd.join(" ");
}
S += " " + pl.params.join(" ");
console.log(S);
}
}
function helpCmd(cmd) {
if (typeof cmd == "undefined") cmd = selectedCommand;
if (typeof cmd == "undefined") return helpAll();
printVersion();
epilog();
console.log("");
if (cmd.longDescription) {
console.log(cmd.longDescription);
} else {
console.log(cmd.description);
}
console.log("Usage: ");
console.log(" snarkjs "+ cmd.cmd);
const pl = parseLine(cmd.cmd);
let S = " or snarkjs ";
if (cmd.alias) {
if (Array.isArray(cmd.alias)) {
S += cmd.alias[0];
} else {
S += cmd.alias;
}
} else {
S += pl.cmd.join(" ");
}
S += " " + pl.params.join(" ");
console.log(S);
console.log("");
}
}

View File

@ -1,50 +0,0 @@
import { Scalar, buildBn128, buildBls12381} from "ffjavascript";
const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const bls12381q = Scalar.e("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16);
const bn128q = Scalar.e("21888242871839275222246405745257275088696311157297823662689037894645226208583");
export async function getCurveFromR(r) {
let curve;
if (Scalar.eq(r, bn128r)) {
curve = await buildBn128();
} else if (Scalar.eq(r, bls12381r)) {
curve = await buildBls12381();
} else {
throw new Error(`Curve not supported: ${Scalar.toString(r)}`);
}
return curve;
}
export async function getCurveFromQ(q) {
let curve;
if (Scalar.eq(q, bn128q)) {
curve = await buildBn128();
} else if (Scalar.eq(q, bls12381q)) {
curve = await buildBls12381();
} else {
throw new Error(`Curve not supported: ${Scalar.toString(q)}`);
}
return curve;
}
export async function getCurveFromName(name) {
let curve;
const normName = normalizeName(name);
if (["BN128", "BN254", "ALTBN128"].indexOf(normName) >= 0) {
curve = await buildBn128();
} else if (["BLS12381"].indexOf(normName) >= 0) {
curve = await buildBls12381();
} else {
throw new Error(`Curve not supported: ${name}`);
}
return curve;
function normalizeName(n) {
return n.toUpperCase().match(/[A-Za-z0-9]+/g).join("");
}
}

137
src/f2field.js Normal file
View File

@ -0,0 +1,137 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const fUtils = require("./futils.js");
class F2Field {
constructor(F, nonResidue) {
this.F = F;
this.zero = [this.F.zero, this.F.zero];
this.one = [this.F.one, this.F.zero];
this.nonResidue = nonResidue;
}
_mulByNonResidue(a) {
return this.F.mul(this.nonResidue, a);
}
copy(a) {
return [this.F.copy(a[0]), this.F.copy(a[1])];
}
add(a, b) {
return [
this.F.add(a[0], b[0]),
this.F.add(a[1], b[1])
];
}
double(a) {
return this.add(a,a);
}
sub(a, b) {
return [
this.F.sub(a[0], b[0]),
this.F.sub(a[1], b[1])
];
}
neg(a) {
return this.sub(this.zero, a);
}
mul(a, b) {
const aA = this.F.mul(a[0] , b[0]);
const bB = this.F.mul(a[1] , b[1]);
return [
this.F.add( aA , this._mulByNonResidue(bB)),
this.F.sub(
this.F.mul(
this.F.add(a[0], a[1]),
this.F.add(b[0], b[1])),
this.F.add(aA, bB))];
}
inverse(a) {
const t0 = this.F.square(a[0]);
const t1 = this.F.square(a[1]);
const t2 = this.F.sub(t0, this._mulByNonResidue(t1));
const t3 = this.F.inverse(t2);
return [
this.F.mul(a[0], t3),
this.F.neg(this.F.mul( a[1], t3)) ];
}
div(a, b) {
return this.mul(a, this.inverse(b));
}
square(a) {
const ab = this.F.mul(a[0] , a[1]);
/*
[
(a + b) * (a + non_residue * b) - ab - non_residue * ab,
ab + ab
];
*/
return [
this.F.sub(
this.F.mul(
this.F.add(a[0], a[1]) ,
this.F.add(
a[0] ,
this._mulByNonResidue(a[1]))),
this.F.add(
ab,
this._mulByNonResidue(ab))),
this.F.add(ab, ab)
];
}
isZero(a) {
return this.F.isZero(a[0]) && this.F.isZero(a[1]);
}
equals(a, b) {
return this.F.equals(a[0], b[0]) && this.F.equals(a[1], b[1]);
}
affine(a) {
return [this.F.affine(a[0]), this.F.affine(a[1])];
}
mulScalar(base, e) {
return fUtils.mulScalar(this, base, e);
}
exp(base, e) {
return fUtils.exp(this, base, e);
}
toString(a) {
const cp = this.affine(a);
return `[ ${this.F.toString(cp[0])} , ${this.F.toString(cp[1])} ]`;
}
}
module.exports = F2Field;

177
src/f3field.js Normal file
View File

@ -0,0 +1,177 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const fUtils = require("./futils.js");
class F3Field {
constructor(F, nonResidue) {
this.F = F;
this.zero = [this.F.zero, this.F.zero, this.F.zero];
this.one = [this.F.one, this.F.zero, this.F.zero];
this.nonResidue = nonResidue;
}
_mulByNonResidue(a) {
return this.F.mul(this.nonResidue, a);
}
copy(a) {
return [this.F.copy(a[0]), this.F.copy(a[1]), this.F.copy(a[2])];
}
add(a, b) {
return [
this.F.add(a[0], b[0]),
this.F.add(a[1], b[1]),
this.F.add(a[2], b[2])
];
}
double(a) {
return this.add(a,a);
}
sub(a, b) {
return [
this.F.sub(a[0], b[0]),
this.F.sub(a[1], b[1]),
this.F.sub(a[2], b[2])
];
}
neg(a) {
return this.sub(this.zero, a);
}
mul(a, b) {
const aA = this.F.mul(a[0] , b[0]);
const bB = this.F.mul(a[1] , b[1]);
const cC = this.F.mul(a[2] , b[2]);
return [
this.F.add(
aA,
this._mulByNonResidue(
this.F.sub(
this.F.mul(
this.F.add(a[1], a[2]),
this.F.add(b[1], b[2])),
this.F.add(bB, cC)))), // aA + non_residue*((b+c)*(B+C)-bB-cC),
this.F.add(
this.F.sub(
this.F.mul(
this.F.add(a[0], a[1]),
this.F.add(b[0], b[1])),
this.F.add(aA, bB)),
this._mulByNonResidue( cC)), // (a+b)*(A+B)-aA-bB+non_residue*cC
this.F.add(
this.F.sub(
this.F.mul(
this.F.add(a[0], a[2]),
this.F.add(b[0], b[2])),
this.F.add(aA, cC)),
bB)]; // (a+c)*(A+C)-aA+bB-cC)
}
inverse(a) {
const t0 = this.F.square(a[0]); // t0 = a^2 ;
const t1 = this.F.square(a[1]); // t1 = b^2 ;
const t2 = this.F.square(a[2]); // t2 = c^2;
const t3 = this.F.mul(a[0],a[1]); // t3 = ab
const t4 = this.F.mul(a[0],a[2]); // t4 = ac
const t5 = this.F.mul(a[1],a[2]); // t5 = bc;
// c0 = t0 - non_residue * t5;
const c0 = this.F.sub(t0, this._mulByNonResidue(t5));
// c1 = non_residue * t2 - t3;
const c1 = this.F.sub(this._mulByNonResidue(t2), t3);
const c2 = this.F.sub(t1, t4); // c2 = t1-t4
// t6 = (a * c0 + non_residue * (c * c1 + b * c2)).inverse();
const t6 =
this.F.inverse(
this.F.add(
this.F.mul(a[0], c0),
this._mulByNonResidue(
this.F.add(
this.F.mul(a[2], c1),
this.F.mul(a[1], c2)))));
return [
this.F.mul(t6, c0), // t6*c0
this.F.mul(t6, c1), // t6*c1
this.F.mul(t6, c2)]; // t6*c2
}
div(a, b) {
return this.mul(a, this.inverse(b));
}
square(a) {
const s0 = this.F.square(a[0]); // s0 = a^2
const ab = this.F.mul(a[0], a[1]); // ab = a*b
const s1 = this.F.add(ab, ab); // s1 = 2ab;
const s2 = this.F.square(
this.F.add(this.F.sub(a[0],a[1]), a[2])); // s2 = (a - b + c)^2;
const bc = this.F.mul(a[1],a[2]); // bc = b*c
const s3 = this.F.add(bc, bc); // s3 = 2*bc
const s4 = this.F.square(a[2]); // s4 = c^2
return [
this.F.add(
s0,
this._mulByNonResidue(s3)), // s0 + non_residue * s3,
this.F.add(
s1,
this._mulByNonResidue(s4)), // s1 + non_residue * s4,
this.F.sub(
this.F.add( this.F.add(s1, s2) , s3 ),
this.F.add(s0, s4))]; // s1 + s2 + s3 - s0 - s4
}
isZero(a) {
return this.F.isZero(a[0]) && this.F.isZero(a[1]) && this.F.isZero(a[2]);
}
equals(a, b) {
return this.F.equals(a[0], b[0]) && this.F.equals(a[1], b[1]) && this.F.equals(a[2], b[2]);
}
affine(a) {
return [this.F.affine(a[0]), this.F.affine(a[1]), this.F.affine(a[2])];
}
mulScalar(base, e) {
return fUtils.mulScalar(this, base, e);
}
exp(base, e) {
return fUtils.exp(this, base, e);
}
toString(a) {
const cp = this.affine(a);
return `[ ${this.F.toString(cp[0])} , ${this.F.toString(cp[1])}, ${this.F.toString(cp[2])} ]`;
}
}
module.exports = F3Field;

53
src/futils.js Normal file
View File

@ -0,0 +1,53 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bigInt = require("./bigint.js");
exports.mulScalar = (F, base, e) =>{
let res = F.zero;
let rem = bigInt(e);
let exp = base;
while (! rem.isZero()) {
if (rem.isOdd()) {
res = F.add(res, exp);
}
exp = F.double(exp);
rem = rem.shr(1);
}
return res;
};
exports.exp = (F, base, e) =>{
let res = F.one;
let rem = bigInt(e);
let exp = base;
while (! rem.isZero()) {
if (rem.isOdd()) {
res = F.mul(res, exp);
}
exp = F.square(exp);
rem = rem.shr(1);
}
return res;
};

190
src/gcurve.js Normal file
View File

@ -0,0 +1,190 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const fUtils = require("./futils.js");
class GCurve {
constructor(F, g) {
this.F = F;
this.g = [F.copy(g[0]), F.copy(g[1])];
if (this.g.length == 2) this.g[2] = this.F.one;
this.zero = [this.F.zero, this.F.one, this.F.zero];
}
isZero(p) {
return this.F.isZero(p[2]);
}
add(p1, p2) {
const F = this.F;
if (this.isZero(p1)) return p2;
if (this.isZero(p2)) return p1;
const res = new Array(3);
const Z1Z1 = F.square( p1[2] );
const Z2Z2 = F.square( p2[2] );
const U1 = F.mul( p1[0] , Z2Z2 ); // U1 = X1 * Z2Z2
const U2 = F.mul( p2[0] , Z1Z1 ); // U2 = X2 * Z1Z1
const Z1_cubed = F.mul( p1[2] , Z1Z1);
const Z2_cubed = F.mul( p2[2] , Z2Z2);
const S1 = F.mul( p1[1] , Z2_cubed); // S1 = Y1 * Z2 * Z2Z2
const S2 = F.mul( p2[1] , Z1_cubed); // S2 = Y2 * Z1 * Z1Z1
if (F.equals(U1,U2) && F.equals(S1,S2)) {
return this.double(p1);
}
const H = F.sub( U2 , U1 ); // H = U2-U1
const S2_minus_S1 = F.sub( S2 , S1 );
const I = F.square( F.add(H,H) ); // I = (2 * H)^2
const J = F.mul( H , I ); // J = H * I
const r = F.add( S2_minus_S1 , S2_minus_S1 ); // r = 2 * (S2-S1)
const V = F.mul( U1 , I ); // V = U1 * I
res[0] =
F.sub(
F.sub( F.square(r) , J ),
F.add( V , V )); // X3 = r^2 - J - 2 * V
const S1_J = F.mul( S1 , J );
res[1] =
F.sub(
F.mul( r , F.sub(V,res[0])),
F.add( S1_J,S1_J )); // Y3 = r * (V-X3)-2 S1 J
res[2] =
F.mul(
H,
F.sub(
F.square( F.add(p1[2],p2[2]) ),
F.add( Z1Z1 , Z2Z2 ))); // Z3 = ((Z1+Z2)^2-Z1Z1-Z2Z2) * H
return res;
}
neg(p) {
return [p[0], this.F.neg(p[1]), p[2]];
}
sub(a, b) {
return this.add(a, this.neg(b));
}
double(p) {
const F = this.F;
const res = new Array(3);
if (this.isZero(p)) return p;
const A = F.square( p[0] ); // A = X1^2
const B = F.square( p[1] ); // B = Y1^2
const C = F.square( B ); // C = B^2
let D =
F.sub(
F.square( F.add(p[0] , B )),
F.add( A , C));
D = F.add(D,D); // D = 2 * ((X1 + B)^2 - A - C)
const E = F.add( F.add(A,A), A); // E = 3 * A
const FF =F.square( E ); // F = E^2
res[0] = F.sub( FF , F.add(D,D) ); // X3 = F - 2 D
let eightC = F.add( C , C );
eightC = F.add( eightC , eightC );
eightC = F.add( eightC , eightC );
res[1] =
F.sub(
F.mul(
E,
F.sub( D, res[0] )),
eightC); // Y3 = E * (D - X3) - 8 * C
const Y1Z1 = F.mul( p[1] , p[2] );
res[2] = F.add( Y1Z1 , Y1Z1 ); // Z3 = 2 * Y1 * Z1
return res;
}
mulScalar(base, e) {
return fUtils.mulScalar(this, base, e);
}
affine(p) {
const F = this.F;
if (this.isZero(p)) {
return this.zero;
} else {
const Z_inv = F.inverse(p[2]);
const Z2_inv = F.square(Z_inv);
const Z3_inv = F.mul(Z2_inv, Z_inv);
const res = new Array(3);
res[0] = F.affine( F.mul(p[0],Z2_inv));
res[1] = F.affine( F.mul(p[1],Z3_inv));
res[2] = F.one;
return res;
}
}
equals(p1, p2) {
const F = this.F;
if (this.isZero(p1)) return this.isZero(p2);
if (this.isZero(p2)) return this.isZero(p1);
const Z1Z1 = F.square( p1[2] );
const Z2Z2 = F.square( p2[2] );
const U1 = F.mul( p1[0] , Z2Z2 );
const U2 = F.mul( p2[0] , Z1Z1 );
const Z1_cubed = F.mul( p1[2] , Z1Z1);
const Z2_cubed = F.mul( p2[2] , Z2Z2);
const S1 = F.mul( p1[1] , Z2_cubed);
const S2 = F.mul( p2[1] , Z1_cubed);
return (F.equals(U1,U2) && F.equals(S1,S2));
}
toString(p) {
const cp = this.affine(p);
return `[ ${this.F.toString(cp[0])} , ${this.F.toString(cp[1])} ]`;
}
}
module.exports = GCurve;

View File

@ -1,23 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
export {default as fullProve} from "./groth16_fullprove.js";
export {default as prove} from "./groth16_prove.js";
export {default as verify} from "./groth16_verify.js";
export {default as exportSolidityCallData} from "./groth16_exportsoliditycalldata.js";

View File

@ -1,42 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
function p256(n) {
let nstr = n.toString(16);
while (nstr.length < 64) nstr = "0"+nstr;
nstr = `"0x${nstr}"`;
return nstr;
}
export default async function groth16ExportSolidityCallData(proof, pub) {
let inputs = "";
for (let i=0; i<pub.length; i++) {
if (inputs != "") inputs = inputs + ",";
inputs = inputs + p256(pub[i]);
}
let S;
S=`[${p256(proof.pi_a[0])}, ${p256(proof.pi_a[1])}],` +
`[[${p256(proof.pi_b[0][1])}, ${p256(proof.pi_b[0][0])}],[${p256(proof.pi_b[1][1])}, ${p256(proof.pi_b[1][0])}]],` +
`[${p256(proof.pi_c[0])}, ${p256(proof.pi_c[1])}],` +
`[${inputs}]`;
return S;
}

View File

@ -1,29 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import groth16_prove from "./groth16_prove.js";
import wtns_calculate from "./wtns_calculate.js";
export default async function groth16FullProve(input, wasmFile, zkeyFileName, logger) {
const wtns= {
type: "mem"
};
await wtns_calculate(input, wasmFile, wtns);
return await groth16_prove(zkeyFileName, wtns, logger);
}

View File

@ -1,375 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as binFileUtils from "@iden3/binfileutils";
import * as zkeyUtils from "./zkey_utils.js";
import * as wtnsUtils from "./wtns_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import { log2 } from "./misc.js";
import { Scalar, utils, BigBuffer } from "ffjavascript";
const {stringifyBigInts} = utils;
export default async function groth16Prove(zkeyFileName, witnessFileName, logger) {
const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
const wtns = await wtnsUtils.readHeader(fdWtns, sectionsWtns);
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
if (!Scalar.eq(zkey.r, wtns.q)) {
throw new Error("Curve of the witness does not match the curve of the proving key");
}
if (wtns.nWitness != zkey.nVars) {
throw new Error(`Invalid witness length. Circuit: ${zkey.nVars}, witness: ${wtns.nWitness}`);
}
const curve = await getCurve(zkey.q);
const Fr = curve.Fr;
const G1 = curve.G1;
const G2 = curve.G2;
const power = log2(zkey.domainSize);
if (logger) logger.debug("Reading Wtns");
const buffWitness = await binFileUtils.readSection(fdWtns, sectionsWtns, 2);
if (logger) logger.debug("Reading Coeffs");
const buffCoeffs = await binFileUtils.readSection(fdZKey, sectionsZKey, 4);
if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buldABC1(curve, zkey, buffWitness, buffCoeffs, logger);
const inc = power == Fr.s ? curve.Fr.shift : curve.Fr.w[power+1];
const buffA = await Fr.ifft(buffA_T, "", "", logger, "IFFT_A");
const buffAodd = await Fr.batchApplyKey(buffA, Fr.e(1), inc);
const buffAodd_T = await Fr.fft(buffAodd, "", "", logger, "FFT_A");
const buffB = await Fr.ifft(buffB_T, "", "", logger, "IFFT_B");
const buffBodd = await Fr.batchApplyKey(buffB, Fr.e(1), inc);
const buffBodd_T = await Fr.fft(buffBodd, "", "", logger, "FFT_B");
const buffC = await Fr.ifft(buffC_T, "", "", logger, "IFFT_C");
const buffCodd = await Fr.batchApplyKey(buffC, Fr.e(1), inc);
const buffCodd_T = await Fr.fft(buffCodd, "", "", logger, "FFT_C");
if (logger) logger.debug("Join ABC");
const buffPodd_T = await joinABC(curve, zkey, buffAodd_T, buffBodd_T, buffCodd_T, logger);
let proof = {};
if (logger) logger.debug("Reading A Points");
const buffBasesA = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
proof.pi_a = await curve.G1.multiExpAffine(buffBasesA, buffWitness, logger, "multiexp A");
if (logger) logger.debug("Reading B1 Points");
const buffBasesB1 = await binFileUtils.readSection(fdZKey, sectionsZKey, 6);
let pib1 = await curve.G1.multiExpAffine(buffBasesB1, buffWitness, logger, "multiexp B1");
if (logger) logger.debug("Reading B2 Points");
const buffBasesB2 = await binFileUtils.readSection(fdZKey, sectionsZKey, 7);
proof.pi_b = await curve.G2.multiExpAffine(buffBasesB2, buffWitness, logger, "multiexp B2");
if (logger) logger.debug("Reading C Points");
const buffBasesC = await binFileUtils.readSection(fdZKey, sectionsZKey, 8);
proof.pi_c = await curve.G1.multiExpAffine(buffBasesC, buffWitness.slice((zkey.nPublic+1)*curve.Fr.n8), logger, "multiexp C");
if (logger) logger.debug("Reading H Points");
const buffBasesH = await binFileUtils.readSection(fdZKey, sectionsZKey, 9);
const resH = await curve.G1.multiExpAffine(buffBasesH, buffPodd_T, logger, "multiexp H");
const r = curve.Fr.random();
const s = curve.Fr.random();
proof.pi_a = G1.add( proof.pi_a, zkey.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.timesFr( zkey.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, zkey.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.timesFr( zkey.vk_delta_2, s ));
pib1 = G1.add( pib1, zkey.vk_beta_1 );
pib1 = G1.add( pib1, G1.timesFr( zkey.vk_delta_1, s ));
proof.pi_c = G1.add(proof.pi_c, resH);
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( zkey.vk_delta_1, Fr.neg(Fr.mul(r,s) )));
let publicSignals = [];
for (let i=1; i<= zkey.nPublic; i++) {
const b = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8);
publicSignals.push(Scalar.fromRprLE(b));
}
proof.pi_a = G1.toObject(G1.toAffine(proof.pi_a));
proof.pi_b = G2.toObject(G2.toAffine(proof.pi_b));
proof.pi_c = G1.toObject(G1.toAffine(proof.pi_c));
proof.protocol = "groth16";
proof.curve = curve.name;
await fdZKey.close();
await fdWtns.close();
proof = stringifyBigInts(proof);
publicSignals = stringifyBigInts(publicSignals);
return {proof, publicSignals};
}
async function buldABC1(curve, zkey, witness, coeffs, logger) {
const n8 = curve.Fr.n8;
const sCoef = 4*3 + zkey.n8r;
const nCoef = (coeffs.byteLength-4) / sCoef;
const outBuffA = new BigBuffer(zkey.domainSize * n8);
const outBuffB = new BigBuffer(zkey.domainSize * n8);
const outBuffC = new BigBuffer(zkey.domainSize * n8);
const outBuf = [ outBuffA, outBuffB ];
for (let i=0; i<nCoef; i++) {
if ((logger)&&(i%1000000 == 0)) logger.debug(`QAP AB: ${i}/${nCoef}`);
const buffCoef = coeffs.slice(4+i*sCoef, 4+i*sCoef+sCoef);
const buffCoefV = new DataView(buffCoef.buffer);
const m= buffCoefV.getUint32(0, true);
const c= buffCoefV.getUint32(4, true);
const s= buffCoefV.getUint32(8, true);
const coef = buffCoef.slice(12, 12+n8);
outBuf[m].set(
curve.Fr.add(
outBuf[m].slice(c*n8, c*n8+n8),
curve.Fr.mul(coef, witness.slice(s*n8, s*n8+n8))
),
c*n8
);
}
for (let i=0; i<zkey.domainSize; i++) {
if ((logger)&&(i%1000000 == 0)) logger.debug(`QAP C: ${i}/${zkey.domainSize}`);
outBuffC.set(
curve.Fr.mul(
outBuffA.slice(i*n8, i*n8+n8),
outBuffB.slice(i*n8, i*n8+n8),
),
i*n8
);
}
return [outBuffA, outBuffB, outBuffC];
}
/*
async function buldABC(curve, zkey, witness, coeffs, logger) {
const concurrency = curve.tm.concurrency;
const sCoef = 4*3 + zkey.n8r;
let getUint32;
if (coeffs instanceof BigBuffer) {
const coeffsDV = [];
const PAGE_LEN = coeffs.buffers[0].length;
for (let i=0; i< coeffs.buffers.length; i++) {
coeffsDV.push(new DataView(coeffs.buffers[i].buffer));
}
getUint32 = function (pos) {
return coeffsDV[Math.floor(pos/PAGE_LEN)].getUint32(pos % PAGE_LEN, true);
};
} else {
const coeffsDV = new DataView(coeffs.buffer, coeffs.byteOffset, coeffs.byteLength);
getUint32 = function (pos) {
return coeffsDV.getUint32(pos, true);
};
}
const elementsPerChunk = Math.floor(zkey.domainSize/concurrency);
const promises = [];
const cutPoints = [];
for (let i=0; i<concurrency; i++) {
cutPoints.push( getCutPoint( Math.floor(i*elementsPerChunk) ));
}
cutPoints.push(coeffs.byteLength);
const chunkSize = 2**26;
for (let s=0 ; s<zkey.nVars ; s+= chunkSize) {
if (logger) logger.debug(`QAP ${s}: ${s}/${zkey.nVars}`);
const ns= Math.min(zkey.nVars-s, chunkSize );
for (let i=0; i<concurrency; i++) {
let n;
if (i< concurrency-1) {
n = elementsPerChunk;
} else {
n = zkey.domainSize - i*elementsPerChunk;
}
if (n==0) continue;
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: coeffs.slice(cutPoints[i], cutPoints[i+1])});
task.push({cmd: "ALLOCSET", var: 1, buff: witness.slice(s*curve.Fr.n8, (s+ns)*curve.Fr.n8)});
task.push({cmd: "ALLOC", var: 2, len: n*curve.Fr.n8});
task.push({cmd: "ALLOC", var: 3, len: n*curve.Fr.n8});
task.push({cmd: "ALLOC", var: 4, len: n*curve.Fr.n8});
task.push({cmd: "CALL", fnName: "qap_buildABC", params:[
{var: 0},
{val: (cutPoints[i+1] - cutPoints[i])/sCoef},
{var: 1},
{var: 2},
{var: 3},
{var: 4},
{val: i*elementsPerChunk},
{val: n},
{val: s},
{val: ns}
]});
task.push({cmd: "GET", out: 0, var: 2, len: n*curve.Fr.n8});
task.push({cmd: "GET", out: 1, var: 3, len: n*curve.Fr.n8});
task.push({cmd: "GET", out: 2, var: 4, len: n*curve.Fr.n8});
promises.push(curve.tm.queueAction(task));
}
}
let result = await Promise.all(promises);
const nGroups = result.length / concurrency;
if (nGroups>1) {
const promises2 = [];
for (let i=0; i<concurrency; i++) {
const task=[];
task.push({cmd: "ALLOC", var: 0, len: result[i][0].byteLength});
task.push({cmd: "ALLOC", var: 1, len: result[i][0].byteLength});
for (let m=0; m<3; m++) {
task.push({cmd: "SET", var: 0, buff: result[i][m]});
for (let s=1; s<nGroups; s++) {
task.push({cmd: "SET", var: 1, buff: result[s*concurrency + i][m]});
task.push({cmd: "CALL", fnName: "qap_batchAdd", params:[
{var: 0},
{var: 1},
{val: result[i][m].length/curve.Fr.n8},
{var: 0}
]});
}
task.push({cmd: "GET", out: m, var: 0, len: result[i][m].length});
}
promises2.push(curve.tm.queueAction(task));
}
result = await Promise.all(promises2);
}
const outBuffA = new BigBuffer(zkey.domainSize * curve.Fr.n8);
const outBuffB = new BigBuffer(zkey.domainSize * curve.Fr.n8);
const outBuffC = new BigBuffer(zkey.domainSize * curve.Fr.n8);
let p=0;
for (let i=0; i<result.length; i++) {
outBuffA.set(result[i][0], p);
outBuffB.set(result[i][1], p);
outBuffC.set(result[i][2], p);
p += result[i][0].byteLength;
}
return [outBuffA, outBuffB, outBuffC];
function getCutPoint(v) {
let m = 0;
let n = getUint32(0);
while (m < n) {
var k = Math.floor((n + m) / 2);
const va = getUint32(4 + k*sCoef + 4);
if (va > v) {
n = k - 1;
} else if (va < v) {
m = k + 1;
} else {
n = k;
}
}
return 4 + m*sCoef;
}
}
*/
async function joinABC(curve, zkey, a, b, c, logger) {
const MAX_CHUNK_SIZE = 1 << 22;
const n8 = curve.Fr.n8;
const nElements = Math.floor(a.byteLength / curve.Fr.n8);
const promises = [];
for (let i=0; i<nElements; i += MAX_CHUNK_SIZE) {
if (logger) logger.debug(`JoinABC: ${i}/${nElements}`);
const n= Math.min(nElements - i, MAX_CHUNK_SIZE);
const task = [];
const aChunk = a.slice(i*n8, (i + n)*n8 );
const bChunk = b.slice(i*n8, (i + n)*n8 );
const cChunk = c.slice(i*n8, (i + n)*n8 );
task.push({cmd: "ALLOCSET", var: 0, buff: aChunk});
task.push({cmd: "ALLOCSET", var: 1, buff: bChunk});
task.push({cmd: "ALLOCSET", var: 2, buff: cChunk});
task.push({cmd: "ALLOC", var: 3, len: n*n8});
task.push({cmd: "CALL", fnName: "qap_joinABC", params:[
{var: 0},
{var: 1},
{var: 2},
{val: n},
{var: 3},
]});
task.push({cmd: "CALL", fnName: "frm_batchFromMontgomery", params:[
{var: 3},
{val: n},
{var: 3}
]});
task.push({cmd: "GET", out: 0, var: 3, len: n*n8});
promises.push(curve.tm.queueAction(task));
}
const result = await Promise.all(promises);
let outBuff;
if (a instanceof BigBuffer) {
outBuff = new BigBuffer(a.byteLength);
} else {
outBuff = new Uint8Array(a.byteLength);
}
let p=0;
for (let i=0; i<result.length; i++) {
outBuff.set(result[i][0], p);
p += result[i][0].byteLength;
}
return outBuff;
}

View File

@ -1,77 +0,0 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
import { Scalar } from "ffjavascript";
import * as curves from "./curves.js";
import { utils } from "ffjavascript";
const {unstringifyBigInts} = utils;
export default async function groth16Verify(vk_verifier, publicSignals, proof, logger) {
/*
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
*/
vk_verifier = unstringifyBigInts(vk_verifier);
proof = unstringifyBigInts(proof);
publicSignals = unstringifyBigInts(publicSignals);
const curve = await curves.getCurveFromName(vk_verifier.curve);
const IC0 = curve.G1.fromObject(vk_verifier.IC[0]);
const IC = new Uint8Array(curve.G1.F.n8*2 * publicSignals.length);
const w = new Uint8Array(curve.Fr.n8 * publicSignals.length);
for (let i=0; i<publicSignals.length; i++) {
const buffP = curve.G1.fromObject(vk_verifier.IC[i+1]);
IC.set(buffP, i*curve.G1.F.n8*2);
Scalar.toRprLE(w, curve.Fr.n8*i, publicSignals[i], curve.Fr.n8);
}
let cpub = await curve.G1.multiExpAffine(IC, w);
cpub = curve.G1.add(cpub, IC0);
const pi_a = curve.G1.fromObject(proof.pi_a);
const pi_b = curve.G2.fromObject(proof.pi_b);
const pi_c = curve.G1.fromObject(proof.pi_c);
const vk_gamma_2 = curve.G2.fromObject(vk_verifier.vk_gamma_2);
const vk_delta_2 = curve.G2.fromObject(vk_verifier.vk_delta_2);
const vk_alpha_1 = curve.G1.fromObject(vk_verifier.vk_alpha_1);
const vk_beta_2 = curve.G2.fromObject(vk_verifier.vk_beta_2);
const res = await curve.pairingEq(
curve.G1.neg(pi_a) , pi_b,
cpub , vk_gamma_2,
pi_c , vk_delta_2,
vk_alpha_1, vk_beta_2
);
if (! res) {
if (logger) logger.error("Invalid proof");
return false;
}
if (logger) logger.info("OK!");
return true;
}

View File

@ -1,84 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import blake2b from "blake2b-wasm";
import { ChaCha } from "ffjavascript";
export function hashToG2(curve, hash) {
const hashV = new DataView(hash.buffer, hash.byteOffset, hash.byteLength);
const seed = [];
for (let i=0; i<8; i++) {
seed[i] = hashV.getUint32(i*4);
}
const rng = new ChaCha(seed);
const g2_sp = curve.G2.fromRng(rng);
return g2_sp;
}
export function getG2sp(curve, persinalization, challenge, g1s, g1sx) {
const h = blake2b(64);
const b1 = new Uint8Array([persinalization]);
h.update(b1);
h.update(challenge);
const b3 = curve.G1.toUncompressed(g1s);
h.update( b3);
const b4 = curve.G1.toUncompressed(g1sx);
h.update( b4);
const hash =h.digest();
return hashToG2(curve, hash);
}
function calculatePubKey(k, curve, personalization, challengeHash, rng ) {
k.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
k.g1_sx = curve.G1.toAffine(curve.G1.timesFr(k.g1_s, k.prvKey));
k.g2_sp = curve.G2.toAffine(getG2sp(curve, personalization, challengeHash, k.g1_s, k.g1_sx));
k.g2_spx = curve.G2.toAffine(curve.G2.timesFr(k.g2_sp, k.prvKey));
return k;
}
export function createPTauKey(curve, challengeHash, rng) {
const key = {
tau: {},
alpha: {},
beta: {}
};
key.tau.prvKey = curve.Fr.fromRng(rng);
key.alpha.prvKey = curve.Fr.fromRng(rng);
key.beta.prvKey = curve.Fr.fromRng(rng);
calculatePubKey(key.tau, curve, 0, challengeHash, rng);
calculatePubKey(key.alpha, curve, 1, challengeHash, rng);
calculatePubKey(key.beta, curve, 2, challengeHash, rng);
return key;
}
export function createDeltaKey(curve, transcript, rng) {
const delta = {};
delta.prvKey = curve.Fr.fromRng(rng);
delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
delta.g1_sx = curve.G1.toAffine(curve.G1.timesScalar(delta.g1_s, delta.prvKey));
delta.g2_sp = hashToG2(curve, transcript);
delta.g2_spx = curve.G2.toAffine(curve.G2.timesScalar(delta.g2_sp, delta.prvKey));
return delta;
}

View File

@ -1,55 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as fastFile from "fastfile";
export default async function loadSymbols(symFileName) {
const sym = {
labelIdx2Name: [ "one" ],
varIdx2Name: [ "one" ],
componentIdx2Name: []
};
const fd = await fastFile.readExisting(symFileName);
const buff = await fd.read(fd.totalSize);
const symsStr = new TextDecoder("utf-8").decode(buff);
const lines = symsStr.split("\n");
for (let i=0; i<lines.length; i++) {
const arr = lines[i].split(",");
if (arr.length!=4) continue;
if (sym.varIdx2Name[arr[1]]) {
sym.varIdx2Name[arr[1]] += "|" + arr[3];
} else {
sym.varIdx2Name[arr[1]] = arr[3];
}
sym.labelIdx2Name[arr[0]] = arr[3];
if (!sym.componentIdx2Name[arr[2]]) {
sym.componentIdx2Name[arr[2]] = extractComponent(arr[3]);
}
}
await fd.close();
return sym;
function extractComponent(name) {
const arr = name.split(".");
arr.pop(); // Remove the lasr element
return arr.join(".");
}
}

View File

@ -1,178 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
/* global window */
import Blake2b from "blake2b-wasm";
import readline from "readline";
import { ChaCha } from "ffjavascript";
import crypto from "crypto";
const _revTable = [];
for (let i=0; i<256; i++) {
_revTable[i] = _revSlow(i, 8);
}
function _revSlow(idx, bits) {
let res =0;
let a = idx;
for (let i=0; i<bits; i++) {
res <<= 1;
res = res | (a &1);
a >>=1;
}
return res;
}
export function bitReverse(idx, bits) {
return (
_revTable[idx >>> 24] |
(_revTable[(idx >>> 16) & 0xFF] << 8) |
(_revTable[(idx >>> 8) & 0xFF] << 16) |
(_revTable[idx & 0xFF] << 24)
) >>> (32-bits);
}
export function log2( V )
{
return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) );
}
export function formatHash(b, title) {
const a = new DataView(b.buffer, b.byteOffset, b.byteLength);
let S = "";
for (let i=0; i<4; i++) {
if (i>0) S += "\n";
S += "\t\t";
for (let j=0; j<4; j++) {
if (j>0) S += " ";
S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0");
}
}
if (title) S = title + "\n" + S;
return S;
}
export function hashIsEqual(h1, h2) {
if (h1.byteLength != h2.byteLength) return false;
var dv1 = new Int8Array(h1);
var dv2 = new Int8Array(h2);
for (var i = 0 ; i != h1.byteLength ; i++)
{
if (dv1[i] != dv2[i]) return false;
}
return true;
}
export function cloneHasher(h) {
const ph = h.getPartialHash();
const res = Blake2b(64);
res.setPartialHash(ph);
return res;
}
export async function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
if (curve.G1.isZero(g1s)) return false;
if (curve.G1.isZero(g1sx)) return false;
if (curve.G2.isZero(g2s)) return false;
if (curve.G2.isZero(g2sx)) return false;
// return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
const res = await curve.pairingEq(g1s, g2sx, curve.G1.neg(g1sx), g2s);
return res;
}
export function askEntropy() {
if (process.browser) {
return window.prompt("Enter a random text. (Entropy): ", "");
} else {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
return new Promise((resolve) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
});
}
}
export async function getRandomRng(entropy) {
// Generate a random Rng
while (!entropy) {
entropy = await askEntropy();
}
const hasher = Blake2b(64);
hasher.update(crypto.randomBytes(64));
const enc = new TextEncoder(); // always utf-8
hasher.update(enc.encode(entropy));
const hash = Buffer.from(hasher.digest());
const seed = [];
for (let i=0;i<8;i++) {
seed[i] = hash.readUInt32BE(i*4);
}
const rng = new ChaCha(seed);
return rng;
}
export function rngFromBeaconParams(beaconHash, numIterationsExp) {
let nIterationsInner;
let nIterationsOuter;
if (numIterationsExp<32) {
nIterationsInner = (1 << numIterationsExp) >>> 0;
nIterationsOuter = 1;
} else {
nIterationsInner = 0x100000000;
nIterationsOuter = (1 << (numIterationsExp-32)) >>> 0;
}
let curHash = beaconHash;
for (let i=0; i<nIterationsOuter; i++) {
for (let j=0; j<nIterationsInner; j++) {
curHash = crypto.createHash("sha256").update(curHash).digest();
}
}
const curHashV = new DataView(curHash.buffer, curHash.byteOffset, curHash.byteLength);
const seed = [];
for (let i=0; i<8; i++) {
seed[i] = curHashV.getUint32(i*4, false);
}
const rng = new ChaCha(seed);
return rng;
}
export function hex2ByteArray(s) {
if (s instanceof Uint8Array) return s;
if (s.slice(0,2) == "0x") s= s.slice(2);
return new Uint8Array(s.match(/[\da-f]{2}/gi).map(function (h) {
return parseInt(h, 16);
}));
}
export function byteArray2hex(byteArray) {
return Array.prototype.map.call(byteArray, function(byte) {
return ("0" + (byte & 0xFF).toString(16)).slice(-2);
}).join("");
}

View File

@ -1,78 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as binFileUtils from "@iden3/binfileutils";
/*
This function creates a new section in the fdTo file with id idSection.
It multiplies the pooints in fdFrom by first, first*inc, first*inc^2, ....
nPoint Times.
It also updates the newChallengeHasher with the new points
*/
export async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, logger) {
const MAX_CHUNK_SIZE = 1 << 16;
const G = curve[groupName];
const sG = G.F.n8*2;
const nPoints = sections[idSection][0].size / sG;
await binFileUtils.startReadUniqueSection(fdOld, sections,idSection );
await binFileUtils.startWriteSection(fdNew, idSection);
let t = first;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if (logger) logger.debug(`Applying key: ${sectionName}: ${i}/${nPoints}`);
const n= Math.min(nPoints - i, MAX_CHUNK_SIZE);
let buff;
buff = await fdOld.read(n*sG);
buff = await G.batchApplyKey(buff, t, inc);
await fdNew.write(buff);
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}
await binFileUtils.endWriteSection(fdNew);
await binFileUtils.endReadSection(fdOld);
}
export async function applyKeyToChallengeSection(fdOld, fdNew, responseHasher, curve, groupName, nPoints, first, inc, formatOut, sectionName, logger) {
const G = curve[groupName];
const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i<nPoints ; i+= chunkSize) {
if (logger) logger.debug(`Applying key ${sectionName}: ${i}/${nPoints}`);
const n= Math.min(nPoints-i, chunkSize );
const buffInU = await fdOld.read(n * sG);
const buffInLEM = await G.batchUtoLEM(buffInU);
const buffOutLEM = await G.batchApplyKey(buffInLEM, t, inc);
let buffOut;
if (formatOut == "COMPRESSED") {
buffOut = await G.batchLEMtoC(buffOutLEM);
} else {
buffOut = await G.batchLEMtoU(buffOutLEM);
}
if (responseHasher) responseHasher.update(buffOut);
await fdNew.write(buffOut);
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}
}

View File

@ -1,24 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
export {default as setup} from "./plonk_setup.js";
export {default as fullProve} from "./plonk_fullprove.js";
export {default as prove} from "./plonk_prove.js";
export {default as verify} from "./plonk_verify.js";
export {default as exportSolidityCallData} from "./plonk_exportsoliditycalldata.js";

View File

@ -1,68 +0,0 @@
/*
Copyright 2021 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import { getCurveFromName } from "./curves.js";
function i2hex(i) {
return ("0" + i.toString(16)).slice(-2);
}
function p256(n) {
let nstr = n.toString(16);
while (nstr.length < 64) nstr = "0"+nstr;
nstr = `"0x${nstr}"`;
return nstr;
}
export default async function plonkExportSolidityCallData(proof, pub) {
const curve = await getCurveFromName(proof.curve);
const G1 = curve.G1;
const Fr = curve.Fr;
let inputs = "";
for (let i=0; i<pub.length; i++) {
if (inputs != "") inputs = inputs + ",";
inputs = inputs + p256(pub[i]);
}
const proofBuff = new Uint8Array(G1.F.n8*2*9 + Fr.n8*7);
G1.toRprUncompressed(proofBuff, 0, G1.e(proof.A));
G1.toRprUncompressed(proofBuff, G1.F.n8*2, G1.e(proof.B));
G1.toRprUncompressed(proofBuff, G1.F.n8*4, G1.e(proof.C));
G1.toRprUncompressed(proofBuff, G1.F.n8*6, G1.e(proof.Z));
G1.toRprUncompressed(proofBuff, G1.F.n8*8, G1.e(proof.T1));
G1.toRprUncompressed(proofBuff, G1.F.n8*10, G1.e(proof.T2));
G1.toRprUncompressed(proofBuff, G1.F.n8*12, G1.e(proof.T3));
G1.toRprUncompressed(proofBuff, G1.F.n8*14, G1.e(proof.Wxi));
G1.toRprUncompressed(proofBuff, G1.F.n8*16, G1.e(proof.Wxiw));
Fr.toRprBE(proofBuff, G1.F.n8*18 , Fr.e(proof.eval_a));
Fr.toRprBE(proofBuff, G1.F.n8*18 + Fr.n8, Fr.e(proof.eval_b));
Fr.toRprBE(proofBuff, G1.F.n8*18 + Fr.n8*2, Fr.e(proof.eval_c));
Fr.toRprBE(proofBuff, G1.F.n8*18 + Fr.n8*3, Fr.e(proof.eval_s1));
Fr.toRprBE(proofBuff, G1.F.n8*18 + Fr.n8*4, Fr.e(proof.eval_s2));
Fr.toRprBE(proofBuff, G1.F.n8*18 + Fr.n8*5, Fr.e(proof.eval_zw));
Fr.toRprBE(proofBuff, G1.F.n8*18 + Fr.n8*6, Fr.e(proof.eval_r));
const proofHex = Array.from(proofBuff).map(i2hex).join("");
const S="0x"+proofHex+",["+inputs+"]";
return S;
}

View File

@ -1,29 +0,0 @@
/*
Copyright 2021 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import plonk_prove from "./plonk_prove.js";
import wtns_calculate from "./wtns_calculate.js";
export default async function plonkFullProve(input, wasmFile, zkeyFileName, logger) {
const wtns= {
type: "mem"
};
await wtns_calculate(input, wasmFile, wtns);
return await plonk_prove(zkeyFileName, wtns, logger);
}

View File

@ -1,855 +0,0 @@
/*
Copyright 2021 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2019/953.pdf section 8.4 */
import * as binFileUtils from "@iden3/binfileutils";
import * as zkeyUtils from "./zkey_utils.js";
import * as wtnsUtils from "./wtns_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import { Scalar, utils, BigBuffer } from "ffjavascript";
const {stringifyBigInts} = utils;
import jsSha3 from "js-sha3";
const { keccak256 } = jsSha3;
export default async function plonk16Prove(zkeyFileName, witnessFileName, logger) {
const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
const wtns = await wtnsUtils.readHeader(fdWtns, sectionsWtns);
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey);
if (zkey.protocol != "plonk") {
throw new Error("zkey file is not groth16");
}
if (!Scalar.eq(zkey.r, wtns.q)) {
throw new Error("Curve of the witness does not match the curve of the proving key");
}
if (wtns.nWitness != zkey.nVars -zkey.nAdditions) {
throw new Error(`Invalid witness length. Circuit: ${zkey.nVars}, witness: ${wtns.nWitness}, ${zkey.nAdditions}`);
}
const curve = await getCurve(zkey.q);
const Fr = curve.Fr;
const G1 = curve.G1;
const n8r = curve.Fr.n8;
if (logger) logger.debug("Reading Wtns");
const buffWitness = await binFileUtils.readSection(fdWtns, sectionsWtns, 2);
// First element in plonk is not used and can be any value. (But always the same).
// We set it to zero to go faster in the exponentiations.
buffWitness.set(Fr.zero, 0);
const buffInternalWitness = new BigBuffer(n8r*zkey.nAdditions);
await calculateAdditions();
let A,B,C,Z;
let A4, B4, C4, Z4;
let pol_a,pol_b,pol_c, pol_z, pol_t, pol_r;
let proof = {};
const sigmaBuff = new BigBuffer(zkey.domainSize*n8r*4*3);
let o = sectionsZKey[12][0].p + zkey.domainSize*n8r;
await fdZKey.readToBuffer(sigmaBuff, 0 , zkey.domainSize*n8r*4, o);
o += zkey.domainSize*n8r*5;
await fdZKey.readToBuffer(sigmaBuff, zkey.domainSize*n8r*4 , zkey.domainSize*n8r*4, o);
o += zkey.domainSize*n8r*5;
await fdZKey.readToBuffer(sigmaBuff, zkey.domainSize*n8r*8 , zkey.domainSize*n8r*4, o);
const pol_s1 = new BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_s1, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p);
const pol_s2 = new BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_s2, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p + 5*zkey.domainSize*n8r);
const PTau = await binFileUtils.readSection(fdZKey, sectionsZKey, 14);
const ch = {};
await round1();
await round2();
await round3();
await round4();
await round5();
///////////////////////
// Final adjustments //
///////////////////////
proof.protocol = "plonk";
proof.curve = curve.name;
await fdZKey.close();
await fdWtns.close();
let publicSignals = [];
for (let i=1; i<= zkey.nPublic; i++) {
const pub = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8);
publicSignals.push(Scalar.fromRprLE(pub));
}
proof.A = G1.toObject(proof.A);
proof.B = G1.toObject(proof.B);
proof.C = G1.toObject(proof.C);
proof.Z = G1.toObject(proof.Z);
proof.T1 = G1.toObject(proof.T1);
proof.T2 = G1.toObject(proof.T2);
proof.T3 = G1.toObject(proof.T3);
proof.eval_a = Fr.toObject(proof.eval_a);
proof.eval_b = Fr.toObject(proof.eval_b);
proof.eval_c = Fr.toObject(proof.eval_c);
proof.eval_s1 = Fr.toObject(proof.eval_s1);
proof.eval_s2 = Fr.toObject(proof.eval_s2);
proof.eval_zw = Fr.toObject(proof.eval_zw);
proof.eval_t = Fr.toObject(proof.eval_t);
proof.eval_r = Fr.toObject(proof.eval_r);
proof.Wxi = G1.toObject(proof.Wxi);
proof.Wxiw = G1.toObject(proof.Wxiw);
delete proof.eval_t;
proof = stringifyBigInts(proof);
publicSignals = stringifyBigInts(publicSignals);
return {proof, publicSignals};
async function calculateAdditions() {
const additionsBuff = await binFileUtils.readSection(fdZKey, sectionsZKey, 3);
const sSum = 8+curve.Fr.n8*2;
for (let i=0; i<zkey.nAdditions; i++) {
const ai= readUInt32(additionsBuff, i*sSum);
const bi= readUInt32(additionsBuff, i*sSum+4);
const ac= additionsBuff.slice(i*sSum+8, i*sSum+8+n8r);
const bc= additionsBuff.slice(i*sSum+8+n8r, i*sSum+8+n8r*2);
const aw= getWitness(ai);
const bw= getWitness(bi);
const r = curve.Fr.add(
curve.Fr.mul(ac, aw),
curve.Fr.mul(bc, bw)
);
buffInternalWitness.set(r, n8r*i);
}
}
async function buildABC() {
let A = new BigBuffer(zkey.domainSize * n8r);
let B = new BigBuffer(zkey.domainSize * n8r);
let C = new BigBuffer(zkey.domainSize * n8r);
const aMap = await binFileUtils.readSection(fdZKey, sectionsZKey, 4);
const bMap = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
const cMap = await binFileUtils.readSection(fdZKey, sectionsZKey, 6);
for (let i=0; i<zkey.nConstrains; i++) {
const iA = readUInt32(aMap, i*4);
A.set(getWitness(iA), i*n8r);
const iB = readUInt32(bMap, i*4);
B.set(getWitness(iB), i*n8r);
const iC = readUInt32(cMap, i*4);
C.set(getWitness(iC), i*n8r);
}
A = await Fr.batchToMontgomery(A);
B = await Fr.batchToMontgomery(B);
C = await Fr.batchToMontgomery(C);
return [A,B,C];
}
function readUInt32(b, o) {
const buff = b.slice(o, o+4);
const buffV = new DataView(buff.buffer, buff.byteOffset, buff.byteLength);
return buffV.getUint32(0, true);
}
function getWitness(idx) {
if (idx < zkey.nVars-zkey.nAdditions) {
return buffWitness.slice(idx*n8r, idx*n8r+n8r);
} else if (idx < zkey.nVars) {
return buffInternalWitness.slice((idx - (zkey.nVars-zkey.nAdditions))*n8r, (idx-(zkey.nVars-zkey.nAdditions))*n8r + n8r);
} else {
return curve.Fr.zero;
}
}
async function round1() {
ch.b = [];
for (let i=1; i<=9; i++) {
ch.b[i] = curve.Fr.random();
}
[A, B, C] = await buildABC();
[pol_a, A4] = await to4T(A, [ch.b[2], ch.b[1]]);
[pol_b, B4] = await to4T(B, [ch.b[4], ch.b[3]]);
[pol_c, C4] = await to4T(C, [ch.b[6], ch.b[5]]);
proof.A = await expTau(pol_a, "multiexp A");
proof.B = await expTau(pol_b, "multiexp B");
proof.C = await expTau(pol_c, "multiexp C");
}
async function round2() {
const transcript1 = new Uint8Array(G1.F.n8*2*3);
G1.toRprUncompressed(transcript1, 0, proof.A);
G1.toRprUncompressed(transcript1, G1.F.n8*2, proof.B);
G1.toRprUncompressed(transcript1, G1.F.n8*4, proof.C);
ch.beta = hashToFr(transcript1);
if (logger) logger.debug("beta: " + Fr.toString(ch.beta));
const transcript2 = new Uint8Array(n8r);
Fr.toRprBE(transcript2, 0, ch.beta);
ch.gamma = hashToFr(transcript2);
if (logger) logger.debug("gamma: " + Fr.toString(ch.gamma));
let numArr = new BigBuffer(Fr.n8*zkey.domainSize);
let denArr = new BigBuffer(Fr.n8*zkey.domainSize);
numArr.set(Fr.one, 0);
denArr.set(Fr.one, 0);
let w = Fr.one;
for (let i=0; i<zkey.domainSize; i++) {
let n1 = A.slice(i*n8r, (i+1)*n8r);
n1 = Fr.add( n1, Fr.mul(ch.beta, w) );
n1 = Fr.add( n1, ch.gamma );
let n2 = B.slice(i*n8r, (i+1)*n8r);
n2 = Fr.add( n2, Fr.mul(zkey.k1, Fr.mul(ch.beta, w) ));
n2 = Fr.add( n2, ch.gamma );
let n3 = C.slice(i*n8r, (i+1)*n8r);
n3 = Fr.add( n3, Fr.mul(zkey.k2, Fr.mul(ch.beta, w) ));
n3 = Fr.add( n3, ch.gamma );
const num = Fr.mul(n1, Fr.mul(n2, n3));
let d1 = A.slice(i*n8r, (i+1)*n8r);
d1 = Fr.add(d1, Fr.mul( sigmaBuff.slice(i*n8r*4, i*n8r*4 + n8r) , ch.beta));
d1 = Fr.add(d1, ch.gamma);
let d2 = B.slice(i*n8r, (i+1)*n8r);
d2 = Fr.add(d2, Fr.mul( sigmaBuff.slice((zkey.domainSize + i)*4*n8r, (zkey.domainSize + i)*4*n8r+n8r) , ch.beta));
d2 = Fr.add(d2, ch.gamma);
let d3 = C.slice(i*n8r, (i+1)*n8r);
d3 = Fr.add(d3, Fr.mul( sigmaBuff.slice((zkey.domainSize*2 + i)*4*n8r, (zkey.domainSize*2 + i)*4*n8r + n8r) , ch.beta));
d3 = Fr.add(d3, ch.gamma);
const den = Fr.mul(d1, Fr.mul(d2, d3));
numArr.set(
Fr.mul(
numArr.slice(i*n8r,(i+1)*n8r) ,
num
),
((i+1)%zkey.domainSize)*n8r
);
denArr.set(
Fr.mul(
denArr.slice(i*n8r,(i+1)*n8r) ,
den
),
((i+1)%zkey.domainSize)*n8r
);
w = Fr.mul(w, Fr.w[zkey.power]);
}
denArr = await Fr.batchInverse(denArr);
// TODO: Do it in assembly and in parallel
for (let i=0; i<zkey.domainSize; i++) {
numArr.set( Fr.mul( numArr.slice(i*n8r, (i+1)*n8r), denArr.slice(i*n8r, (i+1)*n8r) ) ,i*n8r);
}
if (!Fr.eq(numArr.slice(0, n8r), Fr.one)) {
throw new Error("Copy constraints does not match");
}
Z = numArr;
[pol_z, Z4] = await to4T(Z, [ch.b[9], ch.b[8], ch.b[7]]);
proof.Z = await expTau( pol_z, "multiexp Z");
}
async function round3() {
/*
async function checkDegree(P) {
const p = await curve.Fr.ifft(P);
let deg = (P.byteLength/n8r)-1;
while ((deg>0)&&(Fr.isZero(p.slice(deg*n8r, deg*n8r+n8r)))) deg--;
return deg;
}
function printPol(P) {
const n=(P.byteLength/n8r);
console.log("[");
for (let i=0; i<n; i++) {
console.log(Fr.toString(P.slice(i*n8r, i*n8r+n8r)));
}
console.log("]");
}
*/
if (logger) logger.debug("phse3: Reading QM4");
const QM4 = new BigBuffer(zkey.domainSize*4*n8r);
await fdZKey.readToBuffer(QM4, 0 , zkey.domainSize*n8r*4, sectionsZKey[7][0].p + zkey.domainSize*n8r);
if (logger) logger.debug("phse3: Reading QL4");
const QL4 = new BigBuffer(zkey.domainSize*4*n8r);
await fdZKey.readToBuffer(QL4, 0 , zkey.domainSize*n8r*4, sectionsZKey[8][0].p + zkey.domainSize*n8r);
if (logger) logger.debug("phse3: Reading QR4");
const QR4 = new BigBuffer(zkey.domainSize*4*n8r);
await fdZKey.readToBuffer(QR4, 0 , zkey.domainSize*n8r*4, sectionsZKey[9][0].p + zkey.domainSize*n8r);
if (logger) logger.debug("phse3: Reading QO4");
const QO4 = new BigBuffer(zkey.domainSize*4*n8r);
await fdZKey.readToBuffer(QO4, 0 , zkey.domainSize*n8r*4, sectionsZKey[10][0].p + zkey.domainSize*n8r);
if (logger) logger.debug("phse3: Reading QC4");
const QC4 = new BigBuffer(zkey.domainSize*4*n8r);
await fdZKey.readToBuffer(QC4, 0 , zkey.domainSize*n8r*4, sectionsZKey[11][0].p + zkey.domainSize*n8r);
const lPols = await binFileUtils.readSection(fdZKey, sectionsZKey, 13);
const transcript3 = new Uint8Array(G1.F.n8*2);
G1.toRprUncompressed(transcript3, 0, proof.Z);
ch.alpha = hashToFr(transcript3);
if (logger) logger.debug("alpha: " + Fr.toString(ch.alpha));
const Z1 = [
Fr.zero,
Fr.add(Fr.e(-1), Fr.w[2]),
Fr.e(-2),
Fr.sub(Fr.e(-1), Fr.w[2]),
];
const Z2 = [
Fr.zero,
Fr.add(Fr.zero, Fr.mul(Fr.e(-2), Fr.w[2])),
Fr.e(4),
Fr.sub(Fr.zero, Fr.mul(Fr.e(-2), Fr.w[2])),
];
const Z3 = [
Fr.zero,
Fr.add(Fr.e(2), Fr.mul(Fr.e(2), Fr.w[2])),
Fr.e(-8),
Fr.sub(Fr.e(2), Fr.mul(Fr.e(2), Fr.w[2])),
];
const T = new BigBuffer(zkey.domainSize*4*n8r);
const Tz = new BigBuffer(zkey.domainSize*4*n8r);
let w = Fr.one;
for (let i=0; i<zkey.domainSize*4; i++) {
if ((i%4096 == 0)&&(logger)) logger.debug(`calculating t ${i}/${zkey.domainSize*4}`);
const a = A4.slice(i*n8r, i*n8r+n8r);
const b = B4.slice(i*n8r, i*n8r+n8r);
const c = C4.slice(i*n8r, i*n8r+n8r);
const z = Z4.slice(i*n8r, i*n8r+n8r);
const zw = Z4.slice(((i+zkey.domainSize*4+4)%(zkey.domainSize*4)) *n8r, ((i+zkey.domainSize*4+4)%(zkey.domainSize*4)) *n8r +n8r);
const qm = QM4.slice(i*n8r, i*n8r+n8r);
const ql = QL4.slice(i*n8r, i*n8r+n8r);
const qr = QR4.slice(i*n8r, i*n8r+n8r);
const qo = QO4.slice(i*n8r, i*n8r+n8r);
const qc = QC4.slice(i*n8r, i*n8r+n8r);
const s1 = sigmaBuff.slice(i*n8r, i*n8r+n8r);
const s2 = sigmaBuff.slice((i+zkey.domainSize*4)*n8r, (i+zkey.domainSize*4)*n8r+n8r);
const s3 = sigmaBuff.slice((i+zkey.domainSize*8)*n8r, (i+zkey.domainSize*8)*n8r+n8r);
const ap = Fr.add(ch.b[2], Fr.mul(ch.b[1], w));
const bp = Fr.add(ch.b[4], Fr.mul(ch.b[3], w));
const cp = Fr.add(ch.b[6], Fr.mul(ch.b[5], w));
const w2 = Fr.square(w);
const zp = Fr.add(Fr.add(Fr.mul(ch.b[7], w2), Fr.mul(ch.b[8], w)), ch.b[9]);
const wW = Fr.mul(w, Fr.w[zkey.power]);
const wW2 = Fr.square(wW);
const zWp = Fr.add(Fr.add(Fr.mul(ch.b[7], wW2), Fr.mul(ch.b[8], wW)), ch.b[9]);
let pl = Fr.zero;
for (let j=0; j<zkey.nPublic; j++) {
pl = Fr.sub(pl, Fr.mul(
lPols.slice( (j*5*zkey.domainSize+ zkey.domainSize+ i)*n8r, (j*5*zkey.domainSize+ zkey.domainSize + i+1)*n8r),
A.slice(j*n8r, (j+1)*n8r)
));
}
let [e1, e1z] = mul2(a, b, ap, bp, i%4);
e1 = Fr.mul(e1, qm);
e1z = Fr.mul(e1z, qm);
e1 = Fr.add(e1, Fr.mul(a, ql));
e1z = Fr.add(e1z, Fr.mul(ap, ql));
e1 = Fr.add(e1, Fr.mul(b, qr));
e1z = Fr.add(e1z, Fr.mul(bp, qr));
e1 = Fr.add(e1, Fr.mul(c, qo));
e1z = Fr.add(e1z, Fr.mul(cp, qo));
e1 = Fr.add(e1, pl);
e1 = Fr.add(e1, qc);
const betaw = Fr.mul(ch.beta, w);
let e2a =a;
e2a = Fr.add(e2a, betaw);
e2a = Fr.add(e2a, ch.gamma);
let e2b =b;
e2b = Fr.add(e2b, Fr.mul(betaw, zkey.k1));
e2b = Fr.add(e2b, ch.gamma);
let e2c =c;
e2c = Fr.add(e2c, Fr.mul(betaw, zkey.k2));
e2c = Fr.add(e2c, ch.gamma);
let e2d = z;
let [e2, e2z] = mul4(e2a, e2b, e2c, e2d, ap, bp, cp, zp, i%4);
e2 = Fr.mul(e2, ch.alpha);
e2z = Fr.mul(e2z, ch.alpha);
let e3a = a;
e3a = Fr.add(e3a, Fr.mul(ch.beta, s1));
e3a = Fr.add(e3a, ch.gamma);
let e3b = b;
e3b = Fr.add(e3b, Fr.mul(ch.beta,s2));
e3b = Fr.add(e3b, ch.gamma);
let e3c = c;
e3c = Fr.add(e3c, Fr.mul(ch.beta,s3));
e3c = Fr.add(e3c, ch.gamma);
let e3d = zw;
let [e3, e3z] = mul4(e3a, e3b, e3c, e3d, ap, bp, cp, zWp, i%4);
e3 = Fr.mul(e3, ch.alpha);
e3z = Fr.mul(e3z, ch.alpha);
let e4 = Fr.sub(z, Fr.one);
e4 = Fr.mul(e4, lPols.slice( (zkey.domainSize + i)*n8r, (zkey.domainSize+i+1)*n8r));
e4 = Fr.mul(e4, Fr.mul(ch.alpha, ch.alpha));
let e4z = Fr.mul(zp, lPols.slice( (zkey.domainSize + i)*n8r, (zkey.domainSize+i+1)*n8r));
e4z = Fr.mul(e4z, Fr.mul(ch.alpha, ch.alpha));
let e = Fr.add(Fr.sub(Fr.add(e1, e2), e3), e4);
let ez = Fr.add(Fr.sub(Fr.add(e1z, e2z), e3z), e4z);
T.set(e, i*n8r);
Tz.set(ez, i*n8r);
w = Fr.mul(w, Fr.w[zkey.power+2]);
}
if (logger) logger.debug("ifft T");
let t = await Fr.ifft(T);
if (logger) logger.debug("dividing T/Z");
for (let i=0; i<zkey.domainSize; i++) {
t.set(Fr.neg(t.slice(i*n8r, i*n8r+n8r)), i*n8r);
}
for (let i=zkey.domainSize; i<zkey.domainSize*4; i++) {
const a = Fr.sub(
t.slice((i-zkey.domainSize)*n8r, (i-zkey.domainSize)*n8r + n8r),
t.slice(i*n8r, i*n8r+n8r)
);
t.set(a, i*n8r);
if (i > (zkey.domainSize*3 -4) ) {
if (!Fr.isZero(a)) {
throw new Error("T Polynomial is not divisible");
}
}
}
if (logger) logger.debug("ifft Tz");
const tz = await Fr.ifft(Tz);
for (let i=0; i<zkey.domainSize*4; i++) {
const a = tz.slice(i*n8r, (i+1)*n8r);
if (i > (zkey.domainSize*3 +5) ) {
if (!Fr.isZero(a)) {
throw new Error("Tz Polynomial is not well calculated");
}
} else {
t.set(
Fr.add(
t.slice(i*n8r, (i+1)*n8r),
a
),
i*n8r
);
}
}
pol_t = t.slice(0, (zkey.domainSize*3+6)*n8r);
proof.T1 = await expTau( t.slice(0, zkey.domainSize*n8r) , "multiexp T1");
proof.T2 = await expTau( t.slice(zkey.domainSize*n8r, zkey.domainSize*2*n8r) , "multiexp T2");
proof.T3 = await expTau( t.slice(zkey.domainSize*2*n8r, (zkey.domainSize*3+6)*n8r) , "multiexp T3");
function mul2(a,b, ap, bp, p) {
let r, rz;
const a_b = Fr.mul(a,b);
const a_bp = Fr.mul(a,bp);
const ap_b = Fr.mul(ap,b);
const ap_bp = Fr.mul(ap,bp);
r = a_b;
let a0 = Fr.add(a_bp, ap_b);
let a1 = ap_bp;
rz = a0;
if (p) {
rz = Fr.add(rz, Fr.mul(Z1[p], a1));
}
return [r, rz];
}
function mul4(a,b,c,d, ap, bp, cp, dp, p) {
let r, rz;
const a_b = Fr.mul(a,b);
const a_bp = Fr.mul(a,bp);
const ap_b = Fr.mul(ap,b);
const ap_bp = Fr.mul(ap,bp);
const c_d = Fr.mul(c,d);
const c_dp = Fr.mul(c,dp);
const cp_d = Fr.mul(cp,d);
const cp_dp = Fr.mul(cp,dp);
r = Fr.mul(a_b, c_d);
let a0 = Fr.mul(ap_b, c_d);
a0 = Fr.add(a0, Fr.mul(a_bp, c_d));
a0 = Fr.add(a0, Fr.mul(a_b, cp_d));
a0 = Fr.add(a0, Fr.mul(a_b, c_dp));
let a1 = Fr.mul(ap_bp, c_d);
a1 = Fr.add(a1, Fr.mul(ap_b, cp_d));
a1 = Fr.add(a1, Fr.mul(ap_b, c_dp));
a1 = Fr.add(a1, Fr.mul(a_bp, cp_d));
a1 = Fr.add(a1, Fr.mul(a_bp, c_dp));
a1 = Fr.add(a1, Fr.mul(a_b, cp_dp));
let a2 = Fr.mul(a_bp, cp_dp);
a2 = Fr.add(a2, Fr.mul(ap_b, cp_dp));
a2 = Fr.add(a2, Fr.mul(ap_bp, c_dp));
a2 = Fr.add(a2, Fr.mul(ap_bp, cp_d));
let a3 = Fr.mul(ap_bp, cp_dp);
rz = a0;
if (p) {
rz = Fr.add(rz, Fr.mul(Z1[p], a1));
rz = Fr.add(rz, Fr.mul(Z2[p], a2));
rz = Fr.add(rz, Fr.mul(Z3[p], a3));
}
return [r, rz];
}
}
async function round4() {
const pol_qm = new BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_qm, 0 , zkey.domainSize*n8r, sectionsZKey[7][0].p);
const pol_ql = new BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_ql, 0 , zkey.domainSize*n8r, sectionsZKey[8][0].p);
const pol_qr = new BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_qr, 0 , zkey.domainSize*n8r, sectionsZKey[9][0].p);
const pol_qo = new BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_qo, 0 , zkey.domainSize*n8r, sectionsZKey[10][0].p);
const pol_qc = new BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_qc, 0 , zkey.domainSize*n8r, sectionsZKey[11][0].p);
const pol_s3 = new BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_s3, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p + 10*zkey.domainSize*n8r);
const transcript4 = new Uint8Array(G1.F.n8*2*3);
G1.toRprUncompressed(transcript4, 0, proof.T1);
G1.toRprUncompressed(transcript4, G1.F.n8*2, proof.T2);
G1.toRprUncompressed(transcript4, G1.F.n8*4, proof.T3);
ch.xi = hashToFr(transcript4);
if (logger) logger.debug("xi: " + Fr.toString(ch.xi));
proof.eval_a = evalPol(pol_a, ch.xi);
proof.eval_b = evalPol(pol_b, ch.xi);
proof.eval_c = evalPol(pol_c, ch.xi);
proof.eval_s1 = evalPol(pol_s1, ch.xi);
proof.eval_s2 = evalPol(pol_s2, ch.xi);
proof.eval_t = evalPol(pol_t, ch.xi);
proof.eval_zw = evalPol(pol_z, Fr.mul(ch.xi, Fr.w[zkey.power]));
const coef_ab = Fr.mul(proof.eval_a, proof.eval_b);
let e2a = proof.eval_a;
const betaxi = Fr.mul(ch.beta, ch.xi);
e2a = Fr.add( e2a, betaxi);
e2a = Fr.add( e2a, ch.gamma);
let e2b = proof.eval_b;
e2b = Fr.add( e2b, Fr.mul(betaxi, zkey.k1));
e2b = Fr.add( e2b, ch.gamma);
let e2c = proof.eval_c;
e2c = Fr.add( e2c, Fr.mul(betaxi, zkey.k2));
e2c = Fr.add( e2c, ch.gamma);
const e2 = Fr.mul(Fr.mul(Fr.mul(e2a, e2b), e2c), ch.alpha);
let e3a = proof.eval_a;
e3a = Fr.add( e3a, Fr.mul(ch.beta, proof.eval_s1));
e3a = Fr.add( e3a, ch.gamma);
let e3b = proof.eval_b;
e3b = Fr.add( e3b, Fr.mul(ch.beta, proof.eval_s2));
e3b = Fr.add( e3b, ch.gamma);
let e3 = Fr.mul(e3a, e3b);
e3 = Fr.mul(e3, ch.beta);
e3 = Fr.mul(e3, proof.eval_zw);
e3 = Fr.mul(e3, ch.alpha);
ch.xim= ch.xi;
for (let i=0; i<zkey.power; i++) ch.xim = Fr.mul(ch.xim, ch.xim);
const eval_l1 = Fr.div(
Fr.sub(ch.xim, Fr.one),
Fr.mul(Fr.sub(ch.xi, Fr.one), Fr.e(zkey.domainSize))
);
const e4 = Fr.mul(eval_l1, Fr.mul(ch.alpha, ch.alpha));
const coefs3 = e3;
const coefz = Fr.add(e2, e4);
pol_r = new BigBuffer((zkey.domainSize+3)*n8r);
for (let i = 0; i<zkey.domainSize+3; i++) {
let v = Fr.mul(coefz, pol_z.slice(i*n8r,(i+1)*n8r));
if (i<zkey.domainSize) {
v = Fr.add(v, Fr.mul(coef_ab, pol_qm.slice(i*n8r,(i+1)*n8r)));
v = Fr.add(v, Fr.mul(proof.eval_a, pol_ql.slice(i*n8r,(i+1)*n8r)));
v = Fr.add(v, Fr.mul(proof.eval_b, pol_qr.slice(i*n8r,(i+1)*n8r)));
v = Fr.add(v, Fr.mul(proof.eval_c, pol_qo.slice(i*n8r,(i+1)*n8r)));
v = Fr.add(v, pol_qc.slice(i*n8r,(i+1)*n8r));
v = Fr.sub(v, Fr.mul(coefs3, pol_s3.slice(i*n8r,(i+1)*n8r)));
}
pol_r.set(v, i*n8r);
}
proof.eval_r = evalPol(pol_r, ch.xi);
}
async function round5() {
const transcript5 = new Uint8Array(n8r*7);
Fr.toRprBE(transcript5, 0, proof.eval_a);
Fr.toRprBE(transcript5, n8r, proof.eval_b);
Fr.toRprBE(transcript5, n8r*2, proof.eval_c);
Fr.toRprBE(transcript5, n8r*3, proof.eval_s1);
Fr.toRprBE(transcript5, n8r*4, proof.eval_s2);
Fr.toRprBE(transcript5, n8r*5, proof.eval_zw);
Fr.toRprBE(transcript5, n8r*6, proof.eval_r);
ch.v = [];
ch.v[1] = hashToFr(transcript5);
if (logger) logger.debug("v: " + Fr.toString(ch.v[1]));
for (let i=2; i<=6; i++ ) ch.v[i] = Fr.mul(ch.v[i-1], ch.v[1]);
let pol_wxi = new BigBuffer((zkey.domainSize+6)*n8r);
const xi2m = Fr.mul(ch.xim, ch.xim);
for (let i=0; i<zkey.domainSize+6; i++) {
let w = Fr.zero;
w = Fr.add(w, Fr.mul(xi2m, pol_t.slice( (zkey.domainSize*2+i)*n8r, (zkey.domainSize*2+i+1)*n8r )));
if (i<zkey.domainSize+3) {
w = Fr.add(w, Fr.mul(ch.v[1], pol_r.slice(i*n8r, (i+1)*n8r)));
}
if (i<zkey.domainSize+2) {
w = Fr.add(w, Fr.mul(ch.v[2], pol_a.slice(i*n8r, (i+1)*n8r)));
w = Fr.add(w, Fr.mul(ch.v[3], pol_b.slice(i*n8r, (i+1)*n8r)));
w = Fr.add(w, Fr.mul(ch.v[4], pol_c.slice(i*n8r, (i+1)*n8r)));
}
if (i<zkey.domainSize) {
w = Fr.add(w, pol_t.slice(i*n8r, (i+1)*n8r));
w = Fr.add(w, Fr.mul(ch.xim, pol_t.slice( (zkey.domainSize+i)*n8r, (zkey.domainSize+i+1)*n8r )));
w = Fr.add(w, Fr.mul(ch.v[5], pol_s1.slice(i*n8r, (i+1)*n8r)));
w = Fr.add(w, Fr.mul(ch.v[6], pol_s2.slice(i*n8r, (i+1)*n8r)));
}
pol_wxi.set(w, i*n8r);
}
let w0 = pol_wxi.slice(0, n8r);
w0 = Fr.sub(w0, proof.eval_t);
w0 = Fr.sub(w0, Fr.mul(ch.v[1], proof.eval_r));
w0 = Fr.sub(w0, Fr.mul(ch.v[2], proof.eval_a));
w0 = Fr.sub(w0, Fr.mul(ch.v[3], proof.eval_b));
w0 = Fr.sub(w0, Fr.mul(ch.v[4], proof.eval_c));
w0 = Fr.sub(w0, Fr.mul(ch.v[5], proof.eval_s1));
w0 = Fr.sub(w0, Fr.mul(ch.v[6], proof.eval_s2));
pol_wxi.set(w0, 0);
pol_wxi= divPol1(pol_wxi, ch.xi);
proof.Wxi = await expTau(pol_wxi, "multiexp Wxi");
let pol_wxiw = new BigBuffer((zkey.domainSize+3)*n8r);
for (let i=0; i<zkey.domainSize+3; i++) {
const w = pol_z.slice(i*n8r, (i+1)*n8r);
pol_wxiw.set(w, i*n8r);
}
w0 = pol_wxiw.slice(0, n8r);
w0 = Fr.sub(w0, proof.eval_zw);
pol_wxiw.set(w0, 0);
pol_wxiw= divPol1(pol_wxiw, Fr.mul(ch.xi, Fr.w[zkey.power]));
proof.Wxiw = await expTau(pol_wxiw, "multiexp Wxiw");
}
function hashToFr(transcript) {
const v = Scalar.fromRprBE(new Uint8Array(keccak256.arrayBuffer(transcript)));
return Fr.e(v);
}
function evalPol(P, x) {
const n = P.byteLength / n8r;
if (n == 0) return Fr.zero;
let res = P.slice((n-1)*n8r, n*n8r);
for (let i=n-2; i>=0; i--) {
res = Fr.add(Fr.mul(res, x), P.slice(i*n8r, (i+1)*n8r));
}
return res;
}
function divPol1(P, d) {
const n = P.byteLength/n8r;
const res = new BigBuffer(n*n8r);
res.set(Fr.zero, (n-1) *n8r);
res.set(P.slice((n-1)*n8r, n*n8r), (n-2)*n8r);
for (let i=n-3; i>=0; i--) {
res.set(
Fr.add(
P.slice((i+1)*n8r, (i+2)*n8r),
Fr.mul(
d,
res.slice((i+1)*n8r, (i+2)*n8r)
)
),
i*n8r
);
}
if (!Fr.eq(
P.slice(0, n8r),
Fr.mul(
Fr.neg(d),
res.slice(0, n8r)
)
)) {
throw new Error("Polinomial does not divide");
}
return res;
}
async function expTau(b, name) {
const n = b.byteLength/n8r;
const PTauN = PTau.slice(0, n*curve.G1.F.n8*2);
const bm = await curve.Fr.batchFromMontgomery(b);
let res = await curve.G1.multiExpAffine(PTauN, bm, logger, name);
res = curve.G1.toAffine(res);
return res;
}
async function to4T(A, pz) {
pz = pz || [];
let a = await Fr.ifft(A);
const a4 = new BigBuffer(n8r*zkey.domainSize*4);
a4.set(a, 0);
const a1 = new BigBuffer(n8r*(zkey.domainSize + pz.length));
a1.set(a, 0);
for (let i= 0; i<pz.length; i++) {
a1.set(
Fr.add(
a1.slice((zkey.domainSize+i)*n8r, (zkey.domainSize+i+1)*n8r),
pz[i]
),
(zkey.domainSize+i)*n8r
);
a1.set(
Fr.sub(
a1.slice(i*n8r, (i+1)*n8r),
pz[i]
),
i*n8r
);
}
const A4 = await Fr.fft(a4);
return [a1, A4];
}
}

View File

@ -1,424 +0,0 @@
/*
Copyright 2021 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2019/953.pdf */
import {readR1csHeader} from "r1csfile";
import * as utils from "./powersoftau_utils.js";
import {
readBinFile,
createBinFile,
readSection,
writeBigInt,
startWriteSection,
endWriteSection,
} from "@iden3/binfileutils";
import { log2 } from "./misc.js";
import { Scalar, BigBuffer } from "ffjavascript";
import Blake2b from "blake2b-wasm";
import BigArray from "./bigarray.js";
export default async function plonkSetup(r1csName, ptauName, zkeyName, logger) {
await Blake2b.ready();
const {fd: fdPTau, sections: sectionsPTau} = await readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24);
const {curve, power} = await utils.readPTauHeader(fdPTau, sectionsPTau);
const {fd: fdR1cs, sections: sectionsR1cs} = await readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24);
const r1cs = await readR1csHeader(fdR1cs, sectionsR1cs, false);
const sG1 = curve.G1.F.n8*2;
const G1 = curve.G1;
const sG2 = curve.G2.F.n8*2;
const Fr = curve.Fr;
const n8r = curve.Fr.n8;
if (logger) logger.info("Reading r1cs");
let sR1cs = await readSection(fdR1cs, sectionsR1cs, 2);
const plonkConstraints = new BigArray();
const plonkAdditions = new BigArray();
let plonkNVars = r1cs.nVars;
const nPublic = r1cs.nOutputs + r1cs.nPubInputs;
await processConstraints();
const fdZKey = await createBinFile(zkeyName, "zkey", 1, 14, 1<<22, 1<<24);
if (r1cs.prime != curve.r) {
if (logger) logger.error("r1cs curve does not match powers of tau ceremony curve");
return -1;
}
let cirPower = log2(plonkConstraints.length -1) +1;
if (cirPower < 3) cirPower = 3; // As the t polinomal is n+5 whe need at least a power of 4
const domainSize = 2 ** cirPower;
if (logger) logger.info("Plonk constraints: " + plonkConstraints.length);
if (cirPower > power) {
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${plonkConstraints.length} > 2**${power}`);
return -1;
}
if (!sectionsPTau[12]) {
if (logger) logger.error("Powers of tau is not prepared.");
return -1;
}
const LPoints = new BigBuffer(domainSize*sG1);
const o = sectionsPTau[12][0].p + ((2 ** (cirPower)) -1)*sG1;
await fdPTau.readToBuffer(LPoints, 0, domainSize*sG1, o);
const [k1, k2] = getK1K2();
const vk = {};
await writeAdditions(3, "Additions");
await writeWitnessMap(4, 0, "Amap");
await writeWitnessMap(5, 1, "Bmap");
await writeWitnessMap(6, 2, "Cmap");
await writeQMap(7, 3, "Qm");
await writeQMap(8, 4, "Ql");
await writeQMap(9, 5, "Qr");
await writeQMap(10, 6, "Qo");
await writeQMap(11, 7, "Qc");
await writeSigma(12, "sigma");
await writeLs(13, "lagrange polynomials");
// Write PTau points
////////////
await startWriteSection(fdZKey, 14);
const buffOut = new BigBuffer((domainSize+6)*sG1);
await fdPTau.readToBuffer(buffOut, 0, (domainSize+6)*sG1, sectionsPTau[2][0].p);
await fdZKey.write(buffOut);
await endWriteSection(fdZKey);
await writeHeaders();
await fdZKey.close();
await fdR1cs.close();
await fdPTau.close();
if (logger) logger.info("Setup Finished");
return ;
async function processConstraints() {
let r1csPos = 0;
function r1cs_readULE32() {
const buff = sR1cs.slice(r1csPos, r1csPos+4);
r1csPos += 4;
const buffV = new DataView(buff.buffer);
return buffV.getUint32(0, true);
}
function r1cs_readCoef() {
const res = Fr.fromRprLE(sR1cs.slice(r1csPos, r1csPos+curve.Fr.n8));
r1csPos += curve.Fr.n8;
return res;
}
function r1cs_readCoefs() {
const coefs = [];
const res = {
k: curve.Fr.zero
};
const nA = r1cs_readULE32();
for (let i=0; i<nA; i++) {
const s = r1cs_readULE32();
const coefp = r1cs_readCoef();
if (s==0) {
res.k = coefp;
} else {
coefs.push([s, coefp]);
}
}
const resCoef = reduceCoef(coefs);
res.s = resCoef[0];
res.coef = resCoef[1];
return res;
}
function reduceCoef(coefs) {
if (coefs.length == 0) {
return [0, curve.Fr.zero];
}
if (coefs.length == 1) {
return coefs[0];
}
const arr1 = coefs.slice(0, coefs.length >> 1);
const arr2 = coefs.slice(coefs.length >> 1);
const coef1 = reduceCoef(arr1);
const coef2 = reduceCoef(arr2);
const sl = coef1[0];
const sr = coef2[0];
const so = plonkNVars++;
const qm = curve.Fr.zero;
const ql = Fr.neg(coef1[1]);
const qr = Fr.neg(coef2[1]);
const qo = curve.Fr.one;
const qc = curve.Fr.zero;
plonkConstraints.push([sl, sr, so, qm, ql, qr, qo, qc]);
plonkAdditions.push([sl, sr, coef1[1], coef2[1]]);
return [so, curve.Fr.one];
}
for (let s = 1; s <= nPublic ; s++) {
const sl = s;
const sr = 0;
const so = 0;
const qm = curve.Fr.zero;
const ql = curve.Fr.one;
const qr = curve.Fr.zero;
const qo = curve.Fr.zero;
const qc = curve.Fr.zero;
plonkConstraints.push([sl, sr, so, qm, ql, qr, qo, qc]);
}
for (let c=0; c<r1cs.nConstraints; c++) {
if ((logger)&&(c%10000 == 0)) logger.debug(`processing constraints: ${c}/${r1cs.nConstraints}`);
const A = r1cs_readCoefs();
const B = r1cs_readCoefs();
const C = r1cs_readCoefs();
const sl = A.s;
const sr = B.s;
const so = C.s;
const qm = curve.Fr.mul(A.coef, B.coef);
const ql = curve.Fr.mul(A.coef, B.k);
const qr = curve.Fr.mul(A.k, B.coef);
const qo = curve.Fr.neg(C.coef);
const qc = curve.Fr.sub(curve.Fr.mul(A.k, B.k) , C.k);
plonkConstraints.push([sl, sr, so, qm, ql, qr, qo, qc]);
}
}
async function writeWitnessMap(sectionNum, posConstraint, name) {
await startWriteSection(fdZKey, sectionNum);
for (let i=0; i<plonkConstraints.length; i++) {
await fdZKey.writeULE32(plonkConstraints[i][posConstraint]);
if ((logger)&&(i%1000000 == 0)) logger.debug(`writing ${name}: ${i}/${plonkConstraints.length}`);
}
await endWriteSection(fdZKey);
}
async function writeQMap(sectionNum, posConstraint, name) {
let Q = new BigBuffer(domainSize*n8r);
for (let i=0; i<plonkConstraints.length; i++) {
Q.set(plonkConstraints[i][posConstraint], i*n8r);
if ((logger)&&(i%1000000 == 0)) logger.debug(`writing ${name}: ${i}/${plonkConstraints.length}`);
}
await startWriteSection(fdZKey, sectionNum);
await writeP4(Q);
await endWriteSection(fdZKey);
Q = await Fr.batchFromMontgomery(Q);
vk[name]= await curve.G1.multiExpAffine(LPoints, Q, logger, "multiexp "+name);
}
async function writeP4(buff) {
const q = await Fr.ifft(buff);
const q4 = new BigBuffer(domainSize*n8r*4);
q4.set(q, 0);
const Q4 = await Fr.fft(q4);
await fdZKey.write(q);
await fdZKey.write(Q4);
}
async function writeAdditions(sectionNum, name) {
await startWriteSection(fdZKey, sectionNum);
const buffOut = new Uint8Array((2*4+2*n8r));
const buffOutV = new DataView(buffOut.buffer);
for (let i=0; i<plonkAdditions.length; i++) {
const addition=plonkAdditions[i];
let o=0;
buffOutV.setUint32(o, addition[0], true); o+=4;
buffOutV.setUint32(o, addition[1], true); o+=4;
// The value is storen in Montgomery. stored = v*R
// so when montgomery multiplicated by the witness it result = v*R*w/R = v*w
buffOut.set(addition[2], o); o+= n8r;
buffOut.set(addition[3], o); o+= n8r;
await fdZKey.write(buffOut);
if ((logger)&&(i%1000000 == 0)) logger.debug(`writing ${name}: ${i}/${plonkAdditions.length}`);
}
await endWriteSection(fdZKey);
}
async function writeSigma(sectionNum, name) {
const sigma = new BigBuffer(n8r*domainSize*3);
const lastAparence = new BigArray(plonkNVars);
const firstPos = new BigArray(plonkNVars);
let w = Fr.one;
for (let i=0; i<domainSize;i++) {
if (i<plonkConstraints.length) {
buildSigma(plonkConstraints[i][0], i);
buildSigma(plonkConstraints[i][1], domainSize + i);
buildSigma(plonkConstraints[i][2], domainSize*2 + i);
} else {
buildSigma(0, i);
buildSigma(0, domainSize + i);
buildSigma(0, domainSize*2 + i);
}
w = Fr.mul(w, Fr.w[cirPower]);
if ((logger)&&(i%1000000 == 0)) logger.debug(`writing ${name} phase1: ${i}/${plonkConstraints.length}`);
}
for (let s=0; s<plonkNVars; s++) {
if (typeof firstPos[s] !== "undefined") {
sigma.set(lastAparence[s], firstPos[s]*n8r);
} else {
throw new Error("Variable not used");
}
if ((logger)&&(s%1000000 == 0)) logger.debug(`writing ${name} phase2: ${s}/${plonkNVars}`);
}
await startWriteSection(fdZKey, sectionNum);
let S1 = sigma.slice(0, domainSize*n8r);
await writeP4(S1);
let S2 = sigma.slice(domainSize*n8r, domainSize*n8r*2);
await writeP4(S2);
let S3 = sigma.slice(domainSize*n8r*2, domainSize*n8r*3);
await writeP4(S3);
await endWriteSection(fdZKey);
S1 = await Fr.batchFromMontgomery(S1);
S2 = await Fr.batchFromMontgomery(S2);
S3 = await Fr.batchFromMontgomery(S3);
vk.S1= await curve.G1.multiExpAffine(LPoints, S1, logger, "multiexp S1");
vk.S2= await curve.G1.multiExpAffine(LPoints, S2, logger, "multiexp S2");
vk.S3= await curve.G1.multiExpAffine(LPoints, S3, logger, "multiexp S3");
function buildSigma(s, p) {
if (typeof lastAparence[s] === "undefined") {
firstPos[s] = p;
} else {
sigma.set(lastAparence[s], p*n8r);
}
let v;
if (p<domainSize) {
v = w;
} else if (p<2*domainSize) {
v = Fr.mul(w, k1);
} else {
v = Fr.mul(w, k2);
}
lastAparence[s]=v;
}
}
async function writeLs(sectionNum, name) {
await startWriteSection(fdZKey, sectionNum);
const l=Math.max(nPublic, 1);
for (let i=0; i<l; i++) {
let buff = new BigBuffer(domainSize*n8r);
buff.set(Fr.one, i*n8r);
await writeP4(buff);
if (logger) logger.debug(`writing ${name} ${i}/${l}`);
}
await endWriteSection(fdZKey);
}
async function writeHeaders() {
// Write the header
///////////
await startWriteSection(fdZKey, 1);
await fdZKey.writeULE32(2); // Plonk
await endWriteSection(fdZKey);
// Write the Plonk header section
///////////
await startWriteSection(fdZKey, 2);
const primeQ = curve.q;
const n8q = (Math.floor( (Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
const primeR = curve.r;
const n8r = (Math.floor( (Scalar.bitLength(primeR) - 1) / 64) +1)*8;
await fdZKey.writeULE32(n8q);
await writeBigInt(fdZKey, primeQ, n8q);
await fdZKey.writeULE32(n8r);
await writeBigInt(fdZKey, primeR, n8r);
await fdZKey.writeULE32(plonkNVars); // Total number of bars
await fdZKey.writeULE32(nPublic); // Total number of public vars (not including ONE)
await fdZKey.writeULE32(domainSize); // domainSize
await fdZKey.writeULE32(plonkAdditions.length); // domainSize
await fdZKey.writeULE32(plonkConstraints.length);
await fdZKey.write(k1);
await fdZKey.write(k2);
await fdZKey.write(G1.toAffine(vk.Qm));
await fdZKey.write(G1.toAffine(vk.Ql));
await fdZKey.write(G1.toAffine(vk.Qr));
await fdZKey.write(G1.toAffine(vk.Qo));
await fdZKey.write(G1.toAffine(vk.Qc));
await fdZKey.write(G1.toAffine(vk.S1));
await fdZKey.write(G1.toAffine(vk.S2));
await fdZKey.write(G1.toAffine(vk.S3));
let bX_2;
bX_2 = await fdPTau.read(sG2, sectionsPTau[3][0].p + sG2);
await fdZKey.write(bX_2);
await endWriteSection(fdZKey);
}
function getK1K2() {
let k1 = Fr.two;
while (isIncluded(k1, [], cirPower)) Fr.add(k1, Fr.one);
let k2 = Fr.add(k1, Fr.one);
while (isIncluded(k2, [k1], cirPower)) Fr.add(k2, Fr.one);
return [k1, k2];
function isIncluded(k, kArr, pow) {
const domainSize= 2**pow;
let w = Fr.one;
for (let i=0; i<domainSize; i++) {
if (Fr.eq(k, w)) return true;
for (let j=0; j<kArr.length; j++) {
if (Fr.eq(k, Fr.mul(kArr[j], w))) return true;
}
w = Fr.mul(w, Fr.w[pow]);
}
return false;
}
}
}

View File

@ -1,398 +0,0 @@
/*
Copyright 2021 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2019/953.pdf */
import { Scalar } from "ffjavascript";
import * as curves from "./curves.js";
import { utils } from "ffjavascript";
const {unstringifyBigInts} = utils;
import jsSha3 from "js-sha3";
const { keccak256 } = jsSha3;
export default async function plonkVerify(vk_verifier, publicSignals, proof, logger) {
vk_verifier = unstringifyBigInts(vk_verifier);
proof = unstringifyBigInts(proof);
publicSignals = unstringifyBigInts(publicSignals);
const curve = await curves.getCurveFromName(vk_verifier.curve);
const Fr = curve.Fr;
const G1 = curve.G1;
proof = fromObjectProof(curve,proof);
vk_verifier = fromObjectVk(curve, vk_verifier);
if (!isWellConstructed(curve, proof)) {
logger.error("Proof is not well constructed");
return false;
}
const challanges = calculateChallanges(curve, proof);
if (logger) {
logger.debug("beta: " + Fr.toString(challanges.beta, 16));
logger.debug("gamma: " + Fr.toString(challanges.gamma, 16));
logger.debug("alpha: " + Fr.toString(challanges.alpha, 16));
logger.debug("xi: " + Fr.toString(challanges.xi, 16));
logger.debug("v1: " + Fr.toString(challanges.v[1], 16));
logger.debug("v6: " + Fr.toString(challanges.v[6], 16));
logger.debug("u: " + Fr.toString(challanges.u, 16));
}
const L = calculateLagrangeEvaluations(curve, challanges, vk_verifier);
if (logger) {
logger.debug("Lagrange Evaluations: ");
for (let i=1; i<L.length; i++) {
logger.debug(`L${i}(xi)=` + Fr.toString(L[i], 16));
}
}
if (publicSignals.length != vk_verifier.nPublic) {
logger.error("Number of public signals does not match with vk");
return false;
}
const pl = calculatePl(curve, publicSignals, L);
if (logger) {
logger.debug("Pl: " + Fr.toString(pl, 16));
}
const t = calculateT(curve, proof, challanges, pl, L[1]);
if (logger) {
logger.debug("t: " + Fr.toString(t, 16));
}
const D = calculateD(curve, proof, challanges, vk_verifier, L[1]);
if (logger) {
logger.debug("D: " + G1.toString(G1.toAffine(D), 16));
}
const F = calculateF(curve, proof, challanges, vk_verifier, D);
if (logger) {
logger.debug("F: " + G1.toString(G1.toAffine(F), 16));
}
const E = calculateE(curve, proof, challanges, vk_verifier, t);
if (logger) {
logger.debug("E: " + G1.toString(G1.toAffine(E), 16));
}
const res = await isValidPairing(curve, proof, challanges, vk_verifier, E, F);
if (logger) {
if (res) {
logger.info("OK!");
} else {
logger.warn("Invalid Proof");
}
}
return res;
}
function fromObjectProof(curve, proof) {
const G1 = curve.G1;
const Fr = curve.Fr;
const res = {};
res.A = G1.fromObject(proof.A);
res.B = G1.fromObject(proof.B);
res.C = G1.fromObject(proof.C);
res.Z = G1.fromObject(proof.Z);
res.T1 = G1.fromObject(proof.T1);
res.T2 = G1.fromObject(proof.T2);
res.T3 = G1.fromObject(proof.T3);
res.eval_a = Fr.fromObject(proof.eval_a);
res.eval_b = Fr.fromObject(proof.eval_b);
res.eval_c = Fr.fromObject(proof.eval_c);
res.eval_zw = Fr.fromObject(proof.eval_zw);
res.eval_s1 = Fr.fromObject(proof.eval_s1);
res.eval_s2 = Fr.fromObject(proof.eval_s2);
res.eval_r = Fr.fromObject(proof.eval_r);
res.Wxi = G1.fromObject(proof.Wxi);
res.Wxiw = G1.fromObject(proof.Wxiw);
return res;
}
function fromObjectVk(curve, vk) {
const G1 = curve.G1;
const G2 = curve.G2;
const Fr = curve.Fr;
const res = vk;
res.Qm = G1.fromObject(vk.Qm);
res.Ql = G1.fromObject(vk.Ql);
res.Qr = G1.fromObject(vk.Qr);
res.Qo = G1.fromObject(vk.Qo);
res.Qc = G1.fromObject(vk.Qc);
res.S1 = G1.fromObject(vk.S1);
res.S2 = G1.fromObject(vk.S2);
res.S3 = G1.fromObject(vk.S3);
res.k1 = Fr.fromObject(vk.k1);
res.k2 = Fr.fromObject(vk.k2);
res.X_2 = G2.fromObject(vk.X_2);
return res;
}
function isWellConstructed(curve, proof) {
const G1 = curve.G1;
if (!G1.isValid(proof.A)) return false;
if (!G1.isValid(proof.B)) return false;
if (!G1.isValid(proof.C)) return false;
if (!G1.isValid(proof.Z)) return false;
if (!G1.isValid(proof.T1)) return false;
if (!G1.isValid(proof.T2)) return false;
if (!G1.isValid(proof.T3)) return false;
if (!G1.isValid(proof.Wxi)) return false;
if (!G1.isValid(proof.Wxiw)) return false;
return true;
}
function calculateChallanges(curve, proof) {
const G1 = curve.G1;
const Fr = curve.Fr;
const n8r = curve.Fr.n8;
const res = {};
const transcript1 = new Uint8Array(G1.F.n8*2*3);
G1.toRprUncompressed(transcript1, 0, proof.A);
G1.toRprUncompressed(transcript1, G1.F.n8*2, proof.B);
G1.toRprUncompressed(transcript1, G1.F.n8*4, proof.C);
res.beta = hashToFr(curve, transcript1);
const transcript2 = new Uint8Array(n8r);
Fr.toRprBE(transcript2, 0, res.beta);
res.gamma = hashToFr(curve, transcript2);
const transcript3 = new Uint8Array(G1.F.n8*2);
G1.toRprUncompressed(transcript3, 0, proof.Z);
res.alpha = hashToFr(curve, transcript3);
const transcript4 = new Uint8Array(G1.F.n8*2*3);
G1.toRprUncompressed(transcript4, 0, proof.T1);
G1.toRprUncompressed(transcript4, G1.F.n8*2, proof.T2);
G1.toRprUncompressed(transcript4, G1.F.n8*4, proof.T3);
res.xi = hashToFr(curve, transcript4);
const transcript5 = new Uint8Array(n8r*7);
Fr.toRprBE(transcript5, 0, proof.eval_a);
Fr.toRprBE(transcript5, n8r, proof.eval_b);
Fr.toRprBE(transcript5, n8r*2, proof.eval_c);
Fr.toRprBE(transcript5, n8r*3, proof.eval_s1);
Fr.toRprBE(transcript5, n8r*4, proof.eval_s2);
Fr.toRprBE(transcript5, n8r*5, proof.eval_zw);
Fr.toRprBE(transcript5, n8r*6, proof.eval_r);
res.v = [];
res.v[1] = hashToFr(curve, transcript5);
for (let i=2; i<=6; i++ ) res.v[i] = Fr.mul(res.v[i-1], res.v[1]);
const transcript6 = new Uint8Array(G1.F.n8*2*2);
G1.toRprUncompressed(transcript6, 0, proof.Wxi);
G1.toRprUncompressed(transcript6, G1.F.n8*2, proof.Wxiw);
res.u = hashToFr(curve, transcript6);
return res;
}
function calculateLagrangeEvaluations(curve, challanges, vk) {
const Fr = curve.Fr;
let xin = challanges.xi;
let domainSize = 1;
for (let i=0; i<vk.power; i++) {
xin = Fr.square(xin);
domainSize *= 2;
}
challanges.xin = xin;
challanges.zh = Fr.sub(xin, Fr.one);
const L = [];
const n = Fr.e(domainSize);
let w = Fr.one;
for (let i=1; i<=Math.max(1, vk.nPublic); i++) {
L[i] = Fr.div(Fr.mul(w, challanges.zh), Fr.mul(n, Fr.sub(challanges.xi, w)));
w = Fr.mul(w, Fr.w[vk.power]);
}
return L;
}
function hashToFr(curve, transcript) {
const v = Scalar.fromRprBE(new Uint8Array(keccak256.arrayBuffer(transcript)));
return curve.Fr.e(v);
}
function calculatePl(curve, publicSignals, L) {
const Fr = curve.Fr;
let pl = Fr.zero;
for (let i=0; i<publicSignals.length; i++) {
const w = Fr.e(publicSignals[i]);
pl = Fr.sub(pl, Fr.mul(w, L[i+1]));
}
return pl;
}
function calculateT(curve, proof, challanges, pl, l1) {
const Fr = curve.Fr;
let num = proof.eval_r;
num = Fr.add(num, pl);
let e1 = proof.eval_a;
e1 = Fr.add(e1, Fr.mul(challanges.beta, proof.eval_s1));
e1 = Fr.add(e1, challanges.gamma);
let e2 = proof.eval_b;
e2 = Fr.add(e2, Fr.mul(challanges.beta, proof.eval_s2));
e2 = Fr.add(e2, challanges.gamma);
let e3 = proof.eval_c;
e3 = Fr.add(e3, challanges.gamma);
let e = Fr.mul(Fr.mul(e1, e2), e3);
e = Fr.mul(e, proof.eval_zw);
e = Fr.mul(e, challanges.alpha);
num = Fr.sub(num, e);
num = Fr.sub(num, Fr.mul(l1, Fr.square(challanges.alpha)));
const t = Fr.div(num, challanges.zh);
return t;
}
function calculateD(curve, proof, challanges, vk, l1) {
const G1 = curve.G1;
const Fr = curve.Fr;
let s1 = Fr.mul(Fr.mul(proof.eval_a, proof.eval_b), challanges.v[1]);
let res = G1.timesFr(vk.Qm, s1);
let s2 = Fr.mul(proof.eval_a, challanges.v[1]);
res = G1.add(res, G1.timesFr(vk.Ql, s2));
let s3 = Fr.mul(proof.eval_b, challanges.v[1]);
res = G1.add(res, G1.timesFr(vk.Qr, s3));
let s4 = Fr.mul(proof.eval_c, challanges.v[1]);
res = G1.add(res, G1.timesFr(vk.Qo, s4));
res = G1.add(res, G1.timesFr(vk.Qc, challanges.v[1]));
const betaxi = Fr.mul(challanges.beta, challanges.xi);
let s6a = proof.eval_a;
s6a = Fr.add(s6a, betaxi);
s6a = Fr.add(s6a, challanges.gamma);
let s6b = proof.eval_b;
s6b = Fr.add(s6b, Fr.mul(betaxi, vk.k1));
s6b = Fr.add(s6b, challanges.gamma);
let s6c = proof.eval_c;
s6c = Fr.add(s6c, Fr.mul(betaxi, vk.k2));
s6c = Fr.add(s6c, challanges.gamma);
let s6 = Fr.mul(Fr.mul(s6a, s6b), s6c);
s6 = Fr.mul(s6, Fr.mul(challanges.alpha, challanges.v[1]));
let s6d = Fr.mul(Fr.mul(l1, Fr.square(challanges.alpha)), challanges.v[1]);
s6 = Fr.add(s6, s6d);
s6 = Fr.add(s6, challanges.u);
res = G1.add(res, G1.timesFr(proof.Z, s6));
let s7a = proof.eval_a;
s7a = Fr.add(s7a, Fr.mul(challanges.beta, proof.eval_s1));
s7a = Fr.add(s7a, challanges.gamma);
let s7b = proof.eval_b;
s7b = Fr.add(s7b, Fr.mul(challanges.beta, proof.eval_s2));
s7b = Fr.add(s7b, challanges.gamma);
let s7 = Fr.mul(s7a, s7b);
s7 = Fr.mul(s7, challanges.alpha);
s7 = Fr.mul(s7, challanges.v[1]);
s7 = Fr.mul(s7, challanges.beta);
s7 = Fr.mul(s7, proof.eval_zw);
res = G1.sub(res, G1.timesFr(vk.S3, s7));
return res;
}
function calculateF(curve, proof, challanges, vk, D) {
const G1 = curve.G1;
const Fr = curve.Fr;
let res = proof.T1;
res = G1.add(res, G1.timesFr(proof.T2, challanges.xin));
res = G1.add(res, G1.timesFr(proof.T3, Fr.square(challanges.xin)));
res = G1.add(res, D);
res = G1.add(res, G1.timesFr(proof.A, challanges.v[2]));
res = G1.add(res, G1.timesFr(proof.B, challanges.v[3]));
res = G1.add(res, G1.timesFr(proof.C, challanges.v[4]));
res = G1.add(res, G1.timesFr(vk.S1, challanges.v[5]));
res = G1.add(res, G1.timesFr(vk.S2, challanges.v[6]));
return res;
}
function calculateE(curve, proof, challanges, vk, t) {
const G1 = curve.G1;
const Fr = curve.Fr;
let s = t;
s = Fr.add(s, Fr.mul(challanges.v[1], proof.eval_r));
s = Fr.add(s, Fr.mul(challanges.v[2], proof.eval_a));
s = Fr.add(s, Fr.mul(challanges.v[3], proof.eval_b));
s = Fr.add(s, Fr.mul(challanges.v[4], proof.eval_c));
s = Fr.add(s, Fr.mul(challanges.v[5], proof.eval_s1));
s = Fr.add(s, Fr.mul(challanges.v[6], proof.eval_s2));
s = Fr.add(s, Fr.mul(challanges.u, proof.eval_zw));
const res = G1.timesFr(G1.one, s);
return res;
}
async function isValidPairing(curve, proof, challanges, vk, E, F) {
const G1 = curve.G1;
const Fr = curve.Fr;
let A1 = proof.Wxi;
A1 = G1.add(A1, G1.timesFr(proof.Wxiw, challanges.u));
let B1 = G1.timesFr(proof.Wxi, challanges.xi);
const s = Fr.mul(Fr.mul(challanges.u, challanges.xi), Fr.w[vk.power]);
B1 = G1.add(B1, G1.timesFr(proof.Wxiw, s));
B1 = G1.add(B1, F);
B1 = G1.sub(B1, E);
const res = await curve.pairingEq(
G1.neg(A1) , vk.X_2,
B1 , curve.G2.one
);
return res;
}

534
src/polfield.js Normal file
View File

@ -0,0 +1,534 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/*
This library does operations on polynomials with coefficients in a field F.
A polynomial P(x) = p0 + p1 * x + p2 * x^2 + ... + pn * x^n is represented
by the array [ p0, p1, p2, ... , pn ].
*/
const bigInt = require("./bigint.js");
class PolField {
constructor (F) {
this.F = F;
const q = this.F.q;
let rem = q.sub(bigInt(1));
let s = 0;
while (!rem.isOdd()) {
s ++;
rem = rem.shr(1);
}
const five = this.F.add(this.F.add(this.F.two, this.F.two), this.F.one);
this.w = new Array(s+1);
this.wi = new Array(s+1);
this.w[s] = this.F.exp(five, rem);
this.wi[s] = this.F.inverse(this.w[s]);
let n=s-1;
while (n>=0) {
this.w[n] = this.F.square(this.w[n+1]);
this.wi[n] = this.F.square(this.wi[n+1]);
n--;
}
this.roots = [];
/* for (let i=0; i<16; i++) {
let r = this.F.one;
n = 1 << i;
const rootsi = new Array(n);
for (let j=0; j<n; j++) {
rootsi[j] = r;
r = this.F.mul(r, this.w[i]);
}
this.roots.push(rootsi);
}
*/
this._setRoots(15);
}
_setRoots(n) {
for (let i=n; (i>=0) && (!this.roots[i]); i--) {
let r = this.F.one;
const nroots = 1 << i;
const rootsi = new Array(nroots);
for (let j=0; j<nroots; j++) {
rootsi[j] = r;
r = this.F.mul(r, this.w[i]);
}
this.roots[i] = rootsi;
}
}
add(a, b) {
const m = Math.max(a.length, b.length);
const res = new Array(m);
for (let i=0; i<m; i++) {
res[i] = this.F.add(a[i] || this.F.zero, b[i] || this.F.zero);
}
return this.reduce(res);
}
double(a) {
return this.add(a,a);
}
sub(a, b) {
const m = Math.max(a.length, b.length);
const res = new Array(m);
for (let i=0; i<m; i++) {
res[i] = this.F.sub(a[i] || this.F.zero, b[i] || this.F.zero);
}
return this.reduce(res);
}
mulScalar(p, b) {
if (this.F.isZero(b)) return [];
if (this.F.equals(b, this.F.one)) return p;
const res = new Array(p.length);
for (let i=0; i<p.length; i++) {
res[i] = this.F.mul(p[i], b);
}
return res;
}
mul(a, b) {
if (a.length == 0) return [];
if (b.length == 0) return [];
if (a.length == 1) return this.mulScalar(b, a[0]);
if (b.length == 1) return this.mulScalar(a, b[0]);
if (b.length > a.length) {
[b, a] = [a, b];
}
if ((b.length <= 2) || (b.length < log2(a.length))) {
return this.mulNormal(a,b);
} else {
return this.mulFFT(a,b);
}
}
mulNormal(a, b) {
let res = [];
b = this.affine(b);
for (let i=0; i<b.length; i++) {
res = this.add(res, this.scaleX(this.mulScalar(a, b[i]), i) );
}
return res;
}
mulFFT(a,b) {
const longestN = Math.max(a.length, b.length);
const bitsResult = log2(longestN-1)+2;
this._setRoots(bitsResult);
const m = 1 << bitsResult;
const ea = this.extend(a,m);
const eb = this.extend(b,m);
const ta = __fft(this, ea, bitsResult, 0, 1, false);
const tb = __fft(this, eb, bitsResult, 0, 1, false);
const tres = new Array(m);
for (let i=0; i<m; i++) {
tres[i] = this.F.mul(ta[i], tb[i]);
}
const res = __fft(this, tres, bitsResult, 0, 1, true);
const twoinvm = this.F.inverse( this.F.mulScalar(this.F.one, m) );
const resn = new Array(m);
for (let i=0; i<m; i++) {
resn[i] = this.F.mul(res[(m-i)%m], twoinvm);
}
return this.reduce(this.affine(resn));
}
square(a) {
return this.mul(a,a);
}
scaleX(p, n) {
if (n==0) {
return p;
} else if (n>0) {
const z = new Array(n).fill(this.F.zero);
return z.concat(p);
} else {
if (-n >= p.length) return [];
return p.slice(-n);
}
}
eval2(p, x) {
let v = this.F.zero;
let ix = this.F.one;
for (let i=0; i<p.length; i++) {
v = this.F.add(v, this.F.mul(p[i], ix));
ix = this.F.mul(ix, x);
}
return v;
}
eval(p,x) {
const F = this.F;
if (p.length == 0) return F.zero;
const m = this._next2Power(p.length);
const ep = this.extend(p, m);
return _eval(ep, x, 0, 1, m);
function _eval(p, x, offset, step, n) {
if (n==1) return p[offset];
const newX = F.square(x);
const res= F.add(
_eval(p, newX, offset, step << 1, n >> 1),
F.mul(
x,
_eval(p, newX, offset+step , step << 1, n >> 1)));
return res;
}
}
lagrange(points) {
let roots = [this.F.one];
for (let i=0; i<points.length; i++) {
roots = this.mul(roots, [this.F.neg(points[i][0]), this.F.one]);
}
let sum = [];
for (let i=0; i<points.length; i++) {
let mpol = this.ruffini(roots, points[i][0]);
const factor =
this.F.mul(
this.F.inverse(this.eval(mpol, points[i][0])),
points[i][1]);
mpol = this.mulScalar(mpol, factor);
sum = this.add(sum, mpol);
}
return sum;
}
fft(p) {
if (p.length <= 1) return p;
const bits = log2(p.length-1)+1;
this._setRoots(bits);
const m = 1 << bits;
const ep = this.extend(p, m);
const res = __fft(this, ep, bits, 0, 1);
return res;
}
ifft(p) {
if (p.length <= 1) return p;
const bits = log2(p.length-1)+1;
this._setRoots(bits);
const m = 1 << bits;
const ep = this.extend(p, m);
const res = __fft(this, ep, bits, 0, 1);
const twoinvm = this.F.inverse( this.F.mulScalar(this.F.one, m) );
const resn = new Array(m);
for (let i=0; i<m; i++) {
resn[i] = this.F.mul(res[(m-i)%m], twoinvm);
}
return resn;
}
_fft(pall, bits, offset, step) {
const n = 1 << bits;
if (n==1) {
return [ pall[offset] ];
}
const ndiv2 = n >> 1;
const p1 = this._fft(pall, bits-1, offset, step*2);
const p2 = this._fft(pall, bits-1, offset+step, step*2);
const out = new Array(n);
let m= this.F.one;
for (let i=0; i<ndiv2; i++) {
out[i] = this.F.add(p1[i], this.F.mul(m, p2[i]));
out[i+ndiv2] = this.F.sub(p1[i], this.F.mul(m, p2[i]));
m = this.F.mul(m, this.w[bits]);
}
return out;
}
extend(p, e) {
if (e == p.length) return p;
const z = new Array(e-p.length).fill(this.F.zero);
return p.concat(z);
}
reduce(p) {
if (p.length == 0) return p;
if (! this.F.isZero(p[p.length-1]) ) return p;
let i=p.length-1;
while( i>0 && this.F.isZero(p[i]) ) i--;
return p.slice(0, i+1);
}
affine(p) {
for (let i=0; i<p.length; i++) {
p[i] = this.F.affine(p[i]);
}
return p;
}
equals(a, b) {
const pa = this.reduce(this.affine(a));
const pb = this.reduce(this.affine(b));
if (pa.length != pb.length) return false;
for (let i=0; i<pb.length; i++) {
if (!this.F.equals(pa[i], pb[i])) return false;
}
return true;
}
ruffini(p, r) {
const res = new Array(p.length-1);
res[res.length-1] = p[p.length-1];
for (let i = res.length-2; i>=0; i--) {
res[i] = this.F.add(this.F.mul(res[i+1], r), p[i+1]);
}
return res;
}
_next2Power(v) {
v--;
v |= v >> 1;
v |= v >> 2;
v |= v >> 4;
v |= v >> 8;
v |= v >> 16;
v++;
return v;
}
toString(p) {
const ap = this.affine(p);
let S = "";
for (let i=ap.length-1; i>=0; i--) {
if (!this.F.isZero(p[i])) {
if (S!="") S += " + ";
S = S + p[i].toString(10);
if (i>0) {
S = S + "x";
if (i>1) {
S = S + "^" +i;
}
}
}
}
return S;
}
_reciprocal(p, bits) {
const k = 1 << bits;
if (k==1) {
return [ this.F.inverse(p[0]) ];
}
const np = this.scaleX(p, -k/2);
const q = this._reciprocal(np, bits-1);
const a = this.scaleX(this.double(q), 3*k/2-2);
const b = this.mul( this.square(q), p);
return this.scaleX(this.sub(a,b), -(k-2));
}
// divides x^m / v
_div2(m, v) {
const kbits = log2(v.length-1)+1;
const k = 1 << kbits;
const scaleV = k - v.length;
// rec = x^(k - 2) / v* x^scaleV =>
// rec = x^(k-2-scaleV)/ v
//
// res = x^m/v = x^(m + (2*k-2 - scaleV) - (2*k-2 - scaleV)) /v =>
// res = rec * x^(m - (2*k-2 - scaleV)) =>
// res = rec * x^(m - 2*k + 2 + scaleV)
const rec = this._reciprocal(this.scaleX(v, scaleV), kbits);
const res = this.scaleX(rec, m - 2*k + 2 + scaleV);
return res;
}
div(_u, _v) {
if (_u.length < _v.length) return [];
const kbits = log2(_v.length-1)+1;
const k = 1 << kbits;
const u = this.scaleX(_u, k-_v.length);
const v = this.scaleX(_v, k-_v.length);
const n = v.length-1;
let m = u.length-1;
const s = this._reciprocal(v, kbits);
let t;
if (m>2*n) {
t = this.sub(this.scaleX([this.F.one], 2*n), this.mul(s, v));
}
let q = [];
let rem = u;
let us, ut;
let finish = false;
while (!finish) {
us = this.mul(rem, s);
q = this.add(q, this.scaleX(us, -2*n));
if ( m > 2*n ) {
ut = this.mul(rem, t);
rem = this.scaleX(ut, -2*n);
m = rem.length-1;
} else {
finish = true;
}
}
return q;
}
// returns the ith nth-root of one
oneRoot(n, i) {
let nbits = log2(n-1)+1;
let res = this.F.one;
let r = i;
if(i>=n) {
throw new Error("Given 'i' should be lower than 'n'");
}
else if (1<<nbits !== n) {
throw new Error(`Internal errlr: ${n} should equal ${1<<nbits}`);
}
while (r>0) {
if (r & 1 == 1) {
res = this.F.mul(res, this.w[nbits]);
}
r = r >> 1;
nbits --;
}
return res;
}
computeVanishingPolinomial(bits, t) {
const m = 1 << bits;
return this.F.sub(this.F.exp(t, bigInt(m)), this.F.one);
}
evaluateLagrangePolynomials(bits, t) {
const m= 1 << bits;
const tm = this.F.exp(t, bigInt(m));
const u= new Array(m).fill(this.F.zero);
this._setRoots(bits);
const omega = this.w[bits];
if (this.F.equals(tm, this.F.one)) {
for (let i = 0; i < m; i++) {
if (this.F.equals(this.roots[bits][0],t)) { // i.e., t equals omega^i
u[i] = this.F.one;
return u;
}
}
}
const z = this.F.sub(tm, this.F.one);
// let l = this.F.mul(z, this.F.exp(this.F.twoinv, m));
let l = this.F.mul(z, this.F.inverse(bigInt(m)));
for (let i = 0; i < m; i++) {
u[i] = this.F.mul(l, this.F.inverse(this.F.sub(t,this.roots[bits][i])));
l = this.F.mul(l, omega);
}
return u;
}
log2(V) {
return log2(V);
}
}
function log2( V )
{
return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) );
}
function __fft(PF, pall, bits, offset, step) {
const n = 1 << bits;
if (n==1) {
return [ pall[offset] ];
} else if (n==2) {
return [
PF.F.add(pall[offset], pall[offset + step]),
PF.F.sub(pall[offset], pall[offset + step])];
}
const ndiv2 = n >> 1;
const p1 = __fft(PF, pall, bits-1, offset, step*2);
const p2 = __fft(PF, pall, bits-1, offset+step, step*2);
const out = new Array(n);
for (let i=0; i<ndiv2; i++) {
out[i] = PF.F.add(p1[i], PF.F.mul(PF.roots[bits][i], p2[i]));
out[i+ndiv2] = PF.F.sub(p1[i], PF.F.mul(PF.roots[bits][i], p2[i]));
}
return out;
}
module.exports = PolField;

View File

@ -1,30 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
export {default as newAccumulator} from "./powersoftau_new.js";
export {default as exportChallenge} from "./powersoftau_export_challenge.js";
export {default as importResponse} from "./powersoftau_import.js";
export {default as verify} from "./powersoftau_verify.js";
export {default as challengeContribute} from "./powersoftau_challenge_contribute.js";
export {default as beacon} from "./powersoftau_beacon.js";
export {default as contribute} from "./powersoftau_contribute.js";
export {default as preparePhase2} from "./powersoftau_preparephase2.js";
export {default as truncate} from "./powersoftau_truncate.js";
export {default as convert} from "./powersoftau_convert.js";
export {default as exportJson} from "./powersoftau_export_json.js";

View File

@ -1,192 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as misc from "./misc.js";
import * as binFileUtils from "@iden3/binfileutils";
export default async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,numIterationsExp, logger) {
const beaconHash = misc.hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length))
{
if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false;
}
if (beaconHash.length>=256) {
if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
return false;
}
numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) {
if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
return false;
}
await Blake2b.ready();
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
if (power != ceremonyPower) {
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
return false;
}
if (sections[12]) {
if (logger) logger.warn("Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
}
const contributions = await utils.readContributions(fdOld, curve, sections);
const curContribution = {
name: name,
type: 1, // Beacon
numIterationsExp: numIterationsExp,
beaconHash: beaconHash
};
let lastChallengeHash;
if (contributions.length>0) {
lastChallengeHash = contributions[contributions.length-1].nextChallenge;
} else {
lastChallengeHash = utils.calculateFirstChallengeHash(curve, power, logger);
}
curContribution.key = utils.keyFromBeacon(curve, lastChallengeHash, beaconHash, numIterationsExp);
const responseHasher = new Blake2b(64);
responseHasher.update(lastChallengeHash);
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7);
await utils.writePTauHeader(fdNew, curve, power);
const startSections = [];
let firstPoints;
firstPoints = await processSection(2, "G1", (2 ** power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1", logger );
curContribution.tauG1 = firstPoints[1];
firstPoints = await processSection(3, "G2", (2 ** power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2", logger );
curContribution.tauG2 = firstPoints[1];
firstPoints = await processSection(4, "G1", (2 ** power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1", logger );
curContribution.alphaG1 = firstPoints[0];
firstPoints = await processSection(5, "G1", (2 ** power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1", logger );
curContribution.betaG1 = firstPoints[0];
firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2", logger );
curContribution.betaG2 = firstPoints[0];
curContribution.partialHash = responseHasher.getPartialHash();
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
utils.toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest();
if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: "));
const nextChallengeHasher = new Blake2b(64);
nextChallengeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger);
await hashSection(fdNew, "G2", 3, (2 ** power) , "tauG2", logger);
await hashSection(fdNew, "G1", 4, (2 ** power) , "alphaTauG1", logger);
await hashSection(fdNew, "G1", 5, (2 ** power) , "betaTauG1", logger);
await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger);
curContribution.nextChallenge = nextChallengeHasher.digest();
if (logger) logger.info(misc.formatHash(curContribution.nextChallenge, "Next Challenge Hash: "));
contributions.push(curContribution);
await utils.writeContributions(fdNew, curve, contributions);
await fdOld.close();
await fdNew.close();
return hashResponse;
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName, logger) {
const res = [];
fdOld.pos = sections[sectionId][0].p;
await binFileUtils.startWriteSection(fdNew, sectionId);
startSections[sectionId] = fdNew.pos;
const G = curve[groupName];
const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i<NPoints ; i+= chunkSize) {
if (logger) logger.debug(`applying key${sectionName}: ${i}/${NPoints}`);
const n= Math.min(NPoints-i, chunkSize );
const buffIn = await fdOld.read(n * sG);
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
/* Code to test the case where we don't have the 2^m-2 component
if (sectionName== "tauG1") {
const bz = new Uint8Array(64);
buffOutLEM.set(bz, 64*((2 ** power) - 1 ));
}
*/
const promiseWrite = fdNew.write(buffOutLEM);
const buffOutC = await G.batchLEMtoC(buffOutLEM);
responseHasher.update(buffOutC);
await promiseWrite;
if (i==0) // Return the 2 first points.
for (let j=0; j<Math.min(2, NPoints); j++)
res.push(G.fromRprLEM(buffOutLEM, j*sG));
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}
await binFileUtils.endWriteSection(fdNew);
return res;
}
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName, logger) {
const G = curve[groupName];
const sG = G.F.n8*2;
const nPointsChunk = Math.floor((1<<24)/sG);
const oldPos = fdTo.pos;
fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) {
if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG);
const buffU = await G.batchLEMtoU(buffLEM);
nextChallengeHasher.update(buffU);
}
fdTo.pos = oldPos;
}
}

View File

@ -1,117 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
// Format of the output
// Hash of the last contribution 64 Bytes
// 2^N*2-1 TauG1 Points (compressed)
// 2^N TauG2 Points (compressed)
// 2^N AlphaTauG1 Points (compressed)
// 2^N BetaTauG1 Points (compressed)
// Public Key
// BetaG2 (compressed)
// G1*s (compressed)
// G1*s*tau (compressed)
// G1*t (compressed)
// G1*t*alpha (compressed)
// G1*u (compressed)
// G1*u*beta (compressed)
// G2*sp*tau (compressed)
// G2*tp*alpha (compressed)
// G2*up*beta (compressed)
import * as fastFile from "fastfile";
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as misc from "./misc.js";
import { applyKeyToChallengeSection } from "./mpc_applykey.js";
import * as keyPair from "./keypair.js";
export default async function challengeContribute(curve, challengeFilename, responesFileName, entropy, logger) {
await Blake2b.ready();
const fdFrom = await fastFile.readExisting(challengeFilename);
const sG1 = curve.F1.n64*8*2;
const sG2 = curve.F2.n64*8*2;
const domainSize = (fdFrom.totalSize + sG1 - 64 - sG2) / (4*sG1 + sG2);
let e = domainSize;
let power = 0;
while (e>1) {
e = e /2;
power += 1;
}
if (2 ** power != domainSize) throw new Error("Invalid file size");
if (logger) logger.debug("Power to tau size: "+power);
const rng = await misc.getRandomRng(entropy);
const fdTo = await fastFile.createOverride(responesFileName);
// Calculate the hash
const challengeHasher = Blake2b(64);
for (let i=0; i<fdFrom.totalSize; i+= fdFrom.pageSize) {
if (logger) logger.debug(`Hashing challenge ${i}/${fdFrom.totalSize}`);
const s = Math.min(fdFrom.totalSize - i, fdFrom.pageSize);
const buff = await fdFrom.read(s);
challengeHasher.update(buff);
}
const claimedHash = await fdFrom.read(64, 0);
if (logger) logger.info(misc.formatHash(claimedHash, "Claimed Previous Response Hash: "));
const challengeHash = challengeHasher.digest();
if (logger) logger.info(misc.formatHash(challengeHash, "Current Challenge Hash: "));
const key = keyPair.createPTauKey(curve, challengeHash, rng);
if (logger) {
["tau", "alpha", "beta"].forEach( (k) => {
logger.debug(k + ".g1_s: " + curve.G1.toString(key[k].g1_s, 16));
logger.debug(k + ".g1_sx: " + curve.G1.toString(key[k].g1_sx, 16));
logger.debug(k + ".g2_sp: " + curve.G2.toString(key[k].g2_sp, 16));
logger.debug(k + ".g2_spx: " + curve.G2.toString(key[k].g2_spx, 16));
logger.debug("");
});
}
const responseHasher = Blake2b(64);
await fdTo.write(challengeHash);
responseHasher.update(challengeHash);
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power)*2-1, curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG1" , logger );
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G2", (2 ** power) , curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG2" , logger );
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power) , key.alpha.prvKey, key.tau.prvKey, "COMPRESSED", "alphaTauG1", logger );
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power) , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG1" , logger );
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G2", 1 , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG2" , logger );
// Write and hash key
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
utils.toPtauPubKeyRpr(buffKey, 0, curve, key, false);
await fdTo.write(buffKey);
responseHasher.update(buffKey);
const responseHash = responseHasher.digest();
if (logger) logger.info(misc.formatHash(responseHash, "Contribution Response Hash: "));
await fdTo.close();
await fdFrom.close();
}

View File

@ -1,187 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
// Format of the output
// Hash of the last contribution 64 Bytes
// 2^N*2-1 TauG1 Points (uncompressed)
// 2^N TauG2 Points (uncompressed)
// 2^N AlphaTauG1 Points (uncompressed)
// 2^N BetaTauG1 Points (uncompressed)
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as keyPair from "./keypair.js";
import * as binFileUtils from "@iden3/binfileutils";
import * as misc from "./misc.js";
export default async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logger) {
await Blake2b.ready();
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
if (power != ceremonyPower) {
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
throw new Error("This file has been reduced. You cannot contribute into a reduced file.");
}
if (sections[12]) {
if (logger) logger.warn("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
}
const contributions = await utils.readContributions(fdOld, curve, sections);
const curContribution = {
name: name,
type: 0, // Beacon
};
let lastChallengeHash;
const rng = await misc.getRandomRng(entropy);
if (contributions.length>0) {
lastChallengeHash = contributions[contributions.length-1].nextChallenge;
} else {
lastChallengeHash = utils.calculateFirstChallengeHash(curve, power, logger);
}
// Generate a random key
curContribution.key = keyPair.createPTauKey(curve, lastChallengeHash, rng);
const responseHasher = new Blake2b(64);
responseHasher.update(lastChallengeHash);
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7);
await utils.writePTauHeader(fdNew, curve, power);
const startSections = [];
let firstPoints;
firstPoints = await processSection(2, "G1", (2 ** power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1" );
curContribution.tauG1 = firstPoints[1];
firstPoints = await processSection(3, "G2", (2 ** power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2" );
curContribution.tauG2 = firstPoints[1];
firstPoints = await processSection(4, "G1", (2 ** power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1" );
curContribution.alphaG1 = firstPoints[0];
firstPoints = await processSection(5, "G1", (2 ** power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1" );
curContribution.betaG1 = firstPoints[0];
firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2" );
curContribution.betaG2 = firstPoints[0];
curContribution.partialHash = responseHasher.getPartialHash();
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
utils.toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest();
if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: "));
const nextChallengeHasher = new Blake2b(64);
nextChallengeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1");
await hashSection(fdNew, "G2", 3, (2 ** power) , "tauG2");
await hashSection(fdNew, "G1", 4, (2 ** power) , "alphaTauG1");
await hashSection(fdNew, "G1", 5, (2 ** power) , "betaTauG1");
await hashSection(fdNew, "G2", 6, 1 , "betaG2");
curContribution.nextChallenge = nextChallengeHasher.digest();
if (logger) logger.info(misc.formatHash(curContribution.nextChallenge, "Next Challenge Hash: "));
contributions.push(curContribution);
await utils.writeContributions(fdNew, curve, contributions);
await fdOld.close();
await fdNew.close();
return hashResponse;
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) {
const res = [];
fdOld.pos = sections[sectionId][0].p;
await binFileUtils.startWriteSection(fdNew, sectionId);
startSections[sectionId] = fdNew.pos;
const G = curve[groupName];
const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i<NPoints ; i+= chunkSize) {
if (logger) logger.debug(`processing: ${sectionName}: ${i}/${NPoints}`);
const n= Math.min(NPoints-i, chunkSize );
const buffIn = await fdOld.read(n * sG);
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
/* Code to test the case where we don't have the 2^m-2 component
if (sectionName== "tauG1") {
const bz = new Uint8Array(64);
buffOutLEM.set(bz, 64*((2 ** power) - 1 ));
}
*/
const promiseWrite = fdNew.write(buffOutLEM);
const buffOutC = await G.batchLEMtoC(buffOutLEM);
responseHasher.update(buffOutC);
await promiseWrite;
if (i==0) // Return the 2 first points.
for (let j=0; j<Math.min(2, NPoints); j++)
res.push(G.fromRprLEM(buffOutLEM, j*sG));
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}
await binFileUtils.endWriteSection(fdNew);
return res;
}
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const nPointsChunk = Math.floor((1<<24)/sG);
const oldPos = fdTo.pos;
fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) {
if ((logger)&&i) logger.debug(`Hashing ${sectionName}: ` + i);
const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG);
const buffU = await G.batchLEMtoU(buffLEM);
nextChallengeHasher.update(buffU);
}
fdTo.pos = oldPos;
}
}

View File

@ -1,150 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as binFileUtils from "@iden3/binfileutils";
import * as utils from "./powersoftau_utils.js";
import * as fastFile from "fastfile";
import {BigBuffer} from "ffjavascript";
export default async function convert(oldPtauFilename, newPTauFilename, logger) {
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
await utils.writePTauHeader(fdNew, curve, power);
// const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
await binFileUtils.copySection(fdOld, sections, fdNew, 2);
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", "tauG1" );
await binFileUtils.copySection(fdOld, sections, fdNew, 13);
await binFileUtils.copySection(fdOld, sections, fdNew, 14);
await binFileUtils.copySection(fdOld, sections, fdNew, 15);
await fdOld.close();
await fdNew.close();
// await fs.promises.unlink(newPTauFilename+ ".tmp");
return;
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
if (logger) logger.debug("Starting section: "+sectionName);
await binFileUtils.startWriteSection(fdNew, newSectionId);
const size = sections[newSectionId][0].size;
const chunkSize = fdOld.pageSize;
await binFileUtils.startReadUniqueSection(fdOld, sections, newSectionId);
for (let p=0; p<size; p+=chunkSize) {
const l = Math.min(size -p, chunkSize);
const buff = await fdOld.read(l);
await fdNew.write(buff);
}
await binFileUtils.endReadSection(fdOld);
if (oldSectionId == 2) {
await processSectionPower(power+1);
}
await binFileUtils.endWriteSection(fdNew);
async function processSectionPower(p) {
const nPoints = 2 ** p;
const G = curve[Gstr];
const sGin = G.F.n8*2;
let buff;
buff = new BigBuffer(nPoints*sGin);
await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
if ((oldSectionId == 2)&&(p==power+1)) {
await fdOld.readToBuffer(buff, 0,(nPoints-1)*sGin );
buff.set(curve.G1.zeroAffine, (nPoints-1)*sGin );
} else {
await fdOld.readToBuffer(buff, 0,nPoints*sGin );
}
await binFileUtils.endReadSection(fdOld, true);
buff = await G.lagrangeEvaluations(buff, "affine", "affine", logger, sectionName);
await fdNew.write(buff);
/*
if (p <= curve.Fr.s) {
buff = await G.ifft(buff, "affine", "affine", logger, sectionName);
await fdNew.write(buff);
} else if (p == curve.Fr.s+1) {
const smallM = 1<<curve.Fr.s;
let t0 = new BigBuffer( smallM * sGmid );
let t1 = new BigBuffer( smallM * sGmid );
const shift_to_small_m = Fr.exp(Fr.shift, smallM);
const one_over_denom = Fr.inv(Fr.sub(shift_to_small_m, Fr.one));
let sInvAcc = Fr.one;
for (let i=0; i<smallM; i++) {
if (i%10000) logger.debug(`sectionName prepare L calc: ${sectionName}, ${i}/${smallM}`);
const ti = buff.slice(i*sGin, (i+1)*sGin);
const tmi = buff.slice((i+smallM)*sGin, (i+smallM+1)*sGin);
t0.set(
G.timesFr(
G.sub(
G.timesFr(ti , shift_to_small_m),
tmi
),
one_over_denom
),
i*sGmid
);
t1.set(
G.timesFr(
G.sub( tmi, ti),
Fr.mul(sInvAcc, one_over_denom)
),
i*sGmid
);
sInvAcc = Fr.mul(sInvAcc, Fr.shiftInv);
}
t0 = await G.ifft(t0, "jacobian", "affine", logger, sectionName + " t0");
await fdNew.write(t0);
t0 = null;
t1 = await G.ifft(t1, "jacobian", "affine", logger, sectionName + " t1");
await fdNew.write(t1);
} else {
if (logger) logger.error("Power too big");
throw new Error("Power to big");
}
*/
}
}
}

View File

@ -1,82 +0,0 @@
// Format of the outpu
// Hash of the last contribution 64Bytes
// 2^N * 2 -1 TauG1 points (uncompressed)
// 2^N TauG2 Points (uncompressed)
// 2^N AlphaTauG1 Points (uncompressed)
// 2^N BetaTauG1 Points (uncompressed)
// BetaG2 (uncompressed)
import * as fastFile from "fastfile";
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as binFileUtils from "@iden3/binfileutils";
import * as misc from "./misc.js";
export default async function exportChallenge(pTauFilename, challengeFilename, logger) {
await Blake2b.ready();
const {fd: fdFrom, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdFrom, sections);
const contributions = await utils.readContributions(fdFrom, curve, sections);
let lastResponseHash, curChallengeHash;
if (contributions.length == 0) {
lastResponseHash = Blake2b(64).digest();
curChallengeHash = utils.calculateFirstChallengeHash(curve, power);
} else {
lastResponseHash = contributions[contributions.length-1].responseHash;
curChallengeHash = contributions[contributions.length-1].nextChallenge;
}
if (logger) logger.info(misc.formatHash(lastResponseHash, "Last Response Hash: "));
if (logger) logger.info(misc.formatHash(curChallengeHash, "New Challenge Hash: "));
const fdTo = await fastFile.createOverride(challengeFilename);
const toHash = Blake2b(64);
await fdTo.write(lastResponseHash);
toHash.update(lastResponseHash);
await exportSection(2, "G1", (2 ** power) * 2 -1, "tauG1");
await exportSection(3, "G2", (2 ** power) , "tauG2");
await exportSection(4, "G1", (2 ** power) , "alphaTauG1");
await exportSection(5, "G1", (2 ** power) , "betaTauG1");
await exportSection(6, "G2", 1 , "betaG2");
await fdFrom.close();
await fdTo.close();
const calcCurChallengeHash = toHash.digest();
if (!misc.hashIsEqual (curChallengeHash, calcCurChallengeHash)) {
if (logger) logger.info(misc.formatHash(calcCurChallengeHash, "Calc Curret Challenge Hash: "));
if (logger) logger.error("PTau file is corrupted. Calculated new challenge hash does not match with the eclared one");
throw new Error("PTau file is corrupted. Calculated new challenge hash does not match with the eclared one");
}
return curChallengeHash;
async function exportSection(sectionId, groupName, nPoints, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const nPointsChunk = Math.floor((1<<24)/sG);
await binFileUtils.startReadUniqueSection(fdFrom, sections, sectionId);
for (let i=0; i< nPoints; i+= nPointsChunk) {
if (logger) logger.debug(`Exporting ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
let buff;
buff = await fdFrom.read(n*sG);
buff = await G.batchLEMtoU(buff);
await fdTo.write(buff);
toHash.update(buff);
}
await binFileUtils.endReadSection(fdFrom);
}
}

View File

@ -1,89 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as utils from "./powersoftau_utils.js";
import * as binFileUtils from "@iden3/binfileutils";
export default async function exportJson(pTauFilename, verbose) {
const {fd, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fd, sections);
const pTau = {};
pTau.q = curve.q;
pTau.power = power;
pTau.contributions = await utils.readContributions(fd, curve, sections);
pTau.tauG1 = await exportSection(2, "G1", (2 ** power)*2 -1, "tauG1");
pTau.tauG2 = await exportSection(3, "G2", (2 ** power), "tauG2");
pTau.alphaTauG1 = await exportSection(4, "G1", (2 ** power), "alphaTauG1");
pTau.betaTauG1 = await exportSection(5, "G1", (2 ** power), "betaTauG1");
pTau.betaG2 = await exportSection(6, "G2", 1, "betaG2");
pTau.lTauG1 = await exportLagrange(12, "G1", "lTauG1");
pTau.lTauG2 = await exportLagrange(13, "G2", "lTauG2");
pTau.lAlphaTauG1 = await exportLagrange(14, "G1", "lAlphaTauG2");
pTau.lBetaTauG1 = await exportLagrange(15, "G1", "lBetaTauG2");
await fd.close();
return pTau;
async function exportSection(sectionId, groupName, nPoints, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const res = [];
await binFileUtils.startReadUniqueSection(fd, sections, sectionId);
for (let i=0; i< nPoints; i++) {
if ((verbose)&&i&&(i%10000 == 0)) console.log(`${sectionName}: ` + i);
const buff = await fd.read(sG);
res.push(G.fromRprLEM(buff, 0));
}
await binFileUtils.endReadSection(fd);
return res;
}
async function exportLagrange(sectionId, groupName, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const res = [];
await binFileUtils.startReadUniqueSection(fd, sections, sectionId);
for (let p=0; p<=power; p++) {
if (verbose) console.log(`${sectionName}: Power: ${p}`);
res[p] = [];
const nPoints = (2 ** p);
for (let i=0; i<nPoints; i++) {
if ((verbose)&&i&&(i%10000 == 0)) console.log(`${sectionName}: ${i}/${nPoints}`);
const buff = await fd.read(sG);
res[p].push(G.fromRprLEM(buff, 0));
}
}
await binFileUtils.endReadSection(fd);
return res;
}
}

View File

@ -1,232 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as fastFile from "fastfile";
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as binFileUtils from "@iden3/binfileutils";
import * as misc from "./misc.js";
export default async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, logger) {
await Blake2b.ready();
const noHash = new Uint8Array(64);
for (let i=0; i<64; i++) noHash[i] = 0xFF;
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
const contributions = await utils.readContributions(fdOld, curve, sections);
const currentContribution = {};
if (name) currentContribution.name = name;
const sG1 = curve.F1.n8*2;
const scG1 = curve.F1.n8; // Compresed size
const sG2 = curve.F2.n8*2;
const scG2 = curve.F2.n8; // Compresed size
const fdResponse = await fastFile.readExisting(contributionFilename);
if (fdResponse.totalSize !=
64 + // Old Hash
((2 ** power)*2-1)*scG1 +
(2 ** power)*scG2 +
(2 ** power)*scG1 +
(2 ** power)*scG1 +
scG2 +
sG1*6 + sG2*3)
throw new Error("Size of the contribution is invalid");
let lastChallengeHash;
if (contributions.length>0) {
lastChallengeHash = contributions[contributions.length-1].nextChallenge;
} else {
lastChallengeHash = utils.calculateFirstChallengeHash(curve, power, logger);
}
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, importPoints ? 7: 2);
await utils.writePTauHeader(fdNew, curve, power);
const contributionPreviousHash = await fdResponse.read(64);
if (misc.hashIsEqual(noHash,lastChallengeHash)) {
lastChallengeHash = contributionPreviousHash;
contributions[contributions.length-1].nextChallenge = lastChallengeHash;
}
if(!misc.hashIsEqual(contributionPreviousHash,lastChallengeHash))
throw new Error("Wrong contribution. this contribution is not based on the previus hash");
const hasherResponse = new Blake2b(64);
hasherResponse.update(contributionPreviousHash);
const startSections = [];
let res;
res = await processSection(fdResponse, fdNew, "G1", 2, (2 ** power) * 2 -1, [1], "tauG1");
currentContribution.tauG1 = res[0];
res = await processSection(fdResponse, fdNew, "G2", 3, (2 ** power) , [1], "tauG2");
currentContribution.tauG2 = res[0];
res = await processSection(fdResponse, fdNew, "G1", 4, (2 ** power) , [0], "alphaG1");
currentContribution.alphaG1 = res[0];
res = await processSection(fdResponse, fdNew, "G1", 5, (2 ** power) , [0], "betaG1");
currentContribution.betaG1 = res[0];
res = await processSection(fdResponse, fdNew, "G2", 6, 1 , [0], "betaG2");
currentContribution.betaG2 = res[0];
currentContribution.partialHash = hasherResponse.getPartialHash();
const buffKey = await fdResponse.read(curve.F1.n8*2*6+curve.F2.n8*2*3);
currentContribution.key = utils.fromPtauPubKeyRpr(buffKey, 0, curve, false);
hasherResponse.update(new Uint8Array(buffKey));
const hashResponse = hasherResponse.digest();
if (logger) logger.info(misc.formatHash(hashResponse, "Contribution Response Hash imported: "));
if (importPoints) {
const nextChallengeHasher = new Blake2b(64);
nextChallengeHasher.update(hashResponse);
await hashSection(nextChallengeHasher, fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger);
await hashSection(nextChallengeHasher, fdNew, "G2", 3, (2 ** power) , "tauG2", logger);
await hashSection(nextChallengeHasher, fdNew, "G1", 4, (2 ** power) , "alphaTauG1", logger);
await hashSection(nextChallengeHasher, fdNew, "G1", 5, (2 ** power) , "betaTauG1", logger);
await hashSection(nextChallengeHasher, fdNew, "G2", 6, 1 , "betaG2", logger);
currentContribution.nextChallenge = nextChallengeHasher.digest();
if (logger) logger.info(misc.formatHash(currentContribution.nextChallenge, "Next Challenge Hash: "));
} else {
currentContribution.nextChallenge = noHash;
}
contributions.push(currentContribution);
await utils.writeContributions(fdNew, curve, contributions);
await fdResponse.close();
await fdNew.close();
await fdOld.close();
return currentContribution.nextChallenge;
async function processSection(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
if (importPoints) {
return await processSectionImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName);
} else {
return await processSectionNoImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName);
}
}
async function processSectionImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
const G = curve[groupName];
const scG = G.F.n8;
const sG = G.F.n8*2;
const singularPoints = [];
await binFileUtils.startWriteSection(fdTo, sectionId);
const nPointsChunk = Math.floor((1<<24)/sG);
startSections[sectionId] = fdTo.pos;
for (let i=0; i< nPoints; i += nPointsChunk) {
if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffC = await fdFrom.read(n * scG);
hasherResponse.update(buffC);
const buffLEM = await G.batchCtoLEM(buffC);
await fdTo.write(buffLEM);
for (let j=0; j<singularPointIndexes.length; j++) {
const sp = singularPointIndexes[j];
if ((sp >=i) && (sp < i+n)) {
const P = G.fromRprLEM(buffLEM, (sp-i)*sG);
singularPoints.push(P);
}
}
}
await binFileUtils.endWriteSection(fdTo);
return singularPoints;
}
async function processSectionNoImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
const G = curve[groupName];
const scG = G.F.n8;
const singularPoints = [];
const nPointsChunk = Math.floor((1<<24)/scG);
for (let i=0; i< nPoints; i += nPointsChunk) {
if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffC = await fdFrom.read(n * scG);
hasherResponse.update(buffC);
for (let j=0; j<singularPointIndexes.length; j++) {
const sp = singularPointIndexes[j];
if ((sp >=i) && (sp < i+n)) {
const P = G.fromRprCompressed(buffC, (sp-i)*scG);
singularPoints.push(P);
}
}
}
return singularPoints;
}
async function hashSection(nextChallengeHasher, fdTo, groupName, sectionId, nPoints, sectionName, logger) {
const G = curve[groupName];
const sG = G.F.n8*2;
const nPointsChunk = Math.floor((1<<24)/sG);
const oldPos = fdTo.pos;
fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) {
if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG);
const buffU = await G.batchLEMtoU(buffLEM);
nextChallengeHasher.update(buffU);
}
fdTo.pos = oldPos;
}
}

View File

@ -1,146 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
/*
Header(1)
n8
prime
power
tauG1(2)
{(2 ** power)*2-1} [
G1, tau*G1, tau^2 * G1, ....
]
tauG2(3)
{2 ** power}[
G2, tau*G2, tau^2 * G2, ...
]
alphaTauG1(4)
{2 ** power}[
alpha*G1, alpha*tau*G1, alpha*tau^2*G1,....
]
betaTauG1(5)
{2 ** power} []
beta*G1, beta*tau*G1, beta*tau^2*G1, ....
]
betaG2(6)
{1}[
beta*G2
]
contributions(7)
NContributions
{NContributions}[
tau*G1
tau*G2
alpha*G1
beta*G1
beta*G2
pubKey
tau_g1s
tau_g1sx
tau_g2spx
alpha_g1s
alpha_g1sx
alpha_g1spx
beta_g1s
beta_g1sx
beta_g1spx
partialHash (216 bytes) See https://github.com/mafintosh/blake2b-wasm/blob/23bee06945806309977af802bc374727542617c7/blake2b.wat#L9
hashNewChallenge
]
*/
import * as ptauUtils from "./powersoftau_utils.js";
import * as binFileUtils from "@iden3/binfileutils";
import Blake2b from "blake2b-wasm";
import * as misc from "./misc.js";
export default async function newAccumulator(curve, power, fileName, logger) {
await Blake2b.ready();
const fd = await binFileUtils.createBinFile(fileName, "ptau", 1, 7);
await ptauUtils.writePTauHeader(fd, curve, power, 0);
const buffG1 = curve.G1.oneAffine;
const buffG2 = curve.G2.oneAffine;
// Write tauG1
///////////
await binFileUtils.startWriteSection(fd, 2);
const nTauG1 = (2 ** power) * 2 -1;
for (let i=0; i< nTauG1; i++) {
await fd.write(buffG1);
if ((logger)&&((i%100000) == 0)&&i) logger.log("tauG1: " + i);
}
await binFileUtils.endWriteSection(fd);
// Write tauG2
///////////
await binFileUtils.startWriteSection(fd, 3);
const nTauG2 = (2 ** power);
for (let i=0; i< nTauG2; i++) {
await fd.write(buffG2);
if ((logger)&&((i%100000) == 0)&&i) logger.log("tauG2: " + i);
}
await binFileUtils.endWriteSection(fd);
// Write alphaTauG1
///////////
await binFileUtils.startWriteSection(fd, 4);
const nAlfaTauG1 = (2 ** power);
for (let i=0; i< nAlfaTauG1; i++) {
await fd.write(buffG1);
if ((logger)&&((i%100000) == 0)&&i) logger.log("alphaTauG1: " + i);
}
await binFileUtils.endWriteSection(fd);
// Write betaTauG1
///////////
await binFileUtils.startWriteSection(fd, 5);
const nBetaTauG1 = (2 ** power);
for (let i=0; i< nBetaTauG1; i++) {
await fd.write(buffG1);
if ((logger)&&((i%100000) == 0)&&i) logger.log("betaTauG1: " + i);
}
await binFileUtils.endWriteSection(fd);
// Write betaG2
///////////
await binFileUtils.startWriteSection(fd, 6);
await fd.write(buffG2);
await binFileUtils.endWriteSection(fd);
// Contributions
///////////
await binFileUtils.startWriteSection(fd, 7);
await fd.writeULE32(0); // 0 Contributions
await binFileUtils.endWriteSection(fd);
await fd.close();
const firstChallengeHash = ptauUtils.calculateFirstChallengeHash(curve, power, logger);
if (logger) logger.debug(misc.formatHash(Blake2b(64).digest(), "Blank Contribution Hash:"));
if (logger) logger.info(misc.formatHash(firstChallengeHash, "First Contribution Hash:"));
return firstChallengeHash;
}

View File

@ -1,142 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as binFileUtils from "@iden3/binfileutils";
import * as utils from "./powersoftau_utils.js";
import {BigBuffer} from "ffjavascript";
export default async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
await utils.writePTauHeader(fdNew, curve, power);
await binFileUtils.copySection(fdOld, sections, fdNew, 2);
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", "tauG1" );
await processSection(3, 13, "G2", "tauG2" );
await processSection(4, 14, "G1", "alphaTauG1" );
await processSection(5, 15, "G1", "betaTauG1" );
await fdOld.close();
await fdNew.close();
// await fs.promises.unlink(newPTauFilename+ ".tmp");
return;
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
if (logger) logger.debug("Starting section: "+sectionName);
await binFileUtils.startWriteSection(fdNew, newSectionId);
for (let p=0; p<=power; p++) {
await processSectionPower(p);
}
if (oldSectionId == 2) {
await processSectionPower(power+1);
}
await binFileUtils.endWriteSection(fdNew);
async function processSectionPower(p) {
const nPoints = 2 ** p;
const G = curve[Gstr];
const Fr = curve.Fr;
const sGin = G.F.n8*2;
const sGmid = G.F.n8*3;
let buff;
buff = new BigBuffer(nPoints*sGin);
await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
if ((oldSectionId == 2)&&(p==power+1)) {
await fdOld.readToBuffer(buff, 0,(nPoints-1)*sGin );
buff.set(curve.G1.zeroAffine, (nPoints-1)*sGin );
} else {
await fdOld.readToBuffer(buff, 0,nPoints*sGin );
}
await binFileUtils.endReadSection(fdOld, true);
buff = await G.lagrangeEvaluations(buff, "affine", "affine", logger, sectionName);
await fdNew.write(buff);
/*
if (p <= curve.Fr.s) {
buff = await G.ifft(buff, "affine", "affine", logger, sectionName);
await fdNew.write(buff);
} else if (p == curve.Fr.s+1) {
const smallM = 1<<curve.Fr.s;
let t0 = new BigBuffer( smallM * sGmid );
let t1 = new BigBuffer( smallM * sGmid );
const shift_to_small_m = Fr.exp(Fr.shift, smallM);
const one_over_denom = Fr.inv(Fr.sub(shift_to_small_m, Fr.one));
let sInvAcc = Fr.one;
for (let i=0; i<smallM; i++) {
const ti = buff.slice(i*sGin, (i+1)*sGin);
const tmi = buff.slice((i+smallM)*sGin, (i+smallM+1)*sGin);
t0.set(
G.timesFr(
G.sub(
G.timesFr(ti , shift_to_small_m),
tmi
),
one_over_denom
),
i*sGmid
);
t1.set(
G.timesFr(
G.sub( tmi, ti),
Fr.mul(sInvAcc, one_over_denom)
),
i*sGmid
);
sInvAcc = Fr.mul(sInvAcc, Fr.shiftInv);
}
t0 = await G.ifft(t0, "jacobian", "affine", logger, sectionName + " t0");
await fdNew.write(t0);
t0 = null;
t1 = await G.ifft(t1, "jacobian", "affine", logger, sectionName + " t0");
await fdNew.write(t1);
} else {
if (logger) logger.error("Power too big");
throw new Error("Power to big");
}
*/
}
}
}

View File

@ -1,64 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as binFileUtils from "@iden3/binfileutils";
import * as utils from "./powersoftau_utils.js";
export default async function truncate(ptauFilename, template, logger) {
const {fd: fdOld, sections} = await binFileUtils.readBinFile(ptauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
for (let p=1; p<power; p++) {
await generateTruncate(p);
}
await fdOld.close();
return true;
async function generateTruncate(p) {
let sP = p.toString();
while (sP.length<2) sP = "0" + sP;
if (logger) logger.debug("Writing Power: "+sP);
const fdNew = await binFileUtils.createBinFile(template + sP + ".ptau", "ptau", 1, 11);
await utils.writePTauHeader(fdNew, curve, p, ceremonyPower);
await binFileUtils.copySection(fdOld, sections, fdNew, 2, ((2 ** p)*2-1) * sG1 ); // tagG1
await binFileUtils.copySection(fdOld, sections, fdNew, 3, (2 ** p) * sG2); // tauG2
await binFileUtils.copySection(fdOld, sections, fdNew, 4, (2 ** p) * sG1); // alfaTauG1
await binFileUtils.copySection(fdOld, sections, fdNew, 5, (2 ** p) * sG1); // betaTauG1
await binFileUtils.copySection(fdOld, sections, fdNew, 6, sG2); // betaTauG2
await binFileUtils.copySection(fdOld, sections, fdNew, 7); // contributions
await binFileUtils.copySection(fdOld, sections, fdNew, 12, ((2 ** (p+1))*2 -1) * sG1); // L_tauG1
await binFileUtils.copySection(fdOld, sections, fdNew, 13, ((2 ** p)*2 -1) * sG2); // L_tauG2
await binFileUtils.copySection(fdOld, sections, fdNew, 14, ((2 ** p)*2 -1) * sG1); // L_alfaTauG1
await binFileUtils.copySection(fdOld, sections, fdNew, 15, ((2 ** p)*2 -1) * sG1); // L_betaTauG1
await fdNew.close();
}
}

View File

@ -1,368 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import { Scalar } from "ffjavascript";
import Blake2b from "blake2b-wasm";
import * as keyPair from "./keypair.js";
import * as misc from "./misc.js";
import { getCurveFromQ } from "./curves.js";
export async function writePTauHeader(fd, curve, power, ceremonyPower) {
// Write the header
///////////
if (! ceremonyPower) ceremonyPower = power;
await fd.writeULE32(1); // Header type
const pHeaderSize = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
await fd.writeULE32(curve.F1.n64*8);
const buff = new Uint8Array(curve.F1.n8);
Scalar.toRprLE(buff, 0, curve.q, curve.F1.n8);
await fd.write(buff);
await fd.writeULE32(power); // power
await fd.writeULE32(ceremonyPower); // power
const headerSize = fd.pos - pHeaderSize - 8;
const oldPos = fd.pos;
await fd.writeULE64(headerSize, pHeaderSize);
fd.pos = oldPos;
}
export async function readPTauHeader(fd, sections) {
if (!sections[1]) throw new Error(fd.fileName + ": File has no header");
if (sections[1].length>1) throw new Error(fd.fileName +": File has more than one header");
fd.pos = sections[1][0].p;
const n8 = await fd.readULE32();
const buff = await fd.read(n8);
const q = Scalar.fromRprLE(buff);
const curve = await getCurveFromQ(q);
if (curve.F1.n64*8 != n8) throw new Error(fd.fileName +": Invalid size");
const power = await fd.readULE32();
const ceremonyPower = await fd.readULE32();
if (fd.pos-sections[1][0].p != sections[1][0].size) throw new Error("Invalid PTau header size");
return {curve, power, ceremonyPower};
}
export async function readPtauPubKey(fd, curve, montgomery) {
const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3);
return fromPtauPubKeyRpr(buff, 0, curve, montgomery);
}
export function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
const key = {
tau: {},
alpha: {},
beta: {}
};
key.tau.g1_s = readG1();
key.tau.g1_sx = readG1();
key.alpha.g1_s = readG1();
key.alpha.g1_sx = readG1();
key.beta.g1_s = readG1();
key.beta.g1_sx = readG1();
key.tau.g2_spx = readG2();
key.alpha.g2_spx = readG2();
key.beta.g2_spx = readG2();
return key;
function readG1() {
let p;
if (montgomery) {
p = curve.G1.fromRprLEM( buff, pos );
} else {
p = curve.G1.fromRprUncompressed( buff, pos );
}
pos += curve.G1.F.n8*2;
return p;
}
function readG2() {
let p;
if (montgomery) {
p = curve.G2.fromRprLEM( buff, pos );
} else {
p = curve.G2.fromRprUncompressed( buff, pos );
}
pos += curve.G2.F.n8*2;
return p;
}
}
export function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
writeG1(key.tau.g1_s);
writeG1(key.tau.g1_sx);
writeG1(key.alpha.g1_s);
writeG1(key.alpha.g1_sx);
writeG1(key.beta.g1_s);
writeG1(key.beta.g1_sx);
writeG2(key.tau.g2_spx);
writeG2(key.alpha.g2_spx);
writeG2(key.beta.g2_spx);
async function writeG1(p) {
if (montgomery) {
curve.G1.toRprLEM(buff, pos, p);
} else {
curve.G1.toRprUncompressed(buff, pos, p);
}
pos += curve.F1.n8*2;
}
async function writeG2(p) {
if (montgomery) {
curve.G2.toRprLEM(buff, pos, p);
} else {
curve.G2.toRprUncompressed(buff, pos, p);
}
pos += curve.F2.n8*2;
}
return buff;
}
export async function writePtauPubKey(fd, curve, key, montgomery) {
const buff = new Uint8Array(curve.F1.n8*2*6 + curve.F2.n8*2*3);
toPtauPubKeyRpr(buff, 0, curve, key, montgomery);
await fd.write(buff);
}
async function readContribution(fd, curve) {
const c = {};
c.tauG1 = await readG1();
c.tauG2 = await readG2();
c.alphaG1 = await readG1();
c.betaG1 = await readG1();
c.betaG2 = await readG2();
c.key = await readPtauPubKey(fd, curve, true);
c.partialHash = await fd.read(216);
c.nextChallenge = await fd.read(64);
c.type = await fd.readULE32();
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
toPtauPubKeyRpr(buffV, 0, curve, c.key, false);
const responseHasher = Blake2b(64);
responseHasher.setPartialHash(c.partialHash);
responseHasher.update(buffV);
c.responseHash = responseHasher.digest();
const paramLength = await fd.readULE32();
const curPos = fd.pos;
let lastType =0;
while (fd.pos-curPos < paramLength) {
const buffType = await readDV(1);
if (buffType[0]<= lastType) throw new Error("Parameters in the contribution must be sorted");
lastType = buffType[0];
if (buffType[0]==1) { // Name
const buffLen = await readDV(1);
const buffStr = await readDV(buffLen[0]);
c.name = new TextDecoder().decode(buffStr);
} else if (buffType[0]==2) {
const buffExp = await readDV(1);
c.numIterationsExp = buffExp[0];
} else if (buffType[0]==3) {
const buffLen = await readDV(1);
c.beaconHash = await readDV(buffLen[0]);
} else {
throw new Error("Parameter not recognized");
}
}
if (fd.pos != curPos + paramLength) {
throw new Error("Parametes do not match");
}
return c;
async function readG1() {
const pBuff = await fd.read(curve.G1.F.n8*2);
return curve.G1.fromRprLEM( pBuff );
}
async function readG2() {
const pBuff = await fd.read(curve.G2.F.n8*2);
return curve.G2.fromRprLEM( pBuff );
}
async function readDV(n) {
const b = await fd.read(n);
return new Uint8Array(b);
}
}
export async function readContributions(fd, curve, sections) {
if (!sections[7]) throw new Error(fd.fileName + ": File has no contributions");
if (sections[7][0].length>1) throw new Error(fd.fileName +": File has more than one contributions section");
fd.pos = sections[7][0].p;
const nContributions = await fd.readULE32();
const contributions = [];
for (let i=0; i<nContributions; i++) {
const c = await readContribution(fd, curve);
c.id = i+1;
contributions.push(c);
}
if (fd.pos-sections[7][0].p != sections[7][0].size) throw new Error("Invalid contribution section size");
return contributions;
}
async function writeContribution(fd, curve, contribution) {
const buffG1 = new Uint8Array(curve.F1.n8*2);
const buffG2 = new Uint8Array(curve.F2.n8*2);
await writeG1(contribution.tauG1);
await writeG2(contribution.tauG2);
await writeG1(contribution.alphaG1);
await writeG1(contribution.betaG1);
await writeG2(contribution.betaG2);
await writePtauPubKey(fd, curve, contribution.key, true);
await fd.write(contribution.partialHash);
await fd.write(contribution.nextChallenge);
await fd.writeULE32(contribution.type || 0);
const params = [];
if (contribution.name) {
params.push(1); // Param Name
const nameData = new TextEncoder("utf-8").encode(contribution.name.substring(0,64));
params.push(nameData.byteLength);
for (let i=0; i<nameData.byteLength; i++) params.push(nameData[i]);
}
if (contribution.type == 1) {
params.push(2); // Param numIterationsExp
params.push(contribution.numIterationsExp);
params.push(3); // Beacon Hash
params.push(contribution.beaconHash.byteLength);
for (let i=0; i<contribution.beaconHash.byteLength; i++) params.push(contribution.beaconHash[i]);
}
if (params.length>0) {
const paramsBuff = new Uint8Array(params);
await fd.writeULE32(paramsBuff.byteLength);
await fd.write(paramsBuff);
} else {
await fd.writeULE32(0);
}
async function writeG1(p) {
curve.G1.toRprLEM(buffG1, 0, p);
await fd.write(buffG1);
}
async function writeG2(p) {
curve.G2.toRprLEM(buffG2, 0, p);
await fd.write(buffG2);
}
}
export async function writeContributions(fd, curve, contributions) {
await fd.writeULE32(7); // Header type
const pContributionsSize = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
await fd.writeULE32(contributions.length);
for (let i=0; i< contributions.length; i++) {
await writeContribution(fd, curve, contributions[i]);
}
const contributionsSize = fd.pos - pContributionsSize - 8;
const oldPos = fd.pos;
await fd.writeULE64(contributionsSize, pContributionsSize);
fd.pos = oldPos;
}
export function calculateFirstChallengeHash(curve, power, logger) {
if (logger) logger.debug("Calculating First Challenge Hash");
const hasher = new Blake2b(64);
const vG1 = new Uint8Array(curve.G1.F.n8*2);
const vG2 = new Uint8Array(curve.G2.F.n8*2);
curve.G1.toRprUncompressed(vG1, 0, curve.G1.g);
curve.G2.toRprUncompressed(vG2, 0, curve.G2.g);
hasher.update(Blake2b(64).digest());
let n;
n=(2 ** power)*2 -1;
if (logger) logger.debug("Calculate Initial Hash: tauG1");
hashBlock(vG1, n);
n= 2 ** power;
if (logger) logger.debug("Calculate Initial Hash: tauG2");
hashBlock(vG2, n);
if (logger) logger.debug("Calculate Initial Hash: alphaTauG1");
hashBlock(vG1, n);
if (logger) logger.debug("Calculate Initial Hash: betaTauG1");
hashBlock(vG1, n);
hasher.update(vG2);
return hasher.digest();
function hashBlock(buff, n) {
const blockSize = 500000;
const nBlocks = Math.floor(n / blockSize);
const rem = n % blockSize;
const bigBuff = new Uint8Array(blockSize * buff.byteLength);
for (let i=0; i<blockSize; i++) {
bigBuff.set(buff, i*buff.byteLength);
}
for (let i=0; i<nBlocks; i++) {
hasher.update(bigBuff);
if (logger) logger.debug("Initial hash: " +i*blockSize);
}
for (let i=0; i<rem; i++) {
hasher.update(buff);
}
}
}
export function keyFromBeacon(curve, challengeHash, beaconHash, numIterationsExp) {
const rng = misc.rngFromBeaconParams(beaconHash, numIterationsExp);
const key = keyPair.createPTauKey(curve, challengeHash, rng);
return key;
}

View File

@ -1,497 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import Blake2b from "blake2b-wasm";
import * as utils from "./powersoftau_utils.js";
import * as keyPair from "./keypair.js";
import crypto from "crypto";
import * as binFileUtils from "@iden3/binfileutils";
import { ChaCha, BigBuffer } from "ffjavascript";
import * as misc from "./misc.js";
const sameRatio = misc.sameRatio;
async function verifyContribution(curve, cur, prev, logger) {
let sr;
if (cur.type == 1) { // Verify the beacon.
const beaconKey = utils.keyFromBeacon(curve, prev.nextChallenge, cur.beaconHash, cur.numIterationsExp);
if (!curve.G1.eq(cur.key.tau.g1_s, beaconKey.tau.g1_s)) {
if (logger) logger.error(`BEACON key (tauG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.tau.g1_sx, beaconKey.tau.g1_sx)) {
if (logger) logger.error(`BEACON key (tauG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.tau.g2_spx, beaconKey.tau.g2_spx)) {
if (logger) logger.error(`BEACON key (tauG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.alpha.g1_s, beaconKey.alpha.g1_s)) {
if (logger) logger.error(`BEACON key (alphaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.alpha.g1_sx, beaconKey.alpha.g1_sx)) {
if (logger) logger.error(`BEACON key (alphaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.alpha.g2_spx, beaconKey.alpha.g2_spx)) {
if (logger) logger.error(`BEACON key (alphaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.beta.g1_s, beaconKey.beta.g1_s)) {
if (logger) logger.error(`BEACON key (betaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.beta.g1_sx, beaconKey.beta.g1_sx)) {
if (logger) logger.error(`BEACON key (betaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.beta.g2_spx, beaconKey.beta.g2_spx)) {
if (logger) logger.error(`BEACON key (betaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
}
cur.key.tau.g2_sp = curve.G2.toAffine(keyPair.getG2sp(curve, 0, prev.nextChallenge, cur.key.tau.g1_s, cur.key.tau.g1_sx));
cur.key.alpha.g2_sp = curve.G2.toAffine(keyPair.getG2sp(curve, 1, prev.nextChallenge, cur.key.alpha.g1_s, cur.key.alpha.g1_sx));
cur.key.beta.g2_sp = curve.G2.toAffine(keyPair.getG2sp(curve, 2, prev.nextChallenge, cur.key.beta.g1_s, cur.key.beta.g1_sx));
sr = await sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID key (tau) in challenge #"+cur.id);
return false;
}
sr = await sameRatio(curve, cur.key.alpha.g1_s, cur.key.alpha.g1_sx, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID key (alpha) in challenge #"+cur.id);
return false;
}
sr = await sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID key (beta) in challenge #"+cur.id);
return false;
}
sr = await sameRatio(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID tau*G1. challenge #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2);
if (sr !== true) {
if (logger) logger.error("INVALID tau*G2. challenge #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID alpha*G1. challenge #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID beta*G1. challenge #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, prev.betaG2, cur.betaG2);
if (sr !== true) {
if (logger) logger.error("INVALID beta*G2. challenge #"+cur.id+"It does not follow the previous contribution");
return false;
}
if (logger) logger.info("Powers Of tau file OK!");
return true;
}
export default async function verify(tauFilename, logger) {
let sr;
await Blake2b.ready();
const {fd, sections} = await binFileUtils.readBinFile(tauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fd, sections);
const contrs = await utils.readContributions(fd, curve, sections);
if (logger) logger.debug("power: 2**" + power);
// Verify Last contribution
if (logger) logger.debug("Computing initial contribution hash");
const initialContribution = {
tauG1: curve.G1.g,
tauG2: curve.G2.g,
alphaG1: curve.G1.g,
betaG1: curve.G1.g,
betaG2: curve.G2.g,
nextChallenge: utils.calculateFirstChallengeHash(curve, ceremonyPower, logger),
responseHash: Blake2b(64).digest()
};
if (contrs.length == 0) {
if (logger) logger.error("This file has no contribution! It cannot be used in production");
return false;
}
let prevContr;
if (contrs.length>1) {
prevContr = contrs[contrs.length-2];
} else {
prevContr = initialContribution;
}
const curContr = contrs[contrs.length-1];
if (logger) logger.debug("Validating contribution #"+contrs[contrs.length-1].id);
const res = await verifyContribution(curve, curContr, prevContr, logger);
if (!res) return false;
const nextContributionHasher = Blake2b(64);
nextContributionHasher.update(curContr.responseHash);
// Verify powers and compute nextChallengeHash
// await test();
// Verify Section tau*G1
if (logger) logger.debug("Verifying powers in tau*G1 section");
const rTau1 = await processSection(2, "G1", "tauG1", (2 ** power)*2-1, [0, 1], logger);
sr = await sameRatio(curve, rTau1.R1, rTau1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) {
if (logger) logger.error("tauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) {
if (logger) logger.error("First element of tau*G1 section must be the generator");
return false;
}
if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) {
if (logger) logger.error("Second element of tau*G1 section does not match the one in the contribution section");
return false;
}
// await test();
// Verify Section tau*G2
if (logger) logger.debug("Verifying powers in tau*G2 section");
const rTau2 = await processSection(3, "G2", "tauG2", 2 ** power, [0, 1], logger);
sr = await sameRatio(curve, curve.G1.g, curContr.tauG1, rTau2.R1, rTau2.R2);
if (sr !== true) {
if (logger) logger.error("tauG2 section. Powers do not match");
return false;
}
if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) {
if (logger) logger.error("First element of tau*G2 section must be the generator");
return false;
}
if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) {
if (logger) logger.error("Second element of tau*G2 section does not match the one in the contribution section");
return false;
}
// Verify Section alpha*tau*G1
if (logger) logger.debug("Verifying powers in alpha*tau*G1 section");
const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 2 ** power, [0], logger);
sr = await sameRatio(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) {
if (logger) logger.error("alphaTauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) {
if (logger) logger.error("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section");
return false;
}
// Verify Section beta*tau*G1
if (logger) logger.debug("Verifying powers in beta*tau*G1 section");
const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 2 ** power, [0], logger);
sr = await sameRatio(curve, rBetaTauG1.R1, rBetaTauG1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) {
if (logger) logger.error("betaTauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) {
if (logger) logger.error("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section");
return false;
}
//Verify Beta G2
const betaG2 = await processSectionBetaG2(logger);
if (!curve.G2.eq(curContr.betaG2, betaG2)) {
if (logger) logger.error("betaG2 element in betaG2 section does not match the one in the contribution section");
return false;
}
const nextContributionHash = nextContributionHasher.digest();
// Check the nextChallengeHash
if (power == ceremonyPower) {
if (!misc.hashIsEqual(nextContributionHash,curContr.nextChallenge)) {
if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section");
return false;
}
}
if (logger) logger.info(misc.formatHash(nextContributionHash, "Next challenge hash: "));
// Verify Previous contributions
printContribution(curContr, prevContr);
for (let i = contrs.length-2; i>=0; i--) {
const curContr = contrs[i];
const prevContr = (i>0) ? contrs[i-1] : initialContribution;
const res = await verifyContribution(curve, curContr, prevContr, logger);
if (!res) return false;
printContribution(curContr, prevContr, logger);
}
if (logger) logger.info("-----------------------------------------------------");
if ((!sections[12]) || (!sections[13]) || (!sections[14]) || (!sections[15])) {
if (logger) logger.warn(
"this file does not contain phase2 precalculated values. Please run: \n" +
" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony."
);
} else {
let res;
res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1", logger);
if (!res) return false;
res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2", logger);
if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1", logger);
if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1", logger);
if (!res) return false;
}
await fd.close();
if (logger) logger.info("Powers of Tau Ok!");
return true;
function printContribution(curContr, prevContr) {
if (!logger) return;
logger.info("-----------------------------------------------------");
logger.info(`Contribution #${curContr.id}: ${curContr.name ||""}`);
logger.info(misc.formatHash(curContr.nextChallenge, "Next Challenge: "));
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
utils.toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false);
const responseHasher = Blake2b(64);
responseHasher.setPartialHash(curContr.partialHash);
responseHasher.update(buffV);
const responseHash = responseHasher.digest();
logger.info(misc.formatHash(responseHash, "Response Hash:"));
logger.info(misc.formatHash(prevContr.nextChallenge, "Response Hash:"));
if (curContr.type == 1) {
logger.info(`Beacon generator: ${misc.byteArray2hex(curContr.beaconHash)}`);
logger.info(`Beacon iterations Exp: ${curContr.numIterationsExp}`);
}
}
async function processSectionBetaG2(logger) {
const G = curve.G2;
const sG = G.F.n8*2;
const buffUv = new Uint8Array(sG);
if (!sections[6]) {
logger.error("File has no BetaG2 section");
throw new Error("File has no BetaG2 section");
}
if (sections[6].length>1) {
logger.error("File has no BetaG2 section");
throw new Error("File has more than one GetaG2 section");
}
fd.pos = sections[6][0].p;
const buff = await fd.read(sG);
const P = G.fromRprLEM(buff);
G.toRprUncompressed(buffUv, 0, P);
nextContributionHasher.update(buffUv);
return P;
}
async function processSection(idSection, groupName, sectionName, nPoints, singularPointIndexes, logger) {
const MAX_CHUNK_SIZE = 1<<16;
const G = curve[groupName];
const sG = G.F.n8*2;
await binFileUtils.startReadUniqueSection(fd, sections, idSection);
const singularPoints = [];
let R1 = G.zero;
let R2 = G.zero;
let lastBase = G.zero;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if (logger) logger.debug(`points relations: ${sectionName}: ${i}/${nPoints} `);
const n = Math.min(nPoints - i, MAX_CHUNK_SIZE);
const bases = await fd.read(n*sG);
const basesU = await G.batchLEMtoU(bases);
nextContributionHasher.update(basesU);
const scalars = new Uint8Array(4*(n-1));
crypto.randomFillSync(scalars);
if (i>0) {
const firstBase = G.fromRprLEM(bases, 0);
const r = crypto.randomBytes(4).readUInt32BE(0, true);
R1 = G.add(R1, G.timesScalar(lastBase, r));
R2 = G.add(R2, G.timesScalar(firstBase, r));
}
const r1 = await G.multiExpAffine(bases.slice(0, (n-1)*sG), scalars);
const r2 = await G.multiExpAffine(bases.slice(sG), scalars);
R1 = G.add(R1, r1);
R2 = G.add(R2, r2);
lastBase = G.fromRprLEM( bases, (n-1)*sG);
for (let j=0; j<singularPointIndexes.length; j++) {
const sp = singularPointIndexes[j];
if ((sp >=i) && (sp < i+n)) {
const P = G.fromRprLEM(bases, (sp-i)*sG);
singularPoints.push(P);
}
}
}
await binFileUtils.endReadSection(fd);
return {
R1: R1,
R2: R2,
singularPoints: singularPoints
};
}
async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName, logger) {
if (logger) logger.debug(`Verifying phase2 calculated values ${sectionName}...`);
const G = curve[gName];
const sG = G.F.n8*2;
const seed= new Array(8);
for (let i=0; i<8; i++) {
seed[i] = crypto.randomBytes(4).readUInt32BE(0, true);
}
for (let p=0; p<= power; p ++) {
const res = await verifyPower(p);
if (!res) return false;
}
if (tauSection == 2) {
const res = await verifyPower(power+1);
if (!res) return false;
}
return true;
async function verifyPower(p) {
if (logger) logger.debug(`Power ${p}...`);
const n8r = curve.Fr.n8;
const nPoints = 2 ** p;
let buff_r = new Uint32Array(nPoints);
let buffG;
let rng = new ChaCha(seed);
if (logger) logger.debug(`Creating random numbers Powers${p}...`);
for (let i=0; i<nPoints; i++) {
if ((p == power+1)&&(i == nPoints-1)) {
buff_r[i] = 0;
} else {
buff_r[i] = rng.nextU32();
}
}
buff_r = new Uint8Array(buff_r.buffer, buff_r.byteOffset, buff_r.byteLength);
if (logger) logger.debug(`reading points Powers${p}...`);
await binFileUtils.startReadUniqueSection(fd, sections, tauSection);
buffG = new BigBuffer(nPoints*sG);
if (p == power+1) {
await fd.readToBuffer(buffG, 0, (nPoints-1)*sG);
buffG.set(curve.G1.zeroAffine, (nPoints-1)*sG);
} else {
await fd.readToBuffer(buffG, 0, nPoints*sG);
}
await binFileUtils.endReadSection(fd, true);
const resTau = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p);
buff_r = new BigBuffer(nPoints * n8r);
rng = new ChaCha(seed);
const buff4 = new Uint8Array(4);
const buff4V = new DataView(buff4.buffer);
if (logger) logger.debug(`Creating random numbers Powers${p}...`);
for (let i=0; i<nPoints; i++) {
if ((i != nPoints-1) || (p != power+1)) {
buff4V.setUint32(0, rng.nextU32(), true);
buff_r.set(buff4, i*n8r);
}
}
if (logger) logger.debug(`batchToMontgomery ${p}...`);
buff_r = await curve.Fr.batchToMontgomery(buff_r);
if (logger) logger.debug(`fft ${p}...`);
buff_r = await curve.Fr.fft(buff_r);
if (logger) logger.debug(`batchFromMontgomery ${p}...`);
buff_r = await curve.Fr.batchFromMontgomery(buff_r);
if (logger) logger.debug(`reading points Lagrange${p}...`);
await binFileUtils.startReadUniqueSection(fd, sections, lagrangeSection);
fd.pos += sG*((2 ** p)-1);
await fd.readToBuffer(buffG, 0, nPoints*sG);
await binFileUtils.endReadSection(fd, true);
const resLagrange = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p + "_transformed");
if (!G.eq(resTau, resLagrange)) {
if (logger) logger.error("Phase2 caclutation does not match with powers of tau");
return false;
}
return true;
}
}
}

141
src/prover_groth.js Normal file
View File

@ -0,0 +1,141 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const BN128 = require("./bn128.js");
const PolField = require("./polfield.js");
const ZqField = require("./zqfield.js");
const bn128 = new BN128();
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
module.exports = function genProof(vk_proof, witness) {
const proof = {};
const r = PolF.F.random();
const s = PolF.F.random();
/* Uncomment to generate a deterministic proof to debug
const r = PolF.F.zero;
const s = PolF.F.zero;
*/
proof.pi_a = G1.zero;
proof.pi_b = G2.zero;
proof.pi_c = G1.zero;
let pib1 = G1.zero;
// Skip public entries and the "1" signal that are forced by the verifier
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.A[s], witness[s]));
// pi_b = pi_b + B[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.B2[s], witness[s]));
pib1 = G1.add( pib1, G1.mulScalar( vk_proof.B1[s], witness[s]));
}
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[s], witness[s]));
}
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alfa_1 );
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.vk_delta_2, s ));
pib1 = G1.add( pib1, vk_proof.vk_beta_1 );
pib1 = G1.add( pib1, G1.mulScalar( vk_proof.vk_delta_1, s ));
const h = calculateH(vk_proof, witness);
// proof.pi_c = G1.affine(proof.pi_c);
// console.log("pi_onlyc", proof.pi_c);
for (let i = 0; i < h.length; i++) {
// console.log(i + "->" + h[i].toString());
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.hExps[i], h[i]));
}
// proof.pi_c = G1.affine(proof.pi_c);
// console.log("pi_candh", proof.pi_c);
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.vk_delta_1, PolF.F.affine(PolF.F.neg(PolF.F.mul(r,s) ))));
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
proof.pi_a = G1.affine(proof.pi_a);
proof.pi_b = G2.affine(proof.pi_b);
proof.pi_c = G1.affine(proof.pi_c);
proof.protocol = "groth";
return {proof, publicSignals};
};
function calculateH(vk_proof, witness) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
const polC_T = new Array(m).fill(PolF.F.zero);
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
for (let c in vk_proof.polsC[s]) {
polC_T[c] = F.add(polC_T[c], F.mul(witness[s], vk_proof.polsC[s][c]));
}
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polAB_S = PolF.mul(polA_S, polB_S);
const polC_S = PolF.ifft(polC_T);
const polABC_S = PolF.sub(polAB_S, polC_S);
const H_S = polABC_S.slice(m);
return H_S;
}

177
src/prover_kimleeoh.js Normal file
View File

@ -0,0 +1,177 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const BN128 = require("./bn128.js");
const PolField = require("./polfield.js");
const ZqField = require("./zqfield.js");
const createKeccakHash = require("keccak");
const bigInt = require("./bigint");
const bn128 = new BN128();
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
module.exports = function genProof(vk_proof, witness) {
const proof = {};
const r = PolF.F.random();
const s = PolF.F.random();
// const r = PolF.F.zero;
// const s = PolF.F.zero;
/* Uncomment to generate a deterministic proof to debug
const r = PolF.F.zero;
const s = PolF.F.zero;
*/
proof.pi_a = G1.zero;
proof.pi_b = G2.zero;
proof.pi_c = G1.zero;
let pib1 = G1.zero;
let piadelta = G1.zero;
// Skip public entries and the "1" signal that are forced by the verifier
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.A[s], witness[s]));
// pi_b = pi_b + B[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.B2[s], witness[s]));
piadelta = G1.add( piadelta, G1.mulScalar( vk_proof.Adelta[s], witness[s]));
pib1 = G1.add( pib1, G1.mulScalar( vk_proof.B1[s], witness[s]));
}
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[s], witness[s]));
}
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alfa_1 );
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( G1.g, r ));
piadelta = G1.add( piadelta, vk_proof.vk_alfadelta_1);
piadelta = G1.add( piadelta, G1.mulScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( G2.g, s ));
pib1 = G1.add( pib1, vk_proof.vk_beta_1 );
pib1 = G1.add( pib1, G1.mulScalar( G1.g, s ));
proof.pi_a = G1.affine(proof.pi_a);
proof.pi_b = G2.affine(proof.pi_b);
const buff = Buffer.concat([
proof.pi_a[0].beInt2Buff(32),
proof.pi_a[1].beInt2Buff(32),
proof.pi_b[0][0].beInt2Buff(32),
proof.pi_b[0][1].beInt2Buff(32),
proof.pi_b[1][0].beInt2Buff(32),
proof.pi_b[1][1].beInt2Buff(32)
]);
const h1buff = createKeccakHash("keccak256").update(buff).digest();
const h2buff = createKeccakHash("keccak256").update(h1buff).digest();
const h1 = bigInt.beBuff2int(h1buff);
const h2 = bigInt.beBuff2int(h2buff);
// const h1 = PolF.F.zero;
// const h2 = PolF.F.zero;
console.log(h1.toString());
console.log(h2.toString());
const h = calculateH(vk_proof, witness);
// proof.pi_c = G1.affine(proof.pi_c);
// console.log("pi_onlyc", proof.pi_c);
for (let i = 0; i < h.length; i++) {
// console.log(i + "->" + h[i].toString());
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.hExps[i], h[i]));
}
// proof.pi_c = G1.affine(proof.pi_c);
// console.log("pi_candh", proof.pi_c);
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( G1.g, PolF.F.affine(PolF.F.neg(PolF.F.mul(r,s) ))));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( piadelta, h2 ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( pib1, h1 ));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.vk_delta_1, PolF.F.mul(h1,h2)));
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
proof.pi_c = G1.affine(proof.pi_c);
proof.protocol = "kimleeoh";
return {proof, publicSignals};
};
function calculateH(vk_proof, witness) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
const polC_T = new Array(m).fill(PolF.F.zero);
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
for (let c in vk_proof.polsC[s]) {
polC_T[c] = F.add(polC_T[c], F.mul(witness[s], vk_proof.polsC[s][c]));
}
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polAB_S = PolF.mul(polA_S, polB_S);
const polC_S = PolF.ifft(polC_T);
const polABC_S = PolF.sub(polAB_S, polC_S);
const H_S = polABC_S.slice(m);
return H_S;
}

210
src/prover_original.js Normal file
View File

@ -0,0 +1,210 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const BN128 = require("./bn128.js");
const PolField = require("./polfield.js");
const ZqField = require("./zqfield.js");
const bn128 = new BN128();
const PolF = new PolField(new ZqField(bn128.r));
const G1 = bn128.G1;
const G2 = bn128.G2;
module.exports = function genProof(vk_proof, witness) {
const proof = {};
const d1 = PolF.F.random();
const d2 = PolF.F.random();
const d3 = PolF.F.random();
proof.pi_a = G1.zero;
proof.pi_ap = G1.zero;
proof.pi_b = G2.zero;
proof.pi_bp = G1.zero;
proof.pi_c = G1.zero;
proof.pi_cp = G1.zero;
proof.pi_kp = G1.zero;
proof.pi_h = G1.zero;
// Skip public entries and the "1" signal that are forced by the verifier
for (let s= vk_proof.nPublic+1; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.A[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_ap = G1.add( proof.pi_ap, G1.mulScalar( vk_proof.Ap[s], witness[s]));
}
for (let s= 0; s< vk_proof.nVars; s++) {
// pi_a = pi_a + A[s] * witness[s];
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.B[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_bp = G1.add( proof.pi_bp, G1.mulScalar( vk_proof.Bp[s], witness[s]));
// pi_a = pi_a + A[s] * witness[s];
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_cp = G1.add( proof.pi_cp, G1.mulScalar( vk_proof.Cp[s], witness[s]));
// pi_ap = pi_ap + Ap[s] * witness[s];
proof.pi_kp = G1.add( proof.pi_kp, G1.mulScalar( vk_proof.Kp[s], witness[s]));
}
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.A[vk_proof.nVars], d1));
proof.pi_ap = G1.add( proof.pi_ap, G1.mulScalar( vk_proof.Ap[vk_proof.nVars], d1));
proof.pi_b = G2.add( proof.pi_b, G2.mulScalar( vk_proof.B[vk_proof.nVars], d2));
proof.pi_bp = G1.add( proof.pi_bp, G1.mulScalar( vk_proof.Bp[vk_proof.nVars], d2));
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[vk_proof.nVars], d3));
proof.pi_cp = G1.add( proof.pi_cp, G1.mulScalar( vk_proof.Cp[vk_proof.nVars], d3));
proof.pi_kp = G1.add( proof.pi_kp, G1.mulScalar( vk_proof.Kp[vk_proof.nVars ], d1));
proof.pi_kp = G1.add( proof.pi_kp, G1.mulScalar( vk_proof.Kp[vk_proof.nVars+1], d2));
proof.pi_kp = G1.add( proof.pi_kp, G1.mulScalar( vk_proof.Kp[vk_proof.nVars+2], d3));
/*
let polA = [];
let polB = [];
let polC = [];
for (let s= 0; s< vk_proof.nVars; s++) {
polA = PolF.add(
polA,
PolF.mul(
vk_proof.polsA[s],
[witness[s]] ));
polB = PolF.add(
polB,
PolF.mul(
vk_proof.polsB[s],
[witness[s]] ));
polC = PolF.add(
polC,
PolF.mul(
vk_proof.polsC[s],
[witness[s]] ));
}
let polFull = PolF.sub(PolF.mul( polA, polB), polC);
const h = PolF.div(polFull, vk_proof.polZ );
*/
const h = calculateH(vk_proof, witness, d1, d2, d3);
// console.log(h.length + "/" + vk_proof.hExps.length);
for (let i = 0; i < h.length; i++) {
proof.pi_h = G1.add( proof.pi_h, G1.mulScalar( vk_proof.hExps[i], h[i]));
}
proof.pi_a = G1.affine(proof.pi_a);
proof.pi_b = G2.affine(proof.pi_b);
proof.pi_c = G1.affine(proof.pi_c);
proof.pi_ap = G1.affine(proof.pi_ap);
proof.pi_bp = G1.affine(proof.pi_bp);
proof.pi_cp = G1.affine(proof.pi_cp);
proof.pi_kp = G1.affine(proof.pi_kp);
proof.pi_h = G1.affine(proof.pi_h);
// proof.h=h;
proof.protocol = "original";
const publicSignals = witness.slice(1, vk_proof.nPublic+1);
return {proof, publicSignals};
};
function calculateH(vk_proof, witness, d1, d2, d3) {
const F = PolF.F;
const m = vk_proof.domainSize;
const polA_T = new Array(m).fill(PolF.F.zero);
const polB_T = new Array(m).fill(PolF.F.zero);
const polC_T = new Array(m).fill(PolF.F.zero);
for (let s=0; s<vk_proof.nVars; s++) {
for (let c in vk_proof.polsA[s]) {
polA_T[c] = F.add(polA_T[c], F.mul(witness[s], vk_proof.polsA[s][c]));
}
for (let c in vk_proof.polsB[s]) {
polB_T[c] = F.add(polB_T[c], F.mul(witness[s], vk_proof.polsB[s][c]));
}
for (let c in vk_proof.polsC[s]) {
polC_T[c] = F.add(polC_T[c], F.mul(witness[s], vk_proof.polsC[s][c]));
}
}
const polA_S = PolF.ifft(polA_T);
const polB_S = PolF.ifft(polB_T);
const polAB_S = PolF.mul(polA_S, polB_S);
const polC_S = PolF.ifft(polC_T);
const polABC_S = PolF.sub(polAB_S, polC_S);
const polZ_S = new Array(m+1).fill(F.zero);
polZ_S[m] = F.one;
polZ_S[0] = F.neg(F.one);
let H_S = PolF.div(polABC_S, polZ_S);
/*
const H2S = PolF.mul(H_S, polZ_S);
if (PolF.equals(H2S, polABC_S)) {
console.log("Is Divisible!");
} else {
console.log("ERROR: Not divisible!");
}
*/
/* add coefficients of the polynomial (d2*A + d1*B - d3) + d1*d2*Z */
H_S = PolF.extend(H_S, m+1);
for (let i=0; i<m; i++) {
const d2A = PolF.F.mul(d2, polA_S[i]);
const d1B = PolF.F.mul(d1, polB_S[i]);
H_S[i] = PolF.F.add(H_S[i], PolF.F.add(d2A, d1B));
}
H_S[0] = PolF.F.sub(H_S[0], d3);
// Z = x^m -1
const d1d2 = PolF.F.mul(d1, d2);
H_S[m] = PolF.F.add(H_S[m], d1d2);
H_S[0] = PolF.F.sub(H_S[0], d1d2);
H_S = PolF.reduce(PolF.affine(H_S));
return H_S;
}

View File

@ -1,23 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
export {default as print} from "./r1cs_print.js";
export {default as info} from "./r1cs_info.js";
export {default as exportJson} from "./r1cs_export_json.js";

View File

@ -1,49 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import {readR1cs} from "r1csfile";
export function stringifyBigInts(Fr, o) {
if (o instanceof Uint8Array) {
return Fr.toString(o);
} else if (Array.isArray(o)) {
return o.map(stringifyBigInts.bind(null, Fr));
} else if (typeof o == "object") {
const res = {};
const keys = Object.keys(o);
keys.forEach( (k) => {
res[k] = stringifyBigInts(Fr, o[k]);
});
return res;
} else if ((typeof(o) == "bigint") || o.eq !== undefined) {
return o.toString(10);
} else {
return o;
}
}
export default async function r1csExportJson(r1csFileName, logger) {
const cir = await readR1cs(r1csFileName, true, true, true, logger);
const Fr=cir.curve.Fr;
delete cir.curve;
return stringifyBigInts(Fr, cir);
}

View File

@ -1,45 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import { Scalar } from "ffjavascript";
import { readR1cs } from "r1csfile";
const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
export default async function r1csInfo(r1csName, logger) {
const cir = await readR1cs(r1csName);
if (Scalar.eq(cir.prime, bn128r)) {
if (logger) logger.info("Curve: bn-128");
} else if (Scalar.eq(cir.prime, bls12381r)) {
if (logger) logger.info("Curve: bls12-381");
} else {
if (logger) logger.info(`Unknown Curve. Prime: ${Scalar.toString(cir.prime)}`);
}
if (logger) logger.info(`# of Wires: ${cir.nVars}`);
if (logger) logger.info(`# of Constraints: ${cir.nConstraints}`);
if (logger) logger.info(`# of Private Inputs: ${cir.nPrvInputs}`);
if (logger) logger.info(`# of Public Inputs: ${cir.nPubInputs}`);
if (logger) logger.info(`# of Labels: ${cir.nLabels}`);
if (logger) logger.info(`# of Outputs: ${cir.nOutputs}`);
return cir;
}

View File

@ -1,45 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
export default function r1csPrint(r1cs, syms, logger) {
for (let i=0; i<r1cs.constraints.length; i++) {
printCostraint(r1cs.constraints[i]);
}
function printCostraint(c) {
const lc2str = (lc) => {
let S = "";
const keys = Object.keys(lc);
keys.forEach( (k) => {
let name = syms.varIdx2Name[k];
if (name == "one") name = "";
let vs = r1cs.curve.Fr.toString(lc[k]);
if (vs == "1") vs = ""; // Do not show ones
if (vs == "-1") vs = "-"; // Do not show ones
if ((S!="")&&(vs[0]!="-")) vs = "+"+vs;
if (S!="") vs = " "+vs;
S= S + vs + name;
});
return S;
};
const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`;
if (logger) logger.info(S);
}
}

127
src/ratfield.js Normal file
View File

@ -0,0 +1,127 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const fUtils = require("./futils.js");
class RatField {
constructor(F) {
this.F = F;
this.zero = [F.zero, F.one];
this.one = [F.one, F.one];
this.two = [F.two, F.one];
this.twoinv = [F.one, F.two];
this.q = F.q;
}
add(a,b) {
return [
this.F.add(
this.F.mul(a[0], b[1]),
this.F.mul(a[1], b[0])),
this.F.mul(a[1], b[1])];
}
double(a) {
return [this.F.add(a[0], a[0]), a[1]];
}
sub(a,b) {
return [
this.F.sub(
this.F.mul(a[0], b[1]),
this.F.mul(a[1], b[0])),
this.F.mul(a[1], b[1])];
}
neg(a) {
return [this.F.neg(a[0]), a[1]];
}
mul(a,b) {
return [
this.F.mul(a[0], b[0]),
this.F.mul(a[1], b[1]),
];
}
copy(a) {
return [a[0], a[1]];
}
div(a, b) {
return [
this.F.mul(a[0], b[1]),
this.F.mul(a[1], b[0]),
];
}
inverse(a) {
return [a[1], a[0]];
}
square(a) {
return [
this.F.square(a[0]),
this.F.square(a[1])
];
}
mulScalar(base, e) {
return [this.F.mulScalar(base[0], e) , base[1]];
}
exp(base, e) {
return fUtils.exp(this, base, e);
}
equals(a, b) {
return this.F.equals(
this.F.mul(a[0], b[1]),
this.F.mul(a[1], b[0])
);
}
isZero(a) {
return this.F.isZero(a[0]);
}
affine(a) {
return [this.F.div(a[0], a[1]), this.F.one];
}
toString(a) {
const ca = this.affine(a);
return `"0x${ca[0].toString(16)}"`;
}
random() {
return [this.F.random(), this.F.one];
}
fromF(a) {
return [a, this.F.one];
}
toF(a) {
return this.affine(a)[0];
}
}
module.exports = RatField;

212
src/setup_groth.js Normal file
View File

@ -0,0 +1,212 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bigInt = require("./bigint.js");
const BN128 = require("./bn128.js");
const PolField = require("./polfield.js");
const ZqField = require("./zqfield.js");
const bn128 = new BN128();
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
module.exports = function setup(circuit) {
const setup = {
vk_proof : {
protocol: "groth",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
vk_verifier: {
protocol: "groth",
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
calculatePolinomials(setup, circuit);
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit);
return setup;
};
function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsA = new Array(circuit.nVars);
setup.vk_proof.polsB = new Array(circuit.nVars);
setup.vk_proof.polsC = new Array(circuit.nVars);
for (let i=0; i<circuit.nVars; i++) {
setup.vk_proof.polsA[i] = {};
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = bigInt(circuit.constraints[c][0][s]);
}
for (let s in circuit.constraints[c][1]) {
setup.vk_proof.polsB[s][c] = bigInt(circuit.constraints[c][1][s]);
}
for (let s in circuit.constraints[c][2]) {
setup.vk_proof.polsC[s][c] = bigInt(circuit.constraints[c][2][s]);
}
}
/**
* add and process the constraints
* input_i * 0 = 0
* to ensure soundness of input consistency
*/
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
}
}
function calculateValuesAtT(setup, circuit) {
const z_t = PolF.computeVanishingPolinomial(setup.vk_proof.domainBits, setup.toxic.t);
const u = PolF.evaluateLagrangePolynomials(setup.vk_proof.domainBits, setup.toxic.t);
const a_t = new Array(circuit.nVars).fill(F.zero);
const b_t = new Array(circuit.nVars).fill(F.zero);
const c_t = new Array(circuit.nVars).fill(F.zero);
// TODO: substitute setup.polsA for coeficients
for (let s=0; s<circuit.nVars; s++) {
for (let c in setup.vk_proof.polsA[s]) {
a_t[s] = F.add(a_t[s], F.mul(u[c], setup.vk_proof.polsA[s][c]));
}
for (let c in setup.vk_proof.polsB[s]) {
b_t[s] = F.add(b_t[s], F.mul(u[c], setup.vk_proof.polsB[s][c]));
}
for (let c in setup.vk_proof.polsC[s]) {
c_t[s] = F.add(c_t[s], F.mul(u[c], setup.vk_proof.polsC[s][c]));
}
}
return {a_t, b_t, c_t, z_t};
}
function calculateEncriptedValuesAtT(setup, circuit) {
const v = calculateValuesAtT(setup, circuit);
setup.vk_proof.A = new Array(circuit.nVars);
setup.vk_proof.B1 = new Array(circuit.nVars);
setup.vk_proof.B2 = new Array(circuit.nVars);
setup.vk_proof.C = new Array(circuit.nVars);
setup.vk_verifier.IC = new Array(circuit.nPublic);
setup.toxic.kalfa = F.random();
setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random();
setup.toxic.kdelta = F.random();
let invDelta = F.inverse(setup.toxic.kdelta);
let invGamma = F.inverse(setup.toxic.kgamma);
setup.vk_proof.vk_alfa_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalfa));
setup.vk_proof.vk_beta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kdelta));
setup.vk_verifier.vk_alfa_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalfa));
setup.vk_verifier.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_gamma_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_delta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kdelta));
setup.vk_verifier.vk_alfabeta_12 = bn128.F12.affine(bn128.pairing( setup.vk_verifier.vk_alfa_1 , setup.vk_verifier.vk_beta_2 ));
for (let s=0; s<circuit.nVars; s++) {
const A = G1.affine(G1.mulScalar(G1.g, v.a_t[s]));
setup.vk_proof.A[s] = A;
const B1 = G1.affine(G1.mulScalar(G1.g, v.b_t[s]));
setup.vk_proof.B1[s] = B1;
const B2 = G2.affine(G2.mulScalar(G2.g, v.b_t[s]));
setup.vk_proof.B2[s] = B2;
}
for (let s=0; s<=setup.vk_proof.nPublic; s++) {
let ps =
F.mul(
invGamma,
F.add(
F.add(
F.mul(v.a_t[s], setup.toxic.kbeta),
F.mul(v.b_t[s], setup.toxic.kalfa)),
v.c_t[s]));
const IC = G1.affine(G1.mulScalar(G1.g, ps));
setup.vk_verifier.IC[s]=IC;
}
for (let s=setup.vk_proof.nPublic+1; s<circuit.nVars; s++) {
let ps =
F.mul(
invDelta,
F.add(
F.add(
F.mul(v.a_t[s], setup.toxic.kbeta),
F.mul(v.b_t[s], setup.toxic.kalfa)),
v.c_t[s]));
const C = G1.affine(G1.mulScalar(G1.g, ps));
setup.vk_proof.C[s]=C;
}
// Calculate HExps
const maxH = setup.vk_proof.domainSize+1;
setup.vk_proof.hExps = new Array(maxH);
const zod = F.mul(invDelta, v.z_t);
setup.vk_proof.hExps[0] = G1.affine(G1.mulScalar(G1.g, zod));
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.affine(G1.mulScalar(G1.g, F.mul(eT, zod)));
eT = F.mul(eT, setup.toxic.t);
}
}

233
src/setup_kimleeoh.js Normal file
View File

@ -0,0 +1,233 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bigInt = require("./bigint.js");
const BN128 = require("./bn128.js");
const PolField = require("./polfield.js");
const ZqField = require("./zqfield.js");
const bn128 = new BN128();
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
module.exports = function setup(circuit) {
const setup = {
vk_proof : {
protocol: "groth",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
vk_verifier: {
protocol: "groth",
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
calculatePolinomials(setup, circuit);
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit);
return setup;
};
function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsA = new Array(circuit.nVars);
setup.vk_proof.polsB = new Array(circuit.nVars);
setup.vk_proof.polsC = new Array(circuit.nVars);
for (let i=0; i<circuit.nVars; i++) {
setup.vk_proof.polsA[i] = {};
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = bigInt(circuit.constraints[c][0][s]);
}
for (let s in circuit.constraints[c][1]) {
setup.vk_proof.polsB[s][c] = bigInt(circuit.constraints[c][1][s]);
}
for (let s in circuit.constraints[c][2]) {
setup.vk_proof.polsC[s][c] = bigInt(circuit.constraints[c][2][s]);
}
}
/**
* add and process the constraints
* input_i * 0 = 0
* to ensure soundness of input consistency
*/
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
}
}
function calculateValuesAtT(setup, circuit) {
const z_t = PolF.computeVanishingPolinomial(setup.vk_proof.domainBits, setup.toxic.t);
const u = PolF.evaluateLagrangePolynomials(setup.vk_proof.domainBits, setup.toxic.t);
const a_t = new Array(circuit.nVars).fill(F.zero);
const b_t = new Array(circuit.nVars).fill(F.zero);
const c_t = new Array(circuit.nVars).fill(F.zero);
// TODO: substitute setup.polsA for coeficients
for (let s=0; s<circuit.nVars; s++) {
for (let c in setup.vk_proof.polsA[s]) {
a_t[s] = F.add(a_t[s], F.mul(u[c], setup.vk_proof.polsA[s][c]));
}
for (let c in setup.vk_proof.polsB[s]) {
b_t[s] = F.add(b_t[s], F.mul(u[c], setup.vk_proof.polsB[s][c]));
}
for (let c in setup.vk_proof.polsC[s]) {
c_t[s] = F.add(c_t[s], F.mul(u[c], setup.vk_proof.polsC[s][c]));
}
}
return {a_t, b_t, c_t, z_t};
}
function calculateEncriptedValuesAtT(setup, circuit) {
const v = calculateValuesAtT(setup, circuit);
setup.vk_proof.A = new Array(circuit.nVars);
setup.vk_proof.Adelta = new Array(circuit.nVars);
setup.vk_proof.B1 = new Array(circuit.nVars);
setup.vk_proof.B2 = new Array(circuit.nVars);
setup.vk_proof.C = new Array(circuit.nVars);
setup.vk_verifier.IC = new Array(circuit.nPublic);
setup.toxic.kalfa = F.random();
setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random();
setup.toxic.kdelta = F.random();
const gammaSquare = F.mul(setup.toxic.kgamma, setup.toxic.kgamma);
setup.vk_proof.vk_alfa_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalfa));
setup.vk_proof.vk_beta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_alfadelta_1 = G1.affine(G1.mulScalar( G1.g, F.mul(setup.toxic.kalfa, setup.toxic.kdelta)));
setup.vk_proof.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_alfa_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalfa));
setup.vk_verifier.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_gamma_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_delta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kdelta));
setup.vk_verifier.vk_alfabeta_12 = bn128.F12.affine(bn128.pairing( setup.vk_verifier.vk_alfa_1 , setup.vk_verifier.vk_beta_2 ));
for (let s=0; s<circuit.nVars; s++) {
const A = G1.affine(G1.mulScalar(G1.g, F.mul(setup.toxic.kgamma, v.a_t[s])));
setup.vk_proof.A[s] = A;
setup.vk_proof.Adelta[s] = G1.affine(G1.mulScalar(A, setup.toxic.kdelta));
const B1 = G1.affine(G1.mulScalar(G1.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
setup.vk_proof.B1[s] = B1;
const B2 = G2.affine(G2.mulScalar(G2.g, F.mul(setup.toxic.kgamma, v.b_t[s])));
setup.vk_proof.B2[s] = B2;
}
for (let s=0; s<=setup.vk_proof.nPublic; s++) {
let ps =
F.add(
F.mul(
setup.toxic.kgamma,
v.c_t[s]
),
F.add(
F.mul(
setup.toxic.kbeta,
v.a_t[s]
),
F.mul(
setup.toxic.kalfa,
v.b_t[s]
)
)
);
const IC = G1.affine(G1.mulScalar(G1.g, ps));
setup.vk_verifier.IC[s]=IC;
}
for (let s=setup.vk_proof.nPublic+1; s<circuit.nVars; s++) {
let ps =
F.add(
F.mul(
gammaSquare,
v.c_t[s]
),
F.add(
F.mul(
F.mul(setup.toxic.kbeta, setup.toxic.kgamma),
v.a_t[s]
),
F.mul(
F.mul(setup.toxic.kalfa, setup.toxic.kgamma),
v.b_t[s]
)
)
);
const C = G1.affine(G1.mulScalar(G1.g, ps));
setup.vk_proof.C[s]=C;
}
// Calculate HExps
const maxH = setup.vk_proof.domainSize+1;
setup.vk_proof.hExps = new Array(maxH);
const zod = F.mul(gammaSquare, v.z_t);
setup.vk_proof.hExps[0] = G1.affine(G1.mulScalar(G1.g, zod));
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.affine(G1.mulScalar(G1.g, F.mul(eT, zod)));
eT = F.mul(eT, setup.toxic.t);
}
}

238
src/setup_original.js Normal file
View File

@ -0,0 +1,238 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bigInt = require("./bigint.js");
const BN128 = require("./bn128.js");
const PolField = require("./polfield.js");
const ZqField = require("./zqfield.js");
const bn128 = new BN128();
const G1 = bn128.G1;
const G2 = bn128.G2;
const PolF = new PolField(new ZqField(bn128.r));
const F = new ZqField(bn128.r);
module.exports = function setup(circuit) {
const setup = {
vk_proof : {
protocol: "original",
nVars: circuit.nVars,
nPublic: circuit.nPubInputs + circuit.nOutputs
},
vk_verifier: {
protocol: "original",
nPublic: circuit.nPubInputs + circuit.nOutputs
},
toxic: {}
};
setup.vk_proof.domainBits = PolF.log2(circuit.nConstraints + circuit.nPubInputs + circuit.nOutputs +1 -1) +1;
setup.vk_proof.domainSize = 1 << setup.vk_proof.domainBits;
calculatePolinomials(setup, circuit);
setup.toxic.t = F.random();
calculateEncriptedValuesAtT(setup, circuit);
calculateHexps(setup, circuit);
return setup;
};
function calculatePolinomials(setup, circuit) {
setup.vk_proof.polsA = new Array(circuit.nVars);
setup.vk_proof.polsB = new Array(circuit.nVars);
setup.vk_proof.polsC = new Array(circuit.nVars);
for (let i=0; i<circuit.nVars; i++) {
setup.vk_proof.polsA[i] = {};
setup.vk_proof.polsB[i] = {};
setup.vk_proof.polsC[i] = {};
}
for (let c=0; c<circuit.nConstraints; c++) {
for (let s in circuit.constraints[c][0]) {
setup.vk_proof.polsA[s][c] = bigInt(circuit.constraints[c][0][s]);
}
for (let s in circuit.constraints[c][1]) {
setup.vk_proof.polsB[s][c] = bigInt(circuit.constraints[c][1][s]);
}
for (let s in circuit.constraints[c][2]) {
setup.vk_proof.polsC[s][c] = bigInt(circuit.constraints[c][2][s]);
}
}
/**
* add and process the constraints
* input_i * 0 = 0
* to ensure soundness of input consistency
*/
for (let i = 0; i < circuit.nPubInputs + circuit.nOutputs + 1; ++i)
{
setup.vk_proof.polsA[i][circuit.nConstraints + i] = F.one;
}
}
function calculateValuesAtT(setup, circuit) {
const z_t = PolF.computeVanishingPolinomial(setup.vk_proof.domainBits, setup.toxic.t);
const u = PolF.evaluateLagrangePolynomials(setup.vk_proof.domainBits, setup.toxic.t);
const a_t = new Array(circuit.nVars).fill(F.zero);
const b_t = new Array(circuit.nVars).fill(F.zero);
const c_t = new Array(circuit.nVars).fill(F.zero);
// TODO: substitute setup.polsA for coeficients
for (let s=0; s<circuit.nVars; s++) {
for (let c in setup.vk_proof.polsA[s]) {
a_t[s] = F.add(a_t[s], F.mul(u[c], setup.vk_proof.polsA[s][c]));
}
for (let c in setup.vk_proof.polsB[s]) {
b_t[s] = F.add(b_t[s], F.mul(u[c], setup.vk_proof.polsB[s][c]));
}
for (let c in setup.vk_proof.polsC[s]) {
c_t[s] = F.add(c_t[s], F.mul(u[c], setup.vk_proof.polsC[s][c]));
}
}
return {a_t, b_t, c_t, z_t};
}
function calculateEncriptedValuesAtT(setup, circuit) {
const v = calculateValuesAtT(setup, circuit);
setup.vk_proof.A = new Array(circuit.nVars+1);
setup.vk_proof.B = new Array(circuit.nVars+1);
setup.vk_proof.C = new Array(circuit.nVars+1);
setup.vk_proof.Ap = new Array(circuit.nVars+1);
setup.vk_proof.Bp = new Array(circuit.nVars+1);
setup.vk_proof.Cp = new Array(circuit.nVars+1);
setup.vk_proof.Kp = new Array(circuit.nVars+3);
setup.vk_verifier.IC = new Array(circuit.nPublic);
setup.toxic.ka = F.random();
setup.toxic.kb = F.random();
setup.toxic.kc = F.random();
setup.toxic.ra = F.random();
setup.toxic.rb = F.random();
setup.toxic.rc = F.mul(setup.toxic.ra, setup.toxic.rb);
setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random();
const gb = F.mul(setup.toxic.kbeta, setup.toxic.kgamma);
setup.vk_verifier.vk_a = G2.affine(G2.mulScalar( G2.g, setup.toxic.ka));
setup.vk_verifier.vk_b = G1.affine(G1.mulScalar( G1.g, setup.toxic.kb));
setup.vk_verifier.vk_c = G2.affine(G2.mulScalar( G2.g, setup.toxic.kc));
setup.vk_verifier.vk_gb_1 = G1.affine(G1.mulScalar( G1.g, gb));
setup.vk_verifier.vk_gb_2 = G2.affine(G2.mulScalar( G2.g, gb));
setup.vk_verifier.vk_g = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma));
for (let s=0; s<circuit.nVars; s++) {
// A[i] = G1 * polA(t)
const raat = F.mul(setup.toxic.ra, v.a_t[s]);
const A = G1.affine(G1.mulScalar(G1.g, raat));
setup.vk_proof.A[s] = A;
if (s <= setup.vk_proof.nPublic) {
setup.vk_verifier.IC[s]=A;
}
// B1[i] = G1 * polB(t)
const rbbt = F.mul(setup.toxic.rb, v.b_t[s]);
const B1 = G1.affine(G1.mulScalar(G1.g, rbbt));
// B2[i] = G2 * polB(t)
const B2 = G2.affine(G2.mulScalar(G2.g, rbbt));
setup.vk_proof.B[s]=B2;
// C[i] = G1 * polC(t)
const rcct = F.mul(setup.toxic.rc, v.c_t[s]);
const C = G1.affine(G1.mulScalar( G1.g, rcct));
setup.vk_proof.C[s] =C;
// K = G1 * (A+B+C)
const kt = F.affine(F.add(F.add(raat, rbbt), rcct));
const K = G1.affine(G1.mulScalar( G1.g, kt));
/*
// Comment this lines to improve the process
const Ktest = G1.affine(G1.add(G1.add(A, B1), C));
if (!G1.equals(K, Ktest)) {
console.log ("=====FAIL======");
}
*/
if (s > setup.vk_proof.nPublic) {
setup.vk_proof.Ap[s] = G1.affine(G1.mulScalar(A, setup.toxic.ka));
}
setup.vk_proof.Bp[s] = G1.affine(G1.mulScalar(B1, setup.toxic.kb));
setup.vk_proof.Cp[s] = G1.affine(G1.mulScalar(C, setup.toxic.kc));
setup.vk_proof.Kp[s] = G1.affine(G1.mulScalar(K, setup.toxic.kbeta));
}
// Extra coeficients
const A = G1.mulScalar( G1.g, F.mul(setup.toxic.ra, v.z_t));
setup.vk_proof.A[circuit.nVars] = G1.affine(A);
setup.vk_proof.Ap[circuit.nVars] = G1.affine(G1.mulScalar(A, setup.toxic.ka));
const B1 = G1.mulScalar( G1.g, F.mul(setup.toxic.rb, v.z_t));
const B2 = G2.mulScalar( G2.g, F.mul(setup.toxic.rb, v.z_t));
setup.vk_proof.B[circuit.nVars] = G2.affine(B2);
setup.vk_proof.Bp[circuit.nVars] = G1.affine(G1.mulScalar(B1, setup.toxic.kb));
const C = G1.mulScalar( G1.g, F.mul(setup.toxic.rc, v.z_t));
setup.vk_proof.C[circuit.nVars] = G1.affine(C);
setup.vk_proof.Cp[circuit.nVars] = G1.affine(G1.mulScalar(C, setup.toxic.kc));
setup.vk_proof.Kp[circuit.nVars ] = G1.affine(G1.mulScalar(A, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+1] = G1.affine(G1.mulScalar(B1, setup.toxic.kbeta));
setup.vk_proof.Kp[circuit.nVars+2] = G1.affine(G1.mulScalar(C, setup.toxic.kbeta));
// setup.vk_verifier.A[0] = G1.affine(G1.add(setup.vk_verifier.A[0], setup.vk_proof.A[circuit.nVars]));
// vk_z
setup.vk_verifier.vk_z = G2.affine(G2.mulScalar(
G2.g,
F.mul(setup.toxic.rc, v.z_t)));
}
function calculateHexps(setup) {
const maxH = setup.vk_proof.domainSize+1;
setup.vk_proof.hExps = new Array(maxH);
setup.vk_proof.hExps[0] = G1.g;
let eT = setup.toxic.t;
for (let i=1; i<maxH; i++) {
setup.vk_proof.hExps[i] = G1.affine(G1.mulScalar(G1.g, eT));
eT = F.mul(eT, setup.toxic.t);
}
}

View File

@ -1,22 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
export {default as genGroth16Verifier} from "./solidity_gengroth16verifier.js";
export {default as prove} from "./groth16_prove.js";
export {default as validate} from "./groth16_verify.js";

55
src/stringifybigint.js Normal file
View File

@ -0,0 +1,55 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const bigInt = require("./bigint.js");
module.exports.stringifyBigInts = stringifyBigInts;
module.exports.unstringifyBigInts = unstringifyBigInts;
function stringifyBigInts(o) {
if ((typeof(o) == "bigint") || o.isZero !== undefined) {
return o.toString(10);
} else if (Array.isArray(o)) {
return o.map(stringifyBigInts);
} else if (typeof o == "object") {
const res = {};
for (let k in o) {
res[k] = stringifyBigInts(o[k]);
}
return res;
} else {
return o;
}
}
function unstringifyBigInts(o) {
if ((typeof(o) == "string") && (/^[0-9]+$/.test(o) )) {
return bigInt(o);
} else if (Array.isArray(o)) {
return o.map(unstringifyBigInts);
} else if (typeof o == "object") {
const res = {};
for (let k in o) {
res[k] = unstringifyBigInts(o[k]);
}
return res;
} else {
return o;
}
}

View File

@ -1,232 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
const inBrowser = (typeof window !== "undefined");
let NodeWorker;
if (!inBrowser) {
NodeWorker = require("worker_threads").Worker;
}
class Deferred {
constructor() {
this.promise = new Promise((resolve, reject)=> {
this.reject = reject;
this.resolve = resolve;
});
}
}
function thread(self, fn, modules) {
const ctx = {
modules: modules
};
self.onmessage = function(e) {
let data;
if (e.data) {
data = e.data;
} else {
data = e;
}
if (data.cmd == "INIT") {
ctx.processId = data.processId;
}
if (data.cmd == "TERMINATE") {
self.postMessage({cmd: "TERMINATE"});
process.exit();
return;
}
let res = fn(ctx, data);
res = res || {};
res.cmd = data.cmd;
if (res) {
if (res.buff) {
self.postMessage(res, [res.buff.buffer]);
} else {
self.postMessage(res);
}
}
};
}
async function buildTaskManager(fn, mods, initTask) {
let concurrency;
if ((typeof(navigator) === "object") && navigator.hardwareConcurrency) {
concurrency = navigator.hardwareConcurrency;
} else {
const os = require("os");
concurrency = os.cpus().length;
}
const tm = {
workers: []
};
let S = "{";
const keys = Object.keys(mods);
for (let i=0; i<keys.length; i++) {
const key= keys[i];
S += `${key}: require('${mods[key]}'), `;
}
S += "}";
function getOnMsg(i) {
return function(e) {
function finishTask() {
if ( (tm.waitingTask && tm.terminateDeferred))
throw new Error("It can not be a waiting task and it's terminating");
if (tm.terminateDeferred) {
tm.workers[i].worker.postMessage({cmd: "TERMINATE"});
return;
}
tm.workers[i].state = "READY";
if (tm.waitingTask) {
processTask(i, tm.waitingTask.task, tm.waitingTask.asyncCb);
const d = tm.waitingTask.deferral;
tm.waitingTask = null;
d.resolve();
}
}
let data;
if ((e)&&(e.data)) {
data = e.data;
} else {
data = e;
}
if (data.cmd == "TERMINATE") {
tm.workers[i].state = "TERMINATED";
tm.tryTerminate();
return;
}
if (tm.workers[i].asyncCb) {
tm.workers[i].asyncCb(data).then(()=> {
finishTask();
});
} else {
finishTask();
}
};
}
function processTask(i, task, asyncCb) {
if (tm.workers[i].state != "READY")
throw new Error("Worker is not ready");
tm.workers[i].asyncCb = asyncCb;
tm.workers[i].state = "WORKING";
if (task.buff) {
tm.workers[i].worker.postMessage(task, [task.buff.buffer]);
} else {
tm.workers[i].worker.postMessage(task);
}
}
for (let i=0; i<concurrency; i++) {
const worker = new NodeWorker(`(${thread.toString()})(require('worker_threads').parentPort, ${fn.toString()},${S});`, {eval: true});
worker.on("message", getOnMsg(i));
tm.workers[i] = {
state: "READY",
worker: worker,
taskPromise: null
};
}
for (let i=0; i<concurrency; i++) {
initTask.cmd = "INIT";
initTask.processId = i;
processTask(i, initTask);
}
tm.finish = function() {
const self = this;
if (self.terminatePromise != null)
throw new Error("Task manager already terminated");
self.terminateDeferred = new Deferred();
for (let i=0; i<concurrency; i++) {
if (self.workers[i].state == "READY") {
self.workers[i].worker.postMessage({cmd: "TERMINATE"});
}
}
return self.terminateDeferred.promise;
};
tm.addTask = function (task, asyncCb) {
const self = this;
if (self.waitingTask) throw new Error("Waiting task pending");
if (self.terminateDeferred) throw new Error("New task after task manager terminated");
const deferral = new Deferred();
let i;
for (i=0; i<tm.workers.length; i++) {
if (self.workers[i].state == "READY") break;
}
if (i<tm.workers.length) {
processTask(i, task, asyncCb);
deferral.resolve();
} else {
self.waitingTask = {
task: task,
deferral: deferral,
asyncCb: asyncCb
};
}
return deferral.promise;
};
tm.tryTerminate = function() {
const self = this;
if (!self.terminateDeferred) return;
for (let i=0; i<concurrency; i++) {
if (self.workers[i].state != "TERMINATED") return;
}
self.terminateDeferred.resolve();
};
return tm;
}
module.exports = buildTaskManager;

67
src/verifier.js Normal file
View File

@ -0,0 +1,67 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const BN128 = require("./bn128.js");
const bn128 = new BN128();
const G1 = bn128.G1;
const G2 = bn128.G2;
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let full_pi_a = vk_verifier.A[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
full_pi_a = G1.add( full_pi_a, G1.mulScalar( vk_verifier.A[s+1], publicSignals[s]));
}
full_pi_a = G1.add( full_pi_a, proof.pi_a);
if (! bn128.F12.equals(
bn128.pairing( proof.pi_a , vk_verifier.vk_a ),
bn128.pairing( proof.pi_ap , G2.g )))
return false;
if (! bn128.F12.equals(
bn128.pairing( vk_verifier.vk_b, proof.pi_b ),
bn128.pairing( proof.pi_bp , G2.g )))
return false;
if (! bn128.F12.equals(
bn128.pairing( proof.pi_c , vk_verifier.vk_c ),
bn128.pairing( proof.pi_cp , G2.g )))
return false;
if (! bn128.F12.equals(
bn128.F12.mul(
bn128.pairing( G1.add(full_pi_a, proof.pi_c) , vk_verifier.vk_gb_2 ),
bn128.pairing( vk_verifier.vk_gb_1 , proof.pi_b ),
),
bn128.pairing( proof.pi_kp , vk_verifier.vk_g )))
return false;
if (! bn128.F12.equals(
bn128.pairing( full_pi_a , proof.pi_b ),
bn128.F12.mul(
bn128.pairing( proof.pi_h , vk_verifier.vk_z ),
bn128.pairing( proof.pi_c , G2.g ),
)))
return false;
return true;
};

46
src/verifier_groth.js Normal file
View File

@ -0,0 +1,46 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const BN128 = require("./bn128.js");
const bn128 = new BN128();
const G1 = bn128.G1;
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.mulScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
if (! bn128.F12.equals(
bn128.pairing( proof.pi_a , proof.pi_b ),
bn128.F12.mul(
vk_verifier.vk_alfabeta_12,
bn128.F12.mul(
bn128.pairing( cpub , vk_verifier.vk_gamma_2 ),
bn128.pairing( proof.pi_c , vk_verifier.vk_delta_2 )
))))
return false;
return true;
};

75
src/verifier_kimleeoh.js Normal file
View File

@ -0,0 +1,75 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const BN128 = require("./bn128.js");
const createKeccakHash = require("keccak");
const bigInt = require("./bigint");
const bn128 = new BN128();
const G1 = bn128.G1;
const G2 = bn128.G2;
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.mulScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
const buff = Buffer.concat([
proof.pi_a[0].beInt2Buff(32),
proof.pi_a[1].beInt2Buff(32),
proof.pi_b[0][0].beInt2Buff(32),
proof.pi_b[0][1].beInt2Buff(32),
proof.pi_b[1][0].beInt2Buff(32),
proof.pi_b[1][1].beInt2Buff(32)
]);
const h1buff = createKeccakHash("keccak256").update(buff).digest();
const h2buff = createKeccakHash("keccak256").update(h1buff).digest();
const h1 = bigInt.beBuff2int(h1buff);
const h2 = bigInt.beBuff2int(h2buff);
// const h1 = bigInt.zero;
// const h2 = bigInt.zero;
console.log(h1.toString());
console.log(h2.toString());
if (! bn128.F12.equals(
bn128.pairing(
G1.add(proof.pi_a, G1.mulScalar(G1.g, h1)),
G2.add(proof.pi_b, G2.mulScalar(vk_verifier.vk_delta_2, h2))
),
bn128.F12.mul(
vk_verifier.vk_alfabeta_12,
bn128.F12.mul(
bn128.pairing( cpub , vk_verifier.vk_gamma_2 ),
bn128.pairing( proof.pi_c , G2.g )
))))
return false;
return true;
};

67
src/verifier_original.js Normal file
View File

@ -0,0 +1,67 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
const BN128 = require("./bn128.js");
const bn128 = new BN128();
const G1 = bn128.G1;
const G2 = bn128.G2;
module.exports = function isValid(vk_verifier, proof, publicSignals) {
let full_pi_a = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
full_pi_a = G1.add( full_pi_a, G1.mulScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
full_pi_a = G1.add( full_pi_a, proof.pi_a);
if (! bn128.F12.equals(
bn128.pairing( proof.pi_a , vk_verifier.vk_a ),
bn128.pairing( proof.pi_ap , G2.g )))
return false;
if (! bn128.F12.equals(
bn128.pairing( vk_verifier.vk_b, proof.pi_b ),
bn128.pairing( proof.pi_bp , G2.g )))
return false;
if (! bn128.F12.equals(
bn128.pairing( proof.pi_c , vk_verifier.vk_c ),
bn128.pairing( proof.pi_cp , G2.g )))
return false;
if (! bn128.F12.equals(
bn128.F12.mul(
bn128.pairing( G1.add(full_pi_a, proof.pi_c) , vk_verifier.vk_gb_2 ),
bn128.pairing( vk_verifier.vk_gb_1 , proof.pi_b )
),
bn128.pairing( proof.pi_kp , vk_verifier.vk_g )))
return false;
if (! bn128.F12.equals(
bn128.pairing( full_pi_a , proof.pi_b ),
bn128.F12.mul(
bn128.pairing( proof.pi_h , vk_verifier.vk_z ),
bn128.pairing( proof.pi_c , G2.g )
)))
return false;
return true;
};

View File

@ -1,22 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
export {default as calculate} from "./wtns_calculate.js";
export {default as debug} from "./wtns_debug.js";
export {default as exportJson} from "./wtns_export_json.js";

View File

@ -1,39 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as fastFile from "fastfile";
import { WitnessCalculatorBuilder } from "circom_runtime";
import * as wtnsUtils from "./wtns_utils.js";
import * as binFileUtils from "@iden3/binfileutils";
export default async function wtnsCalculate(input, wasmFileName, wtnsFileName, options) {
const fdWasm = await fastFile.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
const wc = await WitnessCalculatorBuilder(wasm);
const w = await wc.calculateBinWitness(input, true);
const fdWtns = await binFileUtils.createBinFile(wtnsFileName, "wtns", 2, 2);
await wtnsUtils.writeBin(fdWtns, w, wc.prime);
await fdWtns.close();
}

View File

@ -1,68 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as fastFile from "fastfile";
import { WitnessCalculatorBuilder } from "circom_runtime";
import * as wtnsUtils from "./wtns_utils.js";
import * as binFileUtils from "@iden3/binfileutils";
import loadSyms from "./loadsyms.js";
export default async function wtnsDebug(input, wasmFileName, wtnsFileName, symName, options, logger) {
const fdWasm = await fastFile.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
let wcOps = {
sanityCheck: true
};
let sym = await loadSyms(symName);
if (options.set) {
if (!sym) sym = await loadSyms(symName);
wcOps.logSetSignal= function(labelIdx, value) {
if (logger) logger.info("SET " + sym.labelIdx2Name[labelIdx] + " <-- " + value.toString());
};
}
if (options.get) {
if (!sym) sym = await loadSyms(symName);
wcOps.logGetSignal= function(varIdx, value) {
if (logger) logger.info("GET " + sym.labelIdx2Name[varIdx] + " --> " + value.toString());
};
}
if (options.trigger) {
if (!sym) sym = await loadSyms(symName);
wcOps.logStartComponent= function(cIdx) {
if (logger) logger.info("START: " + sym.componentIdx2Name[cIdx]);
};
wcOps.logFinishComponent= function(cIdx) {
if (logger) logger.info("FINISH: " + sym.componentIdx2Name[cIdx]);
};
}
wcOps.sym = sym;
const wc = await WitnessCalculatorBuilder(wasm, wcOps);
const w = await wc.calculateWitness(input);
const fdWtns = await binFileUtils.createBinFile(wtnsFileName, "wtns", 2, 2);
await wtnsUtils.write(fdWtns, w, wc.prime);
await fdWtns.close();
}

View File

@ -1,27 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import {read} from "./wtns_utils.js";
export default async function wtnsExportJson(wtnsFileName) {
const w = await read(wtnsFileName);
return w;
}

View File

@ -1,92 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import { Scalar } from "ffjavascript";
import * as binFileUtils from "@iden3/binfileutils";
export async function write(fd, witness, prime) {
await binFileUtils.startWriteSection(fd, 1);
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
await fd.writeULE32(n8);
await binFileUtils.writeBigInt(fd, prime, n8);
await fd.writeULE32(witness.length);
await binFileUtils.endWriteSection(fd);
await binFileUtils.startWriteSection(fd, 2);
for (let i=0; i<witness.length; i++) {
await binFileUtils.writeBigInt(fd, witness[i], n8);
}
await binFileUtils.endWriteSection(fd, 2);
}
export async function writeBin(fd, witnessBin, prime) {
await binFileUtils.startWriteSection(fd, 1);
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
await fd.writeULE32(n8);
await binFileUtils.writeBigInt(fd, prime, n8);
if (witnessBin.byteLength % n8 != 0) {
throw new Error("Invalid witness length");
}
await fd.writeULE32(witnessBin.byteLength / n8);
await binFileUtils.endWriteSection(fd);
await binFileUtils.startWriteSection(fd, 2);
await fd.write(witnessBin);
await binFileUtils.endWriteSection(fd);
}
export async function readHeader(fd, sections) {
await binFileUtils.startReadUniqueSection(fd, sections, 1);
const n8 = await fd.readULE32();
const q = await binFileUtils.readBigInt(fd, n8);
const nWitness = await fd.readULE32();
await binFileUtils.endReadSection(fd);
return {n8, q, nWitness};
}
export async function read(fileName) {
const {fd, sections} = await binFileUtils.readBinFile(fileName, "wtns", 2);
const {n8, nWitness} = await readHeader(fd, sections);
await binFileUtils.startReadUniqueSection(fd, sections, 2);
const res = [];
for (let i=0; i<nWitness; i++) {
const v = await binFileUtils.readBigInt(fd, n8);
res.push(v);
}
await binFileUtils.endReadSection(fd);
await fd.close();
return res;
}

View File

@ -1,30 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
export {default as newZKey} from "./zkey_new.js";
export {default as exportBellman} from "./zkey_export_bellman.js";
export {default as importBellman} from "./zkey_import_bellman.js";
export {default as verifyFromR1cs} from "./zkey_verify_fromr1cs.js";
export {default as verifyFromInit} from "./zkey_verify_frominit.js";
export {default as contribute} from "./zkey_contribute.js";
export {default as beacon} from "./zkey_beacon.js";
export {default as exportJson} from "./zkey_export_json.js";
export {default as bellmanContribute} from "./zkey_bellman_contribute.js";
export {default as exportVerificationKey} from "./zkey_export_verificationkey.js";
export {default as exportSolidityVerifier} from "./zkey_export_solidityverifier.js";

View File

@ -1,132 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as binFileUtils from "@iden3/binfileutils";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import * as misc from "./misc.js";
import Blake2b from "blake2b-wasm";
import * as utils from "./zkey_utils.js";
import { hashToG2 as hashToG2 } from "./keypair.js";
import { applyKeyToSection } from "./mpc_applykey.js";
export default async function beacon(zkeyNameOld, zkeyNameNew, name, beaconHashStr, numIterationsExp, logger) {
await Blake2b.ready();
const beaconHash = misc.hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length))
{
if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false;
}
if (beaconHash.length>=256) {
if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
return false;
}
numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) {
if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
return false;
}
const {fd: fdOld, sections: sections} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fdOld, sections);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
const curve = await getCurve(zkey.q);
const mpcParams = await zkeyUtils.readMPCParams(fdOld, curve, sections);
const fdNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
const rng = await misc.rngFromBeaconParams(beaconHash, numIterationsExp);
const transcriptHasher = Blake2b(64);
transcriptHasher.update(mpcParams.csHash);
for (let i=0; i<mpcParams.contributions.length; i++) {
utils.hashPubKey(transcriptHasher, curve, mpcParams.contributions[i]);
}
const curContribution = {};
curContribution.delta = {};
curContribution.delta.prvKey = curve.Fr.fromRng(rng);
curContribution.delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.toAffine(curve.G1.timesFr(curContribution.delta.g1_s, curContribution.delta.prvKey));
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_s);
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_sx);
curContribution.transcript = transcriptHasher.digest();
curContribution.delta.g2_sp = hashToG2(curve, curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.toAffine(curve.G2.timesFr(curContribution.delta.g2_sp, curContribution.delta.prvKey));
zkey.vk_delta_1 = curve.G1.timesFr(zkey.vk_delta_1, curContribution.delta.prvKey);
zkey.vk_delta_2 = curve.G2.timesFr(zkey.vk_delta_2, curContribution.delta.prvKey);
curContribution.deltaAfter = zkey.vk_delta_1;
curContribution.type = 1;
curContribution.numIterationsExp = numIterationsExp;
curContribution.beaconHash = beaconHash;
if (name) curContribution.name = name;
mpcParams.contributions.push(curContribution);
await zkeyUtils.writeHeader(fdNew, zkey);
// IC
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
// Coeffs (Keep original)
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
// A Section
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
// B1 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
// B2 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
const invDelta = curve.Fr.inv(curContribution.delta.prvKey);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", logger);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", logger);
await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams);
await fdOld.close();
await fdNew.close();
const contributionHasher = Blake2b(64);
utils.hashPubKey(contributionHasher, curve, curContribution);
const contribuionHash = contributionHasher.digest();
if (logger) logger.info(misc.formatHash(contribuionHash, "Contribution Hash: "));
return contribuionHash;
}

View File

@ -1,201 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
// Format of the output
// Hash of the last contribution 64 Bytes
// 2^N*2-1 TauG1 Points (compressed)
// 2^N TauG2 Points (compressed)
// 2^N AlphaTauG1 Points (compressed)
// 2^N BetaTauG1 Points (compressed)
// Public Key
// BetaG2 (compressed)
// G1*s (compressed)
// G1*s*tau (compressed)
// G1*t (compressed)
// G1*t*alpha (compressed)
// G1*u (compressed)
// G1*u*beta (compressed)
// G2*sp*tau (compressed)
// G2*tp*alpha (compressed)
// G2*up*beta (compressed)
import * as fastFile from "fastfile";
import Blake2b from "blake2b-wasm";
import * as utils from "./zkey_utils.js";
import * as misc from "./misc.js";
import { applyKeyToChallengeSection } from "./mpc_applykey.js";
import { hashPubKey } from "./zkey_utils.js";
import { hashToG2 as hashToG2 } from "./keypair.js";
export default async function bellmanContribute(curve, challengeFilename, responesFileName, entropy, logger) {
await Blake2b.ready();
const rng = await misc.getRandomRng(entropy);
const delta = curve.Fr.fromRng(rng);
const invDelta = curve.Fr.inv(delta);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
const fdFrom = await fastFile.readExisting(challengeFilename);
const fdTo = await fastFile.createOverride(responesFileName);
await copy(sG1); // alpha1
await copy(sG1); // beta1
await copy(sG2); // beta2
await copy(sG2); // gamma2
const oldDelta1 = await readG1();
const delta1 = curve.G1.timesFr(oldDelta1, delta);
await writeG1(delta1);
const oldDelta2 = await readG2();
const delta2 = curve.G2.timesFr(oldDelta2, delta);
await writeG2(delta2);
// IC
const nIC = await fdFrom.readUBE32();
await fdTo.writeUBE32(nIC);
await copy(nIC*sG1);
// H
const nH = await fdFrom.readUBE32();
await fdTo.writeUBE32(nH);
await applyKeyToChallengeSection(fdFrom, fdTo, null, curve, "G1", nH, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "H", logger);
// L
const nL = await fdFrom.readUBE32();
await fdTo.writeUBE32(nL);
await applyKeyToChallengeSection(fdFrom, fdTo, null, curve, "G1", nL, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "L", logger);
// A
const nA = await fdFrom.readUBE32();
await fdTo.writeUBE32(nA);
await copy(nA*sG1);
// B1
const nB1 = await fdFrom.readUBE32();
await fdTo.writeUBE32(nB1);
await copy(nB1*sG1);
// B2
const nB2 = await fdFrom.readUBE32();
await fdTo.writeUBE32(nB2);
await copy(nB2*sG2);
//////////
/// Read contributions
//////////
const transcriptHasher = Blake2b(64);
const mpcParams = {};
// csHash
mpcParams.csHash = await fdFrom.read(64);
transcriptHasher.update(mpcParams.csHash);
const nConttributions = await fdFrom.readUBE32();
mpcParams.contributions = [];
for (let i=0; i<nConttributions; i++) {
const c = { delta:{} };
c.deltaAfter = await readG1();
c.delta.g1_s = await readG1();
c.delta.g1_sx = await readG1();
c.delta.g2_spx = await readG2();
c.transcript = await fdFrom.read(64);
mpcParams.contributions.push(c);
hashPubKey(transcriptHasher, curve, c);
}
const curContribution = {};
curContribution.delta = {};
curContribution.delta.prvKey = delta;
curContribution.delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.toAffine(curve.G1.timesFr(curContribution.delta.g1_s, delta));
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_s);
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_sx);
curContribution.transcript = transcriptHasher.digest();
curContribution.delta.g2_sp = hashToG2(curve, curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.toAffine(curve.G2.timesFr(curContribution.delta.g2_sp, delta));
curContribution.deltaAfter = delta1;
curContribution.type = 0;
mpcParams.contributions.push(curContribution);
//////////
/// Write COntribution
//////////
await fdTo.write(mpcParams.csHash);
await fdTo.writeUBE32(mpcParams.contributions.length);
for (let i=0; i<mpcParams.contributions.length; i++) {
const c = mpcParams.contributions[i];
await writeG1(c.deltaAfter);
await writeG1(c.delta.g1_s);
await writeG1(c.delta.g1_sx);
await writeG2(c.delta.g2_spx);
await fdTo.write(c.transcript);
}
const contributionHasher = Blake2b(64);
hashPubKey(contributionHasher, curve, curContribution);
const contributionHash = contributionHasher.digest();
if (logger) logger.info(misc.formatHash(contributionHash, "Contribution Hash: "));
await fdTo.close();
await fdFrom.close();
return contributionHash;
async function copy(nBytes) {
const CHUNK_SIZE = fdFrom.pageSize*2;
for (let i=0; i<nBytes; i+= CHUNK_SIZE) {
const n = Math.min(nBytes -i, CHUNK_SIZE);
const buff = await fdFrom.read(n);
await fdTo.write(buff);
}
}
async function readG1() {
const buff = await fdFrom.read(curve.G1.F.n8*2);
return curve.G1.fromRprUncompressed(buff, 0);
}
async function readG2() {
const buff = await fdFrom.read(curve.G2.F.n8*2);
return curve.G2.fromRprUncompressed(buff, 0);
}
async function writeG1(P) {
const buff = new Uint8Array(sG1);
curve.G1.toRprUncompressed(buff, 0, P);
await fdTo.write(buff);
}
async function writeG2(P) {
const buff = new Uint8Array(sG2);
curve.G2.toRprUncompressed(buff, 0, P);
await fdTo.write(buff);
}
}

View File

@ -1,109 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as binFileUtils from "@iden3/binfileutils";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import * as misc from "./misc.js";
import Blake2b from "blake2b-wasm";
import * as utils from "./zkey_utils.js";
import { hashToG2 as hashToG2 } from "./keypair.js";
import { applyKeyToSection } from "./mpc_applykey.js";
export default async function phase2contribute(zkeyNameOld, zkeyNameNew, name, entropy, logger) {
await Blake2b.ready();
const {fd: fdOld, sections: sections} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fdOld, sections);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
const curve = await getCurve(zkey.q);
const mpcParams = await zkeyUtils.readMPCParams(fdOld, curve, sections);
const fdNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
const rng = await misc.getRandomRng(entropy);
const transcriptHasher = Blake2b(64);
transcriptHasher.update(mpcParams.csHash);
for (let i=0; i<mpcParams.contributions.length; i++) {
utils.hashPubKey(transcriptHasher, curve, mpcParams.contributions[i]);
}
const curContribution = {};
curContribution.delta = {};
curContribution.delta.prvKey = curve.Fr.fromRng(rng);
curContribution.delta.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.toAffine(curve.G1.timesFr(curContribution.delta.g1_s, curContribution.delta.prvKey));
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_s);
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_sx);
curContribution.transcript = transcriptHasher.digest();
curContribution.delta.g2_sp = hashToG2(curve, curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.toAffine(curve.G2.timesFr(curContribution.delta.g2_sp, curContribution.delta.prvKey));
zkey.vk_delta_1 = curve.G1.timesFr(zkey.vk_delta_1, curContribution.delta.prvKey);
zkey.vk_delta_2 = curve.G2.timesFr(zkey.vk_delta_2, curContribution.delta.prvKey);
curContribution.deltaAfter = zkey.vk_delta_1;
curContribution.type = 0;
if (name) curContribution.name = name;
mpcParams.contributions.push(curContribution);
await zkeyUtils.writeHeader(fdNew, zkey);
// IC
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
// Coeffs (Keep original)
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
// A Section
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
// B1 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
// B2 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
const invDelta = curve.Fr.inv(curContribution.delta.prvKey);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", logger);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", logger);
await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams);
await fdOld.close();
await fdNew.close();
const contributionHasher = Blake2b(64);
utils.hashPubKey(contributionHasher, curve, curContribution);
const contribuionHash = contributionHasher.digest();
if (logger) logger.info(misc.formatHash(mpcParams.csHash, "Circuit Hash: "));
if (logger) logger.info(misc.formatHash(contribuionHash, "Contribution Hash: "));
return contribuionHash;
}

View File

@ -1,139 +0,0 @@
import * as binFileUtils from "@iden3/binfileutils";
import * as zkeyUtils from "./zkey_utils.js";
import * as fastFile from "fastfile";
import { getCurveFromQ as getCurve } from "./curves.js";
export default async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
const curve = await getCurve(zkey.q);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
const mpcParams = await zkeyUtils.readMPCParams(fdZKey, curve, sectionsZKey);
const fdMPCParams = await fastFile.createOverride(mpcparamsName);
/////////////////////
// Verification Key Section
/////////////////////
await writeG1(zkey.vk_alpha_1);
await writeG1(zkey.vk_beta_1);
await writeG2(zkey.vk_beta_2);
await writeG2(zkey.vk_gamma_2);
await writeG1(zkey.vk_delta_1);
await writeG2(zkey.vk_delta_2);
// IC
let buffBasesIC;
buffBasesIC = await binFileUtils.readSection(fdZKey, sectionsZKey, 3);
buffBasesIC = await curve.G1.batchLEMtoU(buffBasesIC);
await writePointArray("G1", buffBasesIC);
/////////////////////
// h Section
/////////////////////
const buffBasesH_Lodd = await binFileUtils.readSection(fdZKey, sectionsZKey, 9);
let buffBasesH_Tau;
buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", logger);
buffBasesH_Tau = await curve.G1.batchApplyKey(buffBasesH_Tau, curve.Fr.neg(curve.Fr.e(2)), curve.Fr.w[zkey.power+1], "jacobian", "affine", logger);
// Remove last element. (The degree of H will be allways m-2)
buffBasesH_Tau = buffBasesH_Tau.slice(0, buffBasesH_Tau.byteLength - sG1);
buffBasesH_Tau = await curve.G1.batchLEMtoU(buffBasesH_Tau);
await writePointArray("G1", buffBasesH_Tau);
/////////////////////
// L section
/////////////////////
let buffBasesC;
buffBasesC = await binFileUtils.readSection(fdZKey, sectionsZKey, 8);
buffBasesC = await curve.G1.batchLEMtoU(buffBasesC);
await writePointArray("G1", buffBasesC);
/////////////////////
// A Section (C section)
/////////////////////
let buffBasesA;
buffBasesA = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
buffBasesA = await curve.G1.batchLEMtoU(buffBasesA);
await writePointArray("G1", buffBasesA);
/////////////////////
// B1 Section
/////////////////////
let buffBasesB1;
buffBasesB1 = await binFileUtils.readSection(fdZKey, sectionsZKey, 6);
buffBasesB1 = await curve.G1.batchLEMtoU(buffBasesB1);
await writePointArray("G1", buffBasesB1);
/////////////////////
// B2 Section
/////////////////////
let buffBasesB2;
buffBasesB2 = await binFileUtils.readSection(fdZKey, sectionsZKey, 7);
buffBasesB2 = await curve.G2.batchLEMtoU(buffBasesB2);
await writePointArray("G2", buffBasesB2);
await fdMPCParams.write(mpcParams.csHash);
await writeU32(mpcParams.contributions.length);
for (let i=0; i<mpcParams.contributions.length; i++) {
const c = mpcParams.contributions[i];
await writeG1(c.deltaAfter);
await writeG1(c.delta.g1_s);
await writeG1(c.delta.g1_sx);
await writeG2(c.delta.g2_spx);
await fdMPCParams.write(c.transcript);
}
await fdZKey.close();
await fdMPCParams.close();
async function writeG1(P) {
const buff = new Uint8Array(sG1);
curve.G1.toRprUncompressed(buff, 0, P);
await fdMPCParams.write(buff);
}
async function writeG2(P) {
const buff = new Uint8Array(sG2);
curve.G2.toRprUncompressed(buff, 0, P);
await fdMPCParams.write(buff);
}
async function writePointArray(groupName, buff) {
let sG;
if (groupName == "G1") {
sG = sG1;
} else {
sG = sG2;
}
const buffSize = new Uint8Array(4);
const buffSizeV = new DataView(buffSize.buffer, buffSize.byteOffset, buffSize.byteLength);
buffSizeV.setUint32(0, buff.byteLength / sG, false);
await fdMPCParams.write(buffSize);
await fdMPCParams.write(buff);
}
async function writeU32(n) {
const buffSize = new Uint8Array(4);
const buffSizeV = new DataView(buffSize.buffer, buffSize.byteOffset, buffSize.byteLength);
buffSizeV.setUint32(0, n, false);
await fdMPCParams.write(buffSize);
}
}

View File

@ -1,8 +0,0 @@
import { readZKey as readZKey } from "./zkey_utils.js";
export default async function zkeyExportJson(zkeyFileName) {
const zKey = await readZKey(zkeyFileName, true);
return zKey;
}

View File

@ -1,17 +0,0 @@
import * as fastFile from "fastfile";
import ejs from "ejs";
import exportVerificationKey from "./zkey_export_verificationkey.js";
// Not ready yet
// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
export default async function exportSolidityVerifier(zKeyName, templates, logger) {
const verificationKey = await exportVerificationKey(zKeyName, logger);
let template = templates[verificationKey.protocol];
return ejs.render(template , verificationKey);
}

View File

@ -1,112 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as binFileUtils from "@iden3/binfileutils";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import { utils } from "ffjavascript";
const {stringifyBigInts} = utils;
export default async function zkeyExportVerificationKey(zkeyName, /* logger */ ) {
const {fd, sections} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fd, sections);
let res;
if (zkey.protocol == "groth16") {
res = await groth16Vk(zkey, fd, sections);
} else if (zkey.protocol == "plonk") {
res = await plonkVk(zkey);
} else {
throw new Error("zkey file is not groth16");
}
await fd.close();
return res;
}
async function groth16Vk(zkey, fd, sections) {
const curve = await getCurve(zkey.q);
const sG1 = curve.G1.F.n8*2;
const alphaBeta = await curve.pairing( zkey.vk_alpha_1 , zkey.vk_beta_2 );
let vKey = {
protocol: zkey.protocol,
curve: curve.name,
nPublic: zkey.nPublic,
vk_alpha_1: curve.G1.toObject(zkey.vk_alpha_1),
vk_beta_2: curve.G2.toObject(zkey.vk_beta_2),
vk_gamma_2: curve.G2.toObject(zkey.vk_gamma_2),
vk_delta_2: curve.G2.toObject(zkey.vk_delta_2),
vk_alphabeta_12: curve.Gt.toObject(alphaBeta)
};
// Read IC Section
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 3);
vKey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
const buff = await fd.read(sG1);
const P = curve.G1.toObject(buff);
vKey.IC.push(P);
}
await binFileUtils.endReadSection(fd);
vKey = stringifyBigInts(vKey);
return vKey;
}
async function plonkVk(zkey) {
const curve = await getCurve(zkey.q);
let vKey = {
protocol: zkey.protocol,
curve: curve.name,
nPublic: zkey.nPublic,
power: zkey.power,
k1: curve.Fr.toObject(zkey.k1),
k2: curve.Fr.toObject(zkey.k2),
Qm: curve.G1.toObject(zkey.Qm),
Ql: curve.G1.toObject(zkey.Ql),
Qr: curve.G1.toObject(zkey.Qr),
Qo: curve.G1.toObject(zkey.Qo),
Qc: curve.G1.toObject(zkey.Qc),
S1: curve.G1.toObject(zkey.S1),
S2: curve.G1.toObject(zkey.S2),
S3: curve.G1.toObject(zkey.S3),
X_2: curve.G2.toObject(zkey.X_2),
w: curve.Fr.toObject(curve.Fr.w[zkey.power])
};
vKey = stringifyBigInts(vKey);
return vKey;
}

View File

@ -1,221 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as zkeyUtils from "./zkey_utils.js";
import * as binFileUtils from "@iden3/binfileutils";
import * as fastFile from "fastfile";
import { getCurveFromQ as getCurve } from "./curves.js";
import * as misc from "./misc.js";
export default async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, name, logger) {
const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
const zkeyHeader = await zkeyUtils.readHeader(fdZKeyOld, sectionsZKeyOld, false);
if (zkeyHeader.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
const curve = await getCurve(zkeyHeader.q);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
const oldMPCParams = await zkeyUtils.readMPCParams(fdZKeyOld, curve, sectionsZKeyOld);
const newMPCParams = {};
const fdMPCParams = await fastFile.readExisting(mpcparamsName);
fdMPCParams.pos =
sG1*3 + sG2*3 + // vKey
8 + sG1*zkeyHeader.nVars + // IC + C
4 + sG1*(zkeyHeader.domainSize-1) + // H
4 + sG1*zkeyHeader.nVars + // A
4 + sG1*zkeyHeader.nVars + // B1
4 + sG2*zkeyHeader.nVars; // B2
// csHash
newMPCParams.csHash = await fdMPCParams.read(64);
const nConttributions = await fdMPCParams.readUBE32();
newMPCParams.contributions = [];
for (let i=0; i<nConttributions; i++) {
const c = { delta:{} };
c.deltaAfter = await readG1(fdMPCParams);
c.delta.g1_s = await readG1(fdMPCParams);
c.delta.g1_sx = await readG1(fdMPCParams);
c.delta.g2_spx = await readG2(fdMPCParams);
c.transcript = await fdMPCParams.read(64);
if (i<oldMPCParams.contributions.length) {
c.type = oldMPCParams.contributions[i].type;
if (c.type==1) {
c.beaconHash = oldMPCParams.contributions[i].beaconHash;
c.numIterationsExp = oldMPCParams.contributions[i].numIterationsExp;
}
if (oldMPCParams.contributions[i].name) {
c.name = oldMPCParams.contributions[i].name;
}
}
newMPCParams.contributions.push(c);
}
if (!misc.hashIsEqual(newMPCParams.csHash, oldMPCParams.csHash)) {
if (logger) logger.error("Hash of the original circuit does not match with the MPC one");
return false;
}
if (oldMPCParams.contributions.length > newMPCParams.contributions.length) {
if (logger) logger.error("The impoerted file does not include new contributions");
return false;
}
for (let i=0; i<oldMPCParams.contributions.length; i++) {
if (!contributionIsEqual(oldMPCParams.contributions[i], newMPCParams.contributions[i])) {
if (logger) logger.error(`Previos contribution ${i} does not match`);
return false;
}
}
// Set the same name to all new controbutions
if (name) {
for (let i=oldMPCParams.contributions.length; i<newMPCParams.contributions.length; i++) {
newMPCParams.contributions[i].name = name;
}
}
const fdZKeyNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
fdMPCParams.pos = 0;
// Header
fdMPCParams.pos += sG1; // ignore alpha1 (keep original)
fdMPCParams.pos += sG1; // ignore beta1
fdMPCParams.pos += sG2; // ignore beta2
fdMPCParams.pos += sG2; // ignore gamma2
zkeyHeader.vk_delta_1 = await readG1(fdMPCParams);
zkeyHeader.vk_delta_2 = await readG2(fdMPCParams);
await zkeyUtils.writeHeader(fdZKeyNew, zkeyHeader);
// IC (Keep original)
const nIC = await fdMPCParams.readUBE32();
if (nIC != zkeyHeader.nPublic +1) {
if (logger) logger.error("Invalid number of points in IC");
await fdZKeyNew.discard();
return false;
}
fdMPCParams.pos += sG1*(zkeyHeader.nPublic+1);
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 3);
// Coeffs (Keep original)
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 4);
// H Section
const nH = await fdMPCParams.readUBE32();
if (nH != zkeyHeader.domainSize-1) {
if (logger) logger.error("Invalid number of points in H");
await fdZKeyNew.discard();
return false;
}
let buffH;
const buffTauU = await fdMPCParams.read(sG1*(zkeyHeader.domainSize-1));
const buffTauLEM = await curve.G1.batchUtoLEM(buffTauU);
buffH = new Uint8Array(zkeyHeader.domainSize*sG1);
buffH.set(buffTauLEM); // Let the last one to zero.
curve.G1.toRprLEM(buffH, sG1*(zkeyHeader.domainSize-1), curve.G1.zeroAffine);
const n2Inv = curve.Fr.neg(curve.Fr.inv(curve.Fr.e(2)));
const wInv = curve.Fr.inv(curve.Fr.w[zkeyHeader.power+1]);
buffH = await curve.G1.batchApplyKey(buffH, n2Inv, wInv, "affine", "jacobian", logger);
buffH = await curve.G1.ifft(buffH, "jacobian", "affine", logger);
await binFileUtils.startWriteSection(fdZKeyNew, 9);
await fdZKeyNew.write(buffH);
await binFileUtils.endWriteSection(fdZKeyNew);
// C Secion (L section)
const nL = await fdMPCParams.readUBE32();
if (nL != (zkeyHeader.nVars-zkeyHeader.nPublic-1)) {
if (logger) logger.error("Invalid number of points in L");
await fdZKeyNew.discard();
return false;
}
let buffL;
buffL = await fdMPCParams.read(sG1*(zkeyHeader.nVars-zkeyHeader.nPublic-1));
buffL = await curve.G1.batchUtoLEM(buffL);
await binFileUtils.startWriteSection(fdZKeyNew, 8);
await fdZKeyNew.write(buffL);
await binFileUtils.endWriteSection(fdZKeyNew);
// A Section
const nA = await fdMPCParams.readUBE32();
if (nA != zkeyHeader.nVars) {
if (logger) logger.error("Invalid number of points in A");
await fdZKeyNew.discard();
return false;
}
fdMPCParams.pos += sG1*(zkeyHeader.nVars);
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 5);
// B1 Section
const nB1 = await fdMPCParams.readUBE32();
if (nB1 != zkeyHeader.nVars) {
if (logger) logger.error("Invalid number of points in B1");
await fdZKeyNew.discard();
return false;
}
fdMPCParams.pos += sG1*(zkeyHeader.nVars);
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 6);
// B2 Section
const nB2 = await fdMPCParams.readUBE32();
if (nB2 != zkeyHeader.nVars) {
if (logger) logger.error("Invalid number of points in B2");
await fdZKeyNew.discard();
return false;
}
fdMPCParams.pos += sG2*(zkeyHeader.nVars);
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 7);
await zkeyUtils.writeMPCParams(fdZKeyNew, curve, newMPCParams);
await fdMPCParams.close();
await fdZKeyNew.close();
await fdZKeyOld.close();
return true;
async function readG1(fd) {
const buff = await fd.read(curve.G1.F.n8*2);
return curve.G1.fromRprUncompressed(buff, 0);
}
async function readG2(fd) {
const buff = await fd.read(curve.G2.F.n8*2);
return curve.G2.fromRprUncompressed(buff, 0);
}
function contributionIsEqual(c1, c2) {
if (!curve.G1.eq(c1.deltaAfter , c2.deltaAfter)) return false;
if (!curve.G1.eq(c1.delta.g1_s , c2.delta.g1_s)) return false;
if (!curve.G1.eq(c1.delta.g1_sx , c2.delta.g1_sx)) return false;
if (!curve.G2.eq(c1.delta.g2_spx , c2.delta.g2_spx)) return false;
if (!misc.hashIsEqual(c1.transcript, c2.transcript)) return false;
return true;
}
}

View File

@ -1,589 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import {readR1csHeader} from "r1csfile";
import * as utils from "./powersoftau_utils.js";
import {
readBinFile,
createBinFile,
readSection,
writeBigInt,
startWriteSection,
endWriteSection,
} from "@iden3/binfileutils";
import { log2, formatHash } from "./misc.js";
import { Scalar, BigBuffer } from "ffjavascript";
import Blake2b from "blake2b-wasm";
import BigArray from "./bigarray.js";
export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
const TAU_G1 = 0;
const TAU_G2 = 1;
const ALPHATAU_G1 = 2;
const BETATAU_G1 = 3;
await Blake2b.ready();
const csHasher = Blake2b(64);
const {fd: fdPTau, sections: sectionsPTau} = await readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24);
const {curve, power} = await utils.readPTauHeader(fdPTau, sectionsPTau);
const {fd: fdR1cs, sections: sectionsR1cs} = await readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24);
const r1cs = await readR1csHeader(fdR1cs, sectionsR1cs, false);
const fdZKey = await createBinFile(zkeyName, "zkey", 1, 10, 1<<22, 1<<24);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
if (r1cs.prime != curve.r) {
if (logger) logger.error("r1cs curve does not match powers of tau ceremony curve");
return -1;
}
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
if (cirPower > power) {
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`);
return -1;
}
if (!sectionsPTau[12]) {
if (logger) logger.error("Powers of tau is not prepared.");
return -1;
}
const nPublic = r1cs.nOutputs + r1cs.nPubInputs;
const domainSize = 2 ** cirPower;
// Write the header
///////////
await startWriteSection(fdZKey, 1);
await fdZKey.writeULE32(1); // Groth
await endWriteSection(fdZKey);
// Write the Groth header section
///////////
await startWriteSection(fdZKey, 2);
const primeQ = curve.q;
const n8q = (Math.floor( (Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
const primeR = curve.r;
const n8r = (Math.floor( (Scalar.bitLength(primeR) - 1) / 64) +1)*8;
const Rr = Scalar.mod(Scalar.shl(1, n8r*8), primeR);
const R2r = curve.Fr.e(Scalar.mod(Scalar.mul(Rr,Rr), primeR));
await fdZKey.writeULE32(n8q);
await writeBigInt(fdZKey, primeQ, n8q);
await fdZKey.writeULE32(n8r);
await writeBigInt(fdZKey, primeR, n8r);
await fdZKey.writeULE32(r1cs.nVars); // Total number of bars
await fdZKey.writeULE32(nPublic); // Total number of public vars (not including ONE)
await fdZKey.writeULE32(domainSize); // domainSize
let bAlpha1;
bAlpha1 = await fdPTau.read(sG1, sectionsPTau[4][0].p);
await fdZKey.write(bAlpha1);
bAlpha1 = await curve.G1.batchLEMtoU(bAlpha1);
csHasher.update(bAlpha1);
let bBeta1;
bBeta1 = await fdPTau.read(sG1, sectionsPTau[5][0].p);
await fdZKey.write(bBeta1);
bBeta1 = await curve.G1.batchLEMtoU(bBeta1);
csHasher.update(bBeta1);
let bBeta2;
bBeta2 = await fdPTau.read(sG2, sectionsPTau[6][0].p);
await fdZKey.write(bBeta2);
bBeta2 = await curve.G2.batchLEMtoU(bBeta2);
csHasher.update(bBeta2);
const bg1 = new Uint8Array(sG1);
curve.G1.toRprLEM(bg1, 0, curve.G1.g);
const bg2 = new Uint8Array(sG2);
curve.G2.toRprLEM(bg2, 0, curve.G2.g);
const bg1U = new Uint8Array(sG1);
curve.G1.toRprUncompressed(bg1U, 0, curve.G1.g);
const bg2U = new Uint8Array(sG2);
curve.G2.toRprUncompressed(bg2U, 0, curve.G2.g);
await fdZKey.write(bg2); // gamma2
await fdZKey.write(bg1); // delta1
await fdZKey.write(bg2); // delta2
csHasher.update(bg2U); // gamma2
csHasher.update(bg1U); // delta1
csHasher.update(bg2U); // delta2
await endWriteSection(fdZKey);
if (logger) logger.info("Reading r1cs");
let sR1cs = await readSection(fdR1cs, sectionsR1cs, 2);
const A = new BigArray(r1cs.nVars);
const B1 = new BigArray(r1cs.nVars);
const B2 = new BigArray(r1cs.nVars);
const C = new BigArray(r1cs.nVars- nPublic -1);
const IC = new Array(nPublic+1);
if (logger) logger.info("Reading tauG1");
let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize -1)*sG1, domainSize*sG1);
if (logger) logger.info("Reading tauG2");
let sTauG2 = await readSection(fdPTau, sectionsPTau, 13, (domainSize -1)*sG2, domainSize*sG2);
if (logger) logger.info("Reading alphatauG1");
let sAlphaTauG1 = await readSection(fdPTau, sectionsPTau, 14, (domainSize -1)*sG1, domainSize*sG1);
if (logger) logger.info("Reading betatauG1");
let sBetaTauG1 = await readSection(fdPTau, sectionsPTau, 15, (domainSize -1)*sG1, domainSize*sG1);
await processConstraints();
await composeAndWritePoints(3, "G1", IC, "IC");
await writeHs();
await hashHPoints();
await composeAndWritePoints(8, "G1", C, "C");
await composeAndWritePoints(5, "G1", A, "A");
await composeAndWritePoints(6, "G1", B1, "B1");
await composeAndWritePoints(7, "G2", B2, "B2");
const csHash = csHasher.digest();
// Contributions section
await startWriteSection(fdZKey, 10);
await fdZKey.write(csHash);
await fdZKey.writeULE32(0);
await endWriteSection(fdZKey);
if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
await fdZKey.close();
await fdR1cs.close();
await fdPTau.close();
return csHash;
async function writeHs() {
await startWriteSection(fdZKey, 9);
const buffOut = new BigBuffer(domainSize*sG1);
if (cirPower < curve.Fr.s) {
let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1);
for (let i=0; i< domainSize; i++) {
if ((logger)&&(i%10000 == 0)) logger.debug(`spliting buffer: ${i}/${domainSize}`);
const buff = sTauG1.slice( (i*2+1)*sG1, (i*2+1)*sG1 + sG1 );
buffOut.set(buff, i*sG1);
}
} else if (cirPower == curve.Fr.s) {
const o = sectionsPTau[12][0].p + ((2 ** (cirPower+1)) -1)*sG1;
await fdPTau.readToBuffer(buffOut, 0, domainSize*sG1, o + domainSize*sG1);
} else {
if (logger) logger.error("Circuit too big");
throw new Error("Circuit too big for this curve");
}
await fdZKey.write(buffOut);
await endWriteSection(fdZKey);
}
async function processConstraints() {
const buffCoeff = new Uint8Array(12 + curve.Fr.n8);
const buffCoeffV = new DataView(buffCoeff.buffer);
const bOne = new Uint8Array(curve.Fr.n8);
curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1));
let r1csPos = 0;
function r1cs_readULE32() {
const buff = sR1cs.slice(r1csPos, r1csPos+4);
r1csPos += 4;
const buffV = new DataView(buff.buffer);
return buffV.getUint32(0, true);
}
const coefs = new BigArray();
for (let c=0; c<r1cs.nConstraints; c++) {
if ((logger)&&(c%10000 == 0)) logger.debug(`processing constraints: ${c}/${r1cs.nConstraints}`);
const nA = r1cs_readULE32();
for (let i=0; i<nA; i++) {
const s = r1cs_readULE32();
const coefp = r1csPos;
r1csPos += curve.Fr.n8;
const l1t = TAU_G1;
const l1 = sG1*c;
const l2t = BETATAU_G1;
const l2 = sG1*c;
if (typeof A[s] === "undefined") A[s] = [];
A[s].push([l1t, l1, coefp]);
if (s <= nPublic) {
if (typeof IC[s] === "undefined") IC[s] = [];
IC[s].push([l2t, l2, coefp]);
} else {
if (typeof C[s- nPublic -1] === "undefined") C[s- nPublic -1] = [];
C[s - nPublic -1].push([l2t, l2, coefp]);
}
coefs.push([0, c, s, coefp]);
}
const nB = r1cs_readULE32();
for (let i=0; i<nB; i++) {
const s = r1cs_readULE32();
const coefp = r1csPos;
r1csPos += curve.Fr.n8;
const l1t = TAU_G1;
const l1 = sG1*c;
const l2t = TAU_G2;
const l2 = sG2*c;
const l3t = ALPHATAU_G1;
const l3 = sG1*c;
if (typeof B1[s] === "undefined") B1[s] = [];
B1[s].push([l1t, l1, coefp]);
if (typeof B2[s] === "undefined") B2[s] = [];
B2[s].push([l2t, l2, coefp]);
if (s <= nPublic) {
if (typeof IC[s] === "undefined") IC[s] = [];
IC[s].push([l3t, l3, coefp]);
} else {
if (typeof C[s- nPublic -1] === "undefined") C[s- nPublic -1] = [];
C[s- nPublic -1].push([l3t, l3, coefp]);
}
coefs.push([1, c, s, coefp]);
}
const nC = r1cs_readULE32();
for (let i=0; i<nC; i++) {
const s = r1cs_readULE32();
const coefp = r1csPos;
r1csPos += curve.Fr.n8;
const l1t = TAU_G1;
const l1 = sG1*c;
if (s <= nPublic) {
if (typeof IC[s] === "undefined") IC[s] = [];
IC[s].push([l1t, l1, coefp]);
} else {
if (typeof C[s- nPublic -1] === "undefined") C[s- nPublic -1] = [];
C[s- nPublic -1].push([l1t, l1, coefp]);
}
}
}
for (let s = 0; s <= nPublic ; s++) {
const l1t = TAU_G1;
const l1 = sG1*(r1cs.nConstraints + s);
const l2t = BETATAU_G1;
const l2 = sG1*(r1cs.nConstraints + s);
if (typeof A[s] === "undefined") A[s] = [];
A[s].push([l1t, l1, -1]);
if (typeof IC[s] === "undefined") IC[s] = [];
IC[s].push([l2t, l2, -1]);
coefs.push([0, r1cs.nConstraints + s, s, -1]);
}
await startWriteSection(fdZKey, 4);
const buffSection = new BigBuffer(coefs.length*(12+curve.Fr.n8) + 4);
const buff4 = new Uint8Array(4);
const buff4V = new DataView(buff4.buffer);
buff4V.setUint32(0, coefs.length, true);
buffSection.set(buff4);
let coefsPos = 4;
for (let i=0; i<coefs.length; i++) {
if ((logger)&&(i%100000 == 0)) logger.debug(`writing coeffs: ${i}/${coefs.length}`);
writeCoef(coefs[i]);
}
await fdZKey.write(buffSection);
await endWriteSection(fdZKey);
function writeCoef(c) {
buffCoeffV.setUint32(0, c[0], true);
buffCoeffV.setUint32(4, c[1], true);
buffCoeffV.setUint32(8, c[2], true);
let n;
if (c[3]>=0) {
n = curve.Fr.fromRprLE(sR1cs.slice(c[3], c[3] + curve.Fr.n8), 0);
} else {
n = curve.Fr.fromRprLE(bOne, 0);
}
const nR2 = curve.Fr.mul(n, R2r);
curve.Fr.toRprLE(buffCoeff, 12, nR2);
buffSection.set(buffCoeff, coefsPos);
coefsPos += buffCoeff.length;
}
}
async function composeAndWritePoints(idSection, groupName, arr, sectionName) {
const CHUNK_SIZE= 1<<15;
const G = curve[groupName];
hashU32(arr.length);
await startWriteSection(fdZKey, idSection);
let opPromises = [];
let i=0;
while (i<arr.length) {
let t=0;
while ((i<arr.length)&&(t<curve.tm.concurrency)) {
if (logger) logger.debug(`Writing points start ${sectionName}: ${i}/${arr.length}`);
let n = 1;
let nP = (arr[i] ? arr[i].length : 0);
while ((i + n < arr.length) && (nP + (arr[i+n] ? arr[i+n].length : 0) < CHUNK_SIZE) && (n<CHUNK_SIZE)) {
nP += (arr[i+n] ? arr[i+n].length : 0);
n ++;
}
const subArr = arr.slice(i, i + n);
const _i = i;
opPromises.push(composeAndWritePointsThread(groupName, subArr, logger, sectionName).then( (r) => {
if (logger) logger.debug(`Writing points end ${sectionName}: ${_i}/${arr.length}`);
return r;
}));
i += n;
t++;
}
const result = await Promise.all(opPromises);
for (let k=0; k<result.length; k++) {
await fdZKey.write(result[k][0]);
const buff = await G.batchLEMtoU(result[k][0]);
csHasher.update(buff);
}
opPromises = [];
}
await endWriteSection(fdZKey);
}
async function composeAndWritePointsThread(groupName, arr, logger, sectionName) {
const G = curve[groupName];
const sGin = G.F.n8*2;
const sGmid = G.F.n8*3;
const sGout = G.F.n8*2;
let fnExp, fnMultiExp, fnBatchToAffine, fnZero;
if (groupName == "G1") {
fnExp = "g1m_timesScalarAffine";
fnMultiExp = "g1m_multiexpAffine";
fnBatchToAffine = "g1m_batchToAffine";
fnZero = "g1m_zero";
} else if (groupName == "G2") {
fnExp = "g2m_timesScalarAffine";
fnMultiExp = "g2m_multiexpAffine";
fnBatchToAffine = "g2m_batchToAffine";
fnZero = "g2m_zero";
} else {
throw new Error("Invalid group");
}
let acc =0;
for (let i=0; i<arr.length; i++) acc += arr[i] ? arr[i].length : 0;
let bBases, bScalars;
if (acc> 2<<14) {
bBases = new BigBuffer(acc*sGin);
bScalars = new BigBuffer(acc*curve.Fr.n8);
} else {
bBases = new Uint8Array(acc*sGin);
bScalars = new Uint8Array(acc*curve.Fr.n8);
}
let pB =0;
let pS =0;
const sBuffs = [
sTauG1,
sTauG2,
sAlphaTauG1,
sBetaTauG1
];
const bOne = new Uint8Array(curve.Fr.n8);
curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1));
let offset = 0;
for (let i=0; i<arr.length; i++) {
if (!arr[i]) continue;
for (let j=0; j<arr[i].length; j++) {
if ((logger)&&(j)&&(j%10000 == 0)) logger.debug(`Configuring big array ${sectionName}: ${j}/${arr[i].length}`);
bBases.set(
sBuffs[arr[i][j][0]].slice(
arr[i][j][1],
arr[i][j][1] + sGin
), offset*sGin
);
if (arr[i][j][2]>=0) {
bScalars.set(
sR1cs.slice(
arr[i][j][2],
arr[i][j][2] + curve.Fr.n8
),
offset*curve.Fr.n8
);
} else {
bScalars.set(bOne, offset*curve.Fr.n8);
}
offset ++;
}
}
if (arr.length>1) {
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: bBases});
task.push({cmd: "ALLOCSET", var: 1, buff: bScalars});
task.push({cmd: "ALLOC", var: 2, len: arr.length*sGmid});
pB = 0;
pS = 0;
let pD =0;
for (let i=0; i<arr.length; i++) {
if (!arr[i]) {
task.push({cmd: "CALL", fnName: fnZero, params: [
{var: 2, offset: pD}
]});
pD += sGmid;
continue;
}
if (arr[i].length == 1) {
task.push({cmd: "CALL", fnName: fnExp, params: [
{var: 0, offset: pB},
{var: 1, offset: pS},
{val: curve.Fr.n8},
{var: 2, offset: pD}
]});
} else {
task.push({cmd: "CALL", fnName: fnMultiExp, params: [
{var: 0, offset: pB},
{var: 1, offset: pS},
{val: curve.Fr.n8},
{val: arr[i].length},
{var: 2, offset: pD}
]});
}
pB += sGin*arr[i].length;
pS += curve.Fr.n8*arr[i].length;
pD += sGmid;
}
task.push({cmd: "CALL", fnName: fnBatchToAffine, params: [
{var: 2},
{val: arr.length},
{var: 2},
]});
task.push({cmd: "GET", out: 0, var: 2, len: arr.length*sGout});
const res = await curve.tm.queueAction(task);
return res;
} else {
let res = await G.multiExpAffine(bBases, bScalars, logger, sectionName);
res = [ G.toAffine(res) ];
return res;
}
}
async function hashHPoints() {
const CHUNK_SIZE = 1<<14;
hashU32(domainSize-1);
for (let i=0; i<domainSize-1; i+= CHUNK_SIZE) {
if (logger) logger.debug(`HashingHPoints: ${i}/${domainSize}`);
const n = Math.min(domainSize-1, CHUNK_SIZE);
await hashHPointsChunk(i, n);
}
}
async function hashHPointsChunk(offset, nPoints) {
const buff1 = await fdPTau.read(nPoints *sG1, sectionsPTau[2][0].p + (offset + domainSize)*sG1);
const buff2 = await fdPTau.read(nPoints *sG1, sectionsPTau[2][0].p + offset*sG1);
const concurrency= curve.tm.concurrency;
const nPointsPerThread = Math.floor(nPoints / concurrency);
const opPromises = [];
for (let i=0; i<concurrency; i++) {
let n;
if (i< concurrency-1) {
n = nPointsPerThread;
} else {
n = nPoints - i*nPointsPerThread;
}
if (n==0) continue;
const subBuff1 = buff1.slice(i*nPointsPerThread*sG1, (i*nPointsPerThread+n)*sG1);
const subBuff2 = buff2.slice(i*nPointsPerThread*sG1, (i*nPointsPerThread+n)*sG1);
opPromises.push(hashHPointsThread(subBuff1, subBuff2));
}
const result = await Promise.all(opPromises);
for (let i=0; i<result.length; i++) {
csHasher.update(result[i][0]);
}
}
async function hashHPointsThread(buff1, buff2) {
const nPoints = buff1.byteLength/sG1;
const sGmid = curve.G1.F.n8*3;
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: buff1});
task.push({cmd: "ALLOCSET", var: 1, buff: buff2});
task.push({cmd: "ALLOC", var: 2, len: nPoints*sGmid});
for (let i=0; i<nPoints; i++) {
task.push({
cmd: "CALL",
fnName: "g1m_subAffine",
params: [
{var: 0, offset: i*sG1},
{var: 1, offset: i*sG1},
{var: 2, offset: i*sGmid},
]
});
}
task.push({cmd: "CALL", fnName: "g1m_batchToAffine", params: [
{var: 2},
{val: nPoints},
{var: 2},
]});
task.push({cmd: "CALL", fnName: "g1m_batchLEMtoU", params: [
{var: 2},
{val: nPoints},
{var: 2},
]});
task.push({cmd: "GET", out: 0, var: 2, len: nPoints*sG1});
const res = await curve.tm.queueAction(task);
return res;
}
function hashU32(n) {
const buff = new Uint8Array(4);
const buffV = new DataView(buff.buffer, buff.byteOffset, buff.byteLength);
buffV.setUint32(0, n, false);
csHasher.update(buff);
}
}

View File

@ -1,528 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
// Format
// ======
// Header(1)
// Prover Type 1 Groth
// HeaderGroth(2)
// n8q
// q
// n8r
// r
// NVars
// NPub
// DomainSize (multiple of 2
// alpha1
// beta1
// delta1
// beta2
// gamma2
// delta2
// IC(3)
// Coefs(4)
// PointsA(5)
// PointsB1(6)
// PointsB2(7)
// PointsC(8)
// PointsH(9)
// Contributions(10)
import { Scalar, F1Field } from "ffjavascript";
import * as binFileUtils from "@iden3/binfileutils";
import { getCurveFromQ as getCurve } from "./curves.js";
import { log2 } from "./misc.js";
export async function writeHeader(fd, zkey) {
// Write the header
///////////
await binFileUtils.startWriteSection(fd, 1);
await fd.writeULE32(1); // Groth
await binFileUtils.endWriteSection(fd);
// Write the Groth header section
///////////
const curve = await getCurve(zkey.q);
await binFileUtils.startWriteSection(fd, 2);
const primeQ = curve.q;
const n8q = (Math.floor( (Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
const primeR = curve.r;
const n8r = (Math.floor( (Scalar.bitLength(primeR) - 1) / 64) +1)*8;
await fd.writeULE32(n8q);
await binFileUtils.writeBigInt(fd, primeQ, n8q);
await fd.writeULE32(n8r);
await binFileUtils.writeBigInt(fd, primeR, n8r);
await fd.writeULE32(zkey.nVars); // Total number of bars
await fd.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE)
await fd.writeULE32(zkey.domainSize); // domainSize
await writeG1(fd, curve, zkey.vk_alpha_1);
await writeG1(fd, curve, zkey.vk_beta_1);
await writeG2(fd, curve, zkey.vk_beta_2);
await writeG2(fd, curve, zkey.vk_gamma_2);
await writeG1(fd, curve, zkey.vk_delta_1);
await writeG2(fd, curve, zkey.vk_delta_2);
await binFileUtils.endWriteSection(fd);
}
export async function writeZKey(fileName, zkey) {
let curve = getCurve(zkey.q);
const fd = await binFileUtils.createBinFile(fileName,"zkey", 1, 9);
await writeHeader(fd, zkey);
const n8r = (Math.floor( (Scalar.bitLength(zkey.r) - 1) / 64) +1)*8;
const Rr = Scalar.mod(Scalar.shl(1, n8r*8), zkey.r);
const R2r = Scalar.mod(Scalar.mul(Rr,Rr), zkey.r);
// Write Pols (A and B (C can be ommited))
///////////
zkey.ccoefs = zkey.ccoefs.filter(c => c.matrix<2);
zkey.ccoefs.sort( (a,b) => a.constraint - b.constraint );
await binFileUtils.startWriteSection(fd, 4);
await fd.writeULE32(zkey.ccoefs.length);
for (let i=0; i<zkey.ccoefs.length; i++) {
const coef = zkey.ccoefs[i];
await fd.writeULE32(coef.matrix);
await fd.writeULE32(coef.constraint);
await fd.writeULE32(coef.signal);
await writeFr2(coef.value);
}
await binFileUtils.endWriteSection(fd);
// Write IC Section
///////////
await binFileUtils.startWriteSection(fd, 3);
for (let i=0; i<= zkey.nPublic; i++) {
await writeG1(fd, curve, zkey.IC[i] );
}
await binFileUtils.endWriteSection(fd);
// Write A
///////////
await binFileUtils.startWriteSection(fd, 5);
for (let i=0; i<zkey.nVars; i++) {
await writeG1(fd, curve, zkey.A[i]);
}
await binFileUtils.endWriteSection(fd);
// Write B1
///////////
await binFileUtils.startWriteSection(fd, 6);
for (let i=0; i<zkey.nVars; i++) {
await writeG1(fd, curve, zkey.B1[i]);
}
await binFileUtils.endWriteSection(fd);
// Write B2
///////////
await binFileUtils.startWriteSection(fd, 7);
for (let i=0; i<zkey.nVars; i++) {
await writeG2(fd, curve, zkey.B2[i]);
}
await binFileUtils.endWriteSection(fd);
// Write C
///////////
await binFileUtils.startWriteSection(fd, 8);
for (let i=zkey.nPublic+1; i<zkey.nVars; i++) {
await writeG1(fd, curve, zkey.C[i]);
}
await binFileUtils.endWriteSection(fd);
// Write H points
///////////
await binFileUtils.startWriteSection(fd, 9);
for (let i=0; i<zkey.domainSize; i++) {
await writeG1(fd, curve, zkey.hExps[i]);
}
await binFileUtils.endWriteSection(fd);
await fd.close();
async function writeFr2(n) {
// Convert to montgomery
n = Scalar.mod( Scalar.mul(n, R2r), zkey.r);
await binFileUtils.writeBigInt(fd, n, n8r);
}
}
async function writeG1(fd, curve, p) {
const buff = new Uint8Array(curve.G1.F.n8*2);
curve.G1.toRprLEM(buff, 0, p);
await fd.write(buff);
}
async function writeG2(fd, curve, p) {
const buff = new Uint8Array(curve.G2.F.n8*2);
curve.G2.toRprLEM(buff, 0, p);
await fd.write(buff);
}
async function readG1(fd, curve, toObject) {
const buff = await fd.read(curve.G1.F.n8*2);
const res = curve.G1.fromRprLEM(buff, 0);
return toObject ? curve.G1.toObject(res) : res;
}
async function readG2(fd, curve, toObject) {
const buff = await fd.read(curve.G2.F.n8*2);
const res = curve.G2.fromRprLEM(buff, 0);
return toObject ? curve.G2.toObject(res) : res;
}
export async function readHeader(fd, sections, toObject) {
// Read Header
/////////////////////
await binFileUtils.startReadUniqueSection(fd, sections, 1);
const protocolId = await fd.readULE32();
await binFileUtils.endReadSection(fd);
if (protocolId == 1) {
return await readHeaderGroth16(fd, sections, toObject);
} else if (protocolId == 2) {
return await readHeaderPlonk(fd, sections, toObject);
} else {
throw new Error("Protocol not supported: ");
}
}
async function readHeaderGroth16(fd, sections, toObject) {
const zkey = {};
zkey.protocol = "groth16";
// Read Groth Header
/////////////////////
await binFileUtils.startReadUniqueSection(fd, sections, 2);
const n8q = await fd.readULE32();
zkey.n8q = n8q;
zkey.q = await binFileUtils.readBigInt(fd, n8q);
const n8r = await fd.readULE32();
zkey.n8r = n8r;
zkey.r = await binFileUtils.readBigInt(fd, n8r);
let curve = await getCurve(zkey.q);
zkey.nVars = await fd.readULE32();
zkey.nPublic = await fd.readULE32();
zkey.domainSize = await fd.readULE32();
zkey.power = log2(zkey.domainSize);
zkey.vk_alpha_1 = await readG1(fd, curve, toObject);
zkey.vk_beta_1 = await readG1(fd, curve, toObject);
zkey.vk_beta_2 = await readG2(fd, curve, toObject);
zkey.vk_gamma_2 = await readG2(fd, curve, toObject);
zkey.vk_delta_1 = await readG1(fd, curve, toObject);
zkey.vk_delta_2 = await readG2(fd, curve, toObject);
await binFileUtils.endReadSection(fd);
return zkey;
}
async function readHeaderPlonk(fd, sections, protocol, toObject) {
const zkey = {};
zkey.protocol = "plonk";
// Read Plonk Header
/////////////////////
await binFileUtils.startReadUniqueSection(fd, sections, 2);
const n8q = await fd.readULE32();
zkey.n8q = n8q;
zkey.q = await binFileUtils.readBigInt(fd, n8q);
const n8r = await fd.readULE32();
zkey.n8r = n8r;
zkey.r = await binFileUtils.readBigInt(fd, n8r);
let curve = await getCurve(zkey.q);
zkey.nVars = await fd.readULE32();
zkey.nPublic = await fd.readULE32();
zkey.domainSize = await fd.readULE32();
zkey.power = log2(zkey.domainSize);
zkey.nAdditions = await fd.readULE32();
zkey.nConstrains = await fd.readULE32();
zkey.k1 = await fd.read(n8r);
zkey.k2 = await fd.read(n8r);
zkey.Qm = await readG1(fd, curve, toObject);
zkey.Ql = await readG1(fd, curve, toObject);
zkey.Qr = await readG1(fd, curve, toObject);
zkey.Qo = await readG1(fd, curve, toObject);
zkey.Qc = await readG1(fd, curve, toObject);
zkey.S1 = await readG1(fd, curve, toObject);
zkey.S2 = await readG1(fd, curve, toObject);
zkey.S3 = await readG1(fd, curve, toObject);
zkey.X_2 = await readG2(fd, curve, toObject);
await binFileUtils.endReadSection(fd);
return zkey;
}
export async function readZKey(fileName, toObject) {
const {fd, sections} = await binFileUtils.readBinFile(fileName, "zkey", 1);
const zkey = await readHeader(fd, sections, "groth16", toObject);
const Fr = new F1Field(zkey.r);
const Rr = Scalar.mod(Scalar.shl(1, zkey.n8r*8), zkey.r);
const Rri = Fr.inv(Rr);
const Rri2 = Fr.mul(Rri, Rri);
let curve = await getCurve(zkey.q);
// Read IC Section
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 3);
zkey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
const P = await readG1(fd, curve, toObject);
zkey.IC.push(P);
}
await binFileUtils.endReadSection(fd);
// Read Coefs
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 4);
const nCCoefs = await fd.readULE32();
zkey.ccoefs = [];
for (let i=0; i<nCCoefs; i++) {
const m = await fd.readULE32();
const c = await fd.readULE32();
const s = await fd.readULE32();
const v = await readFr2(toObject);
zkey.ccoefs.push({
matrix: m,
constraint: c,
signal: s,
value: v
});
}
await binFileUtils.endReadSection(fd);
// Read A points
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 5);
zkey.A = [];
for (let i=0; i<zkey.nVars; i++) {
const A = await readG1(fd, curve, toObject);
zkey.A[i] = A;
}
await binFileUtils.endReadSection(fd);
// Read B1
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 6);
zkey.B1 = [];
for (let i=0; i<zkey.nVars; i++) {
const B1 = await readG1(fd, curve, toObject);
zkey.B1[i] = B1;
}
await binFileUtils.endReadSection(fd);
// Read B2 points
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 7);
zkey.B2 = [];
for (let i=0; i<zkey.nVars; i++) {
const B2 = await readG2(fd, curve, toObject);
zkey.B2[i] = B2;
}
await binFileUtils.endReadSection(fd);
// Read C points
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 8);
zkey.C = [];
for (let i=zkey.nPublic+1; i<zkey.nVars; i++) {
const C = await readG1(fd, curve, toObject);
zkey.C[i] = C;
}
await binFileUtils.endReadSection(fd);
// Read H points
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 9);
zkey.hExps = [];
for (let i=0; i<zkey.domainSize; i++) {
const H = await readG1(fd, curve, toObject);
zkey.hExps.push(H);
}
await binFileUtils.endReadSection(fd);
await fd.close();
return zkey;
async function readFr2(/* toObject */) {
const n = await binFileUtils.readBigInt(fd, zkey.n8r);
return Fr.mul(n, Rri2);
}
}
async function readContribution(fd, curve, toObject) {
const c = {delta:{}};
c.deltaAfter = await readG1(fd, curve, toObject);
c.delta.g1_s = await readG1(fd, curve, toObject);
c.delta.g1_sx = await readG1(fd, curve, toObject);
c.delta.g2_spx = await readG2(fd, curve, toObject);
c.transcript = await fd.read(64);
c.type = await fd.readULE32();
const paramLength = await fd.readULE32();
const curPos = fd.pos;
let lastType =0;
while (fd.pos-curPos < paramLength) {
const buffType = await fd.read(1);
if (buffType[0]<= lastType) throw new Error("Parameters in the contribution must be sorted");
lastType = buffType[0];
if (buffType[0]==1) { // Name
const buffLen = await fd.read(1);
const buffStr = await fd.read(buffLen[0]);
c.name = new TextDecoder().decode(buffStr);
} else if (buffType[0]==2) {
const buffExp = await fd.read(1);
c.numIterationsExp = buffExp[0];
} else if (buffType[0]==3) {
const buffLen = await fd.read(1);
c.beaconHash = await fd.read(buffLen[0]);
} else {
throw new Error("Parameter not recognized");
}
}
if (fd.pos != curPos + paramLength) {
throw new Error("Parametes do not match");
}
return c;
}
export async function readMPCParams(fd, curve, sections) {
await binFileUtils.startReadUniqueSection(fd, sections, 10);
const res = { contributions: []};
res.csHash = await fd.read(64);
const n = await fd.readULE32();
for (let i=0; i<n; i++) {
const c = await readContribution(fd, curve);
res.contributions.push(c);
}
await binFileUtils.endReadSection(fd);
return res;
}
async function writeContribution(fd, curve, c) {
await writeG1(fd, curve, c.deltaAfter);
await writeG1(fd, curve, c.delta.g1_s);
await writeG1(fd, curve, c.delta.g1_sx);
await writeG2(fd, curve, c.delta.g2_spx);
await fd.write(c.transcript);
await fd.writeULE32(c.type || 0);
const params = [];
if (c.name) {
params.push(1); // Param Name
const nameData = new TextEncoder("utf-8").encode(c.name.substring(0,64));
params.push(nameData.byteLength);
for (let i=0; i<nameData.byteLength; i++) params.push(nameData[i]);
}
if (c.type == 1) {
params.push(2); // Param numIterationsExp
params.push(c.numIterationsExp);
params.push(3); // Beacon Hash
params.push(c.beaconHash.byteLength);
for (let i=0; i<c.beaconHash.byteLength; i++) params.push(c.beaconHash[i]);
}
if (params.length>0) {
const paramsBuff = new Uint8Array(params);
await fd.writeULE32(paramsBuff.byteLength);
await fd.write(paramsBuff);
} else {
await fd.writeULE32(0);
}
}
export async function writeMPCParams(fd, curve, mpcParams) {
await binFileUtils.startWriteSection(fd, 10);
await fd.write(mpcParams.csHash);
await fd.writeULE32(mpcParams.contributions.length);
for (let i=0; i<mpcParams.contributions.length; i++) {
await writeContribution(fd, curve,mpcParams.contributions[i]);
}
await binFileUtils.endWriteSection(fd);
}
export function hashG1(hasher, curve, p) {
const buff = new Uint8Array(curve.G1.F.n8*2);
curve.G1.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
export function hashG2(hasher,curve, p) {
const buff = new Uint8Array(curve.G2.F.n8*2);
curve.G2.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
export function hashPubKey(hasher, curve, c) {
hashG1(hasher, curve, c.deltaAfter);
hashG1(hasher, curve, c.delta.g1_s);
hashG1(hasher, curve, c.delta.g1_sx);
hashG2(hasher, curve, c.delta.g2_spx);
hasher.update(c.transcript);
}

View File

@ -1,423 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import * as binFileUtils from "@iden3/binfileutils";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import Blake2b from "blake2b-wasm";
import * as misc from "./misc.js";
import { hashToG2 as hashToG2 } from "./keypair.js";
const sameRatio = misc.sameRatio;
import crypto from "crypto";
import {hashG1, hashPubKey} from "./zkey_utils.js";
import { Scalar, ChaCha, BigBuffer } from "ffjavascript";
export default async function phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, logger) {
let sr;
await Blake2b.ready();
const {fd, sections} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fd, sections, false);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
const curve = await getCurve(zkey.q);
const sG1 = curve.G1.F.n8*2;
const mpcParams = await zkeyUtils.readMPCParams(fd, curve, sections);
const accumulatedHasher = Blake2b(64);
accumulatedHasher.update(mpcParams.csHash);
let curDelta = curve.G1.g;
for (let i=0; i<mpcParams.contributions.length; i++) {
const c = mpcParams.contributions[i];
const ourHasher = misc.cloneHasher(accumulatedHasher);
hashG1(ourHasher, curve, c.delta.g1_s);
hashG1(ourHasher, curve, c.delta.g1_sx);
if (!misc.hashIsEqual(ourHasher.digest(), c.transcript)) {
console.log(`INVALID(${i}): Inconsistent transcript `);
return false;
}
const delta_g2_sp = hashToG2(curve, c.transcript);
sr = await sameRatio(curve, c.delta.g1_s, c.delta.g1_sx, delta_g2_sp, c.delta.g2_spx);
if (sr !== true) {
console.log(`INVALID(${i}): public key G1 and G2 do not have the same ration `);
return false;
}
sr = await sameRatio(curve, curDelta, c.deltaAfter, delta_g2_sp, c.delta.g2_spx);
if (sr !== true) {
console.log(`INVALID(${i}): deltaAfter does not fillow the public key `);
return false;
}
if (c.type == 1) {
const rng = misc.rngFromBeaconParams(c.beaconHash, c.numIterationsExp);
const expected_prvKey = curve.Fr.fromRng(rng);
const expected_g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
const expected_g1_sx = curve.G1.toAffine(curve.G1.timesFr(expected_g1_s, expected_prvKey));
if (curve.G1.eq(expected_g1_s, c.delta.g1_s) !== true) {
console.log(`INVALID(${i}): Key of the beacon does not match. g1_s `);
return false;
}
if (curve.G1.eq(expected_g1_sx, c.delta.g1_sx) !== true) {
console.log(`INVALID(${i}): Key of the beacon does not match. g1_sx `);
return false;
}
}
hashPubKey(accumulatedHasher, curve, c);
const contributionHasher = Blake2b(64);
hashPubKey(contributionHasher, curve, c);
c.contributionHash = contributionHasher.digest();
curDelta = c.deltaAfter;
}
const {fd: fdInit, sections: sectionsInit} = await binFileUtils.readBinFile(initFileName, "zkey", 2);
const zkeyInit = await zkeyUtils.readHeader(fdInit, sectionsInit, false);
if (zkeyInit.protocol != "groth16") {
throw new Error("zkeyinit file is not groth16");
}
if ( (!Scalar.eq(zkeyInit.q, zkey.q))
||(!Scalar.eq(zkeyInit.r, zkey.r))
||(zkeyInit.n8q != zkey.n8q)
||(zkeyInit.n8r != zkey.n8r))
{
if (logger) logger.error("INVALID: Different curves");
return false;
}
if ( (zkeyInit.nVars != zkey.nVars)
||(zkeyInit.nPublic != zkey.nPublic)
||(zkeyInit.domainSize != zkey.domainSize))
{
if (logger) logger.error("INVALID: Different circuit parameters");
return false;
}
if (!curve.G1.eq(zkey.vk_alpha_1, zkeyInit.vk_alpha_1)) {
if (logger) logger.error("INVALID: Invalid alpha1");
return false;
}
if (!curve.G1.eq(zkey.vk_beta_1, zkeyInit.vk_beta_1)) {
if (logger) logger.error("INVALID: Invalid beta1");
return false;
}
if (!curve.G2.eq(zkey.vk_beta_2, zkeyInit.vk_beta_2)) {
if (logger) logger.error("INVALID: Invalid beta2");
return false;
}
if (!curve.G2.eq(zkey.vk_gamma_2, zkeyInit.vk_gamma_2)) {
if (logger) logger.error("INVALID: Invalid gamma2");
return false;
}
if (!curve.G1.eq(zkey.vk_delta_1, curDelta)) {
if (logger) logger.error("INVALID: Invalid delta1");
return false;
}
sr = await sameRatio(curve, curve.G1.g, curDelta, curve.G2.g, zkey.vk_delta_2);
if (sr !== true) {
if (logger) logger.error("INVALID: Invalid delta2");
return false;
}
const mpcParamsInit = await zkeyUtils.readMPCParams(fdInit, curve, sectionsInit);
if (!misc.hashIsEqual(mpcParams.csHash, mpcParamsInit.csHash)) {
if (logger) logger.error("INVALID: Circuit does not match");
return false;
}
// Check sizes of sections
if (sections[8][0].size != sG1*(zkey.nVars-zkey.nPublic-1)) {
if (logger) logger.error("INVALID: Invalid L section size");
return false;
}
if (sections[9][0].size != sG1*(zkey.domainSize)) {
if (logger) logger.error("INVALID: Invalid H section size");
return false;
}
let ss;
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 3);
if (!ss) {
if (logger) logger.error("INVALID: IC section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 4);
if (!ss) {
if (logger) logger.error("Coeffs section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 5);
if (!ss) {
if (logger) logger.error("A section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 6);
if (!ss) {
if (logger) logger.error("B1 section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 7);
if (!ss) {
if (logger) logger.error("B2 section is not identical");
return false;
}
// Check L
sr = await sectionHasSameRatio("G1", fdInit, sectionsInit, fd, sections, 8, zkey.vk_delta_2, zkeyInit.vk_delta_2, "L section");
if (sr!==true) {
if (logger) logger.error("L section does not match");
return false;
}
// Check H
sr = await sameRatioH();
if (sr!==true) {
if (logger) logger.error("H section does not match");
return false;
}
if (logger) logger.info(misc.formatHash(mpcParams.csHash, "Circuit Hash: "));
await fd.close();
await fdInit.close();
for (let i=mpcParams.contributions.length-1; i>=0; i--) {
const c = mpcParams.contributions[i];
if (logger) logger.info("-------------------------");
if (logger) logger.info(misc.formatHash(c.contributionHash, `contribution #${i+1} ${c.name ? c.name : ""}:`));
if (c.type == 1) {
if (logger) logger.info(`Beacon generator: ${misc.byteArray2hex(c.beaconHash)}`);
if (logger) logger.info(`Beacon iterations Exp: ${c.numIterationsExp}`);
}
}
if (logger) logger.info("-------------------------");
if (logger) logger.info("ZKey Ok!");
return true;
async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) {
const MAX_CHUNK_SIZE = 1<<20;
const G = curve[groupName];
const sG = G.F.n8*2;
await binFileUtils.startReadUniqueSection(fd1, sections1, idSection);
await binFileUtils.startReadUniqueSection(fd2, sections2, idSection);
let R1 = G.zero;
let R2 = G.zero;
const nPoints = sections1[idSection][0].size / sG;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if (logger) logger.debug(`Same ratio check ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints - i, MAX_CHUNK_SIZE);
const bases1 = await fd1.read(n*sG);
const bases2 = await fd2.read(n*sG);
const scalars = new Uint8Array(4*n);
crypto.randomFillSync(scalars);
const r1 = await G.multiExpAffine(bases1, scalars);
const r2 = await G.multiExpAffine(bases2, scalars);
R1 = G.add(R1, r1);
R2 = G.add(R2, r2);
}
await binFileUtils.endReadSection(fd1);
await binFileUtils.endReadSection(fd2);
if (nPoints == 0) return true;
sr = await sameRatio(curve, R1, R2, g2sp, g2spx);
if (sr !== true) return false;
return true;
}
async function sameRatioH() {
const MAX_CHUNK_SIZE = 1<<20;
const G = curve.G1;
const Fr = curve.Fr;
const sG = G.F.n8*2;
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(pTauFileName, "ptau", 1);
let buff_r = new BigBuffer(zkey.domainSize * zkey.n8r);
const seed= new Array(8);
for (let i=0; i<8; i++) {
seed[i] = crypto.randomBytes(4).readUInt32BE(0, true);
}
const rng = new ChaCha(seed);
for (let i=0; i<zkey.domainSize-1; i++) { // Note that last one is zero
const e = Fr.fromRng(rng);
Fr.toRprLE(buff_r, i*zkey.n8r, e);
}
Fr.toRprLE(buff_r, (zkey.domainSize-1)*zkey.n8r, Fr.zero);
let R1 = G.zero;
for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) {
if (logger) logger.debug(`H Verificaition(tau): ${i}/${zkey.domainSize}`);
const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE);
const buff1 = await fdPTau.read(sG*n, sectionsPTau[2][0].p + zkey.domainSize*sG + i*sG);
const buff2 = await fdPTau.read(sG*n, sectionsPTau[2][0].p + i*sG);
const buffB = await batchSubstract(buff1, buff2);
const buffS = buff_r.slice(i*zkey.n8r, (i+n)*zkey.n8r);
const r = await G.multiExpAffine(buffB, buffS);
R1 = G.add(R1, r);
}
// Caluclate odd coeficients in transformed domain
buff_r = await Fr.batchToMontgomery(buff_r);
// const first = curve.Fr.neg(curve.Fr.inv(curve.Fr.e(2)));
// Works*2 const first = curve.Fr.neg(curve.Fr.e(2));
let first;
if (zkey.power < Fr.s) {
first = Fr.neg(Fr.e(2));
} else {
const small_m = 2 ** Fr.s;
const shift_to_small_m = Fr.exp(Fr.shift, small_m);
first = Fr.sub( shift_to_small_m, Fr.one);
}
// const inc = curve.Fr.inv(curve.PFr.w[zkey.power+1]);
const inc = zkey.power < Fr.s ? Fr.w[zkey.power+1] : Fr.shift;
buff_r = await Fr.batchApplyKey(buff_r, first, inc);
buff_r = await Fr.fft(buff_r);
buff_r = await Fr.batchFromMontgomery(buff_r);
await binFileUtils.startReadUniqueSection(fd, sections, 9);
let R2 = G.zero;
for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) {
if (logger) logger.debug(`H Verificaition(lagrange): ${i}/${zkey.domainSize}`);
const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE);
const buff = await fd.read(sG*n);
const buffS = buff_r.slice(i*zkey.n8r, (i+n)*zkey.n8r);
const r = await G.multiExpAffine(buff, buffS);
R2 = G.add(R2, r);
}
await binFileUtils.endReadSection(fd);
sr = await sameRatio(curve, R1, R2, zkey.vk_delta_2, zkeyInit.vk_delta_2);
if (sr !== true) return false;
return true;
}
async function batchSubstract(buff1, buff2) {
const sG = curve.G1.F.n8*2;
const nPoints = buff1.byteLength / sG;
const concurrency= curve.tm.concurrency;
const nPointsPerThread = Math.floor(nPoints / concurrency);
const opPromises = [];
for (let i=0; i<concurrency; i++) {
let n;
if (i< concurrency-1) {
n = nPointsPerThread;
} else {
n = nPoints - i*nPointsPerThread;
}
if (n==0) continue;
const subBuff1 = buff1.slice(i*nPointsPerThread*sG1, (i*nPointsPerThread+n)*sG1);
const subBuff2 = buff2.slice(i*nPointsPerThread*sG1, (i*nPointsPerThread+n)*sG1);
opPromises.push(batchSubstractThread(subBuff1, subBuff2));
}
const result = await Promise.all(opPromises);
const fullBuffOut = new Uint8Array(nPoints*sG);
let p =0;
for (let i=0; i<result.length; i++) {
fullBuffOut.set(result[i][0], p);
p+=result[i][0].byteLength;
}
return fullBuffOut;
}
async function batchSubstractThread(buff1, buff2) {
const sG1 = curve.G1.F.n8*2;
const sGmid = curve.G1.F.n8*3;
const nPoints = buff1.byteLength/sG1;
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: buff1});
task.push({cmd: "ALLOCSET", var: 1, buff: buff2});
task.push({cmd: "ALLOC", var: 2, len: nPoints*sGmid});
for (let i=0; i<nPoints; i++) {
task.push({
cmd: "CALL",
fnName: "g1m_subAffine",
params: [
{var: 0, offset: i*sG1},
{var: 1, offset: i*sG1},
{var: 2, offset: i*sGmid},
]
});
}
task.push({cmd: "CALL", fnName: "g1m_batchToAffine", params: [
{var: 2},
{val: nPoints},
{var: 2},
]});
task.push({cmd: "GET", out: 0, var: 2, len: nPoints*sG1});
const res = await curve.tm.queueAction(task);
return res;
}
}

View File

@ -1,31 +0,0 @@
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
import newZKey from "./zkey_new.js";
import phase2verifyFromInit from "./zkey_verify_frominit.js";
export default async function phase2verifyFromR1cs(r1csFileName, pTauFileName, zkeyFileName, logger) {
// const initFileName = "~" + zkeyFileName + ".init";
const initFileName = {type: "bigMem"};
await newZKey(r1csFileName, pTauFileName, initFileName, logger);
return await phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, logger);
}

Some files were not shown because too many files have changed in this diff Show More