mirror of
https://github.com/tornadocash/snarkjs.git
synced 2024-11-01 07:45:43 +01:00
deps
This commit is contained in:
parent
f26aa45946
commit
86ee180a81
198
build/cli.cjs
198
build/cli.cjs
@ -13,6 +13,85 @@ var crypto = _interopDefault(require('crypto'));
|
||||
var circomRuntime = _interopDefault(require('circom_runtime'));
|
||||
var Logger = _interopDefault(require('logplease'));
|
||||
|
||||
const SUBARRAY_SIZE = 0x40000;
|
||||
|
||||
const BigArrayHandler = {
|
||||
get: function(obj, prop) {
|
||||
if (!isNaN(prop)) {
|
||||
return obj.getElement(prop);
|
||||
} else return obj[prop];
|
||||
},
|
||||
set: function(obj, prop, value) {
|
||||
if (!isNaN(prop)) {
|
||||
return obj.setElement(prop, value);
|
||||
} else {
|
||||
obj[prop] = value;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class _BigArray {
|
||||
constructor (initSize) {
|
||||
this.length = initSize || 0;
|
||||
this.arr = new Array(SUBARRAY_SIZE);
|
||||
|
||||
for (let i=0; i<initSize; i+=SUBARRAY_SIZE) {
|
||||
this.arr[i/SUBARRAY_SIZE] = new Array(Math.min(SUBARRAY_SIZE, initSize - i));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
push () {
|
||||
for (let i=0; i<arguments.length; i++) {
|
||||
this.setElement (this.length, arguments[i]);
|
||||
}
|
||||
}
|
||||
|
||||
slice (f, t) {
|
||||
const arr = new Array(t-f);
|
||||
for (let i=f; i< t; i++) arr[i-f] = this.getElement(i);
|
||||
return arr;
|
||||
}
|
||||
getElement(idx) {
|
||||
idx = parseInt(idx);
|
||||
const idx1 = Math.floor(idx / SUBARRAY_SIZE);
|
||||
const idx2 = idx % SUBARRAY_SIZE;
|
||||
return this.arr[idx1] ? this.arr[idx1][idx2] : undefined;
|
||||
}
|
||||
setElement(idx, value) {
|
||||
idx = parseInt(idx);
|
||||
const idx1 = Math.floor(idx / SUBARRAY_SIZE);
|
||||
if (!this.arr[idx1]) {
|
||||
this.arr[idx1] = new Array(SUBARRAY_SIZE);
|
||||
}
|
||||
const idx2 = idx % SUBARRAY_SIZE;
|
||||
this.arr[idx1][idx2] = value;
|
||||
if (idx >= this.length) this.length = idx+1;
|
||||
return true;
|
||||
}
|
||||
getKeys() {
|
||||
const newA = new BigArray();
|
||||
for (let i=0; i<this.arr.length; i++) {
|
||||
if (this.arr[i]) {
|
||||
for (let j=0; j<this.arr[i].length; j++) {
|
||||
if (typeof this.arr[i][j] !== "undefined") {
|
||||
newA.push(i*SUBARRAY_SIZE+j);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return newA;
|
||||
}
|
||||
}
|
||||
|
||||
class BigArray {
|
||||
constructor( initSize ) {
|
||||
const obj = new _BigArray(initSize);
|
||||
const extObj = new Proxy(obj, BigArrayHandler);
|
||||
return extObj;
|
||||
}
|
||||
}
|
||||
|
||||
async function open(fileName, openFlags, cacheSize, pageSize) {
|
||||
cacheSize = cacheSize || 4096*64;
|
||||
if (["w+", "wx+", "r", "ax+", "a+"].indexOf(openFlags) <0)
|
||||
@ -827,88 +906,9 @@ async function readExisting$2(o, b, c) {
|
||||
}
|
||||
}
|
||||
|
||||
const SUBARRAY_SIZE = 0x40000;
|
||||
async function readBinFile(fileName, type, maxVersion, cacheSize, pageSize) {
|
||||
|
||||
const BigArrayHandler = {
|
||||
get: function(obj, prop) {
|
||||
if (!isNaN(prop)) {
|
||||
return obj.getElement(prop);
|
||||
} else return obj[prop];
|
||||
},
|
||||
set: function(obj, prop, value) {
|
||||
if (!isNaN(prop)) {
|
||||
return obj.setElement(prop, value);
|
||||
} else {
|
||||
obj[prop] = value;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class _BigArray {
|
||||
constructor (initSize) {
|
||||
this.length = initSize || 0;
|
||||
this.arr = new Array(SUBARRAY_SIZE);
|
||||
|
||||
for (let i=0; i<initSize; i+=SUBARRAY_SIZE) {
|
||||
this.arr[i/SUBARRAY_SIZE] = new Array(Math.min(SUBARRAY_SIZE, initSize - i));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
push () {
|
||||
for (let i=0; i<arguments.length; i++) {
|
||||
this.setElement (this.length, arguments[i]);
|
||||
}
|
||||
}
|
||||
|
||||
slice (f, t) {
|
||||
const arr = new Array(t-f);
|
||||
for (let i=f; i< t; i++) arr[i-f] = this.getElement(i);
|
||||
return arr;
|
||||
}
|
||||
getElement(idx) {
|
||||
idx = parseInt(idx);
|
||||
const idx1 = Math.floor(idx / SUBARRAY_SIZE);
|
||||
const idx2 = idx % SUBARRAY_SIZE;
|
||||
return this.arr[idx1] ? this.arr[idx1][idx2] : undefined;
|
||||
}
|
||||
setElement(idx, value) {
|
||||
idx = parseInt(idx);
|
||||
const idx1 = Math.floor(idx / SUBARRAY_SIZE);
|
||||
if (!this.arr[idx1]) {
|
||||
this.arr[idx1] = new Array(SUBARRAY_SIZE);
|
||||
}
|
||||
const idx2 = idx % SUBARRAY_SIZE;
|
||||
this.arr[idx1][idx2] = value;
|
||||
if (idx >= this.length) this.length = idx+1;
|
||||
return true;
|
||||
}
|
||||
getKeys() {
|
||||
const newA = new BigArray();
|
||||
for (let i=0; i<this.arr.length; i++) {
|
||||
if (this.arr[i]) {
|
||||
for (let j=0; j<this.arr[i].length; j++) {
|
||||
if (typeof this.arr[i][j] !== "undefined") {
|
||||
newA.push(i*SUBARRAY_SIZE+j);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return newA;
|
||||
}
|
||||
}
|
||||
|
||||
class BigArray {
|
||||
constructor( initSize ) {
|
||||
const obj = new _BigArray(initSize);
|
||||
const extObj = new Proxy(obj, BigArrayHandler);
|
||||
return extObj;
|
||||
}
|
||||
}
|
||||
|
||||
async function readBinFile(fileName, type, maxVersion) {
|
||||
|
||||
const fd = await readExisting$2(fileName, 1<<27, 1<<29);
|
||||
const fd = await readExisting$2(fileName, cacheSize, pageSize);
|
||||
|
||||
const b = await fd.read(4);
|
||||
let readedType = "";
|
||||
@ -939,8 +939,7 @@ async function readBinFile(fileName, type, maxVersion) {
|
||||
}
|
||||
|
||||
async function startReadUniqueSection(fd, sections, idSection) {
|
||||
if (typeof fd.readingSection != "undefined")
|
||||
throw new Error("Already reading a section");
|
||||
if (typeof fd.readingSection !== "undefined") throw new Error("Already reading a section");
|
||||
if (!sections[idSection]) throw new Error(fd.fileName + ": Missing section "+ idSection );
|
||||
if (sections[idSection].length>1) throw new Error(fd.fileName +": Section Duplicated " +idSection);
|
||||
|
||||
@ -950,22 +949,19 @@ async function startReadUniqueSection(fd, sections, idSection) {
|
||||
}
|
||||
|
||||
async function endReadSection(fd, noCheck) {
|
||||
if (typeof fd.readingSection == "undefined")
|
||||
throw new Error("Not reading a section");
|
||||
if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
|
||||
if (!noCheck) {
|
||||
if (fd.pos-fd.readingSection.p != fd.readingSection.size)
|
||||
throw new Error("Invalid section size");
|
||||
if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
|
||||
}
|
||||
delete fd.readingSection;
|
||||
}
|
||||
|
||||
|
||||
async function readBigInt(fd, n8, pos) {
|
||||
const buff = await fd.read(n8, pos);
|
||||
return ffjavascript.Scalar.fromRprLE(buff, 0, n8);
|
||||
}
|
||||
|
||||
async function loadHeader(fd,sections) {
|
||||
async function readR1csHeader(fd,sections) {
|
||||
|
||||
|
||||
const res = {};
|
||||
@ -973,7 +969,8 @@ async function loadHeader(fd,sections) {
|
||||
// Read Header
|
||||
res.n8 = await fd.readULE32();
|
||||
res.prime = await readBigInt(fd, res.n8);
|
||||
res.Fr = new ffjavascript.ZqField(res.prime);
|
||||
|
||||
res.curve = await ffjavascript.getCurveFromR(res.prime, true);
|
||||
|
||||
res.nVars = await fd.readULE32();
|
||||
res.nOutputs = await fd.readULE32();
|
||||
@ -986,10 +983,10 @@ async function loadHeader(fd,sections) {
|
||||
return res;
|
||||
}
|
||||
|
||||
async function load(fileName, loadConstraints, loadMap, logger) {
|
||||
async function readR1cs(fileName, loadConstraints, loadMap, logger, loggerCtx) {
|
||||
|
||||
const {fd, sections} = await readBinFile(fileName, "r1cs", 1);
|
||||
const res = await loadHeader(fd, sections);
|
||||
const {fd, sections} = await readBinFile(fileName, "r1cs", 1, 1<<22, 1<<25);
|
||||
const res = await readR1csHeader(fd, sections);
|
||||
|
||||
|
||||
if (loadConstraints) {
|
||||
@ -1000,7 +997,7 @@ async function load(fileName, loadConstraints, loadMap, logger) {
|
||||
res.constraints = [];
|
||||
}
|
||||
for (let i=0; i<res.nConstraints; i++) {
|
||||
if ((logger)&&(i%10000 == 0)) logger.info(`Loading constraints: ${i}/${res.nConstraints}`);
|
||||
if ((logger)&&(i%100000 == 0)) logger.info(`${loggerCtx}: Loading constraints: ${i}/${res.nConstraints}`);
|
||||
const c = await readConstraint();
|
||||
res.constraints.push(c);
|
||||
}
|
||||
@ -1017,6 +1014,7 @@ async function load(fileName, loadConstraints, loadMap, logger) {
|
||||
res.map = [];
|
||||
}
|
||||
for (let i=0; i<res.nVars; i++) {
|
||||
if ((logger)&&(i%10000 == 0)) logger.info(`${loggerCtx}: Loading map: ${i}/${res.nVars}`);
|
||||
const idx = await fd.readULE64();
|
||||
res.map.push(idx);
|
||||
}
|
||||
@ -1042,7 +1040,7 @@ async function load(fileName, loadConstraints, loadMap, logger) {
|
||||
const buffV = new DataView(buff.buffer);
|
||||
for (let i=0; i<nIdx; i++) {
|
||||
const idx = buffV.getUint32(i*(4+res.n8), true);
|
||||
const val = res.Fr.fromRprLE(buff, i*(4+res.n8)+4);
|
||||
const val = res.curve.Fr.fromRprLE(buff, i*(4+res.n8)+4);
|
||||
lc[idx] = val;
|
||||
}
|
||||
return lc;
|
||||
@ -1116,7 +1114,7 @@ const bn128r = ffjavascript.Scalar.e("218882428718392752222464057452572750885483
|
||||
|
||||
async function r1csInfo(r1csName, logger) {
|
||||
|
||||
const cir = await load(r1csName);
|
||||
const cir = await readR1cs(r1csName);
|
||||
|
||||
if (ffjavascript.Scalar.eq(cir.prime, bn128r)) {
|
||||
if (logger) logger.info("Curve: bn-128");
|
||||
@ -1136,7 +1134,7 @@ async function r1csInfo(r1csName, logger) {
|
||||
|
||||
async function r1csExportJson(r1csFileName, logger) {
|
||||
|
||||
const cir = await load(r1csFileName, true, true);
|
||||
const cir = await readR1cs(r1csFileName, true, true);
|
||||
|
||||
return cir;
|
||||
}
|
||||
@ -3878,7 +3876,7 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const csHasher = Blake2b(64);
|
||||
|
||||
const {fd: fdR1cs, sections: sectionsR1cs} = await readBinFile$1(r1csName, "r1cs", 1);
|
||||
const r1cs = await loadHeader(fdR1cs, sectionsR1cs);
|
||||
const r1cs = await readR1csHeader(fdR1cs, sectionsR1cs);
|
||||
|
||||
const {fd: fdPTau, sections: sectionsPTau} = await readBinFile$1(ptauName, "ptau", 1);
|
||||
const {curve, power} = await readPTauHeader(fdPTau, sectionsPTau);
|
||||
@ -6635,7 +6633,7 @@ async function r1csPrint$1(params, options) {
|
||||
|
||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||
|
||||
const cir = await load(r1csName, true, true);
|
||||
const cir = await readR1cs(r1csName, true, true);
|
||||
|
||||
const sym = await loadSymbols(symName);
|
||||
|
||||
@ -6720,7 +6718,7 @@ async function zksnarkSetup(params, options) {
|
||||
|
||||
const protocol = options.protocol || "groth16";
|
||||
|
||||
const cir = await loadR1cs(r1csName, true);
|
||||
const cir = await readR1cs(r1csName, true);
|
||||
|
||||
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
|
||||
const setup = zkSnark[protocol].setup(cir, options.verbose);
|
||||
|
@ -4085,9 +4085,9 @@ class BigArray {
|
||||
}
|
||||
}
|
||||
|
||||
async function readBinFile$1(fileName, type, maxVersion) {
|
||||
async function readBinFile$1(fileName, type, maxVersion, cacheSize, pageSize) {
|
||||
|
||||
const fd = await readExisting$2(fileName, 1<<27, 1<<29);
|
||||
const fd = await readExisting$2(fileName, cacheSize, pageSize);
|
||||
|
||||
const b = await fd.read(4);
|
||||
let readedType = "";
|
||||
@ -4118,8 +4118,7 @@ async function readBinFile$1(fileName, type, maxVersion) {
|
||||
}
|
||||
|
||||
async function startReadUniqueSection$1(fd, sections, idSection) {
|
||||
if (typeof fd.readingSection != "undefined")
|
||||
throw new Error("Already reading a section");
|
||||
if (typeof fd.readingSection !== "undefined") throw new Error("Already reading a section");
|
||||
if (!sections[idSection]) throw new Error(fd.fileName + ": Missing section "+ idSection );
|
||||
if (sections[idSection].length>1) throw new Error(fd.fileName +": Section Duplicated " +idSection);
|
||||
|
||||
@ -4129,22 +4128,19 @@ async function startReadUniqueSection$1(fd, sections, idSection) {
|
||||
}
|
||||
|
||||
async function endReadSection$1(fd, noCheck) {
|
||||
if (typeof fd.readingSection == "undefined")
|
||||
throw new Error("Not reading a section");
|
||||
if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
|
||||
if (!noCheck) {
|
||||
if (fd.pos-fd.readingSection.p != fd.readingSection.size)
|
||||
throw new Error("Invalid section size");
|
||||
if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
|
||||
}
|
||||
delete fd.readingSection;
|
||||
}
|
||||
|
||||
|
||||
async function readBigInt$1(fd, n8, pos) {
|
||||
const buff = await fd.read(n8, pos);
|
||||
return ffjavascript.Scalar.fromRprLE(buff, 0, n8);
|
||||
}
|
||||
|
||||
async function loadHeader(fd,sections) {
|
||||
async function readR1csHeader(fd,sections) {
|
||||
|
||||
|
||||
const res = {};
|
||||
@ -4152,7 +4148,8 @@ async function loadHeader(fd,sections) {
|
||||
// Read Header
|
||||
res.n8 = await fd.readULE32();
|
||||
res.prime = await readBigInt$1(fd, res.n8);
|
||||
res.Fr = new ffjavascript.ZqField(res.prime);
|
||||
|
||||
res.curve = await ffjavascript.getCurveFromR(res.prime, true);
|
||||
|
||||
res.nVars = await fd.readULE32();
|
||||
res.nOutputs = await fd.readULE32();
|
||||
@ -4165,10 +4162,10 @@ async function loadHeader(fd,sections) {
|
||||
return res;
|
||||
}
|
||||
|
||||
async function load(fileName, loadConstraints, loadMap, logger) {
|
||||
async function readR1cs(fileName, loadConstraints, loadMap, logger, loggerCtx) {
|
||||
|
||||
const {fd, sections} = await readBinFile$1(fileName, "r1cs", 1);
|
||||
const res = await loadHeader(fd, sections);
|
||||
const {fd, sections} = await readBinFile$1(fileName, "r1cs", 1, 1<<22, 1<<25);
|
||||
const res = await readR1csHeader(fd, sections);
|
||||
|
||||
|
||||
if (loadConstraints) {
|
||||
@ -4179,7 +4176,7 @@ async function load(fileName, loadConstraints, loadMap, logger) {
|
||||
res.constraints = [];
|
||||
}
|
||||
for (let i=0; i<res.nConstraints; i++) {
|
||||
if ((logger)&&(i%10000 == 0)) logger.info(`Loading constraints: ${i}/${res.nConstraints}`);
|
||||
if ((logger)&&(i%100000 == 0)) logger.info(`${loggerCtx}: Loading constraints: ${i}/${res.nConstraints}`);
|
||||
const c = await readConstraint();
|
||||
res.constraints.push(c);
|
||||
}
|
||||
@ -4196,6 +4193,7 @@ async function load(fileName, loadConstraints, loadMap, logger) {
|
||||
res.map = [];
|
||||
}
|
||||
for (let i=0; i<res.nVars; i++) {
|
||||
if ((logger)&&(i%10000 == 0)) logger.info(`${loggerCtx}: Loading map: ${i}/${res.nVars}`);
|
||||
const idx = await fd.readULE64();
|
||||
res.map.push(idx);
|
||||
}
|
||||
@ -4221,7 +4219,7 @@ async function load(fileName, loadConstraints, loadMap, logger) {
|
||||
const buffV = new DataView(buff.buffer);
|
||||
for (let i=0; i<nIdx; i++) {
|
||||
const idx = buffV.getUint32(i*(4+res.n8), true);
|
||||
const val = res.Fr.fromRprLE(buff, i*(4+res.n8)+4);
|
||||
const val = res.curve.Fr.fromRprLE(buff, i*(4+res.n8)+4);
|
||||
lc[idx] = val;
|
||||
}
|
||||
return lc;
|
||||
@ -4233,7 +4231,7 @@ const bn128r$1 = ffjavascript.Scalar.e("2188824287183927522224640574525727508854
|
||||
|
||||
async function r1csInfo(r1csName, logger) {
|
||||
|
||||
const cir = await load(r1csName);
|
||||
const cir = await readR1cs(r1csName);
|
||||
|
||||
if (ffjavascript.Scalar.eq(cir.prime, bn128r$1)) {
|
||||
if (logger) logger.info("Curve: bn-128");
|
||||
@ -4253,7 +4251,7 @@ async function r1csInfo(r1csName, logger) {
|
||||
|
||||
async function r1csExportJson(r1csFileName, logger) {
|
||||
|
||||
const cir = await load(r1csFileName, true, true);
|
||||
const cir = await readR1cs(r1csFileName, true, true);
|
||||
|
||||
return cir;
|
||||
}
|
||||
@ -4444,7 +4442,7 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const csHasher = Blake2b(64);
|
||||
|
||||
const {fd: fdR1cs, sections: sectionsR1cs} = await readBinFile(r1csName, "r1cs", 1);
|
||||
const r1cs = await loadHeader(fdR1cs, sectionsR1cs);
|
||||
const r1cs = await readR1csHeader(fdR1cs, sectionsR1cs);
|
||||
|
||||
const {fd: fdPTau, sections: sectionsPTau} = await readBinFile(ptauName, "ptau", 1);
|
||||
const {curve, power} = await readPTauHeader(fdPTau, sectionsPTau);
|
||||
|
File diff suppressed because one or more lines are too long
2
build/snarkjs.min.js
vendored
2
build/snarkjs.min.js
vendored
File diff suppressed because one or more lines are too long
6
cli.js
6
cli.js
@ -21,7 +21,7 @@
|
||||
|
||||
import fs from "fs";
|
||||
|
||||
import {load as loadR1cs} from "r1csfile";
|
||||
import {readR1cs} from "r1csfile";
|
||||
|
||||
import loadSyms from "./src/loadsyms.js";
|
||||
import * as r1cs from "./src/r1cs.js";
|
||||
@ -332,7 +332,7 @@ async function r1csPrint(params, options) {
|
||||
|
||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||
|
||||
const cir = await loadR1cs(r1csName, true, true);
|
||||
const cir = await readR1cs(r1csName, true, true);
|
||||
|
||||
const sym = await loadSyms(symName);
|
||||
|
||||
@ -417,7 +417,7 @@ async function zksnarkSetup(params, options) {
|
||||
|
||||
const protocol = options.protocol || "groth16";
|
||||
|
||||
const cir = await loadR1cs(r1csName, true);
|
||||
const cir = await readR1cs(r1csName, true);
|
||||
|
||||
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
|
||||
const setup = zkSnark[protocol].setup(cir, options.verbose);
|
||||
|
32
package-lock.json
generated
32
package-lock.json
generated
@ -35,6 +35,15 @@
|
||||
"resolved": "https://registry.npmjs.org/@iden3/bigarray/-/bigarray-0.0.2.tgz",
|
||||
"integrity": "sha512-Xzdyxqm1bOFF6pdIsiHLLl3HkSLjbhqJHVyqaTxXt3RqXBEnmsUmEW47H7VOi/ak7TdkRpNkxjyK5Zbkm+y52g=="
|
||||
},
|
||||
"@iden3/binfileutils": {
|
||||
"version": "0.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@iden3/binfileutils/-/binfileutils-0.0.2.tgz",
|
||||
"integrity": "sha512-hrIV3d3SfoQC0HT2oRILxVsxwXwrRFMIZsOW1Ag+pqESNUYYPs651sHPzveM9BN7PQOzMMBWpkk813pqbzFJ9A==",
|
||||
"requires": {
|
||||
"fastfile": "0.0.18",
|
||||
"ffjavascript": "^0.2.23"
|
||||
}
|
||||
},
|
||||
"@types/color-name": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz",
|
||||
@ -677,12 +686,12 @@
|
||||
"integrity": "sha512-q03PTKc+wptis4WmuFOwPNQx2p5myFUrl/dMgRlW9mymc1Egyc14JPHgiGnWK+sJ0+dBl2Vwtfh5GfSQltYOpw=="
|
||||
},
|
||||
"ffjavascript": {
|
||||
"version": "0.2.22",
|
||||
"resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.22.tgz",
|
||||
"integrity": "sha512-EsVqap2Txm17bKW0z/jXCX3M7rQ++nQUAJY8alWDpyhjRj90xjl6GLeVSKZQ8rOFDQ/SFFXcEB8w9X8Boxid+w==",
|
||||
"version": "0.2.24",
|
||||
"resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.24.tgz",
|
||||
"integrity": "sha512-XLW+U5rgkWnY6rfusrd3C/4hz/DrIU/VWC4jAQAvQFd5LXsejQ/Yudy+pGxY5dQqh6hzjADJlHshLUgoq/ZMIA==",
|
||||
"requires": {
|
||||
"big-integer": "^1.6.48",
|
||||
"wasmcurves": "0.0.12",
|
||||
"wasmcurves": "0.0.13",
|
||||
"worker-threads": "^1.0.0"
|
||||
}
|
||||
},
|
||||
@ -1519,13 +1528,14 @@
|
||||
"dev": true
|
||||
},
|
||||
"r1csfile": {
|
||||
"version": "0.0.16",
|
||||
"resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.16.tgz",
|
||||
"integrity": "sha512-A2jRVWzGgmXeG2lVAc0H4suJmzt50it5UvBnycJgBCpMXM3tH/M6RguP7nvs6suY/yYnkN6jX6iTScSiDUF3FA==",
|
||||
"version": "0.0.18",
|
||||
"resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.18.tgz",
|
||||
"integrity": "sha512-RSbyKXzn2sKCCm6i+xeYrkVGKbFKOTeC7sjyzYovJllYhS8r9zfyPRQRpx0u6knPQfHdiDEW0BA9rBTGJG9Wyw==",
|
||||
"requires": {
|
||||
"@iden3/bigarray": "0.0.2",
|
||||
"@iden3/binfileutils": "0.0.2",
|
||||
"fastfile": "0.0.18",
|
||||
"ffjavascript": "0.2.22"
|
||||
"ffjavascript": "0.2.24"
|
||||
}
|
||||
},
|
||||
"randombytes": {
|
||||
@ -1990,9 +2000,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"wasmcurves": {
|
||||
"version": "0.0.12",
|
||||
"resolved": "https://registry.npmjs.org/wasmcurves/-/wasmcurves-0.0.12.tgz",
|
||||
"integrity": "sha512-1Jl9mkatyHSNj80ILjf85SZUNuZQBCkTjJlhzqHnZQXUmIimCIWkugaVaYNjozLs1Gun4h/keZe1MBeBN0sRpg==",
|
||||
"version": "0.0.13",
|
||||
"resolved": "https://registry.npmjs.org/wasmcurves/-/wasmcurves-0.0.13.tgz",
|
||||
"integrity": "sha512-7z1nm3o92FPqlg10WYjr8hH8eMJj91oX/biVyWtJz4b3a1AvPDxYYbnEy6yWwcjDIg5zAuFTPugKpuMqIWg/2A==",
|
||||
"requires": {
|
||||
"big-integer": "^1.6.42",
|
||||
"blakejs": "^1.1.0"
|
||||
|
@ -41,9 +41,9 @@
|
||||
"blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git",
|
||||
"circom_runtime": "0.1.5",
|
||||
"fastfile": "0.0.18",
|
||||
"ffjavascript": "0.2.22",
|
||||
"ffjavascript": "0.2.24",
|
||||
"logplease": "^1.2.15",
|
||||
"r1csfile": "0.0.16"
|
||||
"r1csfile": "0.0.18"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
|
@ -1,8 +1,8 @@
|
||||
import {load as loadR1cs} from "r1csfile";
|
||||
import {readR1cs} from "r1csfile";
|
||||
|
||||
export default async function r1csExportJson(r1csFileName, logger) {
|
||||
|
||||
const cir = await loadR1cs(r1csFileName, true, true);
|
||||
const cir = await readR1cs(r1csFileName, true, true);
|
||||
|
||||
return cir;
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
import { Scalar } from "ffjavascript";
|
||||
import {load as loadR1cs} from "r1csfile";
|
||||
import { readR1cs } from "r1csfile";
|
||||
|
||||
const bls12381r = Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
|
||||
const bn128r = Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
|
||||
|
||||
export default async function r1csInfo(r1csName, logger) {
|
||||
|
||||
const cir = await loadR1cs(r1csName);
|
||||
const cir = await readR1cs(r1csName);
|
||||
|
||||
if (Scalar.eq(cir.prime, bn128r)) {
|
||||
if (logger) logger.info("Curve: bn-128");
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
import {loadHeader as loadR1csHeader} from "r1csfile";
|
||||
import {readR1csHeader} from "r1csfile";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import { log2, formatHash } from "./misc.js";
|
||||
@ -13,7 +13,7 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const csHasher = Blake2b(64);
|
||||
|
||||
const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1);
|
||||
const r1cs = await loadR1csHeader(fdR1cs, sectionsR1cs);
|
||||
const r1cs = await readR1csHeader(fdR1cs, sectionsR1cs);
|
||||
|
||||
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1);
|
||||
const {curve, power} = await utils.readPTauHeader(fdPTau, sectionsPTau);
|
||||
|
Loading…
Reference in New Issue
Block a user