mirror of
https://github.com/tornadocash/snarkjs.git
synced 2024-11-01 07:45:43 +01:00
Add lagrange op power plus one
This commit is contained in:
parent
975a410d63
commit
1fafad70ee
329
build/cli.cjs
329
build/cli.cjs
@ -2345,17 +2345,20 @@ async function readBigInt$1(fd, n8, pos) {
|
|||||||
return ffjavascript.Scalar.fromRprLE(buff, 0, n8);
|
return ffjavascript.Scalar.fromRprLE(buff, 0, n8);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function copySection(fdFrom, sections, fdTo, sectionId) {
|
async function copySection(fdFrom, sections, fdTo, sectionId, size) {
|
||||||
|
if (typeof size === "undefined") {
|
||||||
|
size = sections[sectionId][0].size;
|
||||||
|
}
|
||||||
const chunkSize = fdFrom.pageSize;
|
const chunkSize = fdFrom.pageSize;
|
||||||
await startReadUniqueSection$1(fdFrom, sections, sectionId);
|
await startReadUniqueSection$1(fdFrom, sections, sectionId);
|
||||||
await startWriteSection(fdTo, sectionId);
|
await startWriteSection(fdTo, sectionId);
|
||||||
for (let p=0; p<sections[sectionId][0].size; p+=chunkSize) {
|
for (let p=0; p<size; p+=chunkSize) {
|
||||||
const l = Math.min(sections[sectionId][0].size -p, chunkSize);
|
const l = Math.min(size -p, chunkSize);
|
||||||
const buff = await fdFrom.read(l);
|
const buff = await fdFrom.read(l);
|
||||||
await fdTo.write(buff);
|
await fdTo.write(buff);
|
||||||
}
|
}
|
||||||
await endWriteSection(fdTo);
|
await endWriteSection(fdTo);
|
||||||
await endReadSection$1(fdFrom);
|
await endReadSection$1(fdFrom, size != sections[sectionId][0].size);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3005,9 +3008,11 @@ async function verify(tauFilename, logger) {
|
|||||||
const nextContributionHash = nextContributionHasher.digest();
|
const nextContributionHash = nextContributionHasher.digest();
|
||||||
|
|
||||||
// Check the nextChallengeHash
|
// Check the nextChallengeHash
|
||||||
if (!hashIsEqual(nextContributionHash,curContr.nextChallenge)) {
|
if (power == ceremonyPower) {
|
||||||
if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section");
|
if (!hashIsEqual(nextContributionHash,curContr.nextChallenge)) {
|
||||||
return false;
|
if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (logger) logger.info(formatHash(nextContributionHash, "Next challenge hash: "));
|
if (logger) logger.info(formatHash(nextContributionHash, "Next challenge hash: "));
|
||||||
@ -3043,6 +3048,8 @@ async function verify(tauFilename, logger) {
|
|||||||
|
|
||||||
await fd.close();
|
await fd.close();
|
||||||
|
|
||||||
|
if (logger) logger.info("Powers of Tau Ok!");
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
function printContribution(curContr, prevContr) {
|
function printContribution(curContr, prevContr) {
|
||||||
@ -3171,6 +3178,11 @@ async function verify(tauFilename, logger) {
|
|||||||
if (!res) return false;
|
if (!res) return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (tauSection == 2) {
|
||||||
|
const res = await verifyPower(power+1);
|
||||||
|
if (!res) return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
async function verifyPower(p) {
|
async function verifyPower(p) {
|
||||||
@ -3192,7 +3204,12 @@ async function verify(tauFilename, logger) {
|
|||||||
if (logger) logger.debug(`reading points Powers${p}...`);
|
if (logger) logger.debug(`reading points Powers${p}...`);
|
||||||
await startReadUniqueSection$1(fd, sections, tauSection);
|
await startReadUniqueSection$1(fd, sections, tauSection);
|
||||||
buffG = new ffjavascript.BigBuffer(nPoints*sG);
|
buffG = new ffjavascript.BigBuffer(nPoints*sG);
|
||||||
await fd.readToBuffer(buffG, 0, nPoints*sG);
|
if (p == power+1) {
|
||||||
|
await fd.readToBuffer(buffG, 0, (nPoints-1)*sG);
|
||||||
|
buffG.set(curve.G1.zeroAffine, (nPoints-1)*sG);
|
||||||
|
} else {
|
||||||
|
await fd.readToBuffer(buffG, 0, nPoints*sG);
|
||||||
|
}
|
||||||
await endReadSection$1(fd, true);
|
await endReadSection$1(fd, true);
|
||||||
|
|
||||||
const resTau = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p);
|
const resTau = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p);
|
||||||
@ -3733,6 +3750,10 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
|
|||||||
await processSectionPower(p);
|
await processSectionPower(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (oldSectionId == 2) {
|
||||||
|
await processSectionPower(power+1);
|
||||||
|
}
|
||||||
|
|
||||||
await endWriteSection(fdNew);
|
await endWriteSection(fdNew);
|
||||||
|
|
||||||
async function processSectionPower(p) {
|
async function processSectionPower(p) {
|
||||||
@ -3752,7 +3773,213 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
|
|||||||
for (let i=0; i<nChunks; i++) {
|
for (let i=0; i<nChunks; i++) {
|
||||||
let buff;
|
let buff;
|
||||||
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
|
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
|
||||||
buff = await fdOld.read(pointsPerChunk*sGin);
|
if ((oldSectionId == 2)&&(p==power+1)) {
|
||||||
|
buff = new Uint8Array(pointsPerChunk*sGin);
|
||||||
|
await fdOld.readToBuffer(buff, 0,(pointsPerChunk-1)*sGin );
|
||||||
|
buff.set(curve.G1.zeroAffine, (pointsPerChunk-1)*sGin );
|
||||||
|
} else {
|
||||||
|
buff = await fdOld.read(pointsPerChunk*sGin);
|
||||||
|
}
|
||||||
|
buff = await G.batchToJacobian(buff);
|
||||||
|
for (let j=0; j<pointsPerChunk; j++) {
|
||||||
|
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
|
||||||
|
await fdTmp.write(buff.slice(j*sGmid, (j+1)*sGmid ));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await endReadSection$1(fdOld, true);
|
||||||
|
|
||||||
|
for (let j=0; j<nChunks; j++) {
|
||||||
|
if (logger) logger.debug(`${sectionName} ${p} FFTMix ${j+1}/${nChunks}`);
|
||||||
|
let buff;
|
||||||
|
fdTmp.pos = (j*pointsPerChunk)*sGmid;
|
||||||
|
buff = await fdTmp.read(pointsPerChunk*sGmid);
|
||||||
|
buff = await G.fftMix(buff);
|
||||||
|
fdTmp.pos = (j*pointsPerChunk)*sGmid;
|
||||||
|
await fdTmp.write(buff);
|
||||||
|
}
|
||||||
|
for (let i=chunkPower+1; i<= p; i++) {
|
||||||
|
const nGroups = 1 << (p - i);
|
||||||
|
const nChunksPerGroup = nChunks / nGroups;
|
||||||
|
for (let j=0; j<nGroups; j++) {
|
||||||
|
for (let k=0; k <nChunksPerGroup/2; k++) {
|
||||||
|
if (logger) logger.debug(`${sectionName} ${i}/${p} FFTJoin ${j+1}/${nGroups} ${k+1}/${nChunksPerGroup/2}`);
|
||||||
|
const first = Fr.exp( Fr.w[i], k*pointsPerChunk);
|
||||||
|
const inc = Fr.w[i];
|
||||||
|
const o1 = j*nChunksPerGroup + k;
|
||||||
|
const o2 = j*nChunksPerGroup + k + nChunksPerGroup/2;
|
||||||
|
|
||||||
|
let buff1, buff2;
|
||||||
|
fdTmp.pos = o1*pointsPerChunk*sGmid;
|
||||||
|
buff1 = await fdTmp.read(pointsPerChunk * sGmid);
|
||||||
|
fdTmp.pos = o2*pointsPerChunk*sGmid;
|
||||||
|
buff2 = await fdTmp.read(pointsPerChunk * sGmid);
|
||||||
|
|
||||||
|
[buff1, buff2] = await G.fftJoin(buff1, buff2, first, inc);
|
||||||
|
|
||||||
|
fdTmp.pos = o1*pointsPerChunk*sGmid;
|
||||||
|
await fdTmp.write(buff1);
|
||||||
|
fdTmp.pos = o2*pointsPerChunk*sGmid;
|
||||||
|
await fdTmp.write(buff2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await finalInverse(p);
|
||||||
|
}
|
||||||
|
async function finalInverse(p) {
|
||||||
|
const G = curve[Gstr];
|
||||||
|
const Fr = curve.Fr;
|
||||||
|
const sGmid = G.F.n8*3;
|
||||||
|
const sGout = G.F.n8*2;
|
||||||
|
|
||||||
|
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p;
|
||||||
|
const pointsPerChunk = 1<<chunkPower;
|
||||||
|
const nPoints = 1 << p;
|
||||||
|
const nChunks = nPoints / pointsPerChunk;
|
||||||
|
|
||||||
|
const o = fdNew.pos;
|
||||||
|
fdTmp.pos = 0;
|
||||||
|
const factor = Fr.inv( Fr.e( 1<< p));
|
||||||
|
for (let i=0; i<nChunks; i++) {
|
||||||
|
if (logger) logger.debug(`${sectionName} ${p} FFTFinal ${i+1}/${nChunks}`);
|
||||||
|
let buff;
|
||||||
|
buff = await fdTmp.read(pointsPerChunk * sGmid);
|
||||||
|
buff = await G.fftFinal(buff, factor);
|
||||||
|
|
||||||
|
if ( i == 0) {
|
||||||
|
fdNew.pos = o;
|
||||||
|
await fdNew.write(buff.slice((pointsPerChunk-1)*sGout));
|
||||||
|
fdNew.pos = o + ((nChunks - 1)*pointsPerChunk + 1) * sGout;
|
||||||
|
await fdNew.write(buff.slice(0, (pointsPerChunk-1)*sGout));
|
||||||
|
} else {
|
||||||
|
fdNew.pos = o + ((nChunks - 1 - i)*pointsPerChunk + 1) * sGout;
|
||||||
|
await fdNew.write(buff);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fdNew.pos = o + nChunks * pointsPerChunk * sGout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function truncate(ptauFilename, template, logger) {
|
||||||
|
|
||||||
|
const {fd: fdOld, sections} = await readBinFile$1(ptauFilename, "ptau", 1);
|
||||||
|
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
|
||||||
|
|
||||||
|
const sG1 = curve.G1.F.n8*2;
|
||||||
|
const sG2 = curve.G2.F.n8*2;
|
||||||
|
|
||||||
|
for (let p=1; p<power; p++) {
|
||||||
|
await generateTruncate(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
await fdOld.close();
|
||||||
|
|
||||||
|
return true;
|
||||||
|
|
||||||
|
async function generateTruncate(p) {
|
||||||
|
|
||||||
|
let sP = p.toString();
|
||||||
|
while (sP.length<2) sP = "0" + sP;
|
||||||
|
|
||||||
|
if (logger) logger.debug("Writing Power: "+sP);
|
||||||
|
|
||||||
|
const fdNew = await createBinFile(template + sP + ".ptau", "ptau", 1, 11);
|
||||||
|
await writePTauHeader(fdNew, curve, p, ceremonyPower);
|
||||||
|
|
||||||
|
await copySection(fdOld, sections, fdNew, 2, ((1<<p)*2-1) * sG1 ); // tagG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 3, (1<<p) * sG2); // tauG2
|
||||||
|
await copySection(fdOld, sections, fdNew, 4, (1<<p) * sG1); // alfaTauG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 5, (1<<p) * sG1); // betaTauG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 6, sG2); // betaTauG2
|
||||||
|
await copySection(fdOld, sections, fdNew, 7); // contributions
|
||||||
|
await copySection(fdOld, sections, fdNew, 12, ((1<<p)*2 -1) * sG1); // L_tauG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 13, ((1<<p)*2 -1) * sG2); // L_tauG2
|
||||||
|
await copySection(fdOld, sections, fdNew, 14, ((1<<p)*2 -1) * sG1); // L_alfaTauG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 15, ((1<<p)*2 -1) * sG1); // L_betaTauG1
|
||||||
|
|
||||||
|
await fdNew.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
async function convert(oldPtauFilename, newPTauFilename, logger) {
|
||||||
|
|
||||||
|
const {fd: fdOld, sections} = await readBinFile$1(oldPtauFilename, "ptau", 1);
|
||||||
|
const {curve, power} = await readPTauHeader(fdOld, sections);
|
||||||
|
|
||||||
|
const fdNew = await createBinFile(newPTauFilename, "ptau", 1, 11);
|
||||||
|
await writePTauHeader(fdNew, curve, power);
|
||||||
|
|
||||||
|
// const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
|
||||||
|
const fdTmp = await createOverride({type: "bigMem"});
|
||||||
|
|
||||||
|
await copySection(fdOld, sections, fdNew, 2);
|
||||||
|
await copySection(fdOld, sections, fdNew, 3);
|
||||||
|
await copySection(fdOld, sections, fdNew, 4);
|
||||||
|
await copySection(fdOld, sections, fdNew, 5);
|
||||||
|
await copySection(fdOld, sections, fdNew, 6);
|
||||||
|
await copySection(fdOld, sections, fdNew, 7);
|
||||||
|
|
||||||
|
await processSection(2, 12, "G1", "tauG1" );
|
||||||
|
await copySection(fdOld, sections, fdNew, 13);
|
||||||
|
await copySection(fdOld, sections, fdNew, 14);
|
||||||
|
await copySection(fdOld, sections, fdNew, 15);
|
||||||
|
|
||||||
|
await fdOld.close();
|
||||||
|
await fdNew.close();
|
||||||
|
await fdTmp.close();
|
||||||
|
|
||||||
|
// await fs.promises.unlink(newPTauFilename+ ".tmp");
|
||||||
|
|
||||||
|
return;
|
||||||
|
|
||||||
|
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
|
||||||
|
const CHUNKPOW = 16;
|
||||||
|
if (logger) logger.debug("Starting section: "+sectionName);
|
||||||
|
|
||||||
|
await startWriteSection(fdNew, newSectionId);
|
||||||
|
|
||||||
|
const size = sections[newSectionId][0].size;
|
||||||
|
const chunkSize = fdOld.pageSize;
|
||||||
|
await startReadUniqueSection$1(fdOld, sections, newSectionId);
|
||||||
|
for (let p=0; p<size; p+=chunkSize) {
|
||||||
|
const l = Math.min(size -p, chunkSize);
|
||||||
|
const buff = await fdOld.read(l);
|
||||||
|
await fdNew.write(buff);
|
||||||
|
}
|
||||||
|
await endReadSection$1(fdOld);
|
||||||
|
|
||||||
|
if (oldSectionId == 2) {
|
||||||
|
await processSectionPower(power+1);
|
||||||
|
}
|
||||||
|
|
||||||
|
await endWriteSection(fdNew);
|
||||||
|
|
||||||
|
async function processSectionPower(p) {
|
||||||
|
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p;
|
||||||
|
const pointsPerChunk = 1<<chunkPower;
|
||||||
|
const nPoints = 1 << p;
|
||||||
|
const nChunks = nPoints / pointsPerChunk;
|
||||||
|
|
||||||
|
const G = curve[Gstr];
|
||||||
|
const Fr = curve.Fr;
|
||||||
|
const sGin = G.F.n8*2;
|
||||||
|
const sGmid = G.F.n8*3;
|
||||||
|
|
||||||
|
await startReadUniqueSection$1(fdOld, sections, oldSectionId);
|
||||||
|
// Build the initial tmp Buff
|
||||||
|
fdTmp.pos =0;
|
||||||
|
for (let i=0; i<nChunks; i++) {
|
||||||
|
let buff;
|
||||||
|
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
|
||||||
|
if ((oldSectionId == 2)&&(p==power+1)) {
|
||||||
|
buff = new Uint8Array(pointsPerChunk*sGin);
|
||||||
|
await fdOld.readToBuffer(buff, 0,(pointsPerChunk-1)*sGin );
|
||||||
|
buff.set(curve.G1.zeroAffine, (pointsPerChunk-1)*sGin );
|
||||||
|
} else {
|
||||||
|
buff = await fdOld.read(pointsPerChunk*sGin);
|
||||||
|
}
|
||||||
buff = await G.batchToJacobian(buff);
|
buff = await G.batchToJacobian(buff);
|
||||||
for (let j=0; j<pointsPerChunk; j++) {
|
for (let j=0; j<pointsPerChunk; j++) {
|
||||||
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
|
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
|
||||||
@ -4000,7 +4227,7 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
|||||||
|
|
||||||
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
|
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
|
||||||
|
|
||||||
if (cirPower > power+1) {
|
if (cirPower > power) {
|
||||||
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`);
|
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -6401,42 +6628,42 @@ const commands = [
|
|||||||
description: "Starts a powers of tau ceremony",
|
description: "Starts a powers of tau ceremony",
|
||||||
alias: ["ptn"],
|
alias: ["ptn"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawNew
|
action: powersOfTauNew
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>",
|
cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>",
|
||||||
description: "creates a ptau file with a new contribution",
|
description: "creates a ptau file with a new contribution",
|
||||||
alias: ["ptc"],
|
alias: ["ptc"],
|
||||||
options: "-verbose|v -name|n -entropy|e",
|
options: "-verbose|v -name|n -entropy|e",
|
||||||
action: powersOfTawContribute
|
action: powersOfTauContribute
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau export challenge <powersoftau_0000.ptau> [challenge]",
|
cmd: "powersoftau export challenge <powersoftau_0000.ptau> [challenge]",
|
||||||
description: "Creates a challenge",
|
description: "Creates a challenge",
|
||||||
alias: ["ptec"],
|
alias: ["ptec"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawExportChallenge
|
action: powersOfTauExportChallenge
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau challenge contribute <curve> <challenge> [response]",
|
cmd: "powersoftau challenge contribute <curve> <challenge> [response]",
|
||||||
description: "Contribute to a challenge",
|
description: "Contribute to a challenge",
|
||||||
alias: ["ptcc"],
|
alias: ["ptcc"],
|
||||||
options: "-verbose|v -entropy|e",
|
options: "-verbose|v -entropy|e",
|
||||||
action: powersOfTawChallengeContribute
|
action: powersOfTauChallengeContribute
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau import response <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>",
|
cmd: "powersoftau import response <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>",
|
||||||
description: "import a response to a ptau file",
|
description: "import a response to a ptau file",
|
||||||
alias: ["ptir"],
|
alias: ["ptir"],
|
||||||
options: "-verbose|v -nopoints -nocheck -name|n",
|
options: "-verbose|v -nopoints -nocheck -name|n",
|
||||||
action: powersOfTawImport
|
action: powersOfTauImport
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>",
|
cmd: "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>",
|
||||||
description: "adds a beacon",
|
description: "adds a beacon",
|
||||||
alias: ["ptb"],
|
alias: ["ptb"],
|
||||||
options: "-verbose|v -name|n",
|
options: "-verbose|v -name|n",
|
||||||
action: powersOfTawBeacon
|
action: powersOfTauBeacon
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>",
|
cmd: "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>",
|
||||||
@ -6444,21 +6671,37 @@ const commands = [
|
|||||||
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
|
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
|
||||||
alias: ["pt2"],
|
alias: ["pt2"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawPreparePhase2
|
action: powersOfTauPreparePhase2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "powersoftau convert <old_powersoftau.ptau> <new_powersoftau.ptau>",
|
||||||
|
description: "Convert ptau",
|
||||||
|
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
|
||||||
|
alias: ["ptcv"],
|
||||||
|
options: "-verbose|v",
|
||||||
|
action: powersOfTauConvert
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "powersoftau truncate <powersoftau.ptau>",
|
||||||
|
description: "Generate diferent powers of tau with smoller sizes ",
|
||||||
|
longDescription: " This process generates smaller ptau files from a bigger power ptau",
|
||||||
|
alias: ["ptt"],
|
||||||
|
options: "-verbose|v",
|
||||||
|
action: powersOfTauTruncate
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau verify <powersoftau.ptau>",
|
cmd: "powersoftau verify <powersoftau.ptau>",
|
||||||
description: "verifies a powers of tau file",
|
description: "verifies a powers of tau file",
|
||||||
alias: ["ptv"],
|
alias: ["ptv"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawVerify
|
action: powersOfTauVerify
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
|
cmd: "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
|
||||||
description: "Exports a power of tau file to a JSON",
|
description: "Exports a power of tau file to a JSON",
|
||||||
alias: ["ptej"],
|
alias: ["ptej"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawExportJson
|
action: powersOfTauExportJson
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "r1cs info [circuit.r1cs]",
|
cmd: "r1cs info [circuit.r1cs]",
|
||||||
@ -6941,7 +7184,7 @@ async function zkeyExportSolidityCalldata(params, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// powersoftau new <curve> <power> [powersoftau_0000.ptau]",
|
// powersoftau new <curve> <power> [powersoftau_0000.ptau]",
|
||||||
async function powersOfTawNew(params, options) {
|
async function powersOfTauNew(params, options) {
|
||||||
let curveName;
|
let curveName;
|
||||||
let power;
|
let power;
|
||||||
let ptauName;
|
let ptauName;
|
||||||
@ -6954,7 +7197,7 @@ async function powersOfTawNew(params, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (params.length < 3) {
|
if (params.length < 3) {
|
||||||
ptauName = "powersOfTaw" + power + "_0000.ptau";
|
ptauName = "powersOfTau" + power + "_0000.ptau";
|
||||||
} else {
|
} else {
|
||||||
ptauName = params[2];
|
ptauName = params[2];
|
||||||
}
|
}
|
||||||
@ -6966,7 +7209,7 @@ async function powersOfTawNew(params, options) {
|
|||||||
return await newAccumulator(curve, power, ptauName, logger);
|
return await newAccumulator(curve, power, ptauName, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawExportChallenge(params, options) {
|
async function powersOfTauExportChallenge(params, options) {
|
||||||
let ptauName;
|
let ptauName;
|
||||||
let challengeName;
|
let challengeName;
|
||||||
|
|
||||||
@ -6984,7 +7227,7 @@ async function powersOfTawExportChallenge(params, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// powersoftau challenge contribute <curve> <challenge> [response]
|
// powersoftau challenge contribute <curve> <challenge> [response]
|
||||||
async function powersOfTawChallengeContribute(params, options) {
|
async function powersOfTauChallengeContribute(params, options) {
|
||||||
let challengeName;
|
let challengeName;
|
||||||
let responseName;
|
let responseName;
|
||||||
|
|
||||||
@ -7004,7 +7247,7 @@ async function powersOfTawChallengeContribute(params, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async function powersOfTawImport(params, options) {
|
async function powersOfTauImport(params, options) {
|
||||||
let oldPtauName;
|
let oldPtauName;
|
||||||
let response;
|
let response;
|
||||||
let newPtauName;
|
let newPtauName;
|
||||||
@ -7028,7 +7271,7 @@ async function powersOfTawImport(params, options) {
|
|||||||
// TODO Verify
|
// TODO Verify
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawVerify(params, options) {
|
async function powersOfTauVerify(params, options) {
|
||||||
let ptauName;
|
let ptauName;
|
||||||
|
|
||||||
ptauName = params[0];
|
ptauName = params[0];
|
||||||
@ -7043,7 +7286,7 @@ async function powersOfTawVerify(params, options) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawBeacon(params, options) {
|
async function powersOfTauBeacon(params, options) {
|
||||||
let oldPtauName;
|
let oldPtauName;
|
||||||
let newPtauName;
|
let newPtauName;
|
||||||
let beaconHashStr;
|
let beaconHashStr;
|
||||||
@ -7059,7 +7302,7 @@ async function powersOfTawBeacon(params, options) {
|
|||||||
return await beacon(oldPtauName, newPtauName, options.name ,beaconHashStr, numIterationsExp, logger);
|
return await beacon(oldPtauName, newPtauName, options.name ,beaconHashStr, numIterationsExp, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawContribute(params, options) {
|
async function powersOfTauContribute(params, options) {
|
||||||
let oldPtauName;
|
let oldPtauName;
|
||||||
let newPtauName;
|
let newPtauName;
|
||||||
|
|
||||||
@ -7071,7 +7314,7 @@ async function powersOfTawContribute(params, options) {
|
|||||||
return await contribute(oldPtauName, newPtauName, options.name , options.entropy, logger);
|
return await contribute(oldPtauName, newPtauName, options.name , options.entropy, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawPreparePhase2(params, options) {
|
async function powersOfTauPreparePhase2(params, options) {
|
||||||
let oldPtauName;
|
let oldPtauName;
|
||||||
let newPtauName;
|
let newPtauName;
|
||||||
|
|
||||||
@ -7083,8 +7326,36 @@ async function powersOfTawPreparePhase2(params, options) {
|
|||||||
return await preparePhase2(oldPtauName, newPtauName, logger);
|
return await preparePhase2(oldPtauName, newPtauName, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function powersOfTauConvert(params, options) {
|
||||||
|
let oldPtauName;
|
||||||
|
let newPtauName;
|
||||||
|
|
||||||
|
oldPtauName = params[0];
|
||||||
|
newPtauName = params[1];
|
||||||
|
|
||||||
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
|
return await convert(oldPtauName, newPtauName, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function powersOfTauTruncate(params, options) {
|
||||||
|
let ptauName;
|
||||||
|
|
||||||
|
ptauName = params[0];
|
||||||
|
|
||||||
|
let template = ptauName;
|
||||||
|
while ((template.length>0) && (template[template.length-1] != ".")) template = template.slice(0, template.length-1);
|
||||||
|
template = template.slice(0, template.length-1);
|
||||||
|
template = template+"_";
|
||||||
|
|
||||||
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
|
return await truncate(ptauName, template, logger);
|
||||||
|
}
|
||||||
|
|
||||||
// powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
|
// powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
|
||||||
async function powersOfTawExportJson(params, options) {
|
async function powersOfTauExportJson(params, options) {
|
||||||
let ptauName;
|
let ptauName;
|
||||||
let jsonName;
|
let jsonName;
|
||||||
|
|
||||||
|
249
build/main.cjs
249
build/main.cjs
@ -817,17 +817,20 @@ async function readBigInt(fd, n8, pos) {
|
|||||||
return ffjavascript.Scalar.fromRprLE(buff, 0, n8);
|
return ffjavascript.Scalar.fromRprLE(buff, 0, n8);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function copySection(fdFrom, sections, fdTo, sectionId) {
|
async function copySection(fdFrom, sections, fdTo, sectionId, size) {
|
||||||
|
if (typeof size === "undefined") {
|
||||||
|
size = sections[sectionId][0].size;
|
||||||
|
}
|
||||||
const chunkSize = fdFrom.pageSize;
|
const chunkSize = fdFrom.pageSize;
|
||||||
await startReadUniqueSection(fdFrom, sections, sectionId);
|
await startReadUniqueSection(fdFrom, sections, sectionId);
|
||||||
await startWriteSection(fdTo, sectionId);
|
await startWriteSection(fdTo, sectionId);
|
||||||
for (let p=0; p<sections[sectionId][0].size; p+=chunkSize) {
|
for (let p=0; p<size; p+=chunkSize) {
|
||||||
const l = Math.min(sections[sectionId][0].size -p, chunkSize);
|
const l = Math.min(size -p, chunkSize);
|
||||||
const buff = await fdFrom.read(l);
|
const buff = await fdFrom.read(l);
|
||||||
await fdTo.write(buff);
|
await fdTo.write(buff);
|
||||||
}
|
}
|
||||||
await endWriteSection(fdTo);
|
await endWriteSection(fdTo);
|
||||||
await endReadSection(fdFrom);
|
await endReadSection(fdFrom, size != sections[sectionId][0].size);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2803,9 +2806,11 @@ async function verify(tauFilename, logger) {
|
|||||||
const nextContributionHash = nextContributionHasher.digest();
|
const nextContributionHash = nextContributionHasher.digest();
|
||||||
|
|
||||||
// Check the nextChallengeHash
|
// Check the nextChallengeHash
|
||||||
if (!hashIsEqual(nextContributionHash,curContr.nextChallenge)) {
|
if (power == ceremonyPower) {
|
||||||
if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section");
|
if (!hashIsEqual(nextContributionHash,curContr.nextChallenge)) {
|
||||||
return false;
|
if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (logger) logger.info(formatHash(nextContributionHash, "Next challenge hash: "));
|
if (logger) logger.info(formatHash(nextContributionHash, "Next challenge hash: "));
|
||||||
@ -2841,6 +2846,8 @@ async function verify(tauFilename, logger) {
|
|||||||
|
|
||||||
await fd.close();
|
await fd.close();
|
||||||
|
|
||||||
|
if (logger) logger.info("Powers of Tau Ok!");
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
function printContribution(curContr, prevContr) {
|
function printContribution(curContr, prevContr) {
|
||||||
@ -2969,6 +2976,11 @@ async function verify(tauFilename, logger) {
|
|||||||
if (!res) return false;
|
if (!res) return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (tauSection == 2) {
|
||||||
|
const res = await verifyPower(power+1);
|
||||||
|
if (!res) return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
async function verifyPower(p) {
|
async function verifyPower(p) {
|
||||||
@ -2990,7 +3002,12 @@ async function verify(tauFilename, logger) {
|
|||||||
if (logger) logger.debug(`reading points Powers${p}...`);
|
if (logger) logger.debug(`reading points Powers${p}...`);
|
||||||
await startReadUniqueSection(fd, sections, tauSection);
|
await startReadUniqueSection(fd, sections, tauSection);
|
||||||
buffG = new ffjavascript.BigBuffer(nPoints*sG);
|
buffG = new ffjavascript.BigBuffer(nPoints*sG);
|
||||||
await fd.readToBuffer(buffG, 0, nPoints*sG);
|
if (p == power+1) {
|
||||||
|
await fd.readToBuffer(buffG, 0, (nPoints-1)*sG);
|
||||||
|
buffG.set(curve.G1.zeroAffine, (nPoints-1)*sG);
|
||||||
|
} else {
|
||||||
|
await fd.readToBuffer(buffG, 0, nPoints*sG);
|
||||||
|
}
|
||||||
await endReadSection(fd, true);
|
await endReadSection(fd, true);
|
||||||
|
|
||||||
const resTau = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p);
|
const resTau = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p);
|
||||||
@ -3531,6 +3548,10 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
|
|||||||
await processSectionPower(p);
|
await processSectionPower(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (oldSectionId == 2) {
|
||||||
|
await processSectionPower(power+1);
|
||||||
|
}
|
||||||
|
|
||||||
await endWriteSection(fdNew);
|
await endWriteSection(fdNew);
|
||||||
|
|
||||||
async function processSectionPower(p) {
|
async function processSectionPower(p) {
|
||||||
@ -3550,7 +3571,213 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
|
|||||||
for (let i=0; i<nChunks; i++) {
|
for (let i=0; i<nChunks; i++) {
|
||||||
let buff;
|
let buff;
|
||||||
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
|
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
|
||||||
buff = await fdOld.read(pointsPerChunk*sGin);
|
if ((oldSectionId == 2)&&(p==power+1)) {
|
||||||
|
buff = new Uint8Array(pointsPerChunk*sGin);
|
||||||
|
await fdOld.readToBuffer(buff, 0,(pointsPerChunk-1)*sGin );
|
||||||
|
buff.set(curve.G1.zeroAffine, (pointsPerChunk-1)*sGin );
|
||||||
|
} else {
|
||||||
|
buff = await fdOld.read(pointsPerChunk*sGin);
|
||||||
|
}
|
||||||
|
buff = await G.batchToJacobian(buff);
|
||||||
|
for (let j=0; j<pointsPerChunk; j++) {
|
||||||
|
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
|
||||||
|
await fdTmp.write(buff.slice(j*sGmid, (j+1)*sGmid ));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await endReadSection(fdOld, true);
|
||||||
|
|
||||||
|
for (let j=0; j<nChunks; j++) {
|
||||||
|
if (logger) logger.debug(`${sectionName} ${p} FFTMix ${j+1}/${nChunks}`);
|
||||||
|
let buff;
|
||||||
|
fdTmp.pos = (j*pointsPerChunk)*sGmid;
|
||||||
|
buff = await fdTmp.read(pointsPerChunk*sGmid);
|
||||||
|
buff = await G.fftMix(buff);
|
||||||
|
fdTmp.pos = (j*pointsPerChunk)*sGmid;
|
||||||
|
await fdTmp.write(buff);
|
||||||
|
}
|
||||||
|
for (let i=chunkPower+1; i<= p; i++) {
|
||||||
|
const nGroups = 1 << (p - i);
|
||||||
|
const nChunksPerGroup = nChunks / nGroups;
|
||||||
|
for (let j=0; j<nGroups; j++) {
|
||||||
|
for (let k=0; k <nChunksPerGroup/2; k++) {
|
||||||
|
if (logger) logger.debug(`${sectionName} ${i}/${p} FFTJoin ${j+1}/${nGroups} ${k+1}/${nChunksPerGroup/2}`);
|
||||||
|
const first = Fr.exp( Fr.w[i], k*pointsPerChunk);
|
||||||
|
const inc = Fr.w[i];
|
||||||
|
const o1 = j*nChunksPerGroup + k;
|
||||||
|
const o2 = j*nChunksPerGroup + k + nChunksPerGroup/2;
|
||||||
|
|
||||||
|
let buff1, buff2;
|
||||||
|
fdTmp.pos = o1*pointsPerChunk*sGmid;
|
||||||
|
buff1 = await fdTmp.read(pointsPerChunk * sGmid);
|
||||||
|
fdTmp.pos = o2*pointsPerChunk*sGmid;
|
||||||
|
buff2 = await fdTmp.read(pointsPerChunk * sGmid);
|
||||||
|
|
||||||
|
[buff1, buff2] = await G.fftJoin(buff1, buff2, first, inc);
|
||||||
|
|
||||||
|
fdTmp.pos = o1*pointsPerChunk*sGmid;
|
||||||
|
await fdTmp.write(buff1);
|
||||||
|
fdTmp.pos = o2*pointsPerChunk*sGmid;
|
||||||
|
await fdTmp.write(buff2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await finalInverse(p);
|
||||||
|
}
|
||||||
|
async function finalInverse(p) {
|
||||||
|
const G = curve[Gstr];
|
||||||
|
const Fr = curve.Fr;
|
||||||
|
const sGmid = G.F.n8*3;
|
||||||
|
const sGout = G.F.n8*2;
|
||||||
|
|
||||||
|
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p;
|
||||||
|
const pointsPerChunk = 1<<chunkPower;
|
||||||
|
const nPoints = 1 << p;
|
||||||
|
const nChunks = nPoints / pointsPerChunk;
|
||||||
|
|
||||||
|
const o = fdNew.pos;
|
||||||
|
fdTmp.pos = 0;
|
||||||
|
const factor = Fr.inv( Fr.e( 1<< p));
|
||||||
|
for (let i=0; i<nChunks; i++) {
|
||||||
|
if (logger) logger.debug(`${sectionName} ${p} FFTFinal ${i+1}/${nChunks}`);
|
||||||
|
let buff;
|
||||||
|
buff = await fdTmp.read(pointsPerChunk * sGmid);
|
||||||
|
buff = await G.fftFinal(buff, factor);
|
||||||
|
|
||||||
|
if ( i == 0) {
|
||||||
|
fdNew.pos = o;
|
||||||
|
await fdNew.write(buff.slice((pointsPerChunk-1)*sGout));
|
||||||
|
fdNew.pos = o + ((nChunks - 1)*pointsPerChunk + 1) * sGout;
|
||||||
|
await fdNew.write(buff.slice(0, (pointsPerChunk-1)*sGout));
|
||||||
|
} else {
|
||||||
|
fdNew.pos = o + ((nChunks - 1 - i)*pointsPerChunk + 1) * sGout;
|
||||||
|
await fdNew.write(buff);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fdNew.pos = o + nChunks * pointsPerChunk * sGout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function truncate(ptauFilename, template, logger) {
|
||||||
|
|
||||||
|
const {fd: fdOld, sections} = await readBinFile(ptauFilename, "ptau", 1);
|
||||||
|
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
|
||||||
|
|
||||||
|
const sG1 = curve.G1.F.n8*2;
|
||||||
|
const sG2 = curve.G2.F.n8*2;
|
||||||
|
|
||||||
|
for (let p=1; p<power; p++) {
|
||||||
|
await generateTruncate(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
await fdOld.close();
|
||||||
|
|
||||||
|
return true;
|
||||||
|
|
||||||
|
async function generateTruncate(p) {
|
||||||
|
|
||||||
|
let sP = p.toString();
|
||||||
|
while (sP.length<2) sP = "0" + sP;
|
||||||
|
|
||||||
|
if (logger) logger.debug("Writing Power: "+sP);
|
||||||
|
|
||||||
|
const fdNew = await createBinFile(template + sP + ".ptau", "ptau", 1, 11);
|
||||||
|
await writePTauHeader(fdNew, curve, p, ceremonyPower);
|
||||||
|
|
||||||
|
await copySection(fdOld, sections, fdNew, 2, ((1<<p)*2-1) * sG1 ); // tagG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 3, (1<<p) * sG2); // tauG2
|
||||||
|
await copySection(fdOld, sections, fdNew, 4, (1<<p) * sG1); // alfaTauG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 5, (1<<p) * sG1); // betaTauG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 6, sG2); // betaTauG2
|
||||||
|
await copySection(fdOld, sections, fdNew, 7); // contributions
|
||||||
|
await copySection(fdOld, sections, fdNew, 12, ((1<<p)*2 -1) * sG1); // L_tauG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 13, ((1<<p)*2 -1) * sG2); // L_tauG2
|
||||||
|
await copySection(fdOld, sections, fdNew, 14, ((1<<p)*2 -1) * sG1); // L_alfaTauG1
|
||||||
|
await copySection(fdOld, sections, fdNew, 15, ((1<<p)*2 -1) * sG1); // L_betaTauG1
|
||||||
|
|
||||||
|
await fdNew.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
async function convert(oldPtauFilename, newPTauFilename, logger) {
|
||||||
|
|
||||||
|
const {fd: fdOld, sections} = await readBinFile(oldPtauFilename, "ptau", 1);
|
||||||
|
const {curve, power} = await readPTauHeader(fdOld, sections);
|
||||||
|
|
||||||
|
const fdNew = await createBinFile(newPTauFilename, "ptau", 1, 11);
|
||||||
|
await writePTauHeader(fdNew, curve, power);
|
||||||
|
|
||||||
|
// const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
|
||||||
|
const fdTmp = await createOverride({type: "bigMem"});
|
||||||
|
|
||||||
|
await copySection(fdOld, sections, fdNew, 2);
|
||||||
|
await copySection(fdOld, sections, fdNew, 3);
|
||||||
|
await copySection(fdOld, sections, fdNew, 4);
|
||||||
|
await copySection(fdOld, sections, fdNew, 5);
|
||||||
|
await copySection(fdOld, sections, fdNew, 6);
|
||||||
|
await copySection(fdOld, sections, fdNew, 7);
|
||||||
|
|
||||||
|
await processSection(2, 12, "G1", "tauG1" );
|
||||||
|
await copySection(fdOld, sections, fdNew, 13);
|
||||||
|
await copySection(fdOld, sections, fdNew, 14);
|
||||||
|
await copySection(fdOld, sections, fdNew, 15);
|
||||||
|
|
||||||
|
await fdOld.close();
|
||||||
|
await fdNew.close();
|
||||||
|
await fdTmp.close();
|
||||||
|
|
||||||
|
// await fs.promises.unlink(newPTauFilename+ ".tmp");
|
||||||
|
|
||||||
|
return;
|
||||||
|
|
||||||
|
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
|
||||||
|
const CHUNKPOW = 16;
|
||||||
|
if (logger) logger.debug("Starting section: "+sectionName);
|
||||||
|
|
||||||
|
await startWriteSection(fdNew, newSectionId);
|
||||||
|
|
||||||
|
const size = sections[newSectionId][0].size;
|
||||||
|
const chunkSize = fdOld.pageSize;
|
||||||
|
await startReadUniqueSection(fdOld, sections, newSectionId);
|
||||||
|
for (let p=0; p<size; p+=chunkSize) {
|
||||||
|
const l = Math.min(size -p, chunkSize);
|
||||||
|
const buff = await fdOld.read(l);
|
||||||
|
await fdNew.write(buff);
|
||||||
|
}
|
||||||
|
await endReadSection(fdOld);
|
||||||
|
|
||||||
|
if (oldSectionId == 2) {
|
||||||
|
await processSectionPower(power+1);
|
||||||
|
}
|
||||||
|
|
||||||
|
await endWriteSection(fdNew);
|
||||||
|
|
||||||
|
async function processSectionPower(p) {
|
||||||
|
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p;
|
||||||
|
const pointsPerChunk = 1<<chunkPower;
|
||||||
|
const nPoints = 1 << p;
|
||||||
|
const nChunks = nPoints / pointsPerChunk;
|
||||||
|
|
||||||
|
const G = curve[Gstr];
|
||||||
|
const Fr = curve.Fr;
|
||||||
|
const sGin = G.F.n8*2;
|
||||||
|
const sGmid = G.F.n8*3;
|
||||||
|
|
||||||
|
await startReadUniqueSection(fdOld, sections, oldSectionId);
|
||||||
|
// Build the initial tmp Buff
|
||||||
|
fdTmp.pos =0;
|
||||||
|
for (let i=0; i<nChunks; i++) {
|
||||||
|
let buff;
|
||||||
|
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
|
||||||
|
if ((oldSectionId == 2)&&(p==power+1)) {
|
||||||
|
buff = new Uint8Array(pointsPerChunk*sGin);
|
||||||
|
await fdOld.readToBuffer(buff, 0,(pointsPerChunk-1)*sGin );
|
||||||
|
buff.set(curve.G1.zeroAffine, (pointsPerChunk-1)*sGin );
|
||||||
|
} else {
|
||||||
|
buff = await fdOld.read(pointsPerChunk*sGin);
|
||||||
|
}
|
||||||
buff = await G.batchToJacobian(buff);
|
buff = await G.batchToJacobian(buff);
|
||||||
for (let j=0; j<pointsPerChunk; j++) {
|
for (let j=0; j<pointsPerChunk; j++) {
|
||||||
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
|
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
|
||||||
@ -3707,6 +3934,8 @@ var powersoftau = /*#__PURE__*/Object.freeze({
|
|||||||
beacon: beacon,
|
beacon: beacon,
|
||||||
contribute: contribute,
|
contribute: contribute,
|
||||||
preparePhase2: preparePhase2,
|
preparePhase2: preparePhase2,
|
||||||
|
truncate: truncate,
|
||||||
|
convert: convert,
|
||||||
exportJson: exportJson
|
exportJson: exportJson
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -4572,7 +4801,7 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
|||||||
|
|
||||||
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
|
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
|
||||||
|
|
||||||
if (cirPower > power+1) {
|
if (cirPower > power) {
|
||||||
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`);
|
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
251
build/snarkjs.js
251
build/snarkjs.js
File diff suppressed because one or more lines are too long
2
build/snarkjs.min.js
vendored
2
build/snarkjs.min.js
vendored
File diff suppressed because one or more lines are too long
102
cli.js
102
cli.js
@ -28,7 +28,7 @@ import * as r1cs from "./src/r1cs.js";
|
|||||||
|
|
||||||
import clProcessor from "./src/clprocessor.js";
|
import clProcessor from "./src/clprocessor.js";
|
||||||
|
|
||||||
import * as powersOfTaw from "./src/powersoftau.js";
|
import * as powersOfTau from "./src/powersoftau.js";
|
||||||
|
|
||||||
import { utils } from "ffjavascript";
|
import { utils } from "ffjavascript";
|
||||||
const {stringifyBigInts, unstringifyBigInts} = utils;
|
const {stringifyBigInts, unstringifyBigInts} = utils;
|
||||||
@ -51,42 +51,42 @@ const commands = [
|
|||||||
description: "Starts a powers of tau ceremony",
|
description: "Starts a powers of tau ceremony",
|
||||||
alias: ["ptn"],
|
alias: ["ptn"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawNew
|
action: powersOfTauNew
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>",
|
cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>",
|
||||||
description: "creates a ptau file with a new contribution",
|
description: "creates a ptau file with a new contribution",
|
||||||
alias: ["ptc"],
|
alias: ["ptc"],
|
||||||
options: "-verbose|v -name|n -entropy|e",
|
options: "-verbose|v -name|n -entropy|e",
|
||||||
action: powersOfTawContribute
|
action: powersOfTauContribute
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau export challenge <powersoftau_0000.ptau> [challenge]",
|
cmd: "powersoftau export challenge <powersoftau_0000.ptau> [challenge]",
|
||||||
description: "Creates a challenge",
|
description: "Creates a challenge",
|
||||||
alias: ["ptec"],
|
alias: ["ptec"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawExportChallenge
|
action: powersOfTauExportChallenge
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau challenge contribute <curve> <challenge> [response]",
|
cmd: "powersoftau challenge contribute <curve> <challenge> [response]",
|
||||||
description: "Contribute to a challenge",
|
description: "Contribute to a challenge",
|
||||||
alias: ["ptcc"],
|
alias: ["ptcc"],
|
||||||
options: "-verbose|v -entropy|e",
|
options: "-verbose|v -entropy|e",
|
||||||
action: powersOfTawChallengeContribute
|
action: powersOfTauChallengeContribute
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau import response <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>",
|
cmd: "powersoftau import response <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>",
|
||||||
description: "import a response to a ptau file",
|
description: "import a response to a ptau file",
|
||||||
alias: ["ptir"],
|
alias: ["ptir"],
|
||||||
options: "-verbose|v -nopoints -nocheck -name|n",
|
options: "-verbose|v -nopoints -nocheck -name|n",
|
||||||
action: powersOfTawImport
|
action: powersOfTauImport
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>",
|
cmd: "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>",
|
||||||
description: "adds a beacon",
|
description: "adds a beacon",
|
||||||
alias: ["ptb"],
|
alias: ["ptb"],
|
||||||
options: "-verbose|v -name|n",
|
options: "-verbose|v -name|n",
|
||||||
action: powersOfTawBeacon
|
action: powersOfTauBeacon
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>",
|
cmd: "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>",
|
||||||
@ -94,21 +94,37 @@ const commands = [
|
|||||||
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
|
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
|
||||||
alias: ["pt2"],
|
alias: ["pt2"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawPreparePhase2
|
action: powersOfTauPreparePhase2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "powersoftau convert <old_powersoftau.ptau> <new_powersoftau.ptau>",
|
||||||
|
description: "Convert ptau",
|
||||||
|
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
|
||||||
|
alias: ["ptcv"],
|
||||||
|
options: "-verbose|v",
|
||||||
|
action: powersOfTauConvert
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "powersoftau truncate <powersoftau.ptau>",
|
||||||
|
description: "Generate diferent powers of tau with smoller sizes ",
|
||||||
|
longDescription: " This process generates smaller ptau files from a bigger power ptau",
|
||||||
|
alias: ["ptt"],
|
||||||
|
options: "-verbose|v",
|
||||||
|
action: powersOfTauTruncate
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau verify <powersoftau.ptau>",
|
cmd: "powersoftau verify <powersoftau.ptau>",
|
||||||
description: "verifies a powers of tau file",
|
description: "verifies a powers of tau file",
|
||||||
alias: ["ptv"],
|
alias: ["ptv"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawVerify
|
action: powersOfTauVerify
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
|
cmd: "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
|
||||||
description: "Exports a power of tau file to a JSON",
|
description: "Exports a power of tau file to a JSON",
|
||||||
alias: ["ptej"],
|
alias: ["ptej"],
|
||||||
options: "-verbose|v",
|
options: "-verbose|v",
|
||||||
action: powersOfTawExportJson
|
action: powersOfTauExportJson
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
cmd: "r1cs info [circuit.r1cs]",
|
cmd: "r1cs info [circuit.r1cs]",
|
||||||
@ -591,7 +607,7 @@ async function zkeyExportSolidityCalldata(params, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// powersoftau new <curve> <power> [powersoftau_0000.ptau]",
|
// powersoftau new <curve> <power> [powersoftau_0000.ptau]",
|
||||||
async function powersOfTawNew(params, options) {
|
async function powersOfTauNew(params, options) {
|
||||||
let curveName;
|
let curveName;
|
||||||
let power;
|
let power;
|
||||||
let ptauName;
|
let ptauName;
|
||||||
@ -604,7 +620,7 @@ async function powersOfTawNew(params, options) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (params.length < 3) {
|
if (params.length < 3) {
|
||||||
ptauName = "powersOfTaw" + power + "_0000.ptau";
|
ptauName = "powersOfTau" + power + "_0000.ptau";
|
||||||
} else {
|
} else {
|
||||||
ptauName = params[2];
|
ptauName = params[2];
|
||||||
}
|
}
|
||||||
@ -613,10 +629,10 @@ async function powersOfTawNew(params, options) {
|
|||||||
|
|
||||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
return await powersOfTaw.newAccumulator(curve, power, ptauName, logger);
|
return await powersOfTau.newAccumulator(curve, power, ptauName, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawExportChallenge(params, options) {
|
async function powersOfTauExportChallenge(params, options) {
|
||||||
let ptauName;
|
let ptauName;
|
||||||
let challengeName;
|
let challengeName;
|
||||||
|
|
||||||
@ -630,11 +646,11 @@ async function powersOfTawExportChallenge(params, options) {
|
|||||||
|
|
||||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
return await powersOfTaw.exportChallenge(ptauName, challengeName, logger);
|
return await powersOfTau.exportChallenge(ptauName, challengeName, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
// powersoftau challenge contribute <curve> <challenge> [response]
|
// powersoftau challenge contribute <curve> <challenge> [response]
|
||||||
async function powersOfTawChallengeContribute(params, options) {
|
async function powersOfTauChallengeContribute(params, options) {
|
||||||
let challengeName;
|
let challengeName;
|
||||||
let responseName;
|
let responseName;
|
||||||
|
|
||||||
@ -650,11 +666,11 @@ async function powersOfTawChallengeContribute(params, options) {
|
|||||||
|
|
||||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
return await powersOfTaw.challengeContribute(curve, challengeName, responseName, options.entropy, logger);
|
return await powersOfTau.challengeContribute(curve, challengeName, responseName, options.entropy, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async function powersOfTawImport(params, options) {
|
async function powersOfTauImport(params, options) {
|
||||||
let oldPtauName;
|
let oldPtauName;
|
||||||
let response;
|
let response;
|
||||||
let newPtauName;
|
let newPtauName;
|
||||||
@ -670,7 +686,7 @@ async function powersOfTawImport(params, options) {
|
|||||||
|
|
||||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
const res = await powersOfTaw.importResponse(oldPtauName, response, newPtauName, options.name, importPoints, logger);
|
const res = await powersOfTau.importResponse(oldPtauName, response, newPtauName, options.name, importPoints, logger);
|
||||||
|
|
||||||
if (res) return res;
|
if (res) return res;
|
||||||
if (!doCheck) return;
|
if (!doCheck) return;
|
||||||
@ -678,14 +694,14 @@ async function powersOfTawImport(params, options) {
|
|||||||
// TODO Verify
|
// TODO Verify
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawVerify(params, options) {
|
async function powersOfTauVerify(params, options) {
|
||||||
let ptauName;
|
let ptauName;
|
||||||
|
|
||||||
ptauName = params[0];
|
ptauName = params[0];
|
||||||
|
|
||||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
const res = await powersOfTaw.verify(ptauName, logger);
|
const res = await powersOfTau.verify(ptauName, logger);
|
||||||
if (res === true) {
|
if (res === true) {
|
||||||
return 0;
|
return 0;
|
||||||
} else {
|
} else {
|
||||||
@ -693,7 +709,7 @@ async function powersOfTawVerify(params, options) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawBeacon(params, options) {
|
async function powersOfTauBeacon(params, options) {
|
||||||
let oldPtauName;
|
let oldPtauName;
|
||||||
let newPtauName;
|
let newPtauName;
|
||||||
let beaconHashStr;
|
let beaconHashStr;
|
||||||
@ -706,10 +722,10 @@ async function powersOfTawBeacon(params, options) {
|
|||||||
|
|
||||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
return await powersOfTaw.beacon(oldPtauName, newPtauName, options.name ,beaconHashStr, numIterationsExp, logger);
|
return await powersOfTau.beacon(oldPtauName, newPtauName, options.name ,beaconHashStr, numIterationsExp, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawContribute(params, options) {
|
async function powersOfTauContribute(params, options) {
|
||||||
let oldPtauName;
|
let oldPtauName;
|
||||||
let newPtauName;
|
let newPtauName;
|
||||||
|
|
||||||
@ -718,10 +734,10 @@ async function powersOfTawContribute(params, options) {
|
|||||||
|
|
||||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
return await powersOfTaw.contribute(oldPtauName, newPtauName, options.name , options.entropy, logger);
|
return await powersOfTau.contribute(oldPtauName, newPtauName, options.name , options.entropy, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function powersOfTawPreparePhase2(params, options) {
|
async function powersOfTauPreparePhase2(params, options) {
|
||||||
let oldPtauName;
|
let oldPtauName;
|
||||||
let newPtauName;
|
let newPtauName;
|
||||||
|
|
||||||
@ -730,11 +746,39 @@ async function powersOfTawPreparePhase2(params, options) {
|
|||||||
|
|
||||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
return await powersOfTaw.preparePhase2(oldPtauName, newPtauName, logger);
|
return await powersOfTau.preparePhase2(oldPtauName, newPtauName, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function powersOfTauConvert(params, options) {
|
||||||
|
let oldPtauName;
|
||||||
|
let newPtauName;
|
||||||
|
|
||||||
|
oldPtauName = params[0];
|
||||||
|
newPtauName = params[1];
|
||||||
|
|
||||||
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
|
return await powersOfTau.convert(oldPtauName, newPtauName, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function powersOfTauTruncate(params, options) {
|
||||||
|
let ptauName;
|
||||||
|
|
||||||
|
ptauName = params[0];
|
||||||
|
|
||||||
|
let template = ptauName;
|
||||||
|
while ((template.length>0) && (template[template.length-1] != ".")) template = template.slice(0, template.length-1);
|
||||||
|
template = template.slice(0, template.length-1);
|
||||||
|
template = template+"_";
|
||||||
|
|
||||||
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
|
return await powersOfTau.truncate(ptauName, template, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
// powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
|
// powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
|
||||||
async function powersOfTawExportJson(params, options) {
|
async function powersOfTauExportJson(params, options) {
|
||||||
let ptauName;
|
let ptauName;
|
||||||
let jsonName;
|
let jsonName;
|
||||||
|
|
||||||
@ -743,7 +787,7 @@ async function powersOfTawExportJson(params, options) {
|
|||||||
|
|
||||||
if (options.verbose) Logger.setLogLevel("DEBUG");
|
if (options.verbose) Logger.setLogLevel("DEBUG");
|
||||||
|
|
||||||
const pTau = await powersOfTaw.exportJson(ptauName, logger);
|
const pTau = await powersOfTau.exportJson(ptauName, logger);
|
||||||
|
|
||||||
const S = JSON.stringify(stringifyBigInts(pTau), null, 1);
|
const S = JSON.stringify(stringifyBigInts(pTau), null, 1);
|
||||||
await fs.promises.writeFile(jsonName, S);
|
await fs.promises.writeFile(jsonName, S);
|
||||||
|
@ -97,17 +97,20 @@ export async function readBigInt(fd, n8, pos) {
|
|||||||
return Scalar.fromRprLE(buff, 0, n8);
|
return Scalar.fromRprLE(buff, 0, n8);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function copySection(fdFrom, sections, fdTo, sectionId) {
|
export async function copySection(fdFrom, sections, fdTo, sectionId, size) {
|
||||||
|
if (typeof size === "undefined") {
|
||||||
|
size = sections[sectionId][0].size;
|
||||||
|
}
|
||||||
const chunkSize = fdFrom.pageSize;
|
const chunkSize = fdFrom.pageSize;
|
||||||
await startReadUniqueSection(fdFrom, sections, sectionId);
|
await startReadUniqueSection(fdFrom, sections, sectionId);
|
||||||
await startWriteSection(fdTo, sectionId);
|
await startWriteSection(fdTo, sectionId);
|
||||||
for (let p=0; p<sections[sectionId][0].size; p+=chunkSize) {
|
for (let p=0; p<size; p+=chunkSize) {
|
||||||
const l = Math.min(sections[sectionId][0].size -p, chunkSize);
|
const l = Math.min(size -p, chunkSize);
|
||||||
const buff = await fdFrom.read(l);
|
const buff = await fdFrom.read(l);
|
||||||
await fdTo.write(buff);
|
await fdTo.write(buff);
|
||||||
}
|
}
|
||||||
await endWriteSection(fdTo);
|
await endWriteSection(fdTo);
|
||||||
await endReadSection(fdFrom);
|
await endReadSection(fdFrom, size != sections[sectionId][0].size);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,4 +7,6 @@ export {default as challengeContribute} from "./powersoftau_challenge_contribute
|
|||||||
export {default as beacon} from "./powersoftau_beacon.js";
|
export {default as beacon} from "./powersoftau_beacon.js";
|
||||||
export {default as contribute} from "./powersoftau_contribute.js";
|
export {default as contribute} from "./powersoftau_contribute.js";
|
||||||
export {default as preparePhase2} from "./powersoftau_preparephase2.js";
|
export {default as preparePhase2} from "./powersoftau_preparephase2.js";
|
||||||
|
export {default as truncate} from "./powersoftau_truncate.js";
|
||||||
|
export {default as convert} from "./powersoftau_convert.js";
|
||||||
export {default as exportJson} from "./powersoftau_export_json.js";
|
export {default as exportJson} from "./powersoftau_export_json.js";
|
||||||
|
162
src/powersoftau_convert.js
Normal file
162
src/powersoftau_convert.js
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
import * as binFileUtils from "./binfileutils.js";
|
||||||
|
import * as utils from "./powersoftau_utils.js";
|
||||||
|
import * as fastFile from "fastfile";
|
||||||
|
import { bitReverse } from "./misc.js";
|
||||||
|
|
||||||
|
export default async function convert(oldPtauFilename, newPTauFilename, logger) {
|
||||||
|
|
||||||
|
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
|
||||||
|
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
|
||||||
|
|
||||||
|
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
|
||||||
|
await utils.writePTauHeader(fdNew, curve, power);
|
||||||
|
|
||||||
|
// const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
|
||||||
|
const fdTmp = await fastFile.createOverride({type: "bigMem"});
|
||||||
|
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 2);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
|
||||||
|
|
||||||
|
await processSection(2, 12, "G1", "tauG1" );
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 13);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 14);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 15);
|
||||||
|
|
||||||
|
await fdOld.close();
|
||||||
|
await fdNew.close();
|
||||||
|
await fdTmp.close();
|
||||||
|
|
||||||
|
// await fs.promises.unlink(newPTauFilename+ ".tmp");
|
||||||
|
|
||||||
|
return;
|
||||||
|
|
||||||
|
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
|
||||||
|
const CHUNKPOW = 16;
|
||||||
|
if (logger) logger.debug("Starting section: "+sectionName);
|
||||||
|
|
||||||
|
await binFileUtils.startWriteSection(fdNew, newSectionId);
|
||||||
|
|
||||||
|
const size = sections[newSectionId][0].size;
|
||||||
|
const chunkSize = fdOld.pageSize;
|
||||||
|
await binFileUtils.startReadUniqueSection(fdOld, sections, newSectionId);
|
||||||
|
for (let p=0; p<size; p+=chunkSize) {
|
||||||
|
const l = Math.min(size -p, chunkSize);
|
||||||
|
const buff = await fdOld.read(l);
|
||||||
|
await fdNew.write(buff);
|
||||||
|
}
|
||||||
|
await binFileUtils.endReadSection(fdOld);
|
||||||
|
|
||||||
|
if (oldSectionId == 2) {
|
||||||
|
await processSectionPower(power+1);
|
||||||
|
}
|
||||||
|
|
||||||
|
await binFileUtils.endWriteSection(fdNew);
|
||||||
|
|
||||||
|
async function processSectionPower(p) {
|
||||||
|
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p;
|
||||||
|
const pointsPerChunk = 1<<chunkPower;
|
||||||
|
const nPoints = 1 << p;
|
||||||
|
const nChunks = nPoints / pointsPerChunk;
|
||||||
|
|
||||||
|
const G = curve[Gstr];
|
||||||
|
const Fr = curve.Fr;
|
||||||
|
const sGin = G.F.n8*2;
|
||||||
|
const sGmid = G.F.n8*3;
|
||||||
|
|
||||||
|
await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
|
||||||
|
// Build the initial tmp Buff
|
||||||
|
fdTmp.pos =0;
|
||||||
|
for (let i=0; i<nChunks; i++) {
|
||||||
|
let buff;
|
||||||
|
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
|
||||||
|
if ((oldSectionId == 2)&&(p==power+1)) {
|
||||||
|
buff = new Uint8Array(pointsPerChunk*sGin);
|
||||||
|
await fdOld.readToBuffer(buff, 0,(pointsPerChunk-1)*sGin );
|
||||||
|
buff.set(curve.G1.zeroAffine, (pointsPerChunk-1)*sGin );
|
||||||
|
} else {
|
||||||
|
buff = await fdOld.read(pointsPerChunk*sGin);
|
||||||
|
}
|
||||||
|
buff = await G.batchToJacobian(buff);
|
||||||
|
for (let j=0; j<pointsPerChunk; j++) {
|
||||||
|
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
|
||||||
|
await fdTmp.write(buff.slice(j*sGmid, (j+1)*sGmid ));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await binFileUtils.endReadSection(fdOld, true);
|
||||||
|
|
||||||
|
for (let j=0; j<nChunks; j++) {
|
||||||
|
if (logger) logger.debug(`${sectionName} ${p} FFTMix ${j+1}/${nChunks}`);
|
||||||
|
let buff;
|
||||||
|
fdTmp.pos = (j*pointsPerChunk)*sGmid;
|
||||||
|
buff = await fdTmp.read(pointsPerChunk*sGmid);
|
||||||
|
buff = await G.fftMix(buff);
|
||||||
|
fdTmp.pos = (j*pointsPerChunk)*sGmid;
|
||||||
|
await fdTmp.write(buff);
|
||||||
|
}
|
||||||
|
for (let i=chunkPower+1; i<= p; i++) {
|
||||||
|
const nGroups = 1 << (p - i);
|
||||||
|
const nChunksPerGroup = nChunks / nGroups;
|
||||||
|
for (let j=0; j<nGroups; j++) {
|
||||||
|
for (let k=0; k <nChunksPerGroup/2; k++) {
|
||||||
|
if (logger) logger.debug(`${sectionName} ${i}/${p} FFTJoin ${j+1}/${nGroups} ${k+1}/${nChunksPerGroup/2}`);
|
||||||
|
const first = Fr.exp( Fr.w[i], k*pointsPerChunk);
|
||||||
|
const inc = Fr.w[i];
|
||||||
|
const o1 = j*nChunksPerGroup + k;
|
||||||
|
const o2 = j*nChunksPerGroup + k + nChunksPerGroup/2;
|
||||||
|
|
||||||
|
let buff1, buff2;
|
||||||
|
fdTmp.pos = o1*pointsPerChunk*sGmid;
|
||||||
|
buff1 = await fdTmp.read(pointsPerChunk * sGmid);
|
||||||
|
fdTmp.pos = o2*pointsPerChunk*sGmid;
|
||||||
|
buff2 = await fdTmp.read(pointsPerChunk * sGmid);
|
||||||
|
|
||||||
|
[buff1, buff2] = await G.fftJoin(buff1, buff2, first, inc);
|
||||||
|
|
||||||
|
fdTmp.pos = o1*pointsPerChunk*sGmid;
|
||||||
|
await fdTmp.write(buff1);
|
||||||
|
fdTmp.pos = o2*pointsPerChunk*sGmid;
|
||||||
|
await fdTmp.write(buff2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await finalInverse(p);
|
||||||
|
}
|
||||||
|
async function finalInverse(p) {
|
||||||
|
const G = curve[Gstr];
|
||||||
|
const Fr = curve.Fr;
|
||||||
|
const sGmid = G.F.n8*3;
|
||||||
|
const sGout = G.F.n8*2;
|
||||||
|
|
||||||
|
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p;
|
||||||
|
const pointsPerChunk = 1<<chunkPower;
|
||||||
|
const nPoints = 1 << p;
|
||||||
|
const nChunks = nPoints / pointsPerChunk;
|
||||||
|
|
||||||
|
const o = fdNew.pos;
|
||||||
|
fdTmp.pos = 0;
|
||||||
|
const factor = Fr.inv( Fr.e( 1<< p));
|
||||||
|
for (let i=0; i<nChunks; i++) {
|
||||||
|
if (logger) logger.debug(`${sectionName} ${p} FFTFinal ${i+1}/${nChunks}`);
|
||||||
|
let buff;
|
||||||
|
buff = await fdTmp.read(pointsPerChunk * sGmid);
|
||||||
|
buff = await G.fftFinal(buff, factor);
|
||||||
|
|
||||||
|
if ( i == 0) {
|
||||||
|
fdNew.pos = o;
|
||||||
|
await fdNew.write(buff.slice((pointsPerChunk-1)*sGout));
|
||||||
|
fdNew.pos = o + ((nChunks - 1)*pointsPerChunk + 1) * sGout;
|
||||||
|
await fdNew.write(buff.slice(0, (pointsPerChunk-1)*sGout));
|
||||||
|
} else {
|
||||||
|
fdNew.pos = o + ((nChunks - 1 - i)*pointsPerChunk + 1) * sGout;
|
||||||
|
await fdNew.write(buff);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fdNew.pos = o + nChunks * pointsPerChunk * sGout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -44,6 +44,10 @@ export default async function preparePhase2(oldPtauFilename, newPTauFilename, lo
|
|||||||
await processSectionPower(p);
|
await processSectionPower(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (oldSectionId == 2) {
|
||||||
|
await processSectionPower(power+1);
|
||||||
|
}
|
||||||
|
|
||||||
await binFileUtils.endWriteSection(fdNew);
|
await binFileUtils.endWriteSection(fdNew);
|
||||||
|
|
||||||
async function processSectionPower(p) {
|
async function processSectionPower(p) {
|
||||||
@ -63,7 +67,13 @@ export default async function preparePhase2(oldPtauFilename, newPTauFilename, lo
|
|||||||
for (let i=0; i<nChunks; i++) {
|
for (let i=0; i<nChunks; i++) {
|
||||||
let buff;
|
let buff;
|
||||||
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
|
if (logger) logger.debug(`${sectionName} Prepare ${i+1}/${nChunks}`);
|
||||||
buff = await fdOld.read(pointsPerChunk*sGin);
|
if ((oldSectionId == 2)&&(p==power+1)) {
|
||||||
|
buff = new Uint8Array(pointsPerChunk*sGin);
|
||||||
|
await fdOld.readToBuffer(buff, 0,(pointsPerChunk-1)*sGin );
|
||||||
|
buff.set(curve.G1.zeroAffine, (pointsPerChunk-1)*sGin );
|
||||||
|
} else {
|
||||||
|
buff = await fdOld.read(pointsPerChunk*sGin);
|
||||||
|
}
|
||||||
buff = await G.batchToJacobian(buff);
|
buff = await G.batchToJacobian(buff);
|
||||||
for (let j=0; j<pointsPerChunk; j++) {
|
for (let j=0; j<pointsPerChunk; j++) {
|
||||||
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
|
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
|
||||||
|
46
src/powersoftau_truncate.js
Normal file
46
src/powersoftau_truncate.js
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
|
||||||
|
import * as binFileUtils from "./binfileutils.js";
|
||||||
|
import * as utils from "./powersoftau_utils.js";
|
||||||
|
|
||||||
|
export default async function truncate(ptauFilename, template, logger) {
|
||||||
|
|
||||||
|
const {fd: fdOld, sections} = await binFileUtils.readBinFile(ptauFilename, "ptau", 1);
|
||||||
|
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
|
||||||
|
|
||||||
|
const sG1 = curve.G1.F.n8*2;
|
||||||
|
const sG2 = curve.G2.F.n8*2;
|
||||||
|
|
||||||
|
for (let p=1; p<power; p++) {
|
||||||
|
await generateTruncate(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
await fdOld.close();
|
||||||
|
|
||||||
|
return true;
|
||||||
|
|
||||||
|
async function generateTruncate(p) {
|
||||||
|
|
||||||
|
let sP = p.toString();
|
||||||
|
while (sP.length<2) sP = "0" + sP;
|
||||||
|
|
||||||
|
if (logger) logger.debug("Writing Power: "+sP);
|
||||||
|
|
||||||
|
const fdNew = await binFileUtils.createBinFile(template + sP + ".ptau", "ptau", 1, 11);
|
||||||
|
await utils.writePTauHeader(fdNew, curve, p, ceremonyPower);
|
||||||
|
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 2, ((1<<p)*2-1) * sG1 ); // tagG1
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 3, (1<<p) * sG2); // tauG2
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 4, (1<<p) * sG1); // alfaTauG1
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 5, (1<<p) * sG1); // betaTauG1
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 6, sG2); // betaTauG2
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 7); // contributions
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 12, ((1<<p)*2 -1) * sG1); // L_tauG1
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 13, ((1<<p)*2 -1) * sG2); // L_tauG2
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 14, ((1<<p)*2 -1) * sG1); // L_alfaTauG1
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 15, ((1<<p)*2 -1) * sG1); // L_betaTauG1
|
||||||
|
|
||||||
|
await fdNew.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -227,9 +227,11 @@ export default async function verify(tauFilename, logger) {
|
|||||||
const nextContributionHash = nextContributionHasher.digest();
|
const nextContributionHash = nextContributionHasher.digest();
|
||||||
|
|
||||||
// Check the nextChallengeHash
|
// Check the nextChallengeHash
|
||||||
if (!misc.hashIsEqual(nextContributionHash,curContr.nextChallenge)) {
|
if (power == ceremonyPower) {
|
||||||
if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section");
|
if (!misc.hashIsEqual(nextContributionHash,curContr.nextChallenge)) {
|
||||||
return false;
|
if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (logger) logger.info(misc.formatHash(nextContributionHash, "Next challenge hash: "));
|
if (logger) logger.info(misc.formatHash(nextContributionHash, "Next challenge hash: "));
|
||||||
@ -265,6 +267,8 @@ export default async function verify(tauFilename, logger) {
|
|||||||
|
|
||||||
await fd.close();
|
await fd.close();
|
||||||
|
|
||||||
|
if (logger) logger.info("Powers of Tau Ok!");
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
function printContribution(curContr, prevContr) {
|
function printContribution(curContr, prevContr) {
|
||||||
@ -393,6 +397,11 @@ export default async function verify(tauFilename, logger) {
|
|||||||
if (!res) return false;
|
if (!res) return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (tauSection == 2) {
|
||||||
|
const res = await verifyPower(power+1);
|
||||||
|
if (!res) return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
async function verifyPower(p) {
|
async function verifyPower(p) {
|
||||||
@ -414,7 +423,12 @@ export default async function verify(tauFilename, logger) {
|
|||||||
if (logger) logger.debug(`reading points Powers${p}...`);
|
if (logger) logger.debug(`reading points Powers${p}...`);
|
||||||
await binFileUtils.startReadUniqueSection(fd, sections, tauSection);
|
await binFileUtils.startReadUniqueSection(fd, sections, tauSection);
|
||||||
buffG = new BigBuffer(nPoints*sG);
|
buffG = new BigBuffer(nPoints*sG);
|
||||||
await fd.readToBuffer(buffG, 0, nPoints*sG);
|
if (p == power+1) {
|
||||||
|
await fd.readToBuffer(buffG, 0, (nPoints-1)*sG);
|
||||||
|
buffG.set(curve.G1.zeroAffine, (nPoints-1)*sG);
|
||||||
|
} else {
|
||||||
|
await fd.readToBuffer(buffG, 0, nPoints*sG);
|
||||||
|
}
|
||||||
await binFileUtils.endReadSection(fd, true);
|
await binFileUtils.endReadSection(fd, true);
|
||||||
|
|
||||||
const resTau = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p);
|
const resTau = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p);
|
||||||
|
@ -30,7 +30,7 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
|||||||
|
|
||||||
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
|
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
|
||||||
|
|
||||||
if (cirPower > power+1) {
|
if (cirPower > power) {
|
||||||
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`);
|
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
@ -33,7 +33,7 @@ describe("Full process", function () {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it ("powersoftau new", async () => {
|
it ("powersoftau new", async () => {
|
||||||
await snarkjs.powersOfTau.newAccumulator(curve, 12, ptau_0);
|
await snarkjs.powersOfTau.newAccumulator(curve, 11, ptau_0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it ("powersoftau contribute ", async () => {
|
it ("powersoftau contribute ", async () => {
|
||||||
|
Loading…
Reference in New Issue
Block a user