preparephase2 with all intermediate values

This commit is contained in:
Jordi Baylina 2020-06-02 23:13:20 +02:00
parent 4020d0b5b9
commit 8742ec0a38
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
4 changed files with 247 additions and 50 deletions

34
src/misc.js Normal file
View File

@ -0,0 +1,34 @@
const _revTable = [];
for (let i=0; i<256; i++) {
_revTable[i] = _revSlow(i, 8);
}
function _revSlow(idx, bits) {
let res =0;
let a = idx;
for (let i=0; i<bits; i++) {
res <<= 1;
res = res | (a &1);
a >>=1;
}
return res;
}
function bitReverse(idx, bits) {
return (
_revTable[idx >>> 24] |
(_revTable[(idx >>> 16) & 0xFF] << 8) |
(_revTable[(idx >>> 8) & 0xFF] << 16) |
(_revTable[idx & 0xFF] << 24)
) >>> (32-bits);
}
function log2( V )
{
return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) );
}
module.exports.bitReverse = bitReverse;
module.exports.log2 = log2;

View File

@ -1,5 +1,8 @@
const binFileUtils = require("./binfileutils");
const utils = require("./powersoftau_utils");
const fastFile = require("fastfile");
const {bitReverse} = require("./misc");
const fs = require("fs");
async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
@ -9,6 +12,8 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
await utils.writePTauHeader(fdNew, curve, power);
const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
await binFileUtils.copySection(fdOld, sections, fdNew, 2);
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
@ -16,32 +21,129 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", (1<<power) , "tauG1" );
await processSection(3, 13, "G2", (1<<power) , "tauG2" );
await processSection(4, 14, "G1", (1<<power) , "alphaTauG1" );
await processSection(5, 15, "G1", (1<<power) , "betaTauG1" );
await processSection(2, 12, "G1", "tauG1" );
await processSection(3, 13, "G2", "tauG2" );
await processSection(4, 14, "G1", "alphaTauG1" );
await processSection(5, 15, "G1", "betaTauG1" );
await fdOld.close();
await fdNew.close();
await fdTmp.close();
await fs.promises.unlink(newPTauFilename+ ".tmp");
return;
async function processSection(oldSectionId, newSectionId, Gstr, NPoints, sectionName) {
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
const CHUNKPOW = 16;
if (verbose) console.log("Starting section: "+sectionName);
const G = curve[Gstr];
const sG = G.F.n8*2;
let buff;
await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
buff = await fdOld.read(sG*NPoints);
await binFileUtils.endReadSection(fdOld, true);
buff = await G.ifft(buff, verbose ? console.log : null);
await binFileUtils.startWriteSection(fdNew, newSectionId);
await fdNew.write(buff);
for (let p=0; p<=power; p++) {
await processSectionPower(p);
}
await binFileUtils.endWriteSection(fdNew);
async function processSectionPower(p) {
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p;
const pointsPerChunk = 1<<chunkPower;
const nPoints = 1 << p;
const nChunks = nPoints / pointsPerChunk;
const G = curve[Gstr];
const Fr = curve.Fr;
const PFr = curve.PFr;
const sGin = G.F.n8*2;
const sGmid = G.F.n8*3;
await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
// Build the initial tmp Buff
fdTmp.pos =0;
for (let i=0; i<nChunks; i++) {
let buff;
if (verbose) console.log(`${sectionName} Prepare ${i+1}/${nChunks}`);
buff = await fdOld.read(pointsPerChunk*sGin);
buff = await G.batchToJacobian(buff);
for (let j=0; j<pointsPerChunk; j++) {
fdTmp.pos = bitReverse(i*pointsPerChunk+j, p)*sGmid;
await fdTmp.write(buff.slice(j*sGmid, (j+1)*sGmid ));
}
}
await binFileUtils.endReadSection(fdOld, true);
for (let i=1; i<= p; i++) {
if (i<=chunkPower) {
for (let j=0; j<nChunks; j++) {
if (verbose) console.log(`${sectionName} ${i}/${p} FFTMix ${j+1}/${nChunks}`);
let buff;
fdTmp.pos = (j*pointsPerChunk)*sGmid;
buff = await fdTmp.read(pointsPerChunk*sGmid);
buff = await G.fftMix(buff, i);
fdTmp.pos = (j*pointsPerChunk)*sGmid;
await fdTmp.write(buff);
}
} else {
const nGroups = 1 << (p - i);
const nChunksPerGroup = nChunks / nGroups;
for (let j=0; j<nGroups; j++) {
for (let k=0; k <nChunksPerGroup/2; k++) {
if (verbose) console.log(`${sectionName} ${i}/${p} FFTJoin ${j+1}/${nGroups} ${k}/${nChunksPerGroup/2}`);
const first = Fr.pow( PFr.w[i], k*pointsPerChunk);
const inc = PFr.w[i];
const o1 = j*nChunksPerGroup + k;
const o2 = j*nChunksPerGroup + k + nChunksPerGroup/2;
let buff1, buff2;
fdTmp.pos = o1*sGmid;
buff1 = await fdTmp.read(pointsPerChunk * sGmid);
fdTmp.pos = o2*sGmid;
buff2 = await fdTmp.read(pointsPerChunk * sGmid);
[buff1, buff2] = await G.fftJoin(buff1, buff2, first, inc);
fdTmp.pos = o1*sGmid;
await fdTmp.write(buff1);
fdTmp.pos = o2*sGmid;
await fdTmp.write(buff2);
}
}
}
}
await finalInverse(p);
}
async function finalInverse(p) {
const G = curve[Gstr];
const Fr = curve.Fr;
const sGmid = G.F.n8*3;
const sGout = G.F.n8*2;
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p;
const pointsPerChunk = 1<<chunkPower;
const nPoints = 1 << p;
const nChunks = nPoints / pointsPerChunk;
const o = fdNew.pos;
fdTmp.pos = 0;
for (let i=0; i<nChunks; i++) {
if (verbose) console.log(`${sectionName} ${p} FFTFinal ${i+1}/${nChunks}`);
let buff;
buff = await fdTmp.read(pointsPerChunk * sGmid);
buff = await G.fftFinal(buff, Fr.inv( Fr.e( 1<< p)));
if ( i == 0) {
fdNew.pos = o;
await fdNew.write(buff.slice((pointsPerChunk-1)*sGout));
fdNew.pos = o + ((nChunks - 1)*pointsPerChunk + 1) * sGout;
await fdNew.write(buff.slice(0, (pointsPerChunk-1)*sGout));
} else {
fdNew.pos = o + ((nChunks - 1 - i)*pointsPerChunk + 1) * sGout;
await fdNew.write(buff);
}
}
fdNew.pos = o + nChunks * pointsPerChunk * sGout;
}
}
}

View File

@ -0,0 +1,48 @@
const binFileUtils = require("./binfileutils");
const utils = require("./powersoftau_utils");
async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
await utils.writePTauHeader(fdNew, curve, power);
await binFileUtils.copySection(fdOld, sections, fdNew, 2);
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", (1<<power) , "tauG1" );
await processSection(3, 13, "G2", (1<<power) , "tauG2" );
await processSection(4, 14, "G1", (1<<power) , "alphaTauG1" );
await processSection(5, 15, "G1", (1<<power) , "betaTauG1" );
await fdOld.close();
await fdNew.close();
return;
async function processSection(oldSectionId, newSectionId, Gstr, NPoints, sectionName) {
if (verbose) console.log("Starting section: "+sectionName);
const G = curve[Gstr];
const sG = G.F.n8*2;
let buff;
await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
buff = await fdOld.read(sG*NPoints);
await binFileUtils.endReadSection(fdOld, true);
buff = await G.ifft(buff, verbose ? console.log : null);
await binFileUtils.startWriteSection(fdNew, newSectionId);
await fdNew.write(buff);
await binFileUtils.endWriteSection(fdNew);
}
}
module.exports = preparePhase2;

View File

@ -157,7 +157,6 @@ async function verify(tauFilename, verbose) {
const nextContributionHasher = Blake2b(64);
nextContributionHasher.update(curContr.responseHash);
const key = curContr.key;
// Verify powers and compute nextChallangeHash
@ -252,7 +251,8 @@ async function verify(tauFilename, verbose) {
for (let i = contrs.length-2; i>=0; i--) {
const curContr = contrs[i];
const prevContr = (i>0) ? contrs[i-1] : initialContribution;
await verifyContribution(curve, curContr, prevContr);
const res = await verifyContribution(curve, curContr, prevContr);
if (!res) return false;
printContribution(curContr, prevContr);
}
console.log("-----------------------------------------------------");
@ -262,13 +262,13 @@ async function verify(tauFilename, verbose) {
console.log(" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony." );
} else {
let res;
res = await verifyLagrangeEvaluations("G1", 1 << power, 2, 12, "tauG1");
res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1");
if (!res) return false;
res = await verifyLagrangeEvaluations("G2", 1 << power, 3, 13, "tauG2");
res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2");
if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 1 << power, 4, 14, "alphaTauG1");
res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1");
if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 1 << power, 5, 15, "betaTauG1");
res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1");
if (!res) return false;
}
@ -375,13 +375,9 @@ async function verify(tauFilename, verbose) {
}
async function verifyLagrangeEvaluations(gName, nPoints, tauSection, lagrangeSection, sectionName) {
async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName) {
if (verbose) console.log(`Verifying phase2 calculated values ${sectionName}...`);
const n8r = curve.Fr.n8;
let buff_r = new Uint8Array(nPoints * n8r);
let buffG;
const G = curve[gName];
const sG = G.F.n8*2;
@ -392,33 +388,50 @@ async function verify(tauFilename, verbose) {
const rng = new ChaCha(seed);
for (let i=0; i<nPoints; i++) {
const e = curve.Fr.fromRng(rng);
curve.Fr.toRprLE(buff_r, i*n8r, e);
}
await binFileUtils.startReadUniqueSection(fd, sections, tauSection);
buffG = await fd.read(nPoints*sG);
await binFileUtils.endReadSection(fd, true);
const resTau = await G.multiExpAffine(buffG, buff_r);
buff_r = await curve.Fr.batchToMontgomery(buff_r);
buff_r = await curve.Fr.fft(buff_r);
buff_r = await curve.Fr.batchFromMontgomery(buff_r);
await binFileUtils.startReadUniqueSection(fd, sections, lagrangeSection);
buffG = await fd.read(nPoints*sG);
await binFileUtils.endReadSection(fd, true);
const resLagrange = await G.multiExpAffine(buffG, buff_r);
if (!G.eq(resTau, resLagrange)) {
console.log("Phase2 caclutation does not match with powers of tau");
return false;
for (let p=0; p<= power; p ++) {
const res = await verifyPower(p);
if (!res) return false;
}
return true;
async function verifyPower(p) {
if (verbose) console.log(`Power ${p}...`);
const n8r = curve.Fr.n8;
const nPoints = 1<<p;
let buff_r = new Uint8Array(nPoints * n8r);
let buffG;
for (let i=0; i<nPoints; i++) {
const e = curve.Fr.fromRng(rng);
curve.Fr.toRprLE(buff_r, i*n8r, e);
}
await binFileUtils.startReadUniqueSection(fd, sections, tauSection);
buffG = await fd.read(nPoints*sG);
await binFileUtils.endReadSection(fd, true);
const resTau = await G.multiExpAffine(buffG, buff_r);
buff_r = await curve.Fr.batchToMontgomery(buff_r);
buff_r = await curve.Fr.fft(buff_r);
buff_r = await curve.Fr.batchFromMontgomery(buff_r);
await binFileUtils.startReadUniqueSection(fd, sections, lagrangeSection);
fd.pos += sG*((1 << p)-1);
buffG = await fd.read(nPoints*sG);
await binFileUtils.endReadSection(fd, true);
const resLagrange = await G.multiExpAffine(buffG, buff_r);
if (!G.eq(resTau, resLagrange)) {
console.log("Phase2 caclutation does not match with powers of tau");
return false;
}
return true;
}
}
}