powers of tau validated

This commit is contained in:
Jordi Baylina 2020-05-11 20:23:04 +02:00
parent 8ecb2cfdf3
commit 9f72725b3d
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
13 changed files with 1127 additions and 280 deletions

50
cli.js
View File

@ -122,6 +122,20 @@ const commands = [
options: "-verbose|v -entropy|e",
action: powersOfTawContribute
},
{
cmd: "powersoftaw import <powersoftaw_old.ptaw> <response> <<powersoftaw_new.ptaw>",
description: "import a response to a ptaw file",
alias: ["pti"],
options: "-verbose|v -nopoints -nocheck",
action: powersOfTawImport
},
{
cmd: "powersoftaw verify <powersoftaw.ptaw>",
description: "verifies a powers of tau file",
alias: ["ptv"],
options: "-verbose|v",
action: powersOfTawVerify
},
];
@ -515,6 +529,42 @@ async function powersOfTawContribute(params, options) {
}
async function powersOfTawImport(params, options) {
let oldPtauName;
let response;
let newPtauName;
let importPoints = true;
let doCheck = true;
oldPtauName = params[0];
response = params[1];
newPtauName = params[2];
if (options.nopoints) importPoints = false;
if (options.nocheck) doCheck = false;
const res = await powersOfTaw.impoertResponse(oldPtauName, response, newPtauName, importPoints, options.verbose);
if (res) return res;
if (!doCheck) return;
// TODO Verify
}
async function powersOfTawVerify(params, options) {
let ptauName;
ptauName = params[0];
const res = await powersOfTaw.verify(ptauName, options.verbose);
if (res) {
console.log("Powers of tau OK!");
} else {
console.log("=======>INVALID Powers of tau<==========");
}
}
function generateVerifier_original(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_original.sol"), "utf-8");

17
package-lock.json generated
View File

@ -151,12 +151,23 @@
"requires": {
"blake2b-wasm": "^1.1.0",
"nanoassert": "^1.0.0"
},
"dependencies": {
"blake2b-wasm": {
"version": "1.1.7",
"resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-1.1.7.tgz",
"integrity": "sha512-oFIHvXhlz/DUgF0kq5B1CqxIDjIJwh9iDeUUGQUcvgiGz7Wdw03McEO7CfLBy7QKGdsydcMCgO9jFNBAFCtFcA==",
"requires": {
"nanoassert": "^1.0.0"
}
}
}
},
"blake2b-wasm": {
"version": "1.1.7",
"resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-1.1.7.tgz",
"integrity": "sha512-oFIHvXhlz/DUgF0kq5B1CqxIDjIJwh9iDeUUGQUcvgiGz7Wdw03McEO7CfLBy7QKGdsydcMCgO9jFNBAFCtFcA==",
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/blake2b-wasm/-/blake2b-wasm-2.1.0.tgz",
"integrity": "sha512-8zKXt9nk4cUCBU2jaUcSYcPA+UESwWOmb9Gsi8J35BifVb+tjVmbDhZbvmVmZEk6xZN1y35RNW6VqOwb0mkqsg==",
"dev": true,
"requires": {
"nanoassert": "^1.0.0"
}

View File

@ -39,6 +39,7 @@
"yargs": "^12.0.5"
},
"devDependencies": {
"blake2b-wasm": "^2.1.0",
"eslint": "^6.8.0",
"lodash": "^4.17.15",
"mocha": "^7.1.1"

View File

@ -40,5 +40,13 @@ function createKeyPair(curve, personalization, challangeHash, rng ) {
return k;
}
module.exports.create = createKeyPair;
function createPTauKey(curve, challangeHash, rng) {
const key = {};
key.tau = createKeyPair(curve, 0, challangeHash, rng);
key.alpha = createKeyPair(curve, 1, challangeHash, rng);
key.beta = createKeyPair(curve, 2, challangeHash, rng);
return key;
}
module.exports.createPTauKey = createPTauKey;
module.exports.getG2sp = getG2sp;

31
src/partialhash.js Normal file
View File

@ -0,0 +1,31 @@
const blake2wasm = require("blake2b-wasm");
async function run() {
await blake2wasm.ready();
const hasher1 = blake2wasm(64);
hasher1.update(Uint8Array.of(1,2,3,4));
const ph = hasher1.getPartialHash();
hasher1.update(Uint8Array.of(5,6,7,8));
console.log(hasher1.digest("hex"));
const hasher2 = blake2wasm(64);
hasher2.setPartialHash(ph);
hasher2.update(Uint8Array.of(5,6,7,8));
console.log(hasher2.digest("hex"));
}
run().then(() => {
process.exit();
});

View File

@ -18,11 +18,12 @@
const fastFile = require("fastfile");
const assert = require("assert");
const blake2b = require("blake2b");
const blake2b = require("blake2b-wasm");
const readline = require("readline");
const crypto = require("crypto");
const ChaCha = require("ffjavascript").ChaCha;
const fs = require("fs");
const utils = require("./powersoftau_utils");
const buildTaskManager = require("./taskmanager");
@ -42,6 +43,8 @@ function askEntropy() {
async function contribute(curve, challangeFilename, responesFileName, entropy, verbose) {
await blake2b.ready();
const MAX_CHUNK_SIZE = 1024;
let stats = await fs.promises.stat(challangeFilename);
@ -75,19 +78,24 @@ async function contribute(curve, challangeFilename, responesFileName, entropy, v
for (let i=0; i<stats.size; i+= fdFrom.pageSize) {
const s = Math.min(stats.size - i, fdFrom.pageSize);
const buff = await fdFrom.read(s);
challangeHasher.update(buff);
challangeHasher.update(new Uint8Array(buff));
}
const challangeHash = Buffer.from(challangeHasher.digest());
console.log("Challange Hash: " + challangeHash.toString("hex"));
const challangeHash = challangeHasher.digest();
console.log("Challange Hash: ");
console.log(utils.formatHash(challangeHash));
const claimedHash = await fdFrom.read(64, 0);
console.log("Claimed Hash: " + claimedHash.toString("hex"));
const claimedHash = new Uint8Array( await fdFrom.read(64, 0));
console.log("Claimed Hash: ");
console.log(utils.formatHash(claimedHash));
const hasher = blake2b(64);
hasher.update(crypto.randomBytes(64));
hasher.update(entropy);
const enc = new TextEncoder(); // always utf-8
hasher.update(enc.encode(entropy));
const hash = Buffer.from(hasher.digest());
@ -96,27 +104,26 @@ async function contribute(curve, challangeFilename, responesFileName, entropy, v
seed[i] = hash.readUInt32BE(i*4);
}
// const rng = new ChaCha(seed);
const rng = new ChaCha();
const rng = new ChaCha(seed);
const kTau = keyPair.create(curve, 0, challangeHash, rng);
const kAlpha = keyPair.create(curve, 1, challangeHash, rng);
const kBeta = keyPair.create(curve, 2, challangeHash, rng);
const key = keyPair.createPTauKey(curve, challangeHash, rng);
if (verbose) {
console.log("kTau.g1_s_x: " + kTau.g1_s[0].toString(16));
console.log("kTau.g1_s_y: " + kTau.g1_s[1].toString(16));
console.log("kTau.g1_sx_x: " + kTau.g1_sx[0].toString(16));
console.log("kTau.g1_sx_y: " + kTau.g1_sx[1].toString(16));
console.log("kTau.g2_sp_x_c0: " + kTau.g2_sp[0][0].toString(16));
console.log("kTau.g2_sp_x_c1: " + kTau.g2_sp[0][1].toString(16));
console.log("kTau.g2_sp_y_c0: " + kTau.g2_sp[1][0].toString(16));
console.log("kTau.g2_sp_y_c1: " + kTau.g2_sp[1][1].toString(16));
console.log("kTau.g2_spx_x_c0: " + kTau.g2_spx[0][0].toString(16));
console.log("kTau.g2_spx_x_c1: " + kTau.g2_spx[0][1].toString(16));
console.log("kTau.g2_spx_y_c0: " + kTau.g2_spx[1][0].toString(16));
console.log("kTau.g2_spx_y_c1: " + kTau.g2_spx[1][1].toString(16));
["tau", "alpha", "beta"].forEach( (k) => {
console.log(k, ".g1_s_x: " + key[k].g1_s[0].toString(16));
console.log(k, ".g1_s_y: " + key[k].g1_s[1].toString(16));
console.log(k, ".g1_sx_x: " + key[k].g1_sx[0].toString(16));
console.log(k, ".g1_sx_y: " + key[k].g1_sx[1].toString(16));
console.log(k, ".g2_sp_x_c0: " + key[k].g2_sp[0][0].toString(16));
console.log(k, ".g2_sp_x_c1: " + key[k].g2_sp[0][1].toString(16));
console.log(k, ".g2_sp_y_c0: " + key[k].g2_sp[1][0].toString(16));
console.log(k, ".g2_sp_y_c1: " + key[k].g2_sp[1][1].toString(16));
console.log(k, ".g2_spx_x_c0: " + key[k].g2_spx[0][0].toString(16));
console.log(k, ".g2_spx_x_c1: " + key[k].g2_spx[0][1].toString(16));
console.log(k, ".g2_spx_y_c0: " + key[k].g2_spx[1][0].toString(16));
console.log(k, ".g2_spx_y_c1: " + key[k].g2_spx[1][1].toString(16));
console.log("");
});
}
@ -138,14 +145,14 @@ async function contribute(curve, challangeFilename, responesFileName, entropy, v
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: kTau.prvKey.toString(),
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n));
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG1;
}
@ -158,19 +165,19 @@ async function contribute(curve, challangeFilename, responesFileName, entropy, v
await taskManager.addTask({
cmd: "MULG2",
first: t,
inc: kTau.prvKey.toString(),
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n));
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG2;
}
// AlphaTauG1
t = curve.Fr.e(kAlpha.prvKey);
t = curve.Fr.e(key.alpha.prvKey);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("AlfaTauG1: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
@ -178,19 +185,19 @@ async function contribute(curve, challangeFilename, responesFileName, entropy, v
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: kTau.prvKey.toString(),
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n));
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG1;
}
// BetaTauG1
t = curve.Fr.e(kBeta.prvKey);
t = curve.Fr.e(key.beta.prvKey);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("BetaTauG1: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
@ -198,48 +205,33 @@ async function contribute(curve, challangeFilename, responesFileName, entropy, v
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: kTau.prvKey.toString(),
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(kTau.prvKey, n));
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG1;
}
// BetaG2
const buffOldBeta = await fdFrom.read(sG2);
const oldBeta = curve.G2.fromRprBE(buffOldBeta);
const newBeta = curve.G2.mulScalar(oldBeta, kBeta.prvKey);
const buffNewBeta = curve.G2.toRprCompressed(newBeta);
const newBeta = curve.G2.mulScalar(oldBeta, key.beta.prvKey);
const buffNewBeta = new ArrayBuffer(curve.F2.n8*2);
curve.G2.toRprCompressed(buffNewBeta, 0, newBeta);
await fdTo.write(buffNewBeta, writePointer);
writePointer += scG2;
//Write Key
await fdTo.write(curve.G1.toRprBE(kTau.g1_s), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kTau.g1_sx), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kAlpha.g1_s), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kAlpha.g1_sx), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kBeta.g1_s), writePointer);
writePointer += sG1;
await fdTo.write(curve.G1.toRprBE(kBeta.g1_sx), writePointer);
writePointer += sG1;
await fdTo.write(curve.G2.toRprBE(kTau.g2_spx), writePointer);
writePointer += sG2;
await fdTo.write(curve.G2.toRprBE(kAlpha.g2_spx), writePointer);
writePointer += sG2;
await fdTo.write(curve.G2.toRprBE(kBeta.g2_spx), writePointer);
writePointer += sG2;
await taskManager.finish();
//Write Key
fdTo.pos = writePointer;
await utils.writePtauPubKey(fdTo, curve, key);
await fdTo.close();
await fdFrom.close();
@ -257,16 +249,13 @@ function contributeThread(ctx, task) {
} else if (task.cmd == "MULG1") {
const sG1 = ctx.curve.F1.n64*8*2;
const scG1 = ctx.curve.F1.n64*8; // Compresed size
const buffDest = Buffer.allocUnsafe(scG1*task.n);
const buffDest = new ArrayBuffer(scG1*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const slice = task.buff.slice(i*sG1, (i+1)*sG1);
const b = Buffer.from(slice);
const P = ctx.curve.G1.fromRprBE(b);
const P = ctx.curve.G1.fromRprBE(task.buff, i*sG1);
const R = ctx.curve.G1.mulScalar(P, t);
const bR = ctx.curve.G1.toRprCompressed(R);
bR.copy(buffDest, i*scG1);
ctx.curve.G1.toRprCompressed(buffDest, i*scG1, R);
t = ctx.curve.Fr.mul(t, inc);
}
return {
@ -276,16 +265,13 @@ function contributeThread(ctx, task) {
} else if (task.cmd == "MULG2") {
const sG2 = ctx.curve.F2.n64*8*2;
const scG2 = ctx.curve.F2.n64*8; // Compresed size
const buffDest = Buffer.allocUnsafe(scG2*task.n);
const buffDest = new ArrayBuffer(scG2*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const slice = task.buff.slice(i*sG2, (i+1)*sG2);
const b = Buffer.from(slice);
const P = ctx.curve.G2.fromRprBE(b);
const P = ctx.curve.G2.fromRprBE(task.buff, i*sG2);
const R = ctx.curve.G2.mulScalar(P, t);
const bR = ctx.curve.G2.toRprCompressed(R);
bR.copy(buffDest, i*scG2);
ctx.curve.G2.toRprCompressed(buffDest, i*scG2, R);
t = ctx.curve.Fr.mul(t, inc);
}
return {

View File

@ -7,72 +7,36 @@
// BetaG2 (uncompressed)
const fastFile = require("fastfile");
const Scalar = require("ffjavascript").Scalar;
const assert = require("assert");
const bn128 = require("ffjavascript").bn128;
const blake2b = require("blake2b");
const ptauUtils = require("./powersoftau_utils");
const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils");
async function exportChallange(pTauFilename, challangeFilename, verbose) {
await Blake2b.ready();
const {fd: fdFrom, sections} = await utils.readBinFile(pTauFilename, "ptau", 1);
const sections = ptauUtils.
const fdFrom = await fastFile.readExisting(pTauFilename);
const b = await fdFrom.read(4);
if (b.toString() != "ptau") assert(false, "Invalid File format");
let v = await fdFrom.readULE32();
if (v>1) assert(false, "Version not supported");
const nSections = await fdFrom.readULE32();
// Scan sections
let sections = [];
for (let i=0; i<nSections; i++) {
let ht = await fdFrom.readULE32();
let hl = await fdFrom.readULE64();
if (typeof sections[ht] == "undefined") sections[ht] = [];
sections[ht].push({
p: fdFrom.pos,
size: hl
});
fdFrom.pos += hl;
}
if (!sections[1]) assert(false, "File has no header");
if (sections[1].length>1) assert(false, "File has more than one header");
fdFrom.pos = sections[1][0].p;
const n8 = await fdFrom.readULE32();
const qBuff = await fdFrom.read(n8);
const q = Scalar.fromRprLE(qBuff);
let curve;
if (Scalar.eq(q, bn128.q)) {
curve = bn128;
} else {
assert(false, "Curve not supported");
}
assert(curve.F1.n64*8 == n8, "Invalid size");
const power = await fdFrom.readULE32();
const nContributions = await fdFrom.readULE32();
const {curve, power} = await utils.readPTauHeader(fdFrom, sections);
const contributions = await utils.readContributions(fdFrom, curve, sections);
let challangeHash;
if (nContributions == 0) {
challangeHash = Buffer.from(blake2b(64).digest());
if (contributions.length == 0) {
challangeHash = Blake2b(64).digest();
} else {
assert(false, "Not implemented");
challangeHash = contributions[contributions.length-1].newChallange;
}
const fdTo = await fastFile.createOverride(challangeFilename);
const toHash = blake2b(64);
const toHash = Blake2b(64);
fdTo.write(challangeHash);
toHash.update(challangeHash);
const buffG1 = new ArrayBuffer(curve.F1.n8*2);
const buffG1v = new Uint8Array(buffG1);
const buffG2 = new ArrayBuffer(curve.F2.n8*2);
const buffG2v = new Uint8Array(buffG2);
// Process tauG1
if (!sections[2]) assert(false, "File has no tauG1 section");
if (sections[2].length>1) assert(false, "File has more than one tauG1 section");
@ -136,26 +100,27 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
console.log("Challange Hash: " +newChallangeHash);
async function readG1() {
const pBuff = await fdFrom.read(curve.F1.n64*8*2);
const pBuff = await fdFrom.read(curve.F1.n8*2);
return curve.G1.fromRprLEM( pBuff );
}
async function readG2() {
const pBuff = await fdFrom.read(curve.F1.n64*8*2*2);
const pBuff = await fdFrom.read(curve.F2.n8*2);
return curve.G2.fromRprLEM( pBuff );
}
async function writeG1(p) {
const rpr = curve.G1.toRprBE(p);
await fdTo.write(rpr);
toHash.update(rpr);
curve.G1.toRprBE(buffG1, 0, p);
await fdTo.write(buffG1);
toHash.update(buffG1v);
}
async function writeG2(p) {
const rpr = curve.G2.toRprBE(p);
await fdTo.write(rpr);
toHash.update(rpr);
curve.G2.toRprBE(buffG2, 0, p);
await fdTo.write(buffG2);
toHash.update(buffG2v);
}
}

View File

@ -1,60 +1,22 @@
const assert = require("assert");
const fastFile = require("fastfile");
const Scalar = require("Scalar");
const bn128 = require("ffjavascript").bn128;
const Blake2 = require("blake2");
const Blake2b = require("blake2b-wasm");
const fs = require("fs");
const utils = require("./powersoftau_utils");
async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, importPoints, verbose) {
async function importResponse(oldPtauFilename, contributionFilename, newPotFilename, verbose) {
const fdOld = await fastFile.readExisting(oldPtauFilename);
await Blake2b.ready();
const b = await fdOld.read(4);
if (b.toString() != "ptau") assert(false, "Old ptau file: invalid format.");
let v = await fdOld.readULE32();
if (v>1) assert(false, "Old ptau file: Version not supported");
const nSections = await fdOld.readULE32();
// Scan sections
let sections = [];
for (let i=0; i<nSections; i++) {
let ht = await fdOld.readULE32();
let hl = await fdOld.readULE64();
if (typeof sections[ht] == "undefined") sections[ht] = [];
sections[ht].push({
p: fdOld.pos,
size: hl
});
fdOld.pos += hl;
}
if (!sections[1]) assert(false, "Old ptau file: File has no header");
if (sections[1].length>1) assert(false, "Old ptau file: File has more than one header");
fdOld.pos = sections[1][0].p;
const n8 = await fdOld.readULE32();
const qBuff = await fdOld.read(n8);
const q = Scalar.fromRprLE(qBuff);
let curve;
if (Scalar.eq(q, bn128.q)) {
curve = bn128;
} else {
assert(false, "Old ptau file: Curve not supported");
}
assert(curve.F1.n64*8 == n8, "Old ptau file: Invalid size");
const power = await fdOld.readULE32();
const nContributions = await fdOld.readULE32();
const sG1 = curve.F1.n64*8*2;
const scG1 = curve.F1.n64*8; // Compresed size
const sG2 = curve.F2.n64*8*2;
const scG2 = curve.F2.n64*8; // Compresed size
const {fd: fdOld, sections} = await utils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
const contributions = await utils.readContributions(fdOld, curve, sections);
const currentContribution = {};
const sG1 = curve.F1.n8*2;
const scG1 = curve.F1.n8; // Compresed size
const sG2 = curve.F2.n8*2;
const scG2 = curve.F2.n8; // Compresed size
let stats = await fs.promises.stat(contributionFilename);
assert.equal(stats.size,
@ -68,39 +30,83 @@ async function importResponse(oldPtauFilename, contributionFilename, newPotFilen
"Size of the contribution is invalid"
);
const fdNew = await fastFile.createOverride(newPotFilename);
await fdNew.write(Buffer.from("ptau"), 0); // Magic "r1cs"
await fd.writeULE32(1); // Version
await fd.writeULE32(7); // Number of Sections
// Write the header
///////////
await fd.writeULE32(1); // Header type
const pHeaderSize = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
const primeQ = curve.q;
await fd.writeULE32(curve.F1.n64*8);
await fd.write(Scalar.toRprLE(primeQ, curve.F1.n64*8));
await fd.writeULE32(power); // power
await fd.writeULE32(0); // Total number of public contributions
const headerSize = fd.pos - pHeaderSize - 8;
let lastChallangeHash;
if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power);
}
const fdNew = await utils.createBinFile(newPTauFilename, "ptau", 1, 7);
await utils.writePTauHeader(fdNew, curve, power);
const fdResponse = await fastFile.readExisting(contributionFilename);
const hasherResponse = new Blake2(64);
const contributionPreviousHash = await fdResponse.read(64);
hasherResponse.update(contributionPreviousHash);
assert(utils.hashIsEqual(contributionPreviousHash,lastChallangeHash),
"Wrong contribution. this contribution is not based on the previus hash");
const hasherResponse = new Blake2b(64);
hasherResponse.update(new Uint8Array(contributionPreviousHash));
const hasherNewChallange = new Blake2b(64);
hasherNewChallange.update(lastChallangeHash);
await processSection(fdResponse, fdNew, 2, (1 << power) * 2 -1, "G1", "tauG1", 1);
await processSection(fdResponse, fdNew, 3, (1 << power) , "G2", "tauG2", 1);
await processSection(fdResponse, fdNew, 4, (1 << power) , "G1", "alphaG1", 0);
await processSection(fdResponse, fdNew, 5, (1 << power) , "G1", "betaG1", 0);
await processSection(fdResponse, fdNew, 6, 1 , "G2", "betaG2", 0);
currentContribution.nextChallange = hasherNewChallange.digest();
currentContribution.partialHash = hasherResponse.getPartialHash();
const buffKey = await fdResponse.read(curve.F1.n8*2*6+curve.F2.n8*2*3);
currentContribution.key = utils.fromPtauPubKeyRpr(buffKey, 0, curve, false);
hasherResponse.update(new Uint8Array(buffKey));
const hashResponse = hasherResponse.digest();
if (verbose) {
console.log("Contribution Response Hash imported: ");
console.log(utils.formatHash(hashResponse));
}
contributions.push(currentContribution);
await utils.writeContributions(fdNew, curve, contributions);
await fdResponse.close();
await fdNew.close();
async function processSection(fdFrom, fdTo, sectionId, n, G, name, contributionId) {
const buffU = new ArrayBuffer(curve[G].F.n8*2);
const buffUv = new Uint8Array(buffU);
const scG = curve[G].F.n8;
await fdTo.writeULE32(sectionId); // tauG1
const pSection = fdTo.pos;
await fdTo.writeULE64(0); // Temporally set to 0 length
for (let i=0; i< n; i++) {
const buffC = await fdFrom.read(scG);
hasherResponse.update(new Uint8Array(buffC));
const P = curve[G].fromRprCompressed(buffC);
if (i==contributionId) currentContribution[name] = P;
curve[G].toRprBE(buffU, 0, P);
hasherNewChallange.update(buffUv);
curve[G].toRprLEM(buffU, 0, P);
await fdTo.write(buffU);
if ((verbose)&&((i%100000) == 0)&&i) console.log(name +": " + i);
}
const sSize = fdTo.pos - pSection -8;
const lastPos = fdTo.pos;
await fdTo.writeULE64(sSize, pSection);
fdTo.pos = lastPos;
}
}
module.exports = importResponse;

View File

@ -1,67 +1,65 @@
/*
Header
Header(1)
n8
prime
power
nContributions
tauG1
[(1<<power)*2-1] G1
tauG2
[1<<power] G2
alfaTauG1
[1<<power] G1
betaTauG1
[1<<power] G1
betaG2
[1] G2
contributions
[NContributions]
tauG1
tauG2
alphaTauG1
betaTauG1
betaG2
partialHash
state
tau_g1s
tau_g1sx
tau_g2spx
alfa_g1s
alfa_g1sx
alfa_g1spx
beta_g1s
beta_g1sx
beta_g1spx
tauG1(2)
{(1<<power)*2-1} [
G1, tau*G1, tau^2 * G1, ....
]
tauG2(3)
{1<<power}[
G2, tau*G2, tau^2 * G2, ...
]
alfaTauG1(4)
{1<<power}[
alpha*G1, alpha*tau*G1, alpha*tau^2*G1,....
]
betaTauG1(5)
{1<<power} []
beta*G1, beta*tau*G1, beta*tau^2*G1, ....
]
betaG2(6)
{1}[
beta*G2
]
contributions(7)
NContributions
{NContributions}[
tau*G1
tau*G2
alpha*G1
beta*G1
beta*G2
pubKey
tau_g1s
tau_g1sx
tau_g2spx
alfa_g1s
alfa_g1sx
alfa_g1spx
beta_g1s
beta_g1sx
beta_g1spx
partialHash (216 bytes) See https://github.com/mafintosh/blake2b-wasm/blob/23bee06945806309977af802bc374727542617c7/blake2b.wat#L9
hashNewChallange
]
*/
const fastFile = require("fastfile");
const Scalar = require("ffjavascript").Scalar;
const ptauUtils = require("./powersoftau_utils");
async function newAccumulator(curve, power, fileName, verbose) {
const fd = await fastFile.createOverride(fileName);
await fd.write(Buffer.from("ptau"), 0); // Magic "r1cs"
const fd = await ptauUtils.createBinFile(fileName, "ptau", 1, 7);
await fd.writeULE32(1); // Version
await fd.writeULE32(7); // Number of Sections
// Write the header
///////////
await fd.writeULE32(1); // Header type
const pHeaderSize = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
const primeQ = curve.q;
await fd.writeULE32(curve.F1.n64*8);
await fd.write(Scalar.toRprLE(primeQ, curve.F1.n64*8));
await fd.writeULE32(power); // power
await fd.writeULE32(0); // Total number of public contributions
const headerSize = fd.pos - pHeaderSize - 8;
await ptauUtils.writePTauHeader(fd, curve, power, 0);
const buffG1 = new ArrayBuffer(curve.G1.F.n8*2);
const buffG2 = new ArrayBuffer(curve.G2.F.n8*2);
curve.G1.toRprLEM(buffG1, 0, curve.G1.g);
curve.G2.toRprLEM(buffG2, 0, curve.G2.g);
// Write tauG1
///////////
@ -70,7 +68,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
await fd.writeULE64(0); // Temporally set to 0 length
const nTauG1 = (1 << power) * 2 -1;
for (let i=0; i< nTauG1; i++) {
await fd.write(curve.G1.toRprLEM(curve.G1.g));
await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG1: " + i);
}
const tauG1Size = fd.pos - pTauG1 -8;
@ -82,7 +80,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
await fd.writeULE64(0); // Temporally set to 0 length
const nTauG2 = (1 << power);
for (let i=0; i< nTauG2; i++) {
await fd.write(curve.G2.toRprLEM(curve.G2.g));
await fd.write(buffG2);
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG2: " + i);
}
const tauG2Size = fd.pos - pTauG2 -8;
@ -94,7 +92,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
await fd.writeULE64(0); // Temporally set to 0 length
const nAlfaTauG1 = (1 << power);
for (let i=0; i< nAlfaTauG1; i++) {
await fd.write(curve.G1.toRprLEM(curve.G1.g));
await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("alfaTauG1: " + i);
}
const alfaTauG1Size = fd.pos - pAlfaTauG1 -8;
@ -106,7 +104,7 @@ async function newAccumulator(curve, power, fileName, verbose) {
await fd.writeULE64(0); // Temporally set to 0 length
const nBetaTauG1 = (1 << power);
for (let i=0; i< nBetaTauG1; i++) {
await fd.write(curve.G1.toRprLEM(curve.G1.g));
await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("betaTauG1: " + i);
}
const betaTauG1Size = fd.pos - pBetaTauG1 -8;
@ -116,18 +114,18 @@ async function newAccumulator(curve, power, fileName, verbose) {
await fd.writeULE32(6); // betaG2
const pBetaG2 = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
await fd.write(curve.G2.toRprLEM(curve.G2.g));
await fd.write(buffG2);
const betaG2Size = fd.pos - pBetaG2 -8;
// Contributions
///////////
await fd.writeULE32(7); // betaG2
await fd.writeULE32(7); // Contributions
const pContributions = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
await fd.writeULE64(4); // Temporally set to 4 length
await fd.writeULE32(0); // 0 Contributions
const contributionsSize = fd.pos - pContributions -8;
// Write sizes
await fd.writeULE64(headerSize, pHeaderSize);
await fd.writeULE64(tauG1Size, pTauG1);
await fd.writeULE64(tauG2Size, pTauG2);
await fd.writeULE64(alfaTauG1Size, pAlfaTauG1);

View File

@ -1,13 +1,19 @@
const fastFile = require("fastfile");
const assert = require("assert");
const Scalar = require("ffjavascript").Scalar;
const bn128 = require("ffjavascript").bn128;
const Blake2b = require("blake2b-wasm");
async function readBinFile(fileName, type, maxVersion) {
const fd = await fastFile.readExisting(fileName);
const b = await fd.read(4);
const bv = new Uint8Array(b);
let readedType = "";
for (let i=0; i<4; i++) readedType += String.fromCharCode(bv[i]);
if (b.toString() != type) assert(false, fileName + ": Invalid File format");
if (readedType != type) assert(false, fileName + ": Invalid File format");
let v = await fd.readULE32();
@ -28,17 +34,377 @@ async function readBinFile(fileName, type, maxVersion) {
fd.pos += hl;
}
return sections;
return {fd, sections};
}
function writeBinFile(fileName, type, version, nSections) {
async function createBinFile(fileName, type, version, nSections) {
const fd = await fastFile.createOverride(fileName);
const buff = new Uint8Array(4);
for (let i=0; i<4; i++) buff[i] = type.charCodeAt(i);
await fd.write(buff.buffer, 0); // Magic "r1cs"
await fd.writeULE32(version); // Version
await fd.writeULE32(nSections); // Number of Sections
return fd;
}
async function writePTauHeader(fd, curve, power) {
// Write the header
///////////
await fd.writeULE32(1); // Header type
const pHeaderSize = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
await fd.writeULE32(curve.F1.n64*8);
const buff = new ArrayBuffer(curve.F1.n8);
Scalar.toRprLE(buff, 0, curve.q, curve.F1.n8);
await fd.write(buff);
await fd.writeULE32(power); // power
const headerSize = fd.pos - pHeaderSize - 8;
const oldPos = fd.pos;
fd.writeULE64(headerSize, pHeaderSize);
fd.pos = oldPos;
}
async function readPTauHeader(fd, sections) {
if (!sections[1]) assert(false, fd.fileName + ": File has no header");
if (sections[1].length>1) assert(false, fd.fileName +": File has more than one header");
fd.pos = sections[1][0].p;
const n8 = await fd.readULE32();
const buff = await fd.read(n8);
const q = Scalar.fromRprLE(buff);
let curve;
if (Scalar.eq(q, bn128.q)) {
curve = bn128;
} else {
assert(false, fd.fileName +": Curve not supported");
}
assert(curve.F1.n64*8 == n8, fd.fileName +": Invalid size");
const power = await fd.readULE32();
assert.equal(fd.pos-sections[1][0].p, sections[1][0].size);
return {curve, power};
}
async function readPtauPubKey(fd, curve, montgomery) {
const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3);
return fromPtauPubKeyRpr(buff, 0, curve, montgomery);
/*
const key = {
tau: {},
alpha: {},
beta: {}
};
key.tau.g1_s = await readG1();
key.tau.g1_sx = await readG1();
key.alpha.g1_s = await readG1();
key.alpha.g1_sx = await readG1();
key.beta.g1_s = await readG1();
key.beta.g1_sx = await readG1();
key.tau.g2_spx = await readG2();
key.alpha.g2_spx = await readG2();
key.beta.g2_spx = await readG2();
return key;
async function readG1() {
const pBuff = await fd.read(curve.F1.n8*2);
if (montgomery) {
return curve.G1.fromRprLEM( pBuff );
} else {
return curve.G1.fromRprBE( pBuff );
}
}
async function readG2() {
const pBuff = await fd.read(curve.F2.n8*2);
if (montgomery) {
return curve.G2.fromRprLEM( pBuff );
} else {
return curve.G2.fromRprBE( pBuff );
}
}
*/
}
function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
const key = {
tau: {},
alpha: {},
beta: {}
};
key.tau.g1_s = readG1();
key.tau.g1_sx = readG1();
key.alpha.g1_s = readG1();
key.alpha.g1_sx = readG1();
key.beta.g1_s = readG1();
key.beta.g1_sx = readG1();
key.tau.g2_spx = readG2();
key.alpha.g2_spx = readG2();
key.beta.g2_spx = readG2();
return key;
function readG1() {
let p;
if (montgomery) {
p = curve.G1.fromRprLEM( buff, pos );
} else {
p = curve.G1.fromRprBE( buff, pos );
}
pos += curve.G1.F.n8*2;
return p;
}
function readG2() {
let p;
if (montgomery) {
p = curve.G2.fromRprLEM( buff, pos );
} else {
p = curve.G2.fromRprBE( buff, pos );
}
pos += curve.G2.F.n8*2;
return p;
}
}
function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
writeG1(key.tau.g1_s);
writeG1(key.tau.g1_sx);
writeG1(key.alpha.g1_s);
writeG1(key.alpha.g1_sx);
writeG1(key.beta.g1_s);
writeG1(key.beta.g1_sx);
writeG2(key.tau.g2_spx);
writeG2(key.alpha.g2_spx);
writeG2(key.beta.g2_spx);
async function writeG1(p) {
if (montgomery) {
curve.G1.toRprLEM(buff, pos, p);
} else {
curve.G1.toRprBE(buff, pos, p);
}
pos += curve.F1.n8*2;
}
async function writeG2(p) {
if (montgomery) {
curve.G2.toRprLEM(buff, pos, p);
} else {
curve.G2.toRprBE(buff, pos, p);
}
pos += curve.F2.n8*2;
}
return buff;
}
async function writePtauPubKey(fd, curve, key, montgomery) {
const buff = new ArrayBuffer(curve.F1.n8*2*6 + curve.F2.n8*2*3);
toPtauPubKeyRpr(buff, 0, curve, key, montgomery);
await fd.write(buff);
/*
const buffG1 = new ArrayBuffer(curve.F1.n8*2);
const buffG2 = new ArrayBuffer(curve.F2.n8*2);
await writeG1(key.tau.g1_s);
await writeG1(key.tau.g1_sx);
await writeG1(key.alpha.g1_s);
await writeG1(key.alpha.g1_sx);
await writeG1(key.beta.g1_s);
await writeG1(key.beta.g1_sx);
await writeG2(key.tau.g2_spx);
await writeG2(key.alpha.g2_spx);
await writeG2(key.beta.g2_spx);
async function writeG1(p) {
if (montgomery) {
curve.G1.toRprLEM(buffG1, 0, p);
} else {
curve.G1.toRprBE(buffG1, 0, p);
}
await fd.write(buffG1);
}
async function writeG2(p) {
if (montgomery) {
curve.G2.toRprLEM(buffG2, 0, p);
} else {
curve.G2.toRprBE(buffG2, 0, p);
}
await fd.write(buffG2);
}
*/
}
async function readContribution(fd, curve) {
const c = {};
c.tauG1 = await readG1();
c.tauG2 = await readG2();
c.alphaG1 = await readG1();
c.betaG1 = await readG1();
c.betaG2 = await readG2();
c.key = await readPtauPubKey(fd, curve, true);
c.partialHash = new Uint8Array(await fd.read(216));
c.nextChallange = new Uint8Array(await fd.read(64));
return c;
async function readG1() {
const pBuff = await fd.read(curve.F1.n8*2);
return curve.G1.fromRprLEM( pBuff );
}
async function readG2() {
const pBuff = await fd.read(curve.F2.n8*2);
return curve.G2.fromRprLEM( pBuff );
}
}
async function readContributions(fd, curve, sections) {
if (!sections[7]) assert(false, fd.fileName + ": File has no contributions");
if (sections[7][0].length>1) assert(false, fd.fileName +": File has more than one contributions section");
fd.pos = sections[7][0].p;
const nContributions = await fd.readULE32();
const contributions = [];
for (let i=0; i<nContributions; i++) {
const c = await readContribution(fd, curve);
c.id = i+1;
contributions.push(c);
}
assert.equal(fd.pos-sections[7][0].p, sections[7][0].size);
return contributions;
}
async function writeContribution(fd, curve, contribution) {
const buffG1 = new ArrayBuffer(curve.F1.n8*2);
const buffG2 = new ArrayBuffer(curve.F2.n8*2);
await writeG1(contribution.tauG1);
await writeG2(contribution.tauG2);
await writeG1(contribution.alphaG1);
await writeG1(contribution.betaG1);
await writeG2(contribution.betaG2);
await writePtauPubKey(fd, curve, contribution.key, true);
await fd.write(contribution.partialHash);
await fd.write(contribution.nextChallange);
async function writeG1(p) {
curve.G1.toRprLEM(buffG1, 0, p);
await fd.write(buffG1);
}
async function writeG2(p) {
curve.G2.toRprLEM(buffG2, 0, p);
await fd.write(buffG2);
}
}
function writePTauHeader(fd, curve, power, nContributions) {
async function writeContributions(fd, curve, contributions) {
await fd.writeULE32(7); // Header type
const pContributionsSize = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
await fd.writeULE32(contributions.length);
for (let i=0; i< contributions.length; i++) {
await writeContribution(fd, curve, contributions[i]);
}
const contributionsSize = fd.pos - pContributionsSize - 8;
const oldPos = fd.pos;
fd.writeULE64(contributionsSize, pContributionsSize);
fd.pos = oldPos;
}
function readPTauHeader(fd) {
function formatHash(b) {
const a = new DataView(b.buffer);
let S = "";
for (let i=0; i<4; i++) {
if (i>0) S += "\n";
S += "\t\t";
for (let j=0; j<4; j++) {
if (j>0) S += " ";
S += a.getUint32(i*4+j).toString(16).padStart(8, "0");
}
}
return S;
}
function hashIsEqual(h1, h2) {
if (h1.byteLength != h2.byteLength) return false;
var dv1 = new Int8Array(h1);
var dv2 = new Int8Array(h2);
for (var i = 0 ; i != h1.byteLength ; i++)
{
if (dv1[i] != dv2[i]) return false;
}
return true;
}
function calculateFirstChallangeHash(curve, power) {
const hasher = new Blake2b(64);
const buffG1 = new ArrayBuffer(curve.G1.F.n8*2);
const vG1 = new Uint8Array(buffG1);
const buffG2 = new ArrayBuffer(curve.G2.F.n8*2);
const vG2 = new Uint8Array(buffG2);
curve.G1.toRprBE(buffG1, 0, curve.G1.g);
curve.G2.toRprBE(buffG2, 0, curve.G2.g);
const blankHasher = new Blake2b(64);
hasher.update(blankHasher.digest());
let n;
n=(1 << power)*2 -1;
for (let i=0; i<n; i++) hasher.update(vG1);
n= 1 << power;
for (let i=0; i<n; i++) hasher.update(vG2);
for (let i=0; i<n; i++) hasher.update(vG1);
for (let i=0; i<n; i++) hasher.update(vG1);
hasher.update(vG2);
return hasher.digest();
}
module.exports.readBinFile = readBinFile;
module.exports.createBinFile = createBinFile;
module.exports.readPTauHeader = readPTauHeader;
module.exports.writePTauHeader = writePTauHeader;
module.exports.readPtauPubKey = readPtauPubKey;
module.exports.writePtauPubKey = writePtauPubKey;
module.exports.formatHash = formatHash;
module.exports.readContributions = readContributions;
module.exports.writeContributions = writeContributions;
module.exports.hashIsEqual = hashIsEqual;
module.exports.calculateFirstChallangeHash = calculateFirstChallangeHash;
module.exports.toPtauPubKeyRpr = toPtauPubKeyRpr;
module.exports.fromPtauPubKeyRpr = fromPtauPubKeyRpr;

View File

@ -0,0 +1,430 @@
const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils");
const keyPair = require("./keypair");
const assert = require("assert");
const crypto = require("crypto");
const buildTaskManager = require("./taskmanager");
function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
}
function verifyContribution(curve, cur, prev) {
// TODO
cur.key.tau.g2_sp = keyPair.getG2sp(0, prev.nextChallange, cur.key.tau.g1_s, cur.key.tau.g1_sx);
cur.key.alpha.g2_sp = keyPair.getG2sp(1, prev.nextChallange, cur.key.alpha.g1_s, cur.key.alpha.g1_sx);
cur.key.beta.g2_sp = keyPair.getG2sp(2, prev.nextChallange, cur.key.beta.g1_s, cur.key.beta.g1_sx);
if (!sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx)) {
console.log("INVALID key (tau) in challange #"+cur.id);
return false;
}
if (!sameRatio(curve, cur.key.alpha.g1_s, cur.key.alpha.g1_sx, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx)) {
console.log("INVALID key (alpha) in challange #"+cur.id);
return false;
}
if (!sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, cur.key.beta.g2_sp, cur.key.beta.g2_spx)) {
console.log("INVALID key (beta) in challange #"+cur.id);
return false;
}
if (!sameRatio(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx)) {
console.log("INVALID tau*G1. challange #"+cur.id+"It does not follow the previous contribution");
return false;
}
if (!sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2,)) {
console.log("INVALID tau*G2. challange #"+cur.id+"It does not follow the previous contribution");
return false;
}
if (!sameRatio(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx)) {
console.log("INVALID alpha*G1. challange #"+cur.id+"It does not follow the previous contribution");
return false;
}
if (!sameRatio(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx)) {
console.log("INVALID beta*G1. challange #"+cur.id+"It does not follow the previous contribution");
return false;
}
if (!sameRatio(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, prev.betaG2, cur.betaG2,)) {
console.log("INVALID beta*G2. challange #"+cur.id+"It does not follow the previous contribution");
return false;
}
return true;
}
async function verify(tauFilename, verbose) {
await Blake2b.ready();
const {fd, sections} = await utils.readBinFile(tauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fd, sections);
const contrs = await utils.readContributions(fd, curve, sections);
if (verbose) console.log("power: 2**" + power);
// Verify Last contribution
if (verbose) console.log("Computing initial contribution hash");
const initialContribution = {
tauG1: curve.G1.g,
tauG2: curve.G2.g,
alphaG1: curve.G1.g,
betaG1: curve.G1.g,
betaG2: curve.G2.g,
nextChallange: utils.calculateFirstChallangeHash(curve, power)
};
if (contrs.length == 0) {
console.log("This file has no contribution! It cannot be used in production");
return false;
}
let prevContr;
if (contrs.length>1) {
prevContr = contrs[contrs.length-2];
} else {
prevContr = initialContribution;
}
const curContr = contrs[contrs.length-1];
if (verbose) console.log("Validating contribution #"+contrs[contrs.length-1].id);
const res = verifyContribution(curve, curContr,prevContr, verbose);
if (!res) return false;
const nextContributionHasher = Blake2b(64);
nextContributionHasher.update(prevContr.nextChallange);
const key = curContr.key;
// Verify powers and compute nextChallangeHash
// await test();
// Verify Section tau*G1
if (verbose) console.log("Verifying powers in tau*G1 section");
const rTau1 = await processSection(2, "G1", "tauG1", (1 << power)*2-1, [0, 1]);
if (!sameRatio(curve, rTau1.R1, rTau1.R2, key.tau.g2_sp, key.tau.g2_spx)) {
console.log("tauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) {
console.log("First element of tau*G1 section must be the generator");
return false;
}
if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) {
console.log("Second element of tau*G1 section does not match the one in the contribution section");
return false;
}
// await test();
// Verify Section tau*G2
if (verbose) console.log("Verifying powers in tau*G2 section");
const rTau2 = await processSection(3, "G2", "tauG2", 1 << power, [0, 1]);
if (!sameRatio(curve, key.tau.g1_s, key.tau.g1_sx, rTau2.R1, rTau2.R2)) {
console.log("tauG2 section. Powers do not match");
return false;
}
if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) {
console.log("First element of tau*G2 section must be the generator");
return false;
}
if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) {
console.log("Second element of tau*G2 section does not match the one in the contribution section");
return false;
}
// Verify Section alpha*tau*G1
if (verbose) console.log("Verifying powers in alpha*tau*G1 section");
const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 1 << power, [0]);
if (!sameRatio(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, key.tau.g2_sp, key.tau.g2_spx)) {
console.log("alphaTauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) {
console.log("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section");
return false;
}
// Verify Section beta*tau*G1
if (verbose) console.log("Verifying powers in beta*tau*G1 section");
const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 1 << power, [0]);
if (!sameRatio(curve, rBetaTauG1.R1, rBetaTauG1.R2, key.tau.g2_sp, key.tau.g2_spx)) {
console.log("betaTauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) {
console.log("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section");
return false;
}
//Verify Beta G2
const betaG2 = await processSectionBetaG2();
if (!curve.G2.eq(curContr.betaG2, betaG2)) {
console.log("betaG2 element in betaG2 section does not match the one in the contribution section");
return false;
}
await fd.close();
const nextContributionHash = nextContributionHasher.digest();
// Check the nextChallangeHash
if (!utils.hashIsEqual(nextContributionHash,curContr.nextChallange)) {
console.log("Hash of the values does not math the next challange of the last contributor in the contributions section");
return false;
}
if (verbose) {
console.log("Next challange hash: ");
console.log(utils.formatHash(nextContributionHash));
}
// Verify Previous contributions
printContribution(curContr, prevContr);
for (let i = contrs.length-2; i>=0; i--) {
const curContr = contrs[i];
const prevContr = (curContr>0) ? contrs[i-1] : initialContribution;
verifyContribution(curve, curContr, prevContr);
printContribution(curContr, prevContr);
}
console.log("-----------------------------------------------------");
return true;
function printContribution(curContr, prevContr) {
console.log("-----------------------------------------------------");
console.log(`Contribution #${curContr.id}:`);
console.log("\tNext Challange");
console.log(utils.formatHash(curContr.nextChallange));
const buff = new ArrayBuffer(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
const buffV = new Uint8Array(buff);
utils.toPtauPubKeyRpr(buff, 0, curve, key, false);
const responseHasher = Blake2b(64);
responseHasher.setPartialHash(curContr.partialHash);
responseHasher.update(buffV);
const responseHash = responseHasher.digest();
console.log("\tResponse Hash");
console.log(utils.formatHash(responseHash));
console.log("\tBased on challange");
console.log(utils.formatHash(prevContr.nextChallange));
}
async function processSectionBetaG2() {
const G = curve.G2;
const sG = G.F.n8*2;
const buffU = new ArrayBuffer(sG);
const buffUv = new Uint8Array(buffU);
if (!sections[6]) assert(false, "File has no BetaG2 section");
if (sections[6].length>1) assert(false, "File has more than one GetaG2 section");
fd.pos = sections[6][0].p;
const buff = await fd.read(sG);
const P = G.fromRprLEM(buff);
G.toRprBE(buffU, 0, P);
nextContributionHasher.update(buffUv);
return P;
}
async function processSection(idSection, gName, sectionName, nPoints, singularPointIds) {
const MAX_CHUNK_SIZE = 1024;
const G = curve[gName];
const sG = G.F.n8*2;
const buffU = new ArrayBuffer(G.F.n8*2);
const buffUv = new Uint8Array(buffU);
const singularPoints = [];
if (!sections[idSection]) assert(false, `File has no ${sectionName} section`);
if (sections[idSection].length>1) assert(false, `File has more than one ${sectionName} section`);
fd.pos = sections[idSection][0].p;
const seed= new Array(8);
for (let i=0; i<8; i++) {
seed[i] = crypto.randomBytes(4).readUInt32BE(0, true);
}
const taskManager = await buildTaskManager(verifyThread, {
ffjavascript: "ffjavascript"
},{
curve: curve.name,
seed: seed
});
let R1 = G.zero;
let R2 = G.zero;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
const n = Math.min(nPoints - i, MAX_CHUNK_SIZE);
const buff = await fd.read(n*sG);
await taskManager.addTask({
cmd: "MUL",
G: gName,
n: n,
TotalPoints: nPoints,
buff: buff.slice(),
offset: i
}, async function(r) {
R1 = G.add(R1, r.R1);
R2 = G.add(R2, r.R2);
});
for (let j=i; j<i+n; j++) {
const P = G.fromRprLEM(buff, (j-i)*sG);
G.toRprBE(buffU, 0, P);
nextContributionHasher.update(buffUv);
if (singularPointIds.indexOf(j)>=0) singularPoints.push(P);
}
}
if (fd.pos != sections[idSection][0].p + sections[idSection][0].size) assert(false, `Invalid ${sectionName} section size`);
await taskManager.finish();
return {
R1: R1,
R2: R2,
singularPoints: singularPoints
};
}
async function test() {
const NN=2;
fd.pos = sections[3][0].p + curve.G2.F.n8*2*6;
const buff = await fd.read(curve.G2.F.n8*2*NN);
const ctx= {
modules: {
ffjavascript: require("ffjavascript"),
assert: require("assert")
}
};
verifyThread(ctx, {cmd: "INIT", curve: "bn128", seed: [0,0,0,0,0,0,0,0]});
const r = verifyThread(ctx, {
cmd: "MUL",
G: "G2",
n: NN,
TotalPoints: NN,
buff: buff.slice(),
offset: 0
});
if (!sameRatio(curve, key.tau.g1_s, key.tau.g1_sx, r.R1, r.R2)) {
console.log("Test does not match");
} else {
console.log("!!!!!!TEST OK!!!!!!!");
}
}
}
function verifyThread(ctx, task) {
const pow = 16;
const NSet = 1<<pow;
if (task.cmd == "INIT") {
ctx.assert = ctx.modules.assert;
if (task.curve == "bn128") {
ctx.curve = ctx.modules.ffjavascript.bn128;
} else {
ctx.assert(false, "curve not defined");
}
ctx.rndPerm = buildRndPerm(task.seed);
return {};
} else if (task.cmd == "MUL") {
const G = ctx.curve[task.G];
const sG = G.F.n8*2;
const acc1 = new Array(NSet);
const acc2 = new Array(NSet);
for (let i=0; i<NSet; i++) {
acc1[i] = G.zero;
acc2[i] = G.zero;
}
for (let i=0; i<task.n; i++) {
const P = G.fromRprLEM(task.buff, i*sG);
if (task.offset+i < task.TotalPoints-1) {
const r = ctx.rndPerm(task.offset + i);
acc1[r] = G.add(acc1[r], P);
}
if (task.offset+i > 0) {
const r = ctx.rndPerm(task.offset + i-1);
acc2[r] = G.add(acc2[r], P);
}
}
reduceExp(G, acc1, pow);
reduceExp(G, acc2, pow);
return {
R1: acc1[0],
R2: acc2[0]
};
} else {
ctx.assert(false, "Op not implemented");
}
function reduceExp(G, accs, p) {
if (p==1) return;
const half = 1 << (p-1);
for (let i=0; i<half-1; i++) {
accs[i] = G.add(accs[i], accs[half+i]);
accs[half-1] = G.add(accs[half-1], accs[half+i]);
}
reduceExp(G, accs, p-1);
for (let i=0; i<p-1;i++) accs[half-1] = G.double(accs[half-1]);
accs[0] = G.add(accs[0], accs[half-1] );
}
function buildRndPerm(aSeed) {
const seed = aSeed;
const nPages = 2;
const pageId = new Array(nPages);
const pages = new Array(nPages);
for (let i=0; i<nPages; i++) {
pageId[i] = -1;
pages[i] = new Array(16);
}
let nextLoad = 0;
function loadPage(p) {
seed[0] = p;
const rng = new ctx.modules.ffjavascript.ChaCha(seed);
for (let i=0; i<16; i++) {
pages[nextLoad][i] = rng.nextU32();
}
const c = nextLoad;
nextLoad = (nextLoad+1) % nPages;
pageId[nextLoad] = p;
return c;
}
return function(n) {
const page = n>>4;
let idx = pageId.indexOf(page);
if (idx < 0) idx = loadPage(page);
return pages[page][n & 0xF] % (NSet-1);
};
}
}
module.exports = verify;

View File

@ -2,3 +2,5 @@
module.exports.newAccumulator = require("./powersoftau_new");
module.exports.exportChallange = require("./powersoftau_export");
module.exports.contribute = require("./powersoftau_contribute");
module.exports.impoertResponse = require("./powersoftau_import");
module.exports.verify = require("./powersoftau_verify");

View File

@ -49,8 +49,7 @@ function thread(self, fn, modules) {
if (res) {
if (res.buff) {
res.buff = new Uint8Array(res.buff);
self.postMessage(res, [res.buff.buffer]);
self.postMessage(res, [res.buff]);
} else {
self.postMessage(res);
}
@ -120,10 +119,6 @@ async function buildTaskManager(fn, mods, initTask) {
return;
}
if (data.buff) {
data.buff = Buffer.from(data.buff);
}
if (tm.workers[i].asyncCb) {
tm.workers[i].asyncCb(data).then(()=> {
finishTask();
@ -140,12 +135,10 @@ async function buildTaskManager(fn, mods, initTask) {
tm.workers[i].state = "WORKING";
if (task.buff) {
task.buff = new Uint8Array(task.buff);
tm.workers[i].worker.postMessage(task, [task.buff.buffer]);
tm.workers[i].worker.postMessage(task, [task.buff]);
} else {
tm.workers[i].worker.postMessage(task);
}
}
for (let i=0; i<concurrency; i++) {