Commit 16e13b9a authored by John Doe's avatar John Doe
Browse files

Recursion.

parent e862f20f
No related merge requests found
Pipeline #38 canceled with stages
require("@nomicfoundation/hardhat-toolbox");
/** @type import('hardhat/config').HardhatUserConfig */
module.exports = {
solidity: {
version: '0.8.10',
settings: {
evmVersion: 'london',
optimizer: { enabled: true, runs: 5000 },
},
},
networks: {
hardhat: {
blockGasLimit: 10000000,
gasPrice: 10,
hardfork: 'istanbul',
},
},
mocha: {
timeout: 200000
}
};
{
"name": "noir-starter",
"version": "1.0.0",
"description": "A template repository to get started with writing zero knowledge programs with Noir.",
"scripts": {
"dev": "next",
"build": "next build",
"start": "next start",
"bbjs": "node ./scripts/bbjs.mjs",
"test": "vitest"
},
"dependencies": {
"@aztec/bb.js": "0.7.2",
"@noir-lang/acvm_js": "0.26.1",
"@typechain/ethers-v5": "^10.1.0",
"axios": "^1.3.4",
"dotenv": "^16.0.3",
"ethers": "^5.7.2",
"fflate": "^0.8.0",
"next": "latest",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-toastify": "^9.1.1",
"ts-node": "^10.9.1",
"typechain": "^8.1.0",
"@chainlink/contracts": "^0.6.1",
"@openzeppelin/contracts": "^4.7.1",
"@uniswap/v3-core": "^1.0.1",
"bigint-buffer": "^1.1.5",
"circomlibjs": "^0.1.7",
"keccak256": "^1.0.6",
"sequelize": "^6.32.0",
"sqlite3": "^5.1.6"
},
"devDependencies": {
"@types/node": "^18.15.5",
"@types/react": "^18.0.26",
"@types/react-dom": "^18.0.9",
"vitest": "^0.31.4",
"@ethersproject/abi": "^5.6.4",
"@ethersproject/providers": "^5.6.8",
"@nomicfoundation/hardhat-chai-matchers": "^1.0.2",
"@nomicfoundation/hardhat-network-helpers": "^1.0.3",
"@nomicfoundation/hardhat-toolbox": "^1.0.2",
"@nomiclabs/hardhat-ethers": "^2.2.2",
"@nomiclabs/hardhat-etherscan": "^3.1.0",
"@openzeppelin/hardhat-upgrades": "^1.19.1",
"@typechain/hardhat": "^6.1.2",
"@types/memdown": "^3.0.0",
"@types/mocha": "^9.1.1",
"chai": "^4.3.6",
"ethers": "^5.6.9",
"hardhat": "^2.10.1",
"hardhat-gas-reporter": "^1.0.8",
"snarkjs": "^0.4.24",
"solidity-coverage": "^0.7.21",
"ts-node": "^10.9.1",
"typechain": "^8.1.0",
"typescript": "^4.7.4"
}
}
import { execSync } from 'child_process'
execSync(`cd ./node_modules/@aztec/bb.js/dest/node/ && sed -i 's/{ RawBuffer }/*/g' index.d.ts && sed -i 's/{ RawBuffer }/*/g' index.js`)
\ No newline at end of file
import pkg from 'hardhat';
import { getSigners, getContractFactory } from '@nomiclabs/hardhat-ethers/src/internal/helpers'
import { expect } from 'chai';
import { BigNumber } from 'ethers';
import { Noir } from '../utils/noir';
import { execSync } from 'child_process';
import { MerkleTree } from '../utils/MerkleTree'
import {
generateUTXO,
generateTestTransaction,
generateTestPublish,
generateDataToml,
randomBytesFr,
generateTreeProof,
treeConfig,
dumpToml,
dumpTomlRecursive
} from '../utils/test_helpers';
import fs from "fs"
// @ts-ignore
import { Fr } from '@aztec/bb.js';
import mainCircuit from '../circuits/main/target/main.json';
import recursiveCircuit from '../circuits/recursion/target/recursion.json';
import { beforeAll, describe } from 'vitest';
import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/src/signers';
let recompile: boolean = true;
let preprocessing: boolean = false;
let signers: SignerWithAddress[], recipients, recipients_new
beforeAll(async () => {
signers = await getSigners(pkg);
recipients = new Array(16).fill(`0x` + signers[1].address.slice(2).padStart(64, "0"))
recipients_new = new Array(16).fill(`0x` + signers[2].address.slice(2).padStart(64, "0"))
if (recompile) {
execSync(`cd ./circuits/main/ && nargo check && nargo compile && nargo codegen-verifier`)
execSync(`cd ./circuits/recursion/ && nargo check && nargo compile && nargo codegen-verifier && cp ./contract/recursion/plonk_vk.sol ../../contracts/plonk_vk.sol`)
execSync(`npx hardhat compile`)
}
});
describe('It compiles noir program code, receiving circuit bytes and abi object.', async () => {
const noirInstances: { main: Noir, recursive: Noir } = {
main: new Noir(mainCircuit),
recursive: new Noir(recursiveCircuit)
}
const { main: noir } = noirInstances;
await noir.init();
let recursiveInputs: string[] = [];
let trees = {
utxo_tree: new MerkleTree(treeConfig.utxoDepth, noir.api),
tx_tree: new MerkleTree(treeConfig.txDepth, noir.api),
historic_tree: new MerkleTree(treeConfig.stateDepth, noir.api),
newHistoricRoot: ""
}
await trees.utxo_tree.init();
await trees.tx_tree.init();
await trees.historic_tree.init();
let utxoIn: any[] = []
let utxoOut: any[] = []
let treeProof: any[] = []
let data = await generateDataToml("0", "0", trees, noir.api)
let amountPublic = {
amountIn: BigInt(0),
amountOut: new Array(16).fill(BigInt(0))
}
let contractInputs: any[]
let Verifier, verifierContract, TechnicalPreview, technicalPreviewContract, XFTMock, xftMockContract;
let dataToml
beforeAll(async () => {
Verifier = await getContractFactory(pkg, "UltraVerifier");
verifierContract = await Verifier.deploy();
let verifierAddr = await verifierContract.deployed();
console.log(`Verifier deployed to ${verifierAddr.address}`);
let vk_hash = await verifierContract.getVerificationKeyHash();
XFTMock = await getContractFactory(pkg, "XFTMock");
xftMockContract = await XFTMock.deploy(vk_hash);
let xftMockContractAddr = await xftMockContract.deployed();
console.log(`Token deployed to ${xftMockContractAddr.address}`);
TechnicalPreview = await getContractFactory(pkg, "TechnicalPreview");
technicalPreviewContract = await TechnicalPreview.deploy(verifierContract.address, xftMockContract.address, "0x2f51641a7c20eec5405aedc1309dccfd3841bfd54e87d32957daa0371904fb11");
await xftMockContract.grantRole(await xftMockContract.MINTER_ROLE(), technicalPreviewContract.address);
let merkle_root = await technicalPreviewContract.merkleRoot()
let utxo_leaves = await technicalPreviewContract.getUtxoFromRoot(merkle_root);
for (let i = 0; i < utxo_leaves.length; i++) {
await trees.utxo_tree.insert(utxo_leaves[i]);
}
let historic_leaves = await technicalPreviewContract.getValidRoots();
for (let i = 0; i < historic_leaves.length; i++) {
await trees.historic_tree.insert(historic_leaves[i]);
}
})
it('Should generate valid proof for correct input (deposit)', async () => {
console.log("** Generating Test Batch **")
let batchSize0 = 1
let secret0: Fr[] = []
for (let s = 0; s < batchSize0; s++) {
secret0.push(randomBytesFr(32))
}
let amountsOutUTXO = new Array(1).fill(BigInt(1e18))
utxoOut = await generateUTXO(batchSize0, amountsOutUTXO, secret0, noir.api);
amountPublic.amountIn = BigInt(1e18)
await generateTestTransaction(utxoIn, utxoOut, trees, treeProof, amountPublic, data, recipients, noir.api, technicalPreviewContract)
console.log("** Generating Transaction Proof #1 (Deposit) **")
await generateTestPublish(trees, data, noir.api)
dumpToml(data)
const input = [
data.current_root,
data.deposit,
...data.withdrawals,
...data.commitment_out,
...data.recipients,
data.oracle,
...data.old_root_proof,
...data.nullifier_hashes,
...data.secrets,
...data.utxo_in,
...data.utxo_out,
...data.indexes,
...data.hash_path
];
const public_input = [
data.current_root,
data.deposit,
...data.withdrawals,
...data.commitment_out,
...data.recipients,
data.oracle,
...data.nullifier_hashes
]
const witness = await noir.generateWitness(input);
const proof = await noir.generateInnerProof(witness);
expect(proof instanceof Uint8Array).to.be.true;
const verified = await noir.verifyInnerProof(proof);
expect(verified).to.be.true;
const numPublicInputs = public_input.length + 1;
const { proofAsFields, vkAsFields, vkHash } = await noir.generateInnerProofArtifacts(
proof,
numPublicInputs,
);
const publicInputs = proofAsFields.slice(0, numPublicInputs)
expect(vkAsFields).to.be.of.length(114);
expect(vkHash).to.be.a('string');
const aggregationObject = Array(16).fill(
'0x0000000000000000000000000000000000000000000000000000000000000000',
);
recursiveInputs = [
...vkAsFields.map(e => e.toString()),
...proofAsFields,
...publicInputs,
...aggregationObject,
vkHash.toString(),
...data.tx_in,
data.old_root,
data.new_root,
data.oracle,
];
dataToml = {
verification_key: vkAsFields.map(e => e.toString()),
proof: proofAsFields,
public_inputs: publicInputs,
input_aggregation_object: aggregationObject,
key_hash: vkHash.toString(),
tx_ids: data.tx_in,
old_root: data.old_root,
new_root: data.new_root,
oracle: data.oracle,
}
const deposit = publicInputs[1]
const withdrawals = publicInputs.slice(2, 18)
const commitment_out = publicInputs.slice(18, 34)
const recipientPI = publicInputs.slice(34, 50)
const nullifier_hashes = publicInputs.slice(51, 67)
const id = publicInputs[67]
contractInputs = [
id,
commitment_out,
nullifier_hashes,
recipientPI,
withdrawals,
deposit
]
const savedOutputs = {
data: data,
dataToml: dataToml,
recursiveInputs: recursiveInputs,
contractInputs: contractInputs
}
fs.writeFileSync('./outputs.json', JSON.stringify(savedOutputs)); // Caches outputs
await noir.destroy();
});
it('Should verify proof within a proof', async () => {
const { recursive: noir } = noirInstances;
await noir.init();
const savedOutputs = JSON.parse(fs.readFileSync('./outputs.json').toString())
data = savedOutputs.data
dataToml = savedOutputs.dataToml
recursiveInputs = savedOutputs.recursiveInputs
contractInputs = savedOutputs.contractInputs
const proofInput = recursiveInputs.slice(114, 275);
const witness = await noir.generateWitness(recursiveInputs);
const proof = await noir.generateOuterProof(witness);
expect(proof instanceof Uint8Array).to.be.true;
const numPublicInputs = 36;
const { proofAsFields } = await noir.generateInnerProofArtifacts(
proof,
numPublicInputs,
);
const verified = await noir.verifyOuterProof(proof);
console.log(verified);
contractInputs.push(proofInput)
contractInputs.push(proofAsFields.slice(numPublicInputs - 16, numPublicInputs))
await technicalPreviewContract.enqueue(contractInputs)
const batch = await technicalPreviewContract.getCurrentBatch();
fs.writeFileSync('./batch.json', JSON.stringify(batch))
dumpTomlRecursive(dataToml)
execSync(`cd ./circuits/recursion/ && nargo prove`)
const proofString = `0x` + fs.readFileSync('./circuits/recursion/proofs/recursion.proof').toString()
const batchPublicInputs = [
dataToml.key_hash,
dataToml.oracle,
dataToml.old_root,
dataToml.new_root,
batch
]
await technicalPreviewContract.publish(proofString, batchPublicInputs)
utxoIn = utxoOut
await noir.destroy();
});
it('Should generate valid proof for correct input (withdrawal)', async () => {
const { main: noir } = noirInstances
await noir.init();
trees.utxo_tree = new MerkleTree(treeConfig.utxoDepth, noir.api)
trees.tx_tree = new MerkleTree(treeConfig.txDepth, noir.api)
trees.historic_tree = new MerkleTree(treeConfig.stateDepth, noir.api)
await trees.utxo_tree.init()
await trees.tx_tree.init()
await trees.historic_tree.init()
console.log("Populating Historic Tree")
const historicRoots = await technicalPreviewContract.getValidRoots()
for (let r = 0; r < historicRoots.length; r++) {
await trees.historic_tree.insert(historicRoots[r])
}
console.log("** Generating Transaction Proof #2 (Withdraw/Transfer) **")
let amountsOutUTXO = new Array(1).fill(BigInt(5e17))
treeProof = []
for (let i = 0; i < utxoIn.length; i++) {
let utxoProof = await generateTreeProof(utxoIn[i], noir.api, technicalPreviewContract)
treeProof.push(utxoProof);
}
console.log("Tree proof generated")
let batchSize0 = 1
let secret0: Fr[] = []
for (let s = 0; s < batchSize0; s++) {
secret0.push(randomBytesFr(32))
}
amountPublic.amountIn = BigInt(0);
amountPublic.amountOut = new Array(5).fill(1e17);
utxoOut = await generateUTXO(batchSize0, amountsOutUTXO, secret0, noir.api);
let oldRoot = dataToml.old_root
let newRoot = dataToml.new_root
data = await generateDataToml(oldRoot, newRoot, trees, noir.api)
await generateTestTransaction(utxoIn, utxoOut, trees, treeProof, amountPublic, data, recipients_new, noir.api, technicalPreviewContract)
console.log("Test transaction generated")
await generateTestPublish(trees, data, noir.api)
console.log("Test publish generated")
dumpToml(data)
const input = [
data.current_root,
data.deposit,
...data.withdrawals,
...data.commitment_out,
...data.recipients,
data.oracle,
...data.old_root_proof,
...data.nullifier_hashes,
...data.secrets,
...data.utxo_in,
...data.utxo_out,
...data.indexes,
...data.hash_path
];
const public_input = [
data.current_root,
data.deposit,
...data.withdrawals,
...data.commitment_out,
...data.recipients,
data.oracle,
...data.nullifier_hashes
]
console.log("Input formatted")
const witness = await noir.generateWitness(input);
console.log("witness calculated")
const proof = await noir.generateInnerProof(witness);
console.log("Proof generated")
expect(proof instanceof Uint8Array).to.be.true;
const verified = await noir.verifyInnerProof(proof);
expect(verified).to.be.true;
const numPublicInputs = public_input.length + 1; const { proofAsFields, vkAsFields, vkHash } = await noir.generateInnerProofArtifacts(
proof,
numPublicInputs,
);
const publicInputs = proofAsFields.slice(0, numPublicInputs)
expect(vkAsFields).to.be.of.length(114);
expect(vkHash).to.be.a('string');
const aggregationObject = Array(16).fill(
'0x0000000000000000000000000000000000000000000000000000000000000000',
);
recursiveInputs = [
...vkAsFields.map(e => e.toString()),
...proofAsFields,
...publicInputs,
...aggregationObject,
vkHash.toString(),
...data.tx_in,
data.old_root,
data.new_root,
data.oracle,
];
dataToml = {
verification_key: vkAsFields.map(e => e.toString()),
proof: proofAsFields,
public_inputs: publicInputs,
input_aggregation_object: aggregationObject,
key_hash: vkHash.toString(),
tx_ids: data.tx_in,
old_root: data.old_root,
new_root: data.new_root,
oracle: data.oracle,
}
const current_root = publicInputs[0]
const deposit = publicInputs[1]
const withdrawals = publicInputs.slice(2, 18)
const commitment_out = publicInputs.slice(18, 34)
const recipientPI = publicInputs.slice(34, 50)
const oracle = publicInputs[50]
const nullifier_hashes = publicInputs.slice(51, 67)
const id = publicInputs[67]
contractInputs = [
id,
commitment_out,
nullifier_hashes,
recipientPI,
withdrawals,
deposit
]
const savedOutputs = {
data: data,
dataToml: dataToml,
recursiveInputs: recursiveInputs,
contractInputs: contractInputs
}
fs.writeFileSync('./outputs.json', JSON.stringify(savedOutputs));
await noir.destroy();
});
it('Should verify proof within a proof (withdrawal)', async () => {
const { recursive: noir } = noirInstances;
await noir.init();
const savedOutputs = JSON.parse(fs.readFileSync('./outputs.json').toString())
data = savedOutputs.data
dataToml = savedOutputs.dataToml
recursiveInputs = savedOutputs.recursiveInputs
contractInputs = savedOutputs.contractInputs
const proofInput = recursiveInputs.slice(114, 275);
const witness = await noir.generateWitness(recursiveInputs);
const proof = await noir.generateOuterProof(witness);
expect(proof instanceof Uint8Array).to.be.true;
const numPublicInputs = 36;
const { proofAsFields } = await noir.generateInnerProofArtifacts(
proof,
numPublicInputs,
);
const verified = await noir.verifyOuterProof(proof);
console.log(verified);
contractInputs.push(proofInput)
contractInputs.push(proofAsFields.slice(numPublicInputs - 16, numPublicInputs))
await technicalPreviewContract.enqueue(contractInputs)
const batch = await technicalPreviewContract.getCurrentBatch();
fs.writeFileSync('./batch.json', JSON.stringify(batch))
dumpTomlRecursive(dataToml)
execSync(`cd ./circuits/recursion/ && nargo prove`)
const proofString = `0x` + fs.readFileSync('./circuits/recursion/proofs/recursion.proof').toString()
const batchPublicInputs = [
dataToml.key_hash,
dataToml.oracle,
dataToml.old_root,
dataToml.new_root,
batch
]
await technicalPreviewContract.publish(proofString, batchPublicInputs)
utxoIn = utxoOut
const balanceOut = await xftMockContract.balanceOf(signers[2].address)
expect(balanceOut).to.be.equal(BigNumber.from(BigInt(5e17).toString()))
await noir.destroy();
});
});
{
"compilerOptions": {
"target": "es2020",
"useDefineForClassFields": true,
"lib": [
"DOM",
"DOM.Iterable",
"ESNext"
],
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"module": "CommonJS",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "preserve",
"noImplicitAny": false,
"strictNullChecks": true,
"incremental": true
},
"exclude": [
"dist",
"node_modules"
],
"include": [
"./test",
"./src",
"./scripts"
],
}
// @ts-ignore
import { Fr } from '@aztec/bb.js';
// @ts-ignore -- no types
export async function pedersenLeftRight(
barretenberg: any,
left: string,
right: string) {
let leftBuffer = Fr.fromBufferReduce(Buffer.from(left.slice(2), 'hex'));
let rightBuffer = Fr.fromBufferReduce(Buffer.from(right.slice(2), 'hex'));
let hashRes = await barretenberg.pedersenPlookupCompressFields(leftBuffer, rightBuffer);
return hashRes.toString()
}
export class MerkleTree {
zeroValue = "0xf35fcb490b7ea67c3ac26ed530fa5d8dfe8be344e7177ebb63fe02723fb6f725"; // sha256("Momiji")
levels: number;
defaultLeaves: string[]
hashLeftRight: any;
storage: Map<string, string>;
zeros: string[];
totalLeaves: number;
barretenberg: any;
constructor(
levels,
barretenberg,
defaultLeaves = [],
hashLeftRight = pedersenLeftRight) {
this.levels = levels;
this.hashLeftRight = hashLeftRight;
this.storage = new Map();
this.zeros = [];
this.totalLeaves = 0;
this.barretenberg = barretenberg;
this.defaultLeaves = defaultLeaves
}
async init() {
// build zeros depends on tree levels
let currentZero = this.zeroValue;
this.zeros.push(currentZero);
for (let i = 0; i < this.levels; i++) {
currentZero = await this.hashLeftRight(this.barretenberg, currentZero, currentZero);
this.zeros.push(currentZero);
}
if (this.defaultLeaves.length > 0) {
this.totalLeaves = this.defaultLeaves.length;
// store leaves with key value pair
let level = 0;
this.defaultLeaves.forEach((leaf, index) => {
this.storage.set(MerkleTree.indexToKey(level, index), leaf);
});
// build tree with initial leaves
level++;
let numberOfNodesInLevel = Math.ceil(this.totalLeaves / 2);
for (level; level <= this.levels; level++) {
for (let i = 0; i < numberOfNodesInLevel; i++) {
const leftKey = MerkleTree.indexToKey(level - 1, 2 * i);
const rightKey = MerkleTree.indexToKey(level - 1, 2 * i + 1);
const left = this.storage.get(leftKey);
const right = this.storage.get(rightKey) || this.zeros[level - 1];
if (!left) throw new Error("leftKey not found");
const node = await this.hashLeftRight(this.barretenberg, left, right);
this.storage.set(MerkleTree.indexToKey(level, i), node);
}
numberOfNodesInLevel = Math.ceil(numberOfNodesInLevel / 2);
}
}
}
static indexToKey(level, index) {
return `${level}-${index}`;
}
getIndex(leaf) {
for (const [key, value] of this.storage) {
if (value === leaf) {
return Number(key.split("-")[1]);
}
}
return -1;
}
root() {
return this.storage.get(MerkleTree.indexToKey(this.levels, 0)) || this.zeros[this.levels];
}
proof(indexOfLeaf) {
let pathElements: string[] = [];
let pathIndices: number[] = [];
const leaf = this.storage.get(MerkleTree.indexToKey(0, indexOfLeaf));
if (!leaf) throw new Error("leaf not found");
// store sibling into pathElements and target's indices into pathIndices
const handleIndex = (level, currentIndex, siblingIndex) => {
const siblingValue = this.storage.get(MerkleTree.indexToKey(level, siblingIndex)) || this.zeros[level];
pathElements.push(siblingValue);
pathIndices.push(currentIndex % 2);
};
this.traverse(indexOfLeaf, handleIndex);
return {
root: this.root(),
pathElements,
pathIndices,
leaf: leaf,
};
}
async insert(leaf) {
const index = this.totalLeaves;
await this.update(index, leaf, true);
this.totalLeaves++;
}
async update(index, newLeaf, isInsert = false) {
if (!isInsert && index >= this.totalLeaves) {
throw Error("Use insert method for new elements.");
} else if (isInsert && index < this.totalLeaves) {
throw Error("Use update method for existing elements.");
}
let keyValueToStore: any[] = [];
let currentElement = newLeaf;
const handleIndex = async (level, currentIndex, siblingIndex) => {
const siblingElement = this.storage.get(MerkleTree.indexToKey(level, siblingIndex)) || this.zeros[level];
let left;
let right;
if (currentIndex % 2 === 0) {
left = currentElement;
right = siblingElement;
} else {
left = siblingElement;
right = currentElement;
}
keyValueToStore.push({
key: MerkleTree.indexToKey(level, currentIndex),
value: currentElement,
});
currentElement = await this.hashLeftRight(this.barretenberg, left, right);
};
await this.traverse(index, handleIndex);
// push root to the end
keyValueToStore.push({
key: MerkleTree.indexToKey(this.levels, 0),
value: currentElement,
});
keyValueToStore.forEach(o => {
this.storage.set(o.key, o.value);
});
}
// traverse from leaf to root with handler for target node and sibling node
async traverse(indexOfLeaf, handler) {
let currentIndex = indexOfLeaf;
for (let i = 0; i < this.levels; i++) {
let siblingIndex;
if (currentIndex % 2 === 0) {
siblingIndex = currentIndex + 1;
} else {
siblingIndex = currentIndex - 1;
}
await handler(i, currentIndex, siblingIndex);
currentIndex = Math.floor(currentIndex / 2);
}
}
}
\ No newline at end of file
import { decompressSync } from 'fflate';
// @ts-ignore
import { Barretenberg, Crs, RawBuffer, Fr } from '@aztec/bb.js';
import { executeCircuit, compressWitness } from '@noir-lang/acvm_js';
import { ethers } from 'ethers'; // I'm lazy so I'm using ethers to pad my input
import { Ptr } from '@aztec/bb.js/dest/browser/types';
export class Noir {
circuit: any;
acir: string = '';
acirBufferCompressed: Uint8Array = Uint8Array.from([]);
acirBufferUncompressed: Uint8Array = Uint8Array.from([]);
api = {} as Barretenberg;
acirComposer = {} as Ptr;
constructor(circuit: Object) {
this.circuit = circuit;
}
async init() {
const isBrowser = typeof window !== 'undefined';
if (isBrowser) {
const { default: initACVM } = await import('@noir-lang/acvm_js');
await initACVM();
}
this.acirBufferCompressed = Buffer.from(this.circuit.bytecode, 'base64');
this.acirBufferUncompressed = decompressSync(this.acirBufferCompressed);
this.api = await Barretenberg.new(4);
const [exact, total, subgroup] = await this.api.acirGetCircuitSizes(
this.acirBufferUncompressed,
);
const subgroupSize = Math.pow(2, Math.ceil(Math.log2(total)));
const crs = await Crs.new(subgroupSize + 1);
await this.api.commonInitSlabAllocator(subgroupSize);
await this.api.srsInitSrs(
new RawBuffer(crs.getG1Data()),
crs.numPoints,
new RawBuffer(crs.getG2Data()),
);
this.acirComposer = await this.api.acirNewAcirComposer(subgroupSize);
}
// Generates the intermediate witnesses by using `input`
// as the initial set of witnesses and executing these
// against the circuit.
async generateWitness(input: any): Promise<Uint8Array> {
const initialWitness = new Map<number, string>();
for (let i = 1; i <= input.length; i++) {
initialWitness.set(i, ethers.utils.hexZeroPad(input[i - 1], 32));
}
const witnessMap = await executeCircuit(this.acirBufferCompressed, initialWitness, () => {
throw Error('unexpected oracle');
});
const witnessBuff = compressWitness(witnessMap);
return witnessBuff;
}
// Generates an inner proof. This is the proof that will be verified
// in another circuit.
//
// We set isRecursive to true, which will tell the backend to
// generate the proof using components that will make the proof
// easier to verify in a circuit.
async generateInnerProof(witness: Uint8Array) {
const makeEasyToVerifyInCircuit = true;
return this.generateProof(witness, makeEasyToVerifyInCircuit);
}
// Generates artifacts that will be passed to the circuit that will verify this proof.
//
// Instead of passing the proof and verification key as a byte array, we pass them
// as fields which makes it cheaper to verify in a circuit.
//
// The number of public inputs denotes how many public inputs are in the inner proof.
async generateInnerProofArtifacts(proof: Uint8Array, numOfPublicInputs: number = 0) {
console.log('serializing proof');
const proofAsFields = await this.api.acirSerializeProofIntoFields(
this.acirComposer,
proof,
numOfPublicInputs,
);
console.log('proof serialized');
console.log('serializing vk');
await this.api.acirInitVerificationKey(this.acirComposer);
// Note: If you don't init verification key, `acirSerializeVerificationKeyIntoFields`` will just hang on serialization
const vk = await this.api.acirSerializeVerificationKeyIntoFields(this.acirComposer);
console.log('vk serialized');
return {
proofAsFields: proofAsFields.map(p => p.toString()),
vkAsFields: vk[0].map(vk => vk.toString()),
vkHash: vk[1].toString(),
};
}
// Generate an outer proof. This is the proof for the circuit which will verify
// inner proofs.
//
// The settings for this proof are the same as the settings for a "normal" proof
// ie one that is not in the recursive setting.
async generateOuterProof(witness: Uint8Array) {
const makeEasyToVerifyInCircuit = false;
return this.generateProof(witness, makeEasyToVerifyInCircuit);
}
async generateProof(witness: Uint8Array, makeEasyToVerifyInCircuit: boolean) {
console.log('Creating outer proof');
const decompressedWitness = decompressSync(witness);
const proof = await this.api.acirCreateProof(
this.acirComposer,
this.acirBufferUncompressed,
decompressedWitness,
makeEasyToVerifyInCircuit,
);
return proof;
}
async verifyInnerProof(proof: Uint8Array) {
const makeEasyToVerifyInCircuit = true;
return this.verifyProof(proof, makeEasyToVerifyInCircuit);
}
async verifyOuterProof(proof: Uint8Array) {
const makeEasyToVerifyInCircuit = false;
console.log('verifying outer proof');
const verified = await this.verifyProof(proof, makeEasyToVerifyInCircuit);
console.log(verified);
return verified;
}
async verifyProof(proof: Uint8Array, makeEasyToVerifyInCircuit: boolean) {
await this.api.acirInitVerificationKey(this.acirComposer);
const verified = await this.api.acirVerifyProof(
this.acirComposer,
proof,
makeEasyToVerifyInCircuit,
);
return verified;
}
async destroy() {
await this.api.destroy();
}
}
import { randomBytes } from 'crypto'
import { readFileSync } from 'fs';
import { MerkleTree } from './MerkleTree';
import { keccak256 } from "@ethersproject/keccak256/lib/index.js";
import fs from 'fs'
// @ts-ignore
import { Fr } from '@aztec/bb.js';
const ZERO_VALUE = "0xf35fcb490b7ea67c3ac26ed530fa5d8dfe8be344e7177ebb63fe02723fb6f725";
const MAX_VALUE = Fr.MAX_VALUE;
export const treeConfig = {
utxoDepth: 4,
txDepth: 4,
stateDepth: 9
}
export const circuitConfig = {
main: "./src/main.nr"
}
const treeSum = [treeConfig.utxoDepth, treeConfig.txDepth, treeConfig.stateDepth].reduce((a, b) => a + b)
function evalWithScopePrefix(js) {
let public_inputs = {};
js.split('\n').forEach(line => {
const trimmedLine = line.trim();
if (trimmedLine.length > 0) {
eval(`public_inputs.` + trimmedLine);
}
});
return public_inputs;
}
export function randomBytesFr(numBytes) {
const bytes = randomBytes(numBytes)
const bytesFr = Fr.fromBufferReduce(bytes)
return bytesFr
}
export const format = (data) => {
if (typeof data === "string") return `"${data}"`;
if (data.length === 0) return "[]";
return `[\n "${data.join('",\n "')}"\n]`;
}
export const dumpToml = (data) => {
let toml: any = [];
Object.entries(data).forEach(([key, value]) => {
toml.push(`${key} = ${format(value)}`);
});
toml = toml.join('\n');
fs.writeFileSync('./circuits/main/Prover.toml', toml);
}
export const dumpTomlRecursive = (data) => {
let toml: any = [];
Object.entries(data).forEach(([key, value]) => {
toml.push(`${key} = ${format(value)}`);
});
toml = toml.join('\n');
fs.writeFileSync('./circuits/recursion/Prover.toml', toml);
}
export function path_to_uint8array(path) {
let buffer = readFileSync(path);
return new Uint8Array(buffer);
}
const toFixedHex = (number, pad0x, length = 32) => {
let hexString = number.toString(16).padStart(length * 2, '0');
return (pad0x ? `0x` + hexString : hexString);
}
export function getSolidityHash(asset) {
return keccak256(asset);
}
export function generateHashPathInput(hash_path) {
let hash_path_input: string[] = [];
for (var i = 0; i < hash_path.length; i++) {
hash_path_input.push(`0x` + hash_path[i]);
}
return hash_path_input;
}
export async function generateUTXO(batchSize, amounts, _secrets, BarretenbergApi) {
let utxos: any[] = []
for (let i = 0; i < batchSize; i++) {
let amountBN = amounts[i]
let utxo = {
secret: _secrets[i],
owner: await BarretenbergApi.pedersenPlookupCompress([_secrets[i]]),
amountBN: amountBN,
amount: Fr.fromString(toFixedHex(Number(amountBN.toString()), true)),
assetType: Fr.fromBufferReduce(Buffer.from(getSolidityHash(0), 'hex')),
}
utxos.push(utxo)
}
return utxos
}
export async function generateTreeProof(utxoIn, BarretenbergApi, contract) {
let trees = {
utxo_tree: new MerkleTree(treeConfig.utxoDepth, BarretenbergApi),
tx_tree: new MerkleTree(treeConfig.txDepth, BarretenbergApi),
historic_tree: new MerkleTree(treeConfig.stateDepth, BarretenbergApi),
}
await trees.utxo_tree.init()
await trees.tx_tree.init()
await trees.historic_tree.init()
let commitment = await BarretenbergApi.pedersenPlookupCompress(
[utxoIn.owner, utxoIn.amount, utxoIn.assetType]
)
commitment = commitment.toString()
let old_root = await contract.getRootFromUtxo(commitment)
let utxo_list = await contract.getUtxoFromRoot(old_root)
let historic_roots = await contract.getValidRoots()
let oracle = await BarretenbergApi.pedersenPlookupCompress([new Fr(0n)])
for (let i = 0; i < utxo_list.length; i++) {
await trees.utxo_tree.insert(utxo_list[i]);
}
let utxo_root = trees.utxo_tree.root()
await trees.tx_tree.insert(utxo_root);
let tx_root_Fr = Fr.fromString(trees.tx_tree.root())
let batch = await BarretenbergApi.pedersenPlookupCompress([tx_root_Fr, oracle])
let new_root = await BarretenbergApi.pedersenPlookupCompress([batch, Fr.fromString(old_root)])
new_root = new_root.toString()
for (let i = 0; i < historic_roots.length; i++) {
await trees.historic_tree.insert(historic_roots[i]);
}
let proofs = {
utxo: {
leaf: commitment,
index: trees.utxo_tree.getIndex(commitment),
root: utxo_root,
hash_path: trees.utxo_tree.proof(
trees.utxo_tree.getIndex(commitment)
).pathElements
},
tx: {
leaf: utxo_root,
index: trees.tx_tree.getIndex(utxo_root),
root: trees.tx_tree.root(),
hash_path: trees.tx_tree.proof(
trees.tx_tree.getIndex(utxo_root)
).pathElements
},
historic: {
leaf: new_root,
index: trees.historic_tree.getIndex(new_root),
root: trees.historic_tree.root(),
hash_path: trees.historic_tree.proof(
trees.historic_tree.getIndex(new_root)
).pathElements
}
}
return proofs
}
export async function generateDataToml(oldRoot, newRoot, trees, api) {
let zeroHex = toFixedHex(0, true)
let zeroHash = await api.pedersenPlookupCompress([Fr.fromString(zeroHex)])
const data = {
tx_in: new Array(16).fill(ZERO_VALUE),
secrets: new Array(16).fill(zeroHex),
utxo_in: new Array(48).fill(zeroHex),
utxo_out: new Array(48).fill(zeroHex),
oracle: zeroHash.toString(),
old_root_proof: new Array(16).fill(zeroHex),
old_root: ZERO_VALUE,
new_root: ZERO_VALUE,
current_root: trees.historic_tree.root(),
indexes: new Array(48).fill(zeroHex),
hash_path: new Array(16 * treeSum).fill(zeroHex),
commitment_out: new Array(16).fill(ZERO_VALUE),
deposit: zeroHex,
withdrawals: new Array(16).fill(zeroHex),
nullifier_hashes: new Array(16).fill(ZERO_VALUE),
recipients: new Array(16).fill(zeroHex)
}
if (oldRoot !== "0") data.old_root = oldRoot;
if (newRoot !== "0") data.new_root = newRoot;
return data
}
export async function generateTestTransaction(utxoIn, utxoOut, trees, treeProof, amountPublic, data, recipients, BarretenbergApi, contract) {
let utxoInLen = utxoIn.length
let utxoOutLen = utxoOut.length
for (let i = 0; i < utxoInLen; i++) {
let ownerHex = utxoIn[i].owner.toString();
let amountHex = utxoIn[i].amount.toString();
let assetTypeHex = utxoIn[i].assetType.toString();
let note_commitment = await BarretenbergApi.pedersenPlookupCompress([utxoIn[i].owner, utxoIn[i].amount, utxoIn[i].assetType]);
let utxoLeaf = note_commitment.toString()
data.secrets[i] = utxoIn[i].secret.toString()
data.nullifier_hashes[i] = (await BarretenbergApi.pedersenPlookupCompress([utxoIn[i].secret, utxoIn[i].secret])).toString()
data.old_root_proof[i] = await contract.utxoPrevRoots(utxoLeaf)
data.utxo_in[i * 3 + 0] = ownerHex
data.utxo_in[i * 3 + 1] = amountHex
data.utxo_in[i * 3 + 2] = assetTypeHex
data.indexes[i * 4 + 0] = treeProof[i].utxo.index
data.indexes[i * 4 + 1] = treeProof[i].tx.index
data.indexes[i * 4 + 2] = treeProof[i].historic.index
let utxoPath = treeProof[i].utxo.hash_path
let txPath = treeProof[i].tx.hash_path
let historicPath = treeProof[i].historic.hash_path
for (let j = 0; j < utxoPath.length; j++) {
data.hash_path[i * treeSum + 0 + j] = utxoPath[j]
data.hash_path[i * treeSum + treeConfig.utxoDepth + j] = txPath[j]
}
for (let k = 0; k < historicPath.length; k++) {
data.hash_path[i * treeSum + treeConfig.utxoDepth + treeConfig.txDepth + k] = historicPath[k]
}
}
for (let i = 0; i < utxoOutLen; i++) {
let ownerHex = utxoOut[i].owner.toString();
let amountHex = utxoOut[i].amount.toString();
let assetTypeHex = utxoOut[i].assetType.toString();
let note_commitment = await BarretenbergApi.pedersenPlookupCompress([utxoOut[i].owner, utxoOut[i].amount, utxoOut[i].assetType]);
let utxoLeaf = note_commitment.toString()
await trees.utxo_tree.insert(utxoLeaf)
data.utxo_out[i * 3 + 0] = ownerHex
data.utxo_out[i * 3 + 1] = amountHex
data.utxo_out[i * 3 + 2] = assetTypeHex
data.commitment_out[i] = utxoLeaf
}
data.tx_in[0] = trees.utxo_tree.root()
data.deposit = toFixedHex(Number(amountPublic.amountIn.toString()), true)
for (let w = 0; w < amountPublic.amountOut.length; w++) {
data.withdrawals[w] = toFixedHex(Number(amountPublic.amountOut[w].toString()), true)
}
for (let r = 0; r < recipients.length; r++) {
data.recipients[r] = recipients[r];
}
data.current_root = trees.historic_tree.root()
}
export async function generateTestPublish(trees, data, api) {
let utxoTree = trees.utxo_tree
let txTree = trees.tx_tree
let historicTree = trees.historic_tree
let utxoRoot = utxoTree.root()
await txTree.insert(utxoRoot)
let txRoot = txTree.root()
let txRootFr = Fr.fromBufferReduce(Buffer.from(txRoot.slice(2), 'hex'))
let oracleFr = Fr.fromBufferReduce(toFixedHex(0, true))
let oracleHash = await api.pedersenPlookupCompress([oracleFr])
let batch = await api.pedersenPlookupCompressFields(txRootFr, oracleHash)
let oldHistoricRoot = Fr.fromBufferReduce(Buffer.from(data.new_root.slice(2), 'hex'))
let newHistoricRoot = await api.pedersenPlookupCompress([batch, oldHistoricRoot])
let newHistoricRootHex = newHistoricRoot.toString()
await historicTree.insert(newHistoricRootHex)
data.old_root = oldHistoricRoot.toString()
data.new_root = newHistoricRootHex
trees.utxoTreeOld = trees.utxo_tree
trees.txTreeOld = trees.tx_tree
trees.newHistoricRoot = newHistoricRootHex
const input = [
data.current_root,
data.deposit,
data.withdrawals,
data.commitment_out,
data.recipients,
data.oracle,
data.old_root_proof,
data.nullifier_hashes,
data.secrets,
data.utxo_in,
data.utxo_out,
data.indexes,
data.hash_path
];
dumpToml(data)
}
\ No newline at end of file
/// <reference types="vitest" />
import { defineConfig } from 'vitest/config';
export default defineConfig({
root: '.',
test: {
testTimeout: 600000000,
clearMocks: true,
globals: true,
setupFiles: ['dotenv/config'],
watchExclude: ['node_modules', 'artifacts', 'cache'],
forceRerunTriggers: ['circuits/**/*.sol'],
},
});
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment