Commit 31471987 authored by John Doe's avatar John Doe
Browse files

Initial commit message

parents
node_modules
artifacts
cache
typechain-types
proofs
build
target
bin
package-lock.json
Protocol.md
yarn.lock
\ No newline at end of file
[package]
authors = [""]
compiler_version = "0.1"
[dependencies]
\ No newline at end of file
This diff is collapsed.
# Momiji testnet
Set membership/state transition using batched proofs in Noir
## Requirements
- Noir is based upon [Rust](https://www.rust-lang.org/tools/install), and we will need to Noir's package manager `nargo` in order to compile our circuits. Further installation instructions for can be found [here](https://noir-lang.org/getting_started/nargo_installation).
- Note: This version of the testnet was built using nargo 0.9.0; this is highly likely to change in future releases as Noir continues to grow.
- The Javascript tests and contracts live within a hardhat project, where we use [yarn](https://classic.yarnpkg.com/lang/en/docs/install/) as the package manager.
## Development
This repository contains an application that generates proofs of valid state transition and set membership using Pedersen-hash Merkle trees. This walkthrough will allow you to perform test transfers on a local testnet instance using a Solidity verifier.
Start by installing all the packages specified in the `package.json`
```shell
yarn install
```
For the initial Momiji testnet release, you should use nargo 0.9.0. Check which version you are using.
```shell
nargo --version
```
If not 0.9.0, install it.
```shell
noirup --version 0.9.0
```
After installing nargo it should be placed in our path and can be called from inside the folder. We will then compile our circuit and verifier. The contract tests will also run this step if needed.
```shell
npm run compile
```
### Solidity Verifier
Once we have compiled our program, we can generate a Solidity verifier rather than having to use the verifier provided by `nargo`.
This Solidity verifier can also be generated by calling `nargo codegen-verifier`. This will produce a Solidity verifier inside of the Noir project which you can then move into the correct `contracts` folder.
It is important to note that if you change your circuit and want to verify proofs in Solidity you must also regenerate your Solidity verifier. The verifier is based upon the circuit and if the circuit changes any previous verifier will be out of date.
### Running tests
The tests use the method of compiling the circuit using `nargo`. The tests also show how to complete proof verification using the Solidity verifier.
```
npm run test
```
You will also notice inputs in `Prover.toml` that satisfy the specified circuit constraints. This toml file will be automatically populated by the tests. You can run `nargo prove main` followed by `nargo verify main` to prove and verify a circuit natively. More info on the `nargo` commands can be found in the Noir documentation linked above or by running: `nargo --help` in your command line.
amount_public_in = "0x0000000000000000000000000000000000000000000000000000000000000000"
amount_public_out = "0x00000000000000000000000000000000000000000000000006f05b59d3b20000"
commitment_out = ["0x216acaebb8aad63b870a974e1f9e78ae404da0dd9f8a209c003546b4f1d4e309", "0x0b122c7b56a6e8e66be64e88f83ec2dc4a92038d1823622a0b3fdcf5bf026ba7", "0x0c919c72f9e1f4343745ed25a81d64eac9be1a985ed7bd76c522daa881bbe7f1", "0x0409604341f7a16a202f780e01f6e4b920e699c42091c3bba80bbde4badff707", "0x2790046cc3aef47a53fbfaf2afa9931a7ee7ac6d20e2e4345f97fcb82902f981", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720"]
new_root = "0x1868e63dbe4383339f1c07733bcbfeedfdc21eee06bd38b5caa9e3bba39e30b0"
nullifier_hashes = ["0x0c5a9ce50ae597c75d30e0e1f3b254ae94befc38c6f0645e4f16e4d04c7617e2", "0x2869a09c481fa55e08c7608e507cc4eba76109a65eb2d2f128c064cfcd93a793", "0x00939e1afd608c33a5034c8dece5d623fe4a5d006e30de9da22e605b40cc7a52", "0x0e4c5d00dae00ae4425d4cb525321543f1aff498675e6dece9480dff17cffe1f", "0x1f8723a5d7deb47d5a08e9b62fc58542d6e9f61c75d564b9be5513325b43c6f3", "0x13f6979d19548241f5a6a3e5ccd0ba6a22e8c77c92ff7f69adb516486de13140", "0x119dd9cd1dedb274808bef71a3d41c1897e393c038745eac249fe6f123b37a52", "0x10090f1824fdc4dfb613e2b768e5164a5b3940b754e5a7d8924028e9159bd51e", "0x00f23f383d68fd7f815ecb54d38bc2abb0806f5d75b803d6a07d6c27a4f9ea3b", "0x0c294f84ceb8efe846beadcd0c85318d3f2baa0d2414964a18f084b4f303a736", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000"]
old_root = "0x2a19cec67ca39baec7fe767faff4ccd1272d15272012cf7bcbd9f2efa1b13a91"
oracle = "0x03fdabb754f4f499c12406532fc924264db1b70702888a191683157056334d61"
recipient = "0x0000000000000000000000003c44cdddb6a900fa2b585dd299e03d12fa4293bc"
tx_in = ["0x14ce11999f658466af477da8d73f549c0bb83b2f4e3c5d020662d321c7be1235", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720", "0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720"]
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
pragma experimental ABIEncoderV2;
// import "./IElasticERC20.sol";
/*
Offshift PureL1Rollup State Contract
Technical Preview Testnet
*/
struct BatchPublicInputs {
bytes32[16] tx_in;
bytes32 amount_public_in;
bytes32 amount_public_out;
bytes32[16] commitment_out;
bytes32 recipient;
bytes32 oracle;
bytes32 old_root;
bytes32 new_root;
bytes32[16] nullifier_hashes;
}
interface IVerifier {
function verify(
bytes calldata,
bytes32[] calldata
) external view returns (bool);
}
contract TechnicalPreview {
// IElasticIERC20 public token;
IVerifier public verifier;
bytes32[] public utxo;
bytes32[] public validRoots; // Valid
mapping(bytes32 => bool) public nullifierHashes; // Spent UTXO hashes
mapping(bytes32 => bool) public commitments; // Individual commitments
bytes32 public merkleRoot =
0x016a430aa58685aba1311244a973a3bc358859da86784be51094368e8fb6f720;
constructor(IVerifier _verifier) {
// , address _token) {
verifier = _verifier;
// token = IElasticERC20(_token);
}
function publish(
bytes calldata _proof,
BatchPublicInputs calldata _batch
) public payable {
require(uint256(_batch.old_root) == uint256(merkleRoot), "invalid root");
require(verifier.verify(_proof, flattenBatchPublicInputs(_batch)), "invalid proof");
for (uint256 i = 0; i < _batch.nullifier_hashes.length; i++) {
if (_batch.nullifier_hashes[i] == 0) continue;
assert(!nullifierHashes[_batch.nullifier_hashes[i]]);
nullifierHashes[_batch.nullifier_hashes[i]] = true;
}
// Publish UTXO set
for (uint256 i = 0; i < _batch.commitment_out.length; i++) {
utxo.push(_batch.commitment_out[i]);
}
// token.mint(_batch.recipient, _batch.amount_public_out); // Process withdraw
require(msg.value == uint256(_batch.amount_public_in), "Incorrect input amount");
validRoots.push(_batch.new_root); // Add to valid roots
merkleRoot = _batch.new_root; // Update the root
(bool sent, ) = address(uint160(uint256(_batch.recipient))).call{value: uint256(_batch.amount_public_out)}("");
require(sent, "unbalanced reserves or gas griefed");
}
function verifyProof(
bytes calldata _proof,
bytes32[] memory _publicInputs
) public view returns (bool) {
return verifier.verify(_proof, _publicInputs);
}
function flattenBatchPublicInputs(
BatchPublicInputs memory input
) public pure returns (bytes32[] memory) {
bytes32[] memory flatArray = new bytes32[](54);
uint256 idx = 0;
for (uint i = 0; i < 16; i++) {
flatArray[idx++] = input.tx_in[i];
}
flatArray[idx++] = input.amount_public_in;
flatArray[idx++] = input.amount_public_out;
for (uint i = 0; i < 16; i++) {
flatArray[idx++] = input.commitment_out[i];
}
flatArray[idx++] = input.recipient;
flatArray[idx++] = input.oracle;
flatArray[idx++] = input.old_root;
flatArray[idx++] = input.new_root;
for (uint i = 0; i < 16; i++) {
flatArray[idx++] = input.nullifier_hashes[i];
}
return flatArray;
}
}
This diff is collapsed.
import { HardhatUserConfig } from "hardhat/config";
import "@nomicfoundation/hardhat-toolbox";
const config: HardhatUserConfig = {
solidity: {
version: '0.8.10',
settings: {
evmVersion: 'london',
optimizer: { enabled: true, runs: 5000 },
},
},
networks: {
hardhat: {
blockGasLimit: 10000000,
gasPrice: 10,
hardfork: 'istanbul',
},
},
mocha: {
timeout: 200000
}
};
export default config;
\ No newline at end of file
{
"name": "momiji",
"version": "1.0.0",
"description": "",
"scripts": {
"windows-compile": "nargo check && nargo compile p && nargo codegen-verifier && copy .\\contract\\plonk_vk.sol ..\\..\\contracts\\plonk_vk.sol",
"compile": "nargo check && nargo compile main && nargo codegen-verifier && cp ./contract/plonk_vk.sol ./contracts/plonk_vk.sol",
"test": "npx hardhat test ./test/transfer.test.mjs"
},
"repository": {
"type": "git",
"url": "git+https://open.offshift.io"
},
"author": "",
"license": "MIT",
"homepage": "https://open.offshift.io/",
"devDependencies": {
"@ethersproject/abi": "^5.6.4",
"@ethersproject/providers": "^5.6.8",
"@nomicfoundation/hardhat-chai-matchers": "^1.0.2",
"@nomicfoundation/hardhat-network-helpers": "^1.0.3",
"@nomicfoundation/hardhat-toolbox": "^1.0.2",
"@nomiclabs/hardhat-ethers": "^2.2.2",
"@nomiclabs/hardhat-etherscan": "^3.1.0",
"@openzeppelin/hardhat-upgrades": "^1.19.1",
"@typechain/hardhat": "^6.1.2",
"@types/memdown": "^3.0.0",
"@types/mocha": "^9.1.1",
"chai": "^4.3.6",
"ethers": "^5.6.9",
"hardhat": "^2.10.1",
"hardhat-gas-reporter": "^1.0.8",
"snarkjs": "^0.4.24",
"solidity-coverage": "^0.7.21",
"ts-node": "^10.9.1",
"typechain": "^8.1.0",
"typescript": "^4.7.4"
},
"dependencies": {
"@aztec/bb.js": "0.3.3",
"@chainlink/contracts": "^0.6.1",
"@openzeppelin/contracts": "^4.7.1",
"@typechain/ethers-v5": "^10.1.0",
"@uniswap/v3-core": "^1.0.1",
"bigint-buffer": "^1.1.5",
"circomlibjs": "^0.1.7",
"keccak256": "^1.0.6",
"sequelize": "^6.32.0",
"sqlite3": "^5.1.6"
}
}
use dep::std;
fn main(
tx_in: pub [Field; 16],
amount_public_in: pub Field,
amount_public_out: pub Field,
commitment_out: pub [Field; 16],
recipient: pub Field,
oracle: pub Field,
old_root: pub Field,
new_root: pub Field,
nullifier_hashes: pub [Field; 16],
secrets: [Field; 16],
utxo_in: [Field; 48],
utxo_out: [Field; 48],
roots: [Field; 64],
leaves: [Field; 64],
indexes: [Field; 64],
hash_path: [Field; 288],
) {
let trees: Field = 4;
let mut sum_in: Field = amount_public_in;
let mut sum_out: Field = amount_public_out;
for i in 0..16 {
if (utxo_in[i*3 + 1] != 0) {
let owner = std::hash::pedersen([secrets[i]]);
assert(owner[0] == utxo_in[i*3 + 0]);
assert(nullifier_hashes[i] == std::hash::pedersen([secrets[i], secrets[i]])[0]);
let commitment_in = std::hash::pedersen([utxo_in[i*3 + 0], utxo_in[i*3 + 1], utxo_in[i*3 + 2]])[0];
let mut hash_path_utxo: [Field; 4] = [0; 4];
let mut hash_path_tx: [Field; 4] = [0; 4];
let mut hash_path_batch: [Field; 5] = [0; 5];
let mut hash_path_historic: [Field; 5] = [0; 5];
for j in 0..4 {
hash_path_utxo[j] = hash_path[18*i + 0 + j];
hash_path_tx[j] = hash_path[18*i + 4 + j];
}
for l in 0..5 {
hash_path_batch[l] = hash_path[18*i + 8 + l];
hash_path_historic[l] = hash_path[18*i + 13 + l];
}
let leaf_tx = leaves[trees * i + 1];
let leaf_batch = leaves[trees * i + 2];
let leaf_historic = leaves[trees * i + 3];
let index_utxo = indexes[trees * i + 0];
let index_tx = indexes[trees * i + 1];
let index_batch = indexes[trees * i + 2];
let index_historic = indexes[trees * i + 3];
let root_tx = roots[trees * i + 1];
let root_batch = roots[trees * i + 2];
let root_historic = roots[trees * i + 3];
let utxo_root = std::merkle::compute_merkle_root(
commitment_in,
index_utxo,
hash_path_utxo
);
assert(utxo_root == leaf_tx);
let tx_root = std::merkle::compute_merkle_root(
leaf_tx,
index_tx,
hash_path_tx
);
assert(tx_root == root_tx);
let batch_root = std::merkle::compute_merkle_root(
leaf_batch,
index_batch,
hash_path_batch
);
assert(batch_root == root_batch);
let historic_root = std::merkle::compute_merkle_root(
leaf_historic,
index_historic,
hash_path_historic
);
assert(historic_root == root_historic);
sum_in += utxo_in[i*3 + 1];
}
}
for k in 0..16 {
if (utxo_out[k*3 + 1] != 0) {
let commitment_out_calc = std::hash::pedersen([utxo_out[k*3 + 0], utxo_out[k*3 + 1], utxo_out[k*3 + 2]]);
assert(commitment_out_calc[0] == commitment_out[k]);
sum_out += utxo_out[k*3 + 1];
}
else {
let zero_hash = 0xf35fcb490b7ea67c3ac26ed530fa5d8dfe8be344e7177ebb63fe02723fb6f725 as Field;
assert(commitment_out[k] == zero_hash);
}
}
let utxo_root_calc: Field = pedersen_tree_four(commitment_out);
assert(tx_in[0] == utxo_root_calc);
let tx_root_calc: Field = pedersen_tree_four(tx_in);
assert(oracle == std::hash::pedersen([0])[0]);
let batch_root_calc: Field = std::hash::pedersen([tx_root_calc, oracle])[0];
let new_root_calc: Field = std::hash::pedersen([batch_root_calc, old_root])[0];
assert(new_root == new_root_calc);
assert(sum_in == sum_out);
assert(recipient == recipient);
}
fn pedersen_tree_four(leaves: [Field; 16]) -> Field {
let mut tx_tree: [Field; 16] = leaves;
for l in 0..8 {
tx_tree[l] = std::hash::pedersen([tx_tree[2*l], tx_tree[2*l + 1]])[0];
}
for l in 0..4 {
tx_tree[l] = std::hash::pedersen([tx_tree[2*l], tx_tree[2*l + 1]])[0];
}
for l in 0..2 {
tx_tree[l] = std::hash::pedersen([tx_tree[2*l], tx_tree[2*l + 1]])[0];
}
std::hash::pedersen([tx_tree[0], tx_tree[1]])[0]
}
import pkg from 'hardhat';
const { ethers } = pkg;
const { provider } = ethers;
import { fileURLToPath } from 'url';
import { dirname } from 'path';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
import { newBarretenbergApiSync } from '@aztec/bb.js/dest/index.js';
import { BigNumber } from "ethers";
import { expect } from "chai";
import path from 'path';
import { execSync } from 'child_process';
import { MerkleTree } from "../utils/MerkleTree.mjs";
import { generateUTXO, generateTestTransaction, generateTestPublish, generateDataToml, randomBytesFr, generateTreeProof } from '../utils/test_helpers.mjs';
import fs from "fs"
describe('It compiles circuit.', () => {
let verifierContract;
before(async () => {
execSync(`nargo check`)
execSync(`nargo compile main`)
if (!fs.existsSync('./contracts/plonk_vk.sol')) {
execSync(`nargo codegen-verifier && cp ./contract/plonk_vk.sol ./contracts/plonk_vk.sol`)
}
});
before('Deploy contract', async () => {
console.log('Deploying verifier contract...')
execSync('npx hardhat compile', { cwd: path.join(__dirname, '../contracts') });
const Verifier = await ethers.getContractFactory('UltraVerifier');
verifierContract = await Verifier.deploy();
const verifierAddr = await verifierContract.deployed();
console.log(`Verifier deployed to ${verifierAddr.address}`);
});
});
let signers;
let recipient, recipient_new;
const api = await newBarretenbergApiSync();
let trees = {
utxo_tree: new MerkleTree(4, api),
tx_tree: new MerkleTree(4, api),
batch_tree: new MerkleTree(5, api),
historic_tree: new MerkleTree(5, api),
utxoTreeOld: new MerkleTree(4, api),
txTreeOld: new MerkleTree(4, api),
batchLeaf: "",
newHistoricRoot: ""
}
let utxoIn = []
let utxoOut = []
let treeProof = []
let data = generateDataToml("0", "0", api)
let amountPublic = {
amountIn: BigInt(0),
amountOut: BigInt(0)
}
before(async () => {
signers = await ethers.getSigners();
recipient = signers[1].address;
recipient = `0x`+ recipient.slice(2).padStart(64, "0")
recipient_new = signers[2].address;
recipient_new = `0x` + recipient_new.slice(2).padStart(64, "0")
});
describe("Private Transfer works with Solidity verifier", () => {
let Verifier, verifierContract, TechnicalPreview, technicalPreviewContract;
before("Set up Verifier contract", async () => {
TechnicalPreview = await ethers.getContractFactory("TechnicalPreview");
Verifier = await ethers.getContractFactory("UltraVerifier");
verifierContract = await Verifier.deploy();
technicalPreviewContract = await TechnicalPreview.deploy(verifierContract.address);
})
it("Deposit works using Solidity verifier", async () => {
console.log("** Generating Test Batch **")
let batchSize0 = 10
let secret0 = []
for (let s = 0; s < batchSize0; s++) {
secret0.push(randomBytesFr(32))
}
let amountsOutUTXO = new Array(10).fill(BigInt(1e17))
utxoOut = generateUTXO(batchSize0, amountsOutUTXO, secret0, api);
amountPublic.amountIn = BigInt(1e18)
generateTestTransaction(utxoIn, utxoOut, trees, treeProof, amountPublic, data, recipient, api)
console.log("** Generating Transaction Proof #1 (Deposit) **")
generateTestPublish(trees, data, api)
execSync(`nargo prove main`)
let proof = fs.readFileSync('./proofs/main.proof').toString()
proof = `0x`+ proof
let amount_public_in,
amount_public_out,
commitment_out,
new_root,
nullifier_hashes,
old_root,
oracle,
tx_in;
eval(fs.readFileSync('./Verifier.toml').toString());
let public_inputs = [
tx_in,
amount_public_in,
amount_public_out,
commitment_out,
recipient,
oracle,
old_root,
new_root,
nullifier_hashes
]
const before = await provider.getBalance(signers[1].address);
const technicalPreviewSigner = technicalPreviewContract.connect(signers[1]);
await technicalPreviewSigner.publish(proof, public_inputs, {value: amount_public_in, gasPrice: '0'});
const after = await provider.getBalance(signers[1].address);
expect(before.sub(after)).to.equal(BigNumber.from(amount_public_in));
utxoIn = utxoOut;
});
it("Private transfer works using Solidity verifier", async () => {
console.log("** Generating Transaction Proof #2 (Withdraw/Transfer) **")
let amountsOutUTXO = new Array(5).fill(BigInt(1e17))
treeProof = generateTreeProof(utxoIn, trees, api)
let batchSize1 = 5
let secret1 = []
for (let s = 0; s < batchSize1; s++) {
secret1.push(randomBytesFr(32))
}
utxoOut = generateUTXO(batchSize1, amountsOutUTXO, secret1, api);
amountPublic.amountIn = BigInt(0);
amountPublic.amountOut = BigInt(5e17);
let oldRoot = data.old_root
let newRoot = data.new_root
data = generateDataToml(oldRoot, newRoot, api)
const before = await provider.getBalance(signers[2].address);
generateTestTransaction(utxoIn, utxoOut, trees, treeProof, amountPublic, data, recipient_new, api)
generateTestPublish(trees, data, api)
execSync(`nargo prove main`)
let proof = fs.readFileSync('./proofs/main.proof').toString()
proof = `0x`+ proof
let amount_public_in,
amount_public_out,
commitment_out,
new_root,
nullifier_hashes,
old_root,
oracle,
tx_in;
eval(fs.readFileSync('./Verifier.toml').toString());
let public_inputs = [
tx_in,
amount_public_in,
amount_public_out,
commitment_out,
recipient_new,
oracle,
old_root,
new_root,
nullifier_hashes
]
await technicalPreviewContract.publish(proof, public_inputs)
const after = await provider.getBalance(signers[2].address);
expect(after.sub(before)).to.equal(BigNumber.from(amount_public_out));
});
});
{
"compilerOptions": {
"target": "es2020",
"module": "commonjs",
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"skipLibCheck": true,
"moduleResolution": "node",
},
"include": ["./test", "./utils"],
"files": ["hardhat.config.ts"]
}
\ No newline at end of file
// @ts-ignore -- no types
import { Fr } from '@aztec/bb.js/dest/types/index.js';
// @ts-ignore -- no types
export function pedersenLeftRight(
barretenberg,
left,
right) {
let leftBuffer = Fr.fromBufferReduce(Buffer.from(left.slice(2), 'hex'));
let rightBuffer = Fr.fromBufferReduce(Buffer.from(right.slice(2), 'hex'));
let hashRes = barretenberg.pedersenPlookupCompressFields(leftBuffer, rightBuffer);
return hashRes.toString('hex')
}
export class MerkleTree {
zeroValue = "0xf35fcb490b7ea67c3ac26ed530fa5d8dfe8be344e7177ebb63fe02723fb6f725"; // sha256("Momiji")
levels;
hashLeftRight;
storage;
zeros;
totalLeaves;
barretenberg;
constructor(
levels,
barretenberg,
defaultLeaves = [],
hashLeftRight = pedersenLeftRight) {
this.levels = levels;
this.hashLeftRight = hashLeftRight;
this.storage = new Map();
this.zeros = [];
this.totalLeaves = 0;
this.barretenberg = barretenberg;
// build zeros depends on tree levels
let currentZero = this.zeroValue;
this.zeros.push(currentZero);
for (let i = 0; i < levels; i++) {
currentZero = this.hashLeftRight(barretenberg, currentZero, currentZero);
this.zeros.push(currentZero);
}
if (defaultLeaves.length > 0) {
this.totalLeaves = defaultLeaves.length;
// store leaves with key value pair
let level = 0;
defaultLeaves.forEach((leaf, index) => {
this.storage.set(MerkleTree.indexToKey(level, index), leaf);
});
// build tree with initial leaves
level++;
let numberOfNodesInLevel = Math.ceil(this.totalLeaves / 2);
for (level; level <= this.levels; level++) {
for (let i = 0; i < numberOfNodesInLevel; i++) {
const leftKey = MerkleTree.indexToKey(level - 1, 2 * i);
const rightKey = MerkleTree.indexToKey(level - 1, 2 * i + 1);
const left = this.storage.get(leftKey);
const right = this.storage.get(rightKey) || this.zeros[level - 1];
if (!left) throw new Error("leftKey not found");
const node = this.hashLeftRight(barretenberg, left, right);
this.storage.set(MerkleTree.indexToKey(level, i), node);
}
numberOfNodesInLevel = Math.ceil(numberOfNodesInLevel / 2);
}
}
}
static indexToKey(level, index) {
return `${level}-${index}`;
}
getIndex(leaf) {
for (const [key, value] of this.storage) {
if (value === leaf) {
return Number(key.split("-")[1]);
}
}
return -1;
}
root() {
return this.storage.get(MerkleTree.indexToKey(this.levels, 0)) || this.zeros[this.levels];
}
proof(indexOfLeaf) {
let pathElements = [];
let pathIndices = [];
const leaf = this.storage.get(MerkleTree.indexToKey(0, indexOfLeaf));
if (!leaf) throw new Error("leaf not found");
// store sibling into pathElements and target's indices into pathIndices
const handleIndex = (level, currentIndex, siblingIndex) => {
const siblingValue = this.storage.get(MerkleTree.indexToKey(level, siblingIndex)) || this.zeros[level];
pathElements.push(siblingValue);
pathIndices.push(currentIndex % 2);
};
this.traverse(indexOfLeaf, handleIndex);
return {
root: this.root(),
pathElements,
pathIndices,
leaf: leaf,
};
}
insert(leaf) {
const index = this.totalLeaves;
this.update(index, leaf, true);
this.totalLeaves++;
}
update(index, newLeaf, isInsert = false) {
if (!isInsert && index >= this.totalLeaves) {
throw Error("Use insert method for new elements.");
} else if (isInsert && index < this.totalLeaves) {
throw Error("Use update method for existing elements.");
}
let keyValueToStore = [];
let currentElement = newLeaf;
const handleIndex = (level, currentIndex, siblingIndex) => {
const siblingElement = this.storage.get(MerkleTree.indexToKey(level, siblingIndex)) || this.zeros[level];
let left;
let right;
if (currentIndex % 2 === 0) {
left = currentElement;
right = siblingElement;
} else {
left = siblingElement;
right = currentElement;
}
keyValueToStore.push({
key: MerkleTree.indexToKey(level, currentIndex),
value: currentElement,
});
currentElement = this.hashLeftRight(this.barretenberg, left, right);
};
this.traverse(index, handleIndex);
// push root to the end
keyValueToStore.push({
key: MerkleTree.indexToKey(this.levels, 0),
value: currentElement,
});
keyValueToStore.forEach(o => {
this.storage.set(o.key, o.value);
});
}
// traverse from leaf to root with handler for target node and sibling node
traverse(indexOfLeaf, handler) {
let currentIndex = indexOfLeaf;
for (let i = 0; i < this.levels; i++) {
let siblingIndex;
if (currentIndex % 2 === 0) {
siblingIndex = currentIndex + 1;
} else {
siblingIndex = currentIndex - 1;
}
handler(i, currentIndex, siblingIndex);
currentIndex = Math.floor(currentIndex / 2);
}
}
}
\ No newline at end of file
import { randomBytes } from 'crypto'
import { readFileSync } from 'fs';
import { Fr } from '@aztec/bb.js/dest/types/index.js';
import { MerkleTree } from './MerkleTree.mjs';
import { keccak256 } from "@ethersproject/keccak256/lib/index.js";
import fs from 'fs'
export function randomBytesFr(numBytes) {
const bytes = randomBytes(numBytes)
const bytesFr = Fr.fromBufferReduce(bytes)
return bytesFr
}
export const format = (data) => {
if (data.length === 0) return "[]"
return `[\n "${data.join('",\n "')}"\n]`;
}
export const dumpToml = (data) => {
data =
`
tx_in = ${format(data.tx_in)}
amount_public_in = "${data.amount_public_in}"
amount_public_out = "${data.amount_public_out}"
commitment_out = ${format(data.commitment_out)}
recipient = "${data.recipient}"
oracle = "${data.oracle}"
old_root = "${data.old_root}"
new_root = "${data.new_root}"
secrets = ${format(data.secrets)}
utxo_in = ${format(data.utxo_in)}
utxo_out = ${format(data.utxo_out)}
roots = ${format(data.roots)}
leaves = ${format(data.leaves)}
indexes = ${format(data.indexes)}
hash_path = ${format(data.hash_path)}
nullifier_hashes = ${format(data.nullifiers)}
`
fs.writeFileSync('./Prover.toml', data);
}
export function path_to_uint8array(path) {
let buffer = readFileSync(path);
return new Uint8Array(buffer);
}
const toFixedHex = (number, pad0x, length = 32) => {
let hexString = number.toString(16).padStart(length * 2, '0');
return (pad0x ? `0x` + hexString : hexString);
}
export function getSolidityHash(asset) {
// Flatten the object
asset = 0;
return keccak256(asset);
}
export function generateHashPathInput(hash_path) {
let hash_path_input = [];
for (var i = 0; i < hash_path.length; i++) {
hash_path_input.push(`0x` + hash_path[i]);
}
return hash_path_input;
}
export function generateUTXO(batchSize, amounts, _secrets, BarretenbergApi) {
let utxos = []
for (let i = 0; i < batchSize; i++) {
let amountBN = amounts[i]
let utxo = {
secret: _secrets[i],
owner: BarretenbergApi.pedersenPlookupCompress([_secrets[i]]),
amountBN: amountBN,
amount: Fr.fromString(toFixedHex(Number(amountBN.toString()), true)),
assetType: Fr.fromBufferReduce(Buffer.from(getSolidityHash(0), 'hex')),
}
utxos.push(utxo)
}
return utxos
}
export function generateTreeProof(utxoIn, trees, BarretenbergApi) {
let treeProof = []
for (let i = 0; i < utxoIn.length; i++) {
let commitment = BarretenbergApi.pedersenPlookupCompress(
[utxoIn[i].owner, utxoIn[i].amount, utxoIn[i].assetType]
).toString()
let proofs = {
utxo: {
leaf: commitment,
index: trees.utxoTreeOld.getIndex(commitment),
root: trees.utxoTreeOld.root(),
hash_path: trees.utxoTreeOld.proof(
trees.utxoTreeOld.getIndex(commitment)
).pathElements
},
tx: {
leaf: trees.utxoTreeOld.root(),
index: trees.txTreeOld.getIndex(trees.utxoTreeOld.root()),
root: trees.txTreeOld.root(),
hash_path: trees.txTreeOld.proof(
trees.txTreeOld.getIndex(trees.utxoTreeOld.root())
).pathElements
},
batch: {
leaf: trees.batchLeaf,
index: trees.batch_tree.getIndex(trees.batchLeaf),
root: trees.batch_tree.root(),
hash_path: trees.batch_tree.proof(
trees.batch_tree.getIndex(trees.batchLeaf)
).pathElements
},
historic: {
leaf: trees.newHistoricRoot,
index: trees.historic_tree.getIndex(trees.newHistoricRoot),
root: trees.historic_tree.root(),
hash_path: trees.historic_tree.proof(
trees.historic_tree.getIndex(trees.newHistoricRoot)
).pathElements
}
}
treeProof.push(proofs)
}
return treeProof
}
export function generateDataToml(oldRoot, newRoot, api) {
let zeroHash = api.pedersenPlookupCompress([Fr.fromString(toFixedHex(0, true))])
const data = {
tx_in: new Array(16).fill('0xf35fcb490b7ea67c3ac26ed530fa5d8dfe8be344e7177ebb63fe02723fb6f725'),
secrets: new Array(16).fill('0'),
utxo_in: new Array(48).fill('0'),
utxo_out: new Array(48).fill('0'),
oracle: zeroHash.toString(),
old_root: "0xf35fcb490b7ea67c3ac26ed530fa5d8dfe8be344e7177ebb63fe02723fb6f725",
new_root: "0xf35fcb490b7ea67c3ac26ed530fa5d8dfe8be344e7177ebb63fe02723fb6f725",
roots: new Array(64).fill('0'),
leaves: new Array(64).fill('0'),
indexes: new Array(64).fill('0'),
hash_path: new Array(288).fill('0'),
commitment_out: new Array(16).fill('0xf35fcb490b7ea67c3ac26ed530fa5d8dfe8be344e7177ebb63fe02723fb6f725'),
amount_public_in: "0",
amount_public_out: "0",
nullifiers: new Array(16).fill('0'),
recipient: Fr.fromString(toFixedHex(0, true)).toString()
}
if (oldRoot !== "0") data.old_root = oldRoot;
if (newRoot !== "0") data.new_root = newRoot;
return data
}
export function generateTestTransaction(utxoIn, utxoOut, trees, treeProof, amountPublic, data, recipient, BarretenbergApi) {
let utxoInLen = utxoIn.length
let utxoOutLen = utxoOut.length
// UTXOs being spent
for (let i = 0; i < utxoInLen; i++) {
let ownerHex = utxoIn[i].owner.toString();
let amountHex = utxoIn[i].amount.toString();
let assetTypeHex = utxoIn[i].assetType.toString();
let note_commitment = BarretenbergApi.pedersenPlookupCompress([utxoIn[i].owner, utxoIn[i].amount, utxoIn[i].assetType]);
let utxoLeaf = note_commitment.toString()
data.secrets[i] = utxoIn[i].secret
data.nullifiers[i] = BarretenbergApi.pedersenPlookupCompressFields(utxoIn[i].secret, utxoIn[i].secret)
data.utxo_in[i*3 + 0] = ownerHex
data.utxo_in[i*3 + 1] = amountHex
data.utxo_in[i*3 + 2] = assetTypeHex
data.leaves[i*4 + 0] = utxoLeaf
data.leaves[i*4 + 1] = treeProof[i].tx.leaf
data.leaves[i*4 + 2] = treeProof[i].batch.leaf
data.leaves[i*4 + 3] = treeProof[i].historic.leaf
data.indexes[i*4 + 0] = treeProof[i].utxo.index
data.indexes[i*4 + 1] = treeProof[i].tx.index
data.indexes[i*4 + 2] = treeProof[i].batch.index
data.indexes[i*4 + 3] = treeProof[i].historic.index
data.roots[i*4 + 0] = treeProof[i].utxo.root
data.roots[i*4 + 1] = treeProof[i].tx.root
data.roots[i*4 + 2] = treeProof[i].batch.root
data.roots[i*4 + 3] = treeProof[i].historic.root
let utxoPath = treeProof[i].utxo.hash_path
let txPath = treeProof[i].tx.hash_path
let batchPath = treeProof[i].batch.hash_path
let historicPath = treeProof[i].historic.hash_path
for (let j = 0; j < utxoPath.length; j++) {
data.hash_path[i*18 + 0 + j] = utxoPath[j]
data.hash_path[i*18 + 4 + j] = txPath[j]
}
for (let k = 0; k < batchPath.length; k++) {
data.hash_path[i*18 + 8 + k] = batchPath[k]
data.hash_path[i*18 + 13 + k] = historicPath[k]
}
}
// UTXOs being generated
for (let i = 0; i < utxoOutLen; i++) {
let ownerHex = utxoOut[i].owner.toString();
let amountHex = utxoOut[i].amount.toString();
let assetTypeHex = utxoOut[i].assetType.toString();
let note_commitment = BarretenbergApi.pedersenPlookupCompress([utxoOut[i].owner, utxoOut[i].amount, utxoOut[i].assetType]);
let utxoLeaf = note_commitment.toString()
trees.utxo_tree.insert(utxoLeaf)
data.utxo_out[i*3 + 0] = ownerHex
data.utxo_out[i*3 + 1] = amountHex
data.utxo_out[i*3 + 2] = assetTypeHex
data.commitment_out[i] = utxoLeaf
}
data.tx_in[0] = trees.utxo_tree.root()
data.amount_public_in = toFixedHex(Number(amountPublic.amountIn.toString()), true)
data.amount_public_out = toFixedHex(Number(amountPublic.amountOut.toString()), true)
data.recipient = recipient
dumpToml(data)
}
export function generateTestPublish(trees, data, api) {
// Publish our local set of test txs
let utxoTree = trees.utxo_tree
let txTree = trees.tx_tree
let batchTree = trees.batch_tree
let historicTree = trees.historic_tree
let utxoRoot = utxoTree.root()
txTree.insert(utxoRoot)
let txRoot = txTree.root()
let txRootFr = Fr.fromBufferReduce(Buffer.from(txRoot.slice(2), 'hex'))
let oracleFr = Fr.fromBufferReduce(toFixedHex(0, true))
let oracleHash = api.pedersenPlookupCompress([oracleFr])
let batch = api.pedersenPlookupCompressFields(txRootFr, oracleHash)
let batchHex = batch.toString()
batchTree.insert(batchHex)
let oldHistoricRoot = Fr.fromBufferReduce(Buffer.from(data.new_root.slice(2), 'hex'))
let newHistoricRoot = api.pedersenPlookupCompress([batch, oldHistoricRoot])
let newHistoricRootHex = newHistoricRoot.toString()
historicTree.insert(newHistoricRootHex)
data.old_root = oldHistoricRoot.toString()
data.new_root = newHistoricRootHex
// Clearing the utxo tree for the next batch
// Saving the old tree for proofs
trees.utxoTreeOld = trees.utxo_tree
trees.txTreeOld = trees.tx_tree
trees.batchLeaf = batchHex
trees.newHistoricRoot = newHistoricRootHex
trees.utxo_tree = new MerkleTree(4, api)
trees.tx_tree = new MerkleTree(4, api)
dumpToml(data)
}
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment