diff --git a/README.md b/README.md
index 1ba6e71..eac7db8 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,16 @@
-
+
# Mezcal
-Mezcal (Nahuatl: mexcalli) - agave booze.
+Mezcal (Nahuatl: mexcalli - agave booze) - on-chain dark pool implementation using [Noir](https://noir-lang.org) and [Taceo coNoir](https://taceo.io). Hides EVERYTHING about orders and traders(tokens, amounts and addresses of traders are completely hidden). Trades settled on an EVM chain using a very simplified version of [Aztec Protocol](https://aztec.network). The tradeoff is O(N^2) order matching engine.
+
+The code is highly experimental. The core code is located in `packages/contracts`.
+
+## Install coSnarks
+
+```sh
+cargo install --git https://github.com/TaceoLabs/co-snarks co-noir --rev 1b2db005ee550c028af824b3ec4e811d6e8a3705
+```
## TODO
@@ -21,22 +29,3 @@ Mezcal (Nahuatl: mexcalli) - agave booze.
- [ ] deploy as proxy
- [ ] test contracts with larger token amounts
- [ ] TODO(security): parse inputs to circuits instead of assuming they are correct. Same applies to types returned from `unconstrained` functions.
-
-### Backend
-
-- [x] prove using native bb
-- [ ] persist merkle trees
-- [ ] return pending tree roots
-
-### UI
-
-- [x] shield
-- [x] transfer
-- [ ] join (maybe behind the scenes, multicall)
-- [ ] unshield
-
-### compliance
-
-- [ ] unshield only mode
-- [ ] set shield limit to 10 USDC
-- [ ] disclaimer that the rollup is not audited
diff --git a/packages/contracts/contracts/PoolERC20.sol b/packages/contracts/contracts/PoolERC20.sol
index d86676c..ff934d7 100644
--- a/packages/contracts/contracts/PoolERC20.sol
+++ b/packages/contracts/contracts/PoolERC20.sol
@@ -20,6 +20,7 @@ contract PoolERC20 is PoolGeneric {
IVerifier unshieldVerifier;
IVerifier joinVerifier;
IVerifier transferVerifier;
+ IVerifier swapVerifier;
}
constructor(
@@ -27,12 +28,14 @@ contract PoolERC20 is PoolGeneric {
IVerifier unshieldVerifier,
IVerifier joinVerifier,
IVerifier transferVerifier,
+ IVerifier swapVerifier,
IVerifier rollupVerifier
) PoolGeneric(rollupVerifier) {
_poolErc20Storage().shieldVerifier = shieldVerifier;
_poolErc20Storage().unshieldVerifier = unshieldVerifier;
_poolErc20Storage().joinVerifier = joinVerifier;
_poolErc20Storage().transferVerifier = transferVerifier;
+ _poolErc20Storage().swapVerifier = swapVerifier;
}
function shield(
@@ -43,7 +46,7 @@ contract PoolERC20 is PoolGeneric {
) external {
token.safeTransferFrom(msg.sender, address(this), amount);
- PublicInputs.Type memory pi = PublicInputs.create(1 + 2 + U256_LIMBS);
+ PublicInputs.Type memory pi = PublicInputs.create(1 + 2 + 1);
pi.push(getNoteHashTree().root);
pi.push(address(token));
pi.pushUint256Limbs(amount);
@@ -73,7 +76,7 @@ contract PoolERC20 is PoolGeneric {
// TODO(security): bring back unshield. It was removed because nullifiers are no longer checked on tx level. Only when the tx is rolled up.
require(false, "not implemented");
- PublicInputs.Type memory pi = PublicInputs.create(6 + U256_LIMBS);
+ PublicInputs.Type memory pi = PublicInputs.create(6 + 1);
// params
pi.push(getNoteHashTree().root);
pi.push(getNullifierTree().root);
@@ -155,6 +158,39 @@ contract PoolERC20 is PoolGeneric {
}
}
+ // TODO: move to a separate contract
+ function swap(
+ bytes calldata proof,
+ NoteInput[4] calldata notes,
+ bytes32[2] calldata nullifiers
+ ) external {
+ PublicInputs.Type memory pi = PublicInputs.create(1 + 6);
+ pi.push(getNoteHashTree().root);
+ pi.push(notes[0].noteHash);
+ pi.push(notes[1].noteHash);
+ pi.push(notes[2].noteHash);
+ pi.push(notes[3].noteHash);
+ pi.push(nullifiers[0]);
+ pi.push(nullifiers[1]);
+ require(
+ _poolErc20Storage().swapVerifier.verify(proof, pi.finish()),
+ "Invalid swap proof"
+ );
+
+ {
+ NoteInput[] memory noteInputs = new NoteInput[](4);
+ noteInputs[0] = notes[0];
+ noteInputs[1] = notes[1];
+ noteInputs[2] = notes[2];
+ noteInputs[3] = notes[3];
+ bytes32[] memory nullifiersDyn = new bytes32[](nullifiers.length);
+ for (uint256 i = 0; i < nullifiers.length; i++) {
+ nullifiersDyn[i] = nullifiers[i];
+ }
+ _PoolGeneric_addPendingTx(noteInputs, nullifiersDyn);
+ }
+ }
+
function _poolErc20Storage()
private
pure
diff --git a/packages/contracts/contracts/Utils.sol b/packages/contracts/contracts/Utils.sol
index c95a819..92e5d13 100644
--- a/packages/contracts/contracts/Utils.sol
+++ b/packages/contracts/contracts/Utils.sol
@@ -89,10 +89,11 @@ library PublicInputs {
Type memory publicInputs,
uint256 value
) internal pure {
- uint256[U256_LIMBS] memory limbs = toNoirU256(value);
- for (uint256 i = 0; i < limbs.length; i++) {
- push(publicInputs, limbs[i]);
- }
+ push(publicInputs, value);
+ // uint256[U256_LIMBS] memory limbs = toNoirU256(value);
+ // for (uint256 i = 0; i < limbs.length; i++) {
+ // push(publicInputs, limbs[i]);
+ // }
}
function finish(
diff --git a/packages/contracts/deploy/00_deploy.ts b/packages/contracts/deploy/00_deploy.ts
index 840652c..2dde18a 100644
--- a/packages/contracts/deploy/00_deploy.ts
+++ b/packages/contracts/deploy/00_deploy.ts
@@ -34,6 +34,10 @@ const deploy: DeployFunction = async ({
"Erc20TransferVerifier",
"erc20_transfer",
);
+ const swapVerifier = await deployVerifier(
+ "LobRouterSwapVerifier",
+ "lob_router_swap",
+ );
const rollupVerifier = await deployVerifier("RollupVerifier", "rollup");
const pool = await typedDeployments.deploy("PoolERC20", {
@@ -44,6 +48,7 @@ const deploy: DeployFunction = async ({
unshieldVerifier.address,
joinVerifier.address,
transferVerifier.address,
+ swapVerifier.address,
rollupVerifier.address,
],
});
diff --git a/packages/contracts/noir/Nargo.toml b/packages/contracts/noir/Nargo.toml
index 7e79d22..c980995 100644
--- a/packages/contracts/noir/Nargo.toml
+++ b/packages/contracts/noir/Nargo.toml
@@ -5,6 +5,8 @@ members = [
"erc20_unshield",
"erc20_join",
"erc20_transfer",
+ "lob_router",
+ "lob_router_swap",
"rollup",
"common",
]
diff --git a/packages/contracts/noir/common/Nargo.toml b/packages/contracts/noir/common/Nargo.toml
index 30f1ff3..2f3b527 100644
--- a/packages/contracts/noir/common/Nargo.toml
+++ b/packages/contracts/noir/common/Nargo.toml
@@ -5,5 +5,4 @@ authors = ["Oleh Misarosh "]
[dependencies]
protocol_types = { git = "https://github.com/AztecProtocol/aztec-packages/", tag = "v0.86.0", directory = "noir-projects/noir-protocol-circuits/crates/types" }
-bignum = { tag = "v0.7.3", git = "https://github.com/noir-lang/noir-bignum/" }
nodash = { tag = "v0.41.2", git = "https://github.com/olehmisar/nodash/" }
diff --git a/packages/contracts/noir/common/src/erc20_note.nr b/packages/contracts/noir/common/src/erc20_note.nr
index 915da0b..df49f94 100644
--- a/packages/contracts/noir/common/src/erc20_note.nr
+++ b/packages/contracts/noir/common/src/erc20_note.nr
@@ -33,13 +33,13 @@ impl Erc20Note {
}
}
-impl crate::Serialize<6> for Erc20Note {
- fn serialize(self) -> [Field; 6] {
+impl crate::Serialize<4> for Erc20Note {
+ fn serialize(self) -> [Field; 4] {
self
.owner
.serialize()
.concat(self.amount.token.serialize())
- .concat(self.amount.amount.limbs.map(|x| x.into()))
+ .concat([self.amount.amount.to_integer()])
.concat([self.randomness])
}
}
diff --git a/packages/contracts/noir/common/src/lib.nr b/packages/contracts/noir/common/src/lib.nr
index 0e001ac..cc61519 100644
--- a/packages/contracts/noir/common/src/lib.nr
+++ b/packages/contracts/noir/common/src/lib.nr
@@ -1,7 +1,7 @@
-use bignum::BigNum;
use protocol_types::hash::poseidon2_hash_with_separator;
mod context;
+mod uint253;
mod erc20_note;
pub(crate) mod note;
mod owned_note;
@@ -46,7 +46,7 @@ pub global GENERATOR_INDEX__NOTE_HASH: Field = 3;
// Note: keep in sync with other languages
pub global U256_LIMBS: u32 = 3;
-pub type U256 = bignum::U256;
+pub type U256 = uint253::U253;
/// User address within the rollup
#[derive(Eq, Serialize)]
@@ -96,6 +96,13 @@ impl std::ops::Sub for TokenAmount {
}
}
+impl std::cmp::Ord for TokenAmount {
+ fn cmp(self, other: Self) -> std::cmp::Ordering {
+ self._check(other);
+ self.amount.cmp(other.amount)
+ }
+}
+
pub struct TreeRoots {
pub note_hash_root: Field,
}
diff --git a/packages/contracts/noir/common/src/uint253.nr b/packages/contracts/noir/common/src/uint253.nr
new file mode 100644
index 0000000..db17fb9
--- /dev/null
+++ b/packages/contracts/noir/common/src/uint253.nr
@@ -0,0 +1,191 @@
+// Copyright (c) 2025 Clarified Labs, Inc.
+// SPDX-License-Identifier: Apache-2.0
+
+use std::cmp::{Eq, Ord, Ordering};
+use std::ops::{Add, Div, Mul, Rem, Sub};
+
+// Maximum value for U253 (2^253 - 1), chosen to fit within Aztec's field arithmetic bounds
+pub global MAX_U253: Field = 0x1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff;
+
+pub global U253_PACKED_LEN: u32 = 1;
+
+pub struct U253 {
+ value: Field,
+}
+
+impl U253 {
+ pub fn new(value: Field) -> Self {
+ value.assert_max_bit_size::<253>();
+ Self { value }
+ }
+
+ pub fn new_unchecked(value: Field) -> Self {
+ Self { value }
+ }
+
+ pub fn from_integer(value: Field) -> Self {
+ value.assert_max_bit_size::<253>();
+ Self { value }
+ }
+
+ pub fn to_integer(self) -> Field {
+ self.value
+ }
+
+ pub fn zero() -> Self {
+ Self { value: 0 }
+ }
+
+ pub fn one() -> Self {
+ Self { value: 1 }
+ }
+
+ pub fn max() -> Self {
+ Self { value: MAX_U253 }
+ }
+
+ pub fn is_zero(self) -> bool {
+ self.value == 0
+ }
+
+ // Performs division with remainder using binary long division algorithm
+ // Returns (quotient, remainder) tuple
+ pub unconstrained fn div_rem_unconstrained(self, other: Self) -> (Self, Self) {
+ assert(!(other.value == 0), "Division by zero");
+
+ self.value.assert_max_bit_size::<253>();
+ other.value.assert_max_bit_size::<253>();
+
+ let bits: [u1; 253] = self.value.to_be_bits();
+ let divisor = other.value;
+
+ let mut quotient: Field = 0;
+ let mut remainder: Field = 0;
+
+ // Process each bit from MSB to LSB, similar to paper-and-pencil division
+ for i in 0..253 {
+ // Shift remainder left by 1 bit and add next bit
+ remainder = remainder * 2 + (bits[i] as Field);
+
+ // Single comparison to determine if we should subtract divisor
+ // Changed to just !remainder.lt(divisor) which means remainder >= divisor
+ if !remainder.lt(divisor) {
+ remainder = remainder - divisor;
+ quotient = quotient * 2 + 1;
+ } else {
+ quotient = quotient * 2;
+ }
+ }
+ (Self { value: quotient }, Self { value: remainder })
+ }
+
+ // Performs division with remainder using unconstrained binary long division algorithm, then
+ // constrains the result via multiplicative properties
+ // Returns (quotient, remainder) tuple
+ pub fn div_rem(self, other: Self) -> (Self, Self) {
+ assert(!(other.value == 0), "Division by zero");
+
+ if self.value == other.value {
+ (Self::one(), Self::zero())
+ } else if self.is_zero() {
+ (Self::zero(), Self::zero())
+ } else if other.value == 1 {
+ (self, Self::zero())
+ } else if self.value.lt(other.value) {
+ (Self::zero(), self)
+ } else {
+ //Safety: constraining this immediately after by checking the division property
+ let (quotient, remainder) = unsafe { self.div_rem_unconstrained(other) };
+
+ // Verify quotient * other + remainder == self
+ assert(
+ quotient * other + remainder == self,
+ "Unconstrained division result is incorrect",
+ );
+
+ (quotient, remainder)
+ }
+ }
+
+ // Adds two U253 values without overflow checks - use with caution
+ pub fn add_unchecked(self, other: Self) -> Self {
+ Self { value: self.value + other.value }
+ }
+
+ // Subtracts two U253 values without underflow checks - use with caution
+ pub fn sub_unchecked(self, other: Self) -> Self {
+ Self { value: self.value - other.value }
+ }
+}
+
+impl Add for U253 {
+ fn add(self, other: Self) -> Self {
+ let result = self.value + other.value;
+ result.assert_max_bit_size::<253>();
+
+ assert(!MAX_U253.lt(result), "U253 addition overflow");
+ assert(!result.lt(self.value), "U253 addition overflow");
+ assert(!result.lt(other.value), "U253 addition overflow");
+ Self { value: result }
+ }
+}
+
+impl Sub for U253 {
+ fn sub(self, other: Self) -> Self {
+ assert(
+ other.value.lt(self.value) | other.value.eq(self.value),
+ "U253 subtraction underflow",
+ );
+ let result = self.value - other.value;
+ result.assert_max_bit_size::<253>();
+ Self { value: result }
+ }
+}
+
+impl Mul for U253 {
+ fn mul(self, other: Self) -> Self {
+ let result = self.value * other.value;
+
+ result.assert_max_bit_size::<253>();
+ // Allow multiplication by 1 without additional checks, otherwise check for overflow
+ assert(
+ (self.value == 1)
+ | (other.value == 1)
+ | (result.lt(MAX_U253 + 1) & !result.lt(self.value) & !result.lt(other.value)),
+ "U253 multiplication overflow",
+ );
+ Self { value: result }
+ }
+}
+
+impl Div for U253 {
+ fn div(self, other: Self) -> Self {
+ let (quotient, _) = self.div_rem(other);
+ quotient
+ }
+}
+
+impl Rem for U253 {
+ fn rem(self, other: Self) -> Self {
+ let (_, remainder) = self.div_rem(other);
+ remainder
+ }
+}
+
+impl Ord for U253 {
+ fn cmp(self, other: Self) -> Ordering {
+ if self.value.lt(other.value) {
+ Ordering::less()
+ } else if self.value.eq(other.value) {
+ Ordering::equal()
+ } else {
+ Ordering::greater()
+ }
+ }
+}
+
+impl Eq for U253 {
+ fn eq(self, other: Self) -> bool {
+ self.value.eq(other.value)
+ }
+}
diff --git a/packages/contracts/noir/erc20_transfer/Prover1.toml b/packages/contracts/noir/erc20_transfer/Prover1.toml
new file mode 100644
index 0000000..f37e4b8
--- /dev/null
+++ b/packages/contracts/noir/erc20_transfer/Prover1.toml
@@ -0,0 +1,116 @@
+from_secret_key = "0x118f09bc73ec486db2030077142f2bceba2a4d4c9e0f6147d776f8ca8ec02ff1"
+change_randomness = "0x0577601b056366fa61051149b73897c8dc91c8563d08d17c50d9322b7a5098ae"
+
+[tree_roots]
+note_hash_root = "0x12c082c76e5eb67cec5d7db4471729b5fb15d495fa6a2d4e5c2b3406946223a0"
+nullifier_root = "0x0aa63c509390ad66ecd821998aabb16a818bcc5db5cf4accc0ce1821745244e9"
+
+[from_note_inputs]
+note_sibling_path = [
+ "0x0000000000000000000000000000000000000000000000000000000000000000",
+ "0x0b63a53787021a4a962a452c2921b3663aff1ffd8d5510540f8e659e782956f1",
+ "0x0e34ac2c09f45a503d2908bcb12f1cbae5fa4065759c88d501c097506a8b2290",
+ "0x21f9172d72fdcdafc312eee05cf5092980dda821da5b760a9fb8dbdf607c8a20",
+ "0x2373ea368857ec7af97e7b470d705848e2bf93ed7bef142a490f2119bcf82d8e",
+ "0x120157cfaaa49ce3da30f8b47879114977c24b266d58b0ac18b325d878aafddf",
+ "0x01c28fe1059ae0237b72334700697bdf465e03df03986fe05200cadeda66bd76",
+ "0x2d78ed82f93b61ba718b17c2dfe5b52375b4d37cbbed6f1fc98b47614b0cf21b",
+ "0x067243231eddf4222f3911defbba7705aff06ed45960b27f6f91319196ef97e1",
+ "0x1849b85f3c693693e732dfc4577217acc18295193bede09ce8b97ad910310972",
+ "0x2a775ea761d20435b31fa2c33ff07663e24542ffb9e7b293dfce3042eb104686",
+ "0x0f320b0703439a8114f81593de99cd0b8f3b9bf854601abb5b2ea0e8a3dda4a7",
+ "0x0d07f6e7a8a0e9199d6d92801fff867002ff5b4808962f9da2ba5ce1bdd26a73",
+ "0x1c4954081e324939350febc2b918a293ebcdaead01be95ec02fcbe8d2c1635d1",
+ "0x0197f2171ef99c2d053ee1fb5ff5ab288d56b9b41b4716c9214a4d97facc4c4a",
+ "0x2b9cdd484c5ba1e4d6efcc3f18734b5ac4c4a0b9102e2aeb48521a661d3feee9",
+ "0x14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3",
+ "0x071d7627ae3b2eabda8a810227bf04206370ac78dbf6c372380182dbd3711fe3",
+ "0x2fdc08d9fe075ac58cb8c00f98697861a13b3ab6f9d41a4e768f75e477475bf5",
+ "0x20165fe405652104dceaeeca92950aa5adc571b8cafe192878cba58ff1be49c5",
+ "0x1c8c3ca0b3a3d75850fcd4dc7bf1e3445cd0cfff3ca510630fd90b47e8a24755",
+ "0x1f0c1a8fb16b0d2ac9a146d7ae20d8d179695a92a79ed66fc45d9da4532459b3",
+ "0x038146ec5a2573e1c30d2fb32c66c8440f426fbd108082df41c7bebd1d521c30",
+ "0x17d3d12b17fe762de4b835b2180b012e808816a7f2ff69ecb9d65188235d8fd4",
+ "0x0e1a6b7d63a6e5a9e54e8f391dd4e9d49cdfedcbc87f02cd34d4641d2eb30491",
+ "0x09244eec34977ff795fc41036996ce974136377f521ac8eb9e04642d204783d2",
+ "0x1646d6f544ec36df9dc41f778a7ef1690a53c730b501471b6acd202194a7e8e9",
+ "0x064769603ba3f6c41f664d266ecb9a3a0f6567cd3e48b40f34d4894ee4c361b3",
+ "0x1595bb3cd19f84619dc2e368175a88d8627a7439eda9397202cdb1167531fd3f",
+ "0x2a529be462b81ca30265b558763b1498289c9d88277ab14f0838cb1fce4b472c",
+ "0x0c08da612363088ad0bbc78abd233e8ace4c05a56fdabdd5e5e9b05e428bdaee",
+ "0x14748d0241710ef47f54b931ac5a58082b1d56b0f0c30d55fb71a6e8c9a6be14",
+ "0x0b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d",
+ "0x2c45bb0c3d5bc1dc98e0baef09ff46d18c1a451e724f41c2b675549bb5c80e59",
+ "0x121468e6710bf1ffec6d0f26743afe6f88ef55dab40b83ca0a39bc44b196374c",
+ "0x2042c32c823a7440ceb6c342f9125f1fe426b02c527cd8fb28c85d02b705e759",
+ "0x0d582c10ff8115413aa5b70564fdd2f3cefe1f33a1e43a47bc495081e91e73e5",
+ "0x0f55a0d491a9da093eb999fa0dffaf904620cbc78d07e63c6f795c5c7512b523",
+ "0x21849764e1aa64b83a69e39d27eedaec2a8f97066e5ddb74634ffdb11388dd9a",
+ "0x2e33ee2008411c04b99c24b313513d097a0d21a5040b6193d1f978b8226892d6",
+]
+note_index = "0x0"
+
+[from_note_inputs.nullifier_low_leaf_preimage]
+nullifier = "16803307459015040401852171866520504004549441014858337312433795419933165652271"
+next_nullifier = "16947817896211750602831357878879841536953337186959472835195250218326948105343"
+next_index = "38"
+
+[from_note_inputs.nullifier_low_leaf_membership_witness]
+leaf_index = "53"
+sibling_path = [
+ "89608827205400042212603164533674623076734752674846833106321039558861918097",
+ "11301393563187046258491469457343761693629010154592613576413167355835381418907",
+ "12390023533950167391600770646920656387356437563612710359669154167054707439741",
+ "6737950067533114345487670792281012677572324355958600670420132499294961450278",
+ "6708144597151111882649888870478814964992609871804461820795238719880019958165",
+ "7997675936567769706222583500490338185379747281415057124249902920940843577209",
+ "796074195456137668475057404256202455048248910468542119987582633322559749494",
+ "20567739078944838550556895816409602128127282297589578747131836752205066334747",
+ "2915761020738377646169465098196184536995852317462848975418156916828302972897",
+ "10985760690611977917867463287126968335324276333731556907069004868774077204850",
+ "19208047717975195819992968481289292904158208618635067144381052124352153142918",
+ "6873111190261103763395069460662520014470628472871405490586772273844549690535",
+ "5894139036143562089612233756205231544611692010506775540918923829608719739507",
+ "12794319561613039897672261721253788651586435024857268094532550402122135778769",
+ "720777601321551456724742356376872832235514487302799006897322578639686749258",
+ "19726607866286112953874979389205149577323021278529259017954198462517737418473",
+ "9477901871732605408863140319391985875503693577321165842544029785283526188723",
+ "3218243980816964110015535469652973420290887819006413761652914020854170460131",
+ "21647471328696313483506044180817939310547082363167430262013183074005768690677",
+ "14513543603428597604998785424833526732416414663942895493375066920249255152069",
+ "12912536786691007423957206067517486813236154886763950786309034005218474477397",
+ "14043083790220302639747777938344554939065181077244110063366429914379380349363",
+ "1585351311391412912983327123858240918248160277807437690996718569990466444336",
+ "10777443874874414095971316544407516389536700701770254896428522920996906045396",
+ "6379059771196981783531842116523729103253487220527074934863013362203865842833",
+ "4134966835882445041808556871336951519851774168539778125719567488662398010322",
+ "10076045549529902860501553551276683947306207322373274327817594791372714928361",
+ "2840050510901032730295917342517416830172237966947356225177945514569852215731",
+ "9763122299140113778865332540237465612993478463904652910937740892142312750399",
+ "19143097027756952285085354173215061221134962025215187577014441848927452612396",
+ "5443396159062520964690002960163216626227972885246029607890253069325844601582",
+ "9252184438226219647561583700014163671525787751930886881150609055515912093204",
+ "5133978852121333258945738158023312408330097040934952294511033236283137218189",
+ "20024968741681435992617962040670787662064588050911389666593896948755442699865",
+ "8177692210107365315198900131750690704270322702202776727756223630359548802892",
+ "14591970101429819852875418865351062544941910746983613947047278625268406019929",
+ "6035853708389102381677690046728119823868576653788717783661700952558999073765",
+ "6935984739519493649769258491382157191948193966139936850002438965559842485539",
+ "15160592699256931057355536169588270738517810359373901881744156960877034659226",
+ "20898143714352063775313258090973641367368749294647965931365904988797017821910",
+]
+
+[from_note_inputs.note]
+randomness = "0x2f6e3831b5312229f58e719b7ef458465987a7f8473c592fe56a4c4b1e2fdf48"
+
+[from_note_inputs.note.owner]
+inner = "0x28c7eef33d7e5d31b9d2cc09c783294d91c36e05b7b815d96549f91fe0d3b0d4"
+
+[from_note_inputs.note.amount.token]
+inner = "0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6"
+
+[from_note_inputs.note.amount.amount]
+value = "500"
+
+[amount]
+value = "123"
diff --git a/packages/contracts/noir/erc20_transfer/Prover2.toml b/packages/contracts/noir/erc20_transfer/Prover2.toml
new file mode 100644
index 0000000..9f47ffa
--- /dev/null
+++ b/packages/contracts/noir/erc20_transfer/Prover2.toml
@@ -0,0 +1,3 @@
+to_randomness = "0x0d71211e0e89af974bf816f24ba78f9c99ab50ab9f19367490cedb993d0526fe"
+[to]
+inner = "0x2b7bd70beb13e310f4593dbc807332acba0f01c4586f17cf984eedd7e1437414"
diff --git a/packages/contracts/noir/lob_router/Nargo.toml b/packages/contracts/noir/lob_router/Nargo.toml
new file mode 100644
index 0000000..8e8b236
--- /dev/null
+++ b/packages/contracts/noir/lob_router/Nargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "lob_router"
+type = "lib"
+authors = ["Oleh Misarosh "]
+
+[dependencies]
+common = { path = "../common" }
+erc20 = { path = "../erc20" }
diff --git a/packages/contracts/noir/lob_router/src/lib.nr b/packages/contracts/noir/lob_router/src/lib.nr
new file mode 100644
index 0000000..5ac0ac3
--- /dev/null
+++ b/packages/contracts/noir/lob_router/src/lib.nr
@@ -0,0 +1,58 @@
+mod LobRouter {
+ use common::OwnedNote;
+
+ pub fn swap(
+ context: &mut common::Context,
+ seller_secret_key: Field,
+ seller_note: erc20::Erc20NoteConsumptionInputs,
+ seller_order: crate::Order,
+ seller_randomness: Field,
+ buyer_secret_key: Field,
+ buyer_note: erc20::Erc20NoteConsumptionInputs,
+ buyer_order: crate::Order,
+ buyer_randomness: Field,
+ ) {
+ // TODO(security): orders must be signed by parties
+
+ assert(
+ seller_order.sell_amount == buyer_order.buy_amount,
+ "seller order amount does not match buyer order amount",
+ );
+ assert(
+ seller_order.buy_amount == buyer_order.sell_amount,
+ "buyer order amount does not match seller order amount",
+ );
+ let seller_amount = seller_order.sell_amount;
+ let buyer_amount = seller_order.buy_amount;
+ assert(seller_amount.token == seller_note.note.amount.token, "invalid seller note token");
+ assert(buyer_amount.token == buyer_note.note.amount.token, "invalid buyer note token");
+
+ erc20::Token::transfer(
+ context,
+ seller_secret_key,
+ seller_note,
+ buyer_note.note.owner(),
+ seller_amount,
+ buyer_randomness,
+ seller_randomness,
+ );
+
+ erc20::Token::transfer(
+ context,
+ buyer_secret_key,
+ buyer_note,
+ seller_note.note.owner(),
+ buyer_amount,
+ seller_randomness,
+ buyer_randomness,
+ );
+ }
+}
+
+pub struct Order {
+ pub sell_amount: common::TokenAmount,
+ pub buy_amount: common::TokenAmount,
+ /// Hide order contents from other parties and outside world
+ // TODO(perf): not sure if this is needed because orders are secret shared in an MPC network
+ pub randomness: Field,
+}
diff --git a/packages/contracts/noir/lob_router_swap/Nargo.toml b/packages/contracts/noir/lob_router_swap/Nargo.toml
new file mode 100644
index 0000000..1e9d5a5
--- /dev/null
+++ b/packages/contracts/noir/lob_router_swap/Nargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "lob_router_swap"
+type = "bin"
+authors = ["Oleh Misarosh pub common::Result<4, 2> {
+ let mut context = common::Context::from(tree_roots);
+
+ lob_router::LobRouter::swap(
+ &mut context,
+ seller_secret_key,
+ seller_note,
+ seller_order,
+ seller_randomness,
+ buyer_secret_key,
+ buyer_note,
+ buyer_order,
+ buyer_randomness,
+ );
+
+ context.finish()
+}
diff --git a/packages/contracts/noir/run.sh b/packages/contracts/noir/run.sh
new file mode 100755
index 0000000..c2871dc
--- /dev/null
+++ b/packages/contracts/noir/run.sh
@@ -0,0 +1,86 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+
+source timer.sh
+
+nargo compile --silence-warnings
+echo "Compiled"
+
+CIRCUIT_NAME=lob_router_swap
+CIRCUIT=target/$CIRCUIT_NAME.json
+
+# merge Prover1.toml and Prover2.toml into Prover.toml
+# Convert TOML to JSON
+dasel -f "$CIRCUIT_NAME/Prover1.toml" -r toml -w json >prover1.json
+dasel -f "$CIRCUIT_NAME/Prover2.toml" -r toml -w json >prover2.json
+# Merge JSON with jq
+jq -s '.[0] * .[1]' prover1.json prover2.json >merged.json
+# Convert back to TOML
+dasel -f merged.json -r json -w toml >"$CIRCUIT_NAME/Prover.toml"
+rm prover1.json prover2.json merged.json
+
+# split input into shares
+co-noir split-input --circuit $CIRCUIT --input $CIRCUIT_NAME/Prover1.toml --protocol REP3 --out-dir target
+co-noir split-input --circuit $CIRCUIT --input $CIRCUIT_NAME/Prover2.toml --protocol REP3 --out-dir target
+echo "Inputs split"
+
+# merge inputs into single input file
+timeStart "merge-input-shares"
+co-noir merge-input-shares --inputs target/Prover1.toml.0.shared --inputs target/Prover2.toml.0.shared --protocol REP3 --out target/Prover.toml.0.shared
+co-noir merge-input-shares --inputs target/Prover1.toml.1.shared --inputs target/Prover2.toml.1.shared --protocol REP3 --out target/Prover.toml.1.shared
+co-noir merge-input-shares --inputs target/Prover1.toml.2.shared --inputs target/Prover2.toml.2.shared --protocol REP3 --out target/Prover.toml.2.shared
+timeEnd "merge-input-shares"
+
+# run witness extension in MPC
+timeStart "mpc-generate-witness"
+co-noir generate-witness --input target/Prover.toml.0.shared --circuit $CIRCUIT --protocol REP3 --config configs/party0.toml --out target/witness.gz.0.shared &
+co-noir generate-witness --input target/Prover.toml.1.shared --circuit $CIRCUIT --protocol REP3 --config configs/party1.toml --out target/witness.gz.1.shared &
+co-noir generate-witness --input target/Prover.toml.2.shared --circuit $CIRCUIT --protocol REP3 --config configs/party2.toml --out target/witness.gz.2.shared
+wait $(jobs -p)
+timeEnd "mpc-generate-witness"
+
+# run proving in MPC
+timeStart "mpc-build-proving-key"
+co-noir build-proving-key --witness target/witness.gz.0.shared --circuit $CIRCUIT --protocol REP3 --config configs/party0.toml --out target/proving_key.0 &
+co-noir build-proving-key --witness target/witness.gz.1.shared --circuit $CIRCUIT --protocol REP3 --config configs/party1.toml --out target/proving_key.1 &
+co-noir build-proving-key --witness target/witness.gz.2.shared --circuit $CIRCUIT --protocol REP3 --config configs/party2.toml --out target/proving_key.2
+wait $(jobs -p)
+timeEnd "mpc-build-proving-key"
+
+timeStart "mpc-generate-proof"
+co-noir generate-proof --proving-key target/proving_key.0 --protocol REP3 --hasher keccak --crs ~/.bb-crs/bn254_g1.dat --config configs/party0.toml --out target/proof.0.proof --public-input target/public_input.json &
+co-noir generate-proof --proving-key target/proving_key.1 --protocol REP3 --hasher keccak --crs ~/.bb-crs/bn254_g1.dat --config configs/party1.toml --out target/proof.1.proof &
+co-noir generate-proof --proving-key target/proving_key.2 --protocol REP3 --hasher keccak --crs ~/.bb-crs/bn254_g1.dat --config configs/party2.toml --out target/proof.2.proof
+wait $(jobs -p)
+timeEnd "mpc-generate-proof"
+
+timeStart "bb-generate-witness"
+nargo execute --package $CIRCUIT_NAME --silence-warnings
+timeEnd "bb-generate-witness"
+timeStart "bb-generate-proof"
+bb prove_ultra_keccak_honk -b $CIRCUIT -w target/$CIRCUIT_NAME.gz -o target/proof_bb.proof
+timeEnd "bb-generate-proof"
+
+# Create verification key
+co-noir create-vk --circuit $CIRCUIT --crs bn254_g1.dat --hasher keccak --vk target/verification_key
+echo "Verification key created"
+
+# verify proof
+co-noir verify --proof target/proof.0.proof --vk target/verification_key --hasher keccak --crs bn254_g2.dat
+echo "Proof verified"
+
+bb write_vk_ultra_keccak_honk -b $CIRCUIT -o target/verification_key_bb
+echo "Verification key created with bb"
+
+# check if verification keys are the same (yes/no)
+cmp -s target/verification_key target/verification_key_bb && echo "Verification keys are the same" || echo "Verification keys are different"
+cmp -s target/proof.0.proof target/proof_bb.proof && echo "Proofs are the same" || echo "Proofs are different"
+
+# Double check with bb
+echo "Verifying with bb"
+bb verify_ultra_keccak_honk -p target/proof.0.proof -k target/verification_key_bb
+echo "Proof verified with bb"
+
+# Check the bb proof
+bb verify_ultra_keccak_honk -p target/proof_bb.proof -k target/verification_key_bb
diff --git a/packages/contracts/noir/timer.sh b/packages/contracts/noir/timer.sh
new file mode 100644
index 0000000..84fa0c6
--- /dev/null
+++ b/packages/contracts/noir/timer.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+declare -A TIMERS
+
+timeStart() {
+ TIMERS["$1"]=$(date +%s) # Use seconds instead
+}
+
+timeEnd() {
+ local start=${TIMERS["$1"]}
+ if [[ -z "$start" ]]; then
+ echo "Timer '$1' not found"
+ return 1
+ fi
+ local end=$(date +%s)
+ local duration=$(( end - start ))
+ echo "$1 took ${duration}s"
+ unset TIMERS["$1"]
+}
diff --git a/packages/contracts/package.json b/packages/contracts/package.json
index 2d8c858..622da8c 100644
--- a/packages/contracts/package.json
+++ b/packages/contracts/package.json
@@ -57,6 +57,8 @@
"ky": "^1.7.2",
"lodash-es": "^4.17.21",
"ox": "^0.7.2",
+ "p-queue": "^8.1.0",
+ "smol-toml": "^1.3.1",
"ts-essentials": "^9.4.1",
"zod": "^3.23.8"
}
diff --git a/packages/contracts/sdk/LobService.ts b/packages/contracts/sdk/LobService.ts
new file mode 100644
index 0000000..481ec9c
--- /dev/null
+++ b/packages/contracts/sdk/LobService.ts
@@ -0,0 +1,228 @@
+import { uniq } from "lodash";
+import { assert, type AsyncOrSync } from "ts-essentials";
+import { type PoolERC20 } from "../typechain-types";
+import { NoteInputStruct } from "../typechain-types/contracts/PoolERC20";
+import { MpcProverService, type Side } from "./mpc/MpcNetworkService.js";
+import { splitInput } from "./mpc/utils.js";
+import {
+ CompleteWaAddress,
+ Erc20Note,
+ getRandomness,
+ TokenAmount,
+ type NoirAndBackend,
+ type PoolErc20Service,
+} from "./PoolErc20Service.js";
+import { type ITreesService } from "./RemoteTreesService.js";
+import { prove } from "./utils.js";
+
+export class LobService {
+ constructor(
+ private contract: PoolERC20,
+ private trees: ITreesService,
+ private poolErc20: PoolErc20Service,
+ private mpcProver: MpcProverService,
+ private circuits: AsyncOrSync<{
+ swap: NoirAndBackend;
+ }>,
+ ) {}
+
+ async swap(params: {
+ sellerSecretKey: string;
+ sellerNote: Erc20Note;
+ sellerAmount: TokenAmount;
+ buyerSecretKey: string;
+ buyerNote: Erc20Note;
+ buyerAmount: TokenAmount;
+ }) {
+ const swapCircuit = (await this.circuits).swap;
+ const sellerRandomness = await getRandomness();
+ const buyerRandomness = await getRandomness();
+
+ const sellerChangeNote = await Erc20Note.from({
+ owner: await CompleteWaAddress.fromSecretKey(params.sellerSecretKey),
+ amount: params.sellerNote.amount.sub(params.sellerAmount),
+ randomness: sellerRandomness,
+ });
+ const buyerChangeNote = await Erc20Note.from({
+ owner: await CompleteWaAddress.fromSecretKey(params.buyerSecretKey),
+ amount: params.buyerNote.amount.sub(params.buyerAmount),
+ randomness: buyerRandomness,
+ });
+ const sellerSwapNote = await Erc20Note.from({
+ owner: await CompleteWaAddress.fromSecretKey(params.sellerSecretKey),
+ amount: params.buyerAmount,
+ randomness: sellerRandomness,
+ });
+ const buyerSwapNote = await Erc20Note.from({
+ owner: await CompleteWaAddress.fromSecretKey(params.buyerSecretKey),
+ amount: params.sellerAmount,
+ randomness: buyerRandomness,
+ });
+
+ const seller_order = {
+ sell_amount: await params.sellerAmount.toNoir(),
+ buy_amount: await params.buyerAmount.toNoir(),
+ randomness: sellerRandomness,
+ };
+ const buyer_order = {
+ sell_amount: await params.buyerAmount.toNoir(),
+ buy_amount: await params.sellerAmount.toNoir(),
+ randomness: buyerRandomness,
+ };
+
+ const input = {
+ tree_roots: await this.trees.getTreeRoots(),
+ seller_secret_key: params.sellerSecretKey,
+ seller_note: await this.poolErc20.toNoteConsumptionInputs(
+ params.sellerSecretKey,
+ params.sellerNote,
+ ),
+ seller_order,
+ seller_randomness: sellerRandomness,
+ buyer_secret_key: params.buyerSecretKey,
+ buyer_note: await this.poolErc20.toNoteConsumptionInputs(
+ params.buyerSecretKey,
+ params.buyerNote,
+ ),
+ buyer_order,
+ buyer_randomness: buyerRandomness,
+ };
+ const { proof } = await prove("swap", swapCircuit, input);
+ const noteInputs: [
+ NoteInputStruct,
+ NoteInputStruct,
+ NoteInputStruct,
+ NoteInputStruct,
+ ] = [
+ await sellerChangeNote.toSolidityNoteInput(),
+ await buyerSwapNote.toSolidityNoteInput(),
+ await buyerChangeNote.toSolidityNoteInput(),
+ await sellerSwapNote.toSolidityNoteInput(),
+ ];
+ const nullifiers: [string, string] = [
+ (
+ await params.sellerNote.computeNullifier(params.sellerSecretKey)
+ ).toString(),
+ (
+ await params.buyerNote.computeNullifier(params.buyerSecretKey)
+ ).toString(),
+ ];
+ const tx = await this.contract.swap(proof, noteInputs, nullifiers);
+ const receipt = await tx.wait();
+ console.log("swap gas used", receipt?.gasUsed);
+ }
+
+ async requestSwap(params: {
+ secretKey: string;
+ note: Erc20Note;
+ sellAmount: TokenAmount;
+ buyAmount: TokenAmount;
+ }) {
+ const orderId = await getRandomness();
+ console.log(
+ "order ID",
+ orderId,
+ params.sellAmount.amount,
+ "->",
+ params.buyAmount.amount,
+ );
+
+ const swapCircuit = (await this.circuits).swap;
+ const randomness = await getRandomness();
+
+ const changeNote = await Erc20Note.from({
+ owner: await CompleteWaAddress.fromSecretKey(params.secretKey),
+ amount: params.note.amount.sub(params.sellAmount),
+ randomness,
+ });
+ const swapNote = await Erc20Note.from({
+ owner: await CompleteWaAddress.fromSecretKey(params.secretKey),
+ amount: params.buyAmount,
+ randomness,
+ });
+
+ const order = {
+ sell_amount: await params.sellAmount.toNoir(),
+ buy_amount: await params.buyAmount.toNoir(),
+ randomness,
+ };
+
+ // deterministic side
+ const side: Side =
+ params.sellAmount.token.toLowerCase() <
+ params.buyAmount.token.toLowerCase()
+ ? "seller"
+ : "buyer";
+ const input = {
+ [`${side}_secret_key`]: params.secretKey,
+ [`${side}_note`]: await this.poolErc20.toNoteConsumptionInputs(
+ params.secretKey,
+ params.note,
+ ),
+ [`${side}_order`]: order,
+ [`${side}_randomness`]: randomness,
+ };
+ // only one trading party need to provide public inputs
+ const inputPublic =
+ side === "seller"
+ ? {
+ tree_roots: await this.trees.getTreeRoots(),
+ }
+ : undefined;
+ const inputsShared = await splitInput(swapCircuit.circuit, {
+ // merge public inputs into first input because it does not matter how public inputs are passed
+ ...input,
+ ...inputPublic,
+ });
+ const proofs = await this.mpcProver.prove(inputsShared, {
+ orderId,
+ side,
+ circuit: swapCircuit.circuit,
+ });
+ assert(uniq(proofs).length === 1, "proofs mismatch");
+ const proof = proofs[0]!;
+ return {
+ orderId,
+ proof,
+ side,
+ changeNote: await changeNote.toSolidityNoteInput(),
+ swapNote: await swapNote.toSolidityNoteInput(),
+ nullifier: (
+ await params.note.computeNullifier(params.secretKey)
+ ).toString(),
+ };
+ }
+
+ async commitSwap(params: { swapA: SwapResult; swapB: SwapResult }) {
+ const [sellerSwap, buyerSwap] =
+ params.swapA.side === "seller"
+ ? [params.swapA, params.swapB]
+ : [params.swapB, params.swapA];
+
+ assert(
+ sellerSwap.orderId !== buyerSwap.orderId,
+ "order ids must be different",
+ ); // sanity check
+
+ assert(
+ sellerSwap.proof === buyerSwap.proof,
+ `seller & buyer proof mismatch: ${sellerSwap.orderId} ${buyerSwap.orderId}`,
+ );
+ const proof = sellerSwap.proof;
+
+ const tx = await this.contract.swap(
+ proof,
+ [
+ sellerSwap.changeNote,
+ buyerSwap.swapNote,
+ buyerSwap.changeNote,
+ sellerSwap.swapNote,
+ ],
+ [sellerSwap.nullifier, buyerSwap.nullifier],
+ );
+ const receipt = await tx.wait();
+ console.log("swap gas used", receipt?.gasUsed);
+ }
+}
+
+export type SwapResult = Awaited>;
diff --git a/packages/contracts/sdk/PoolErc20Service.ts b/packages/contracts/sdk/PoolErc20Service.ts
index 7ed147f..f6d9955 100644
--- a/packages/contracts/sdk/PoolErc20Service.ts
+++ b/packages/contracts/sdk/PoolErc20Service.ts
@@ -1,6 +1,6 @@
import type { Fr } from "@aztec/aztec.js";
import type { UltraHonkBackend } from "@aztec/bb.js";
-import type { Noir } from "@noir-lang/noir_js";
+import type { CompiledCircuit, Noir } from "@noir-lang/noir_js";
import { utils } from "@repo/utils";
import { ethers } from "ethers";
import { compact, orderBy, times } from "lodash-es";
@@ -8,7 +8,7 @@ import { assert, type AsyncOrSync } from "ts-essentials";
import { type PoolERC20 } from "../typechain-types";
import { EncryptionService } from "./EncryptionService";
import type { ITreesService } from "./RemoteTreesService";
-import { fromNoirU256, prove, toNoirU256, U256_LIMBS } from "./utils.js";
+import { prove, toNoirU256 } from "./utils.js";
// Note: keep in sync with other languages
export const NOTE_HASH_TREE_HEIGHT = 40;
@@ -368,7 +368,8 @@ export class Erc20Note {
return [
BigInt(this.owner.address),
BigInt(this.amount.token),
- ...amount.amount.limbs.map((x) => BigInt(x)),
+ // ...amount.amount.limbs.map((x) => BigInt(x)),
+ BigInt(amount.amount.value),
BigInt(this.randomness),
];
}
@@ -385,9 +386,10 @@ export class Erc20Note {
),
amount: await TokenAmount.from({
token: ethers.zeroPadValue(fieldsStr[1]!, 20),
- amount: fromNoirU256({ limbs: fields.slice(2, 2 + U256_LIMBS) }),
+ // amount: fromNoirU256({ limbs: fields.slice(2, 2 + U256_LIMBS) }),
+ amount: ethers.toBigInt(fieldsStr[2]!),
}),
- randomness: ethers.zeroPadValue(fieldsStr[2 + U256_LIMBS]!, 32),
+ randomness: ethers.zeroPadValue(fieldsStr[3]!, 32),
});
}
@@ -523,6 +525,7 @@ export class CompleteWaAddress {
}
export type NoirAndBackend = {
+ circuit: CompiledCircuit;
noir: Noir;
backend: UltraHonkBackend;
};
diff --git a/packages/contracts/sdk/backendSdk.ts b/packages/contracts/sdk/backendSdk.ts
index c3bd5a0..32a180a 100644
--- a/packages/contracts/sdk/backendSdk.ts
+++ b/packages/contracts/sdk/backendSdk.ts
@@ -24,7 +24,7 @@ export function createBackendSdk(
`${process.env.HOME}/.bb/bb`,
circuit,
) as unknown as UltraHonkBackend);
- return { noir, backend };
+ return { circuit, noir, backend };
}),
});
return {
diff --git a/packages/contracts/sdk/mpc/.gitignore b/packages/contracts/sdk/mpc/.gitignore
new file mode 100644
index 0000000..3f38e0c
--- /dev/null
+++ b/packages/contracts/sdk/mpc/.gitignore
@@ -0,0 +1,2 @@
+work-dirs
+configs
diff --git a/packages/contracts/sdk/mpc/MpcNetworkService.ts b/packages/contracts/sdk/mpc/MpcNetworkService.ts
new file mode 100644
index 0000000..c85e346
--- /dev/null
+++ b/packages/contracts/sdk/mpc/MpcNetworkService.ts
@@ -0,0 +1,212 @@
+import { UltraHonkBackend } from "@aztec/bb.js";
+import type { CompiledCircuit } from "@noir-lang/noir_js";
+import { utils } from "@repo/utils";
+import { ethers } from "ethers";
+import fs from "node:fs";
+import os from "node:os";
+import path from "node:path";
+import PQueue, { type QueueAddOptions } from "p-queue";
+import { promiseWithResolvers } from "../utils.js";
+import { inWorkingDir, makeRunCommand, splitInput } from "./utils.js";
+
+export class MpcProverService {
+ readonly #parties = {
+ 0: new MpcProverPartyService(0),
+ 1: new MpcProverPartyService(1),
+ 2: new MpcProverPartyService(2),
+ };
+
+ async prove(
+ inputsShared: Awaited>,
+ params: {
+ orderId: OrderId;
+ side: Side;
+ circuit: CompiledCircuit;
+ },
+ ) {
+ return await Promise.all(
+ inputsShared.map(async ({ partyIndex, inputShared }) => {
+ return await this.#parties[partyIndex].requestProveAsParty({
+ ...params,
+ inputShared,
+ });
+ }),
+ );
+ }
+}
+
+class MpcProverPartyService {
+ #storage: Map = new Map();
+ #queue = new PQueue({ concurrency: 1 });
+
+ constructor(readonly partyIndex: PartyIndex) {}
+
+ async requestProveAsParty(params: {
+ orderId: OrderId;
+ side: Side;
+ inputShared: string;
+ circuit: CompiledCircuit;
+ }) {
+ // TODO(security): authorization
+ if (this.#storage.has(params.orderId)) {
+ throw new Error(`order already exists ${params.orderId}`);
+ }
+ const order: Order = {
+ id: params.orderId,
+ inputShared: params.inputShared,
+ side: params.side,
+ result: promiseWithResolvers(),
+ };
+ this.#storage.set(params.orderId, order);
+
+ // add this order to other order's queue
+ // TODO(perf): this is O(N^2) but we should do better
+ for (const otherOrder of this.#storage.values()) {
+ this.#addOrdersToQueue({
+ orderAId: order.id,
+ orderBId: otherOrder.id,
+ circuit: params.circuit,
+ });
+ }
+
+ return await order.result.promise;
+ }
+
+ #addOrdersToQueue(params: {
+ orderAId: OrderId;
+ orderBId: OrderId;
+ circuit: CompiledCircuit;
+ }) {
+ const options: QueueAddOptions = {
+ throwOnTimeout: true,
+ // this is a hack to enforce the order of execution matches across all MPC parties
+ priority: Number(
+ ethers.getBigInt(
+ ethers.id([params.orderAId, params.orderBId].sort().join("")),
+ ) % BigInt(Number.MAX_SAFE_INTEGER),
+ ),
+ };
+ this.#queue.add(async () => {
+ await utils.sleep(500); // just to make sure all parties got the order over network
+ const orderA = this.#storage.get(params.orderAId);
+ const orderB = this.#storage.get(params.orderBId);
+ if (!orderA || !orderB) {
+ // one of the orders was already matched
+ return;
+ }
+ if (orderA.id === orderB.id) {
+ // can't match with itself
+ return;
+ }
+ if (orderA.side === orderB.side) {
+ // pre-check that orders are on opposite sides
+ return;
+ }
+
+ // deterministic ordering
+ const [order0, order1] =
+ orderA.side === "seller" ? [orderA, orderB] : [orderB, orderA];
+ console.log("executing orders", this.partyIndex, order0.id, order1.id);
+ try {
+ const { proof } = await proveAsParty({
+ circuit: params.circuit,
+ partyIndex: this.partyIndex,
+ input0Shared: order0.inputShared,
+ input1Shared: order1.inputShared,
+ });
+ const proofHex = ethers.hexlify(proof);
+ order0.result.resolve(proofHex);
+ order1.result.resolve(proofHex);
+ this.#storage.delete(order0.id);
+ this.#storage.delete(order1.id);
+ console.log(
+ `orders matched: ${this.partyIndex} ${order0.id} ${order1.id}`,
+ );
+ } catch (error) {
+ console.log(
+ `orders did not match: ${this.partyIndex} ${order0.id} ${order1.id}`,
+ );
+ }
+ }, options);
+ }
+}
+
+async function proveAsParty(params: {
+ partyIndex: number;
+ circuit: CompiledCircuit;
+ input0Shared: string;
+ input1Shared: string;
+}) {
+ return await inWorkingDir(async (workingDir) => {
+ for (const [traderIndex, inputShared] of [
+ params.input0Shared,
+ params.input1Shared,
+ ].entries()) {
+ fs.writeFileSync(
+ path.join(
+ workingDir,
+ `Prover${traderIndex}.toml.${params.partyIndex}.shared`,
+ ),
+ ethers.getBytes(inputShared),
+ );
+ }
+
+ const circuitPath = path.join(workingDir, "circuit.json");
+ fs.writeFileSync(circuitPath, JSON.stringify(params.circuit));
+
+ const runCommand = makeRunCommand(__dirname);
+ await runCommand("./run-party.sh", [
+ workingDir,
+ circuitPath,
+ params.partyIndex,
+ ]);
+
+ const proof = fs.readFileSync(
+ path.join(workingDir, `proof.${params.partyIndex}.proof`),
+ );
+ const publicInputs = JSON.parse(
+ fs.readFileSync(path.join(workingDir, "public-input.json"), "utf-8"),
+ );
+
+ // pre-verify proof
+ const backend = new UltraHonkBackend(params.circuit.bytecode, {
+ threads: os.cpus().length,
+ });
+ let verified: boolean;
+ try {
+ verified = await backend.verifyProof(
+ { proof, publicInputs },
+ { keccak: true },
+ );
+ } catch (e: any) {
+ if (e.message?.includes("unreachable")) {
+ throw new Error("mpc generated invalid proof: failed in runtime");
+ }
+ throw e;
+ } finally {
+ await backend.destroy();
+ }
+ if (!verified) {
+ throw new Error("mpc generated invalid proof: returned false");
+ }
+
+ return {
+ proof: proof.slice(4), // remove length
+ publicInputs,
+ };
+ });
+}
+
+export type OrderId = string & { __brand: "OrderId" };
+export type PartyIndex = 0 | 1 | 2;
+/**
+ * Deterministically determined based on the tokens being swapped
+ */
+export type Side = "seller" | "buyer";
+
+type Order = {
+ side: Side;
+ id: OrderId;
+ inputShared: string;
+ result: ReturnType>;
+};
diff --git a/packages/contracts/sdk/mpc/run-party.sh b/packages/contracts/sdk/mpc/run-party.sh
new file mode 100755
index 0000000..7107762
--- /dev/null
+++ b/packages/contracts/sdk/mpc/run-party.sh
@@ -0,0 +1,37 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+
+source ../../noir/timer.sh
+
+if [ $# -ne 3 ]; then
+ echo "Usage: $0 "
+ exit 1
+fi
+WORK_DIR=$1
+CIRCUIT=$2
+PARTY_INDEX=$3
+
+PROVER0_TOML=$WORK_DIR/Prover0.toml
+PROVER1_TOML=$WORK_DIR/Prover1.toml
+# copy from https://github.com/TaceoLabs/co-snarks/tree/e96a712dfa987fb39e17232ef11d067b29b62aef/co-noir/co-noir/examples/configs
+PARTY_CONFIGS_DIR=configs
+
+# merge inputs into single input file
+timeStart "merge-input-shares"
+co-noir merge-input-shares --inputs $PROVER0_TOML.$PARTY_INDEX.shared --inputs $PROVER1_TOML.$PARTY_INDEX.shared --protocol REP3 --out $WORK_DIR/Prover.toml.$PARTY_INDEX.shared
+timeEnd "merge-input-shares"
+
+# run witness extension in MPC
+timeStart "mpc-generate-witness"
+co-noir generate-witness --input $WORK_DIR/Prover.toml.$PARTY_INDEX.shared --circuit $CIRCUIT --protocol REP3 --config $PARTY_CONFIGS_DIR/party$PARTY_INDEX.toml --out $WORK_DIR/witness.gz.$PARTY_INDEX.shared
+timeEnd "mpc-generate-witness"
+
+# run proving in MPC
+timeStart "mpc-build-proving-key"
+co-noir build-proving-key --witness $WORK_DIR/witness.gz.$PARTY_INDEX.shared --circuit $CIRCUIT --protocol REP3 --config $PARTY_CONFIGS_DIR/party$PARTY_INDEX.toml --out $WORK_DIR/proving_key.$PARTY_INDEX
+timeEnd "mpc-build-proving-key"
+
+timeStart "mpc-generate-proof"
+co-noir generate-proof --proving-key $WORK_DIR/proving_key.$PARTY_INDEX --protocol REP3 --hasher keccak --crs ~/.bb-crs/bn254_g1.dat --config $PARTY_CONFIGS_DIR/party$PARTY_INDEX.toml --out $WORK_DIR/proof.$PARTY_INDEX.proof --public-input $WORK_DIR/public_input.json
+timeEnd "mpc-generate-proof"
diff --git a/packages/contracts/sdk/mpc/split-inputs.sh b/packages/contracts/sdk/mpc/split-inputs.sh
new file mode 100755
index 0000000..1846efb
--- /dev/null
+++ b/packages/contracts/sdk/mpc/split-inputs.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+
+if [ $# -ne 2 ]; then
+ echo "Usage: $0 "
+ exit 1
+fi
+
+PROVER_TOML=$1
+CIRCUIT=$2
+
+WORK_DIR=$(dirname $PROVER_TOML)
+
+co-noir split-input --circuit $CIRCUIT --input $PROVER_TOML --protocol REP3 --out-dir $WORK_DIR
diff --git a/packages/contracts/sdk/mpc/utils.ts b/packages/contracts/sdk/mpc/utils.ts
new file mode 100644
index 0000000..b532392
--- /dev/null
+++ b/packages/contracts/sdk/mpc/utils.ts
@@ -0,0 +1,77 @@
+import type { CompiledCircuit, InputMap } from "@noir-lang/noir_js";
+import { ethers } from "ethers";
+import { range } from "lodash";
+import fs from "node:fs";
+import path from "node:path";
+import toml from "smol-toml";
+import type { PartyIndex } from "./MpcNetworkService.js";
+
+export async function splitInput(circuit: CompiledCircuit, input: InputMap) {
+ return await inWorkingDir(async (workingDir) => {
+ const proverPath = path.join(workingDir, "ProverX.toml");
+ fs.writeFileSync(proverPath, toml.stringify(input));
+ const circuitPath = path.join(workingDir, "circuit.json");
+ fs.writeFileSync(circuitPath, JSON.stringify(circuit));
+ const runCommand = makeRunCommand(__dirname);
+ await runCommand("./split-inputs.sh", [proverPath, circuitPath]);
+ const shared = range(3).map((i) => {
+ const x = Uint8Array.from(fs.readFileSync(`${proverPath}.${i}.shared`));
+ return ethers.hexlify(x);
+ });
+ return Array.from(shared.entries()).map(([partyIndex, inputShared]) => ({
+ partyIndex: partyIndex as PartyIndex,
+ inputShared,
+ }));
+ });
+}
+
+export async function inWorkingDir(f: (workingDir: string) => Promise) {
+ const id = crypto.randomUUID();
+ const workingDir = path.join(__dirname, "work-dirs", id);
+ fs.mkdirSync(workingDir, { recursive: true });
+ try {
+ return await f(workingDir);
+ } finally {
+ fs.rmSync(workingDir, { recursive: true });
+ }
+}
+
+export const makeRunCommand =
+ (cwd?: string) =>
+ async (command: string, args: (string | number)[] = []) => {
+ const { spawn } = await import("node:child_process");
+
+ const spawned = spawn(
+ command,
+ args.map((arg) => arg.toString()),
+ { cwd },
+ );
+ spawned.stdout.on("data", (data) => {
+ process.stdout.write(data);
+ });
+
+ spawned.stderr.on("data", (data) => {
+ process.stderr.write(data);
+ });
+
+ return await new Promise((resolve, reject) => {
+ spawned.on("close", (code: number) => {
+ if (code !== 0) {
+ reject(new Error(`Process exited with code ${code}`));
+ return;
+ }
+
+ resolve();
+ });
+
+ spawned.on("error", (err) => {
+ reject(
+ new Error(
+ `Error executing command \`${
+ command + " " + args.join(" ")
+ }\`: ${err.message}`,
+ ),
+ );
+ });
+ });
+ };
diff --git a/packages/contracts/sdk/sdk.ts b/packages/contracts/sdk/sdk.ts
index 714f42f..8c76bcc 100644
--- a/packages/contracts/sdk/sdk.ts
+++ b/packages/contracts/sdk/sdk.ts
@@ -4,6 +4,8 @@ import { mapValues } from "lodash-es";
import type { AsyncOrSync } from "ts-essentials";
import type { PoolERC20 } from "../typechain-types/index.js";
import { EncryptionService } from "./EncryptionService.js";
+import { LobService } from "./LobService.js";
+import { MpcProverService } from "./mpc/MpcNetworkService.js";
import { PoolErc20Service } from "./PoolErc20Service.js";
import { type ITreesService } from "./RemoteTreesService.js";
@@ -25,7 +27,7 @@ export function createInterfaceSdk(
coreSdk: ReturnType,
trees: ITreesService,
compiledCircuits: Record<
- "shield" | "unshield" | "join" | "transfer",
+ "shield" | "unshield" | "join" | "transfer" | "swap",
AsyncOrSync
>,
) {
@@ -38,9 +40,18 @@ export function createInterfaceSdk(
trees,
circuits,
);
+ const mpcProver = new MpcProverService();
+ const lob = new LobService(
+ coreSdk.contract,
+ trees,
+ poolErc20,
+ mpcProver,
+ circuits,
+ );
return {
poolErc20,
+ lob,
};
}
@@ -50,5 +61,5 @@ async function getCircuit(artifact: AsyncOrSync) {
artifact = await artifact;
const noir = new Noir(artifact);
const backend = new UltraHonkBackend(artifact.bytecode);
- return { noir, backend };
+ return { circuit: artifact, noir, backend };
}
diff --git a/packages/contracts/sdk/utils.ts b/packages/contracts/sdk/utils.ts
index 662c8d0..229bc97 100644
--- a/packages/contracts/sdk/utils.ts
+++ b/packages/contracts/sdk/utils.ts
@@ -52,11 +52,12 @@ export const U256_LIMBS = 3;
export const U256_LIMB_SIZE = 120;
export function toNoirU256(value: bigint) {
- assert(value >= 0n && value < 2n ** 256n, "invalid U256 value");
- const limbs = splitBigIntToLimbs(value, U256_LIMB_SIZE, U256_LIMBS).map(
- (x) => "0x" + x.toString(16),
- );
- return { limbs };
+ return { value: value.toString() };
+ // assert(value >= 0n && value < 2n ** 256n, "invalid U256 value");
+ // const limbs = splitBigIntToLimbs(value, U256_LIMB_SIZE, U256_LIMBS).map(
+ // (x) => "0x" + x.toString(16),
+ // );
+ // return { limbs };
}
export function fromNoirU256(value: { limbs: (bigint | string)[] }) {
@@ -81,6 +82,19 @@ export async function prove(
return { proof, witness, returnValue, publicInputs };
}
+export function promiseWithResolvers(): {
+ promise: Promise;
+ resolve: (value: T) => void;
+ reject: (reason: unknown) => void;
+} {
+ const ret: any = {};
+ ret.promise = new Promise((resolve, reject) => {
+ ret.resolve = resolve;
+ ret.reject = reject;
+ });
+ return ret;
+}
+
export function readNativeHonkProof(pathToProofDir: string) {
const proof = fs.readFileSync(path.join(pathToProofDir, "proof"));
const publicInputs = fs.readFileSync(
diff --git a/packages/contracts/test/PoolERC20.test.ts b/packages/contracts/test/PoolERC20.test.ts
index cf1f81a..b70edeb 100644
--- a/packages/contracts/test/PoolERC20.test.ts
+++ b/packages/contracts/test/PoolERC20.test.ts
@@ -3,6 +3,7 @@ import { expect } from "chai";
import { ethers, noir, typedDeployments } from "hardhat";
import type { sdk as interfaceSdk } from "../sdk";
import type { createBackendSdk } from "../sdk/backendSdk";
+import { SwapResult } from "../sdk/LobService";
import { parseUnits, snapshottedBeforeEach } from "../shared/utils";
import {
MockERC20,
@@ -42,6 +43,10 @@ describe("PoolERC20", () => {
await usdc.mintForTests(alice, await parseUnits(usdc, "1000000"));
await usdc.connect(alice).approve(pool, ethers.MaxUint256);
+ await btc.mintForTests(bob, await parseUnits(btc, "1000000"));
+ await btc.connect(bob).approve(pool, ethers.MaxUint256);
+ await btc.mintForTests(charlie, await parseUnits(btc, "1000000"));
+ await btc.connect(charlie).approve(pool, ethers.MaxUint256);
({ CompleteWaAddress, TokenAmount } = (
await tsImport("../sdk", __filename)
@@ -66,6 +71,7 @@ describe("PoolERC20", () => {
unshield: noir.getCircuitJson("erc20_unshield"),
join: noir.getCircuitJson("erc20_join"),
transfer: noir.getCircuitJson("erc20_transfer"),
+ swap: noir.getCircuitJson("lob_router_swap"),
});
backendSdk = createBackendSdk(coreSdk, trees, {
@@ -370,4 +376,257 @@ describe("PoolERC20", () => {
await sdk.poolErc20.getBalanceNotesOf(usdc, aliceSecretKey),
).to.deep.equal([changeNote]);
});
+
+ it("swaps", async () => {
+ const { note: aliceNote } = await sdk.poolErc20.shield({
+ account: alice,
+ token: usdc,
+ amount: 100n,
+ secretKey: aliceSecretKey,
+ });
+ const { note: bobNote } = await sdk.poolErc20.shield({
+ account: bob,
+ token: btc,
+ amount: 10n,
+ secretKey: bobSecretKey,
+ });
+
+ await backendSdk.rollup.rollup();
+
+ await sdk.lob.swap({
+ sellerSecretKey: aliceSecretKey,
+ sellerNote: aliceNote,
+ sellerAmount: await TokenAmount.from({
+ token: await usdc.getAddress(),
+ amount: 70n,
+ }),
+ buyerSecretKey: bobSecretKey,
+ buyerNote: bobNote,
+ buyerAmount: await TokenAmount.from({
+ token: await btc.getAddress(),
+ amount: 2n,
+ }),
+ });
+
+ await backendSdk.rollup.rollup();
+
+ expect(await sdk.poolErc20.balanceOf(usdc, aliceSecretKey)).to.equal(30n);
+ expect(await sdk.poolErc20.balanceOf(btc, aliceSecretKey)).to.equal(2n);
+ expect(await sdk.poolErc20.balanceOf(usdc, bobSecretKey)).to.equal(70n);
+ expect(await sdk.poolErc20.balanceOf(btc, bobSecretKey)).to.equal(8n);
+ });
+
+ it("swaps mpc", async () => {
+ if (process.env.CI) {
+ // TODO: install co-noir on github actions and remove this
+ console.log("skipping mpc swap test");
+ return;
+ }
+
+ const { note: aliceNote } = await sdk.poolErc20.shield({
+ account: alice,
+ token: usdc,
+ amount: 100n,
+ secretKey: aliceSecretKey,
+ });
+ const { note: bobNote } = await sdk.poolErc20.shield({
+ account: bob,
+ token: btc,
+ amount: 10n,
+ secretKey: bobSecretKey,
+ });
+
+ await backendSdk.rollup.rollup();
+
+ const sellerAmount = await TokenAmount.from({
+ token: await usdc.getAddress(),
+ amount: 70n,
+ });
+ const buyerAmount = await TokenAmount.from({
+ token: await btc.getAddress(),
+ amount: 2n,
+ });
+
+ const swapAlicePromise = sdk.lob.requestSwap({
+ secretKey: aliceSecretKey,
+ note: aliceNote,
+ sellAmount: sellerAmount,
+ buyAmount: buyerAmount,
+ });
+ const swapBobPromise = sdk.lob.requestSwap({
+ secretKey: bobSecretKey,
+ note: bobNote,
+ sellAmount: buyerAmount,
+ buyAmount: sellerAmount,
+ });
+ const [swapAlice, swapBob] = await Promise.all([
+ swapAlicePromise,
+ swapBobPromise,
+ ]);
+ await sdk.lob.commitSwap({ swapA: swapAlice, swapB: swapBob });
+
+ await backendSdk.rollup.rollup();
+
+ expect(await sdk.poolErc20.balanceOf(usdc, aliceSecretKey)).to.equal(30n);
+ expect(await sdk.poolErc20.balanceOf(btc, aliceSecretKey)).to.equal(2n);
+ expect(await sdk.poolErc20.balanceOf(usdc, bobSecretKey)).to.equal(70n);
+ expect(await sdk.poolErc20.balanceOf(btc, bobSecretKey)).to.equal(8n);
+ });
+
+ it("swaps 4 orders", async () => {
+ if (process.env.CI) {
+ // TODO: install co-noir on github actions and remove this
+ return;
+ }
+
+ const { note: aliceNote0 } = await sdk.poolErc20.shield({
+ account: alice,
+ token: usdc,
+ amount: 100n,
+ secretKey: aliceSecretKey,
+ });
+ const { note: aliceNote1 } = await sdk.poolErc20.shield({
+ account: alice,
+ token: usdc,
+ amount: 100n,
+ secretKey: aliceSecretKey,
+ });
+ const { note: bobNote } = await sdk.poolErc20.shield({
+ account: bob,
+ token: btc,
+ amount: 10n,
+ secretKey: bobSecretKey,
+ });
+ const { note: charlieNote } = await sdk.poolErc20.shield({
+ account: charlie,
+ token: btc,
+ amount: 20n,
+ secretKey: charlieSecretKey,
+ });
+ await backendSdk.rollup.rollup();
+
+ let swaps0Promise: Promise<[SwapResult, SwapResult]>;
+ {
+ // alice <-> bob
+ const sellerAmount = await TokenAmount.from({
+ token: await usdc.getAddress(),
+ amount: 70n,
+ });
+ const buyerAmount = await TokenAmount.from({
+ token: await btc.getAddress(),
+ amount: 2n,
+ });
+ swaps0Promise = Promise.all([
+ sdk.lob.requestSwap({
+ secretKey: aliceSecretKey,
+ note: aliceNote0,
+ sellAmount: sellerAmount,
+ buyAmount: buyerAmount,
+ }),
+ sdk.lob.requestSwap({
+ secretKey: bobSecretKey,
+ note: bobNote,
+ sellAmount: buyerAmount,
+ buyAmount: sellerAmount,
+ }),
+ ]);
+ }
+
+ let swaps1Promise: Promise<[SwapResult, SwapResult]>;
+ {
+ // alice <-> charlie
+ const sellerAmount = await TokenAmount.from({
+ token: await usdc.getAddress(),
+ amount: 30n,
+ });
+ const buyerAmount = await TokenAmount.from({
+ token: await btc.getAddress(),
+ amount: 1n,
+ });
+ swaps1Promise = Promise.all([
+ sdk.lob.requestSwap({
+ secretKey: aliceSecretKey,
+ note: aliceNote1,
+ sellAmount: sellerAmount,
+ buyAmount: buyerAmount,
+ }),
+ sdk.lob.requestSwap({
+ secretKey: charlieSecretKey,
+ note: charlieNote,
+ sellAmount: buyerAmount,
+ buyAmount: sellerAmount,
+ }),
+ ]);
+ }
+
+ const swaps0 = await swaps0Promise;
+ const swaps1 = await swaps1Promise;
+ await sdk.lob.commitSwap({ swapA: swaps0[0], swapB: swaps0[1] });
+ await sdk.lob.commitSwap({ swapA: swaps1[0], swapB: swaps1[1] });
+ await backendSdk.rollup.rollup();
+
+ expect(await sdk.poolErc20.balanceOf(usdc, aliceSecretKey)).to.equal(
+ 200n - 70n - 30n,
+ );
+ expect(await sdk.poolErc20.balanceOf(btc, aliceSecretKey)).to.equal(
+ 2n + 1n,
+ );
+
+ expect(await sdk.poolErc20.balanceOf(usdc, bobSecretKey)).to.equal(70n);
+ expect(await sdk.poolErc20.balanceOf(btc, bobSecretKey)).to.equal(8n);
+
+ expect(await sdk.poolErc20.balanceOf(usdc, charlieSecretKey)).to.equal(30n);
+ expect(await sdk.poolErc20.balanceOf(btc, charlieSecretKey)).to.equal(19n);
+ });
+
+ // TODO: fix this test and re-enable. It never finishes because it does not throw if orders do no match anymore.
+ it.skip("fails to swap if order amounts do not match", async () => {
+ if (process.env.CI) {
+ // TODO: install co-noir on github actions and remove this
+ return;
+ }
+
+ const { note: aliceNote } = await sdk.poolErc20.shield({
+ account: alice,
+ token: usdc,
+ amount: 100n,
+ secretKey: aliceSecretKey,
+ });
+ const { note: bobNote } = await sdk.poolErc20.shield({
+ account: bob,
+ token: btc,
+ amount: 10n,
+ secretKey: bobSecretKey,
+ });
+
+ await backendSdk.rollup.rollup();
+
+ const sellerAmount = await TokenAmount.from({
+ token: await usdc.getAddress(),
+ amount: 70n,
+ });
+ const buyerAmount = await TokenAmount.from({
+ token: await btc.getAddress(),
+ amount: 2n,
+ });
+
+ const swapAlicePromise = sdk.lob.requestSwap({
+ secretKey: aliceSecretKey,
+ note: aliceNote,
+ sellAmount: sellerAmount,
+ buyAmount: buyerAmount,
+ });
+ const swapBobPromise = sdk.lob.requestSwap({
+ secretKey: bobSecretKey,
+ note: bobNote,
+ sellAmount: buyerAmount,
+ buyAmount: await TokenAmount.from({
+ token: await usdc.getAddress(),
+ amount: 71n, // amount differs
+ }),
+ });
+ await expect(
+ Promise.all([swapAlicePromise, swapBobPromise]),
+ ).to.be.rejectedWith("mpc generated invalid proof");
+ });
});
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 221651a..27e401a 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -180,6 +180,12 @@ importers:
ox:
specifier: ^0.7.2
version: 0.7.2(typescript@5.6.3)(zod@3.23.8)
+ p-queue:
+ specifier: ^8.1.0
+ version: 8.1.0
+ smol-toml:
+ specifier: ^1.3.1
+ version: 1.3.1
ts-essentials:
specifier: ^9.4.1
version: 9.4.2(typescript@5.6.3)
@@ -4503,6 +4509,14 @@ packages:
resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==}
engines: {node: '>=10'}
+ p-queue@8.1.0:
+ resolution: {integrity: sha512-mxLDbbGIBEXTJL0zEx8JIylaj3xQ7Z/7eEVjcF9fJX4DBiH9oqe+oahYnlKKxm0Ci9TlWTyhSHgygxMxjIB2jw==}
+ engines: {node: '>=18'}
+
+ p-timeout@6.1.4:
+ resolution: {integrity: sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==}
+ engines: {node: '>=14.16'}
+
package-json-from-dist@1.0.1:
resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==}
@@ -5082,6 +5096,10 @@ packages:
resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==}
engines: {node: '>=10'}
+ smol-toml@1.3.1:
+ resolution: {integrity: sha512-tEYNll18pPKHroYSmLLrksq233j021G0giwW7P3D24jC54pQ5W5BXMsQ/Mvw1OJCmEYDgY+lrzT+3nNUtoNfXQ==}
+ engines: {node: '>= 18'}
+
solc@0.8.26:
resolution: {integrity: sha512-yiPQNVf5rBFHwN6SIf3TUUvVAFKcQqmSUFeq+fb6pNRCo0ZCgpYOZDi3BVoezCPIAcKrVYd/qXlBLUP9wVrZ9g==}
engines: {node: '>=10.0.0'}
@@ -10837,6 +10855,13 @@ snapshots:
dependencies:
aggregate-error: 3.1.0
+ p-queue@8.1.0:
+ dependencies:
+ eventemitter3: 5.0.1
+ p-timeout: 6.1.4
+
+ p-timeout@6.1.4: {}
+
package-json-from-dist@1.0.1: {}
pako@1.0.11: {}
@@ -11396,6 +11421,8 @@ snapshots:
astral-regex: 2.0.0
is-fullwidth-code-point: 3.0.0
+ smol-toml@1.3.1: {}
+
solc@0.8.26(debug@4.3.7):
dependencies:
command-exists: 1.2.9