Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crates/alerter/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ impl OrderBookApi {
// untouched.
fn convert_eth_to_weth(token: Address) -> Address {
const WETH: Address = address!("0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2");
if token == Address::from_slice(&BUY_ETH_ADDRESS.0) {
if token.as_slice() == BUY_ETH_ADDRESS.as_bytes() {
WETH
} else {
token
Expand Down
4 changes: 2 additions & 2 deletions crates/autopilot/src/arguments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,11 @@ pub struct Arguments {
/// bad token detector thinks they are bad. Base tokens are
/// automatically allowed.
#[clap(long, env, use_value_delimiter = true)]
pub allowed_tokens: Vec<H160>,
pub allowed_tokens: Vec<Address>,

/// List of token addresses to be ignored throughout service
#[clap(long, env, use_value_delimiter = true)]
pub unsupported_tokens: Vec<H160>,
pub unsupported_tokens: Vec<Address>,

/// Which estimators to use to estimate token prices in terms of the chain's
/// native token. Estimators with the same name need to also be specified as
Expand Down
4 changes: 2 additions & 2 deletions crates/autopilot/src/run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -326,8 +326,8 @@ pub async fn run(args: Arguments, shutdown_controller: ShutdownController) {
&args.shared.base_tokens,
));
let mut allowed_tokens = args.allowed_tokens.clone();
allowed_tokens.extend(base_tokens.tokens().iter().copied());
allowed_tokens.push(model::order::BUY_ETH_ADDRESS);
allowed_tokens.extend(base_tokens.tokens().iter().map(|t| t.into_alloy()));
allowed_tokens.push(model::order::BUY_ETH_ADDRESS.into_alloy());
let unsupported_tokens = args.unsupported_tokens.clone();

let finder = token_owner_finder::init(
Expand Down
4 changes: 2 additions & 2 deletions crates/autopilot/src/solvable_orders.rs
Original file line number Diff line number Diff line change
Expand Up @@ -691,7 +691,7 @@ async fn find_unsupported_tokens(
.map(|token| {
let bad_token = bad_token.clone();
async move {
match bad_token.detect(token.into_legacy()).await {
match bad_token.detect(token).await {
Ok(quality) => (!quality.is_good()).then_some(token),
Err(err) => {
tracing::warn!(
Expand Down Expand Up @@ -1313,7 +1313,7 @@ mod tests {
let token0 = H160::from_low_u64_le(0);
let token1 = H160::from_low_u64_le(1);
let token2 = H160::from_low_u64_le(2);
let bad_token = Arc::new(ListBasedDetector::deny_list(vec![token0]));
let bad_token = Arc::new(ListBasedDetector::deny_list(vec![token0.into_alloy()]));
let orders = vec![
OrderBuilder::default()
.with_sell_token(token0)
Expand Down
8 changes: 4 additions & 4 deletions crates/driver/src/boundary/liquidity/balancer/v2/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use {
BalancerV2WeightedPoolFactoryV3,
},
ethrpc::{
alloy::conversions::IntoAlloy,
alloy::conversions::{IntoAlloy, IntoLegacy},
block_stream::{BlockRetrieving, CurrentBlockWatcher},
},
shared::{
Expand Down Expand Up @@ -68,9 +68,9 @@ fn to_interaction(
let (target, value, call_data) = interaction.encode_swap();

eth::Interaction {
target: target.into(),
value: value.into(),
call_data: call_data.0.into(),
target: target.into_legacy().into(),
value: value.into_legacy().into(),
call_data: call_data.0.to_vec().into(),
}
}

Expand Down
6 changes: 3 additions & 3 deletions crates/driver/src/boundary/liquidity/uniswap/v2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,9 @@ pub fn to_interaction(
let (target, value, call_data) = interaction.encode_swap();

eth::Interaction {
target: target.into(),
value: value.into(),
call_data: call_data.0.into(),
target: target.into_legacy().into(),
value: value.into_legacy().into(),
call_data: call_data.0.to_vec().into(),
}
}

Expand Down
6 changes: 3 additions & 3 deletions crates/driver/src/boundary/liquidity/uniswap/v3.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,9 @@ pub fn to_interaction(

let encoded = interaction.encode();
eth::Interaction {
target: eth::Address(encoded.0),
value: eth::Ether(encoded.1),
call_data: crate::util::Bytes(encoded.2.0),
target: eth::Address(encoded.0.into_legacy()),
value: eth::Ether(encoded.1.into_legacy()),
call_data: crate::util::Bytes(encoded.2.0.to_vec()),
}
}

Expand Down
15 changes: 7 additions & 8 deletions crates/e2e/tests/e2e/quote_verification.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use {
::alloy::primitives::address,
::alloy::primitives::{Address, U256, address},
bigdecimal::{BigDecimal, Zero},
e2e::setup::{eth, *},
ethcontract::H160,
Expand Down Expand Up @@ -184,16 +184,15 @@ async fn test_bypass_verification_for_rfq_quotes(web3: Web3) {
buy_token_destination: BuyTokenDestination::Erc20,
},
TradeKind::Legacy(LegacyTrade {
out_amount: 16380122291179526144u128.into(),
out_amount: U256::from(16380122291179526144u128),
gas_estimate: Some(225000),
interactions: vec![Interaction {
target: H160::from_str("0xdef1c0ded9bec7f1a1670819833240f027b25eff")
.unwrap(),
target: address!("0xdef1c0ded9bec7f1a1670819833240f027b25eff"),
data: const_hex::decode("aa77476c000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000002260fac5e5542a773aa44fbcfedf7c193bc2c599000000000000000000000000000000000000000000000000e357b42c3a9d8ccf0000000000000000000000000000000000000000000000000000000004d0e79e000000000000000000000000a69babef1ca67a37ffaf7a485dfff3382056e78c0000000000000000000000009008d19f58aabd9ed0d60971565aa8510560ab41000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000066360af101ffffffffffffffffffffffffffffffffffffff0f3f47f166360a8d0000003f0000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000001c66b3383f287dd9c85ad90e7c5a576ea4ba1bdf5a001d794a9afa379e6b2517b47e487a1aef32e75af432cbdbd301ada42754eaeac21ec4ca744afd92732f47540000000000000000000000000000000000000000000000000000000004d0c80f").unwrap(),
value: 0.into(),
value: U256::ZERO,
}],
solver: H160::from_str("0xe3067c7c27c1038de4e8ad95a83b927d23dfbd99")
.unwrap(),
solver: address!("0xe3067c7c27c1038de4e8ad95a83b927d23dfbd99")
,
tx_origin,
}),
)
Expand All @@ -220,7 +219,7 @@ async fn test_bypass_verification_for_rfq_quotes(web3: Web3) {
// `tx_origin: 0x0000` is currently used to bypass quote verification due to an
// implementation detail of zeroex RFQ orders.
// TODO: remove with #2693
let verification = verify_trade(Some(H160::zero())).await;
let verification = verify_trade(Some(Address::ZERO)).await;
assert_eq!(&verification.unwrap(), &verified_quote);

// Trades using any other `tx_origin` can not bypass the verification.
Expand Down
12 changes: 12 additions & 0 deletions crates/number/src/conversions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,18 @@ pub mod alloy {
ensure!(!ratio.denom().is_zero(), "zero denominator");
big_int_to_u256(&(ratio.numer() / ratio.denom()))
}

pub fn u256_to_big_uint(input: &U256) -> BigUint {
BigUint::from_bytes_be(&input.to_be_bytes::<32>())
}

pub fn u256_to_big_int(input: &U256) -> BigInt {
BigInt::from_biguint(Sign::Plus, u256_to_big_uint(input))
}

pub fn u256_to_big_rational(input: &U256) -> BigRational {
BigRational::new(u256_to_big_int(input), 1.into())
}
}

pub fn rational_to_big_decimal<T>(value: &Ratio<T>) -> BigDecimal
Expand Down
5 changes: 2 additions & 3 deletions crates/orderbook/src/arguments.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use {
alloy::primitives::Address,
primitive_types::H160,
reqwest::Url,
shared::{
arguments::{display_option, display_secret_option},
Expand Down Expand Up @@ -78,7 +77,7 @@ pub struct Arguments {

/// List of token addresses to be ignored throughout service
#[clap(long, env, use_value_delimiter = true)]
pub unsupported_tokens: Vec<H160>,
pub unsupported_tokens: Vec<Address>,

/// List of account addresses to be denied from order creation
#[clap(long, env, use_value_delimiter = true)]
Expand Down Expand Up @@ -106,7 +105,7 @@ pub struct Arguments {
/// bad token detector thinks they are bad. Base tokens are
/// automatically allowed.
#[clap(long, env, use_value_delimiter = true)]
pub allowed_tokens: Vec<H160>,
pub allowed_tokens: Vec<Address>,

/// Skip EIP-1271 order signature validation on creation.
#[clap(long, env, action = clap::ArgAction::Set, default_value = "false")]
Expand Down
4 changes: 2 additions & 2 deletions crates/orderbook/src/run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -232,8 +232,8 @@ pub async fn run(args: Arguments) {
&args.shared.base_tokens,
));
let mut allowed_tokens = args.allowed_tokens.clone();
allowed_tokens.extend(base_tokens.tokens().iter().copied());
allowed_tokens.push(BUY_ETH_ADDRESS);
allowed_tokens.extend(base_tokens.tokens().iter().map(|t| t.into_alloy()));
allowed_tokens.push(BUY_ETH_ADDRESS.into_alloy());
let unsupported_tokens = args.unsupported_tokens.clone();

let uniswapv3_factory = IUniswapV3Factory::Instance::deployed(&web3.alloy)
Expand Down
20 changes: 10 additions & 10 deletions crates/shared/src/bad_token/cache.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
use {
super::{BadTokenDetecting, TokenQuality},
alloy::primitives::Address,
anyhow::Result,
dashmap::DashMap,
futures::future::join_all,
primitive_types::H160,
std::{
ops::Div,
sync::Arc,
Expand All @@ -14,15 +14,15 @@ use {

pub struct CachingDetector {
inner: Box<dyn BadTokenDetecting>,
cache: DashMap<H160, (Instant, TokenQuality)>,
cache: DashMap<Address, (Instant, TokenQuality)>,
cache_expiry: Duration,
prefetch_time: Duration,
}

#[async_trait::async_trait]
impl BadTokenDetecting for CachingDetector {
#[instrument(skip_all)]
async fn detect(&self, token: H160) -> Result<TokenQuality> {
async fn detect(&self, token: Address) -> Result<TokenQuality> {
if let Some(quality) = self.get_from_cache(&token, Instant::now()) {
return Ok(quality);
}
Expand Down Expand Up @@ -53,13 +53,13 @@ impl CachingDetector {
detector
}

fn get_from_cache(&self, token: &H160, now: Instant) -> Option<TokenQuality> {
fn get_from_cache(&self, token: &Address, now: Instant) -> Option<TokenQuality> {
let (instant, quality) = self.cache.get(token)?.value().clone();
let still_valid = now.saturating_duration_since(instant) < self.cache_expiry;
still_valid.then_some(quality)
}

fn insert_many_into_cache(&self, tokens: impl Iterator<Item = (H160, TokenQuality)>) {
fn insert_many_into_cache(&self, tokens: impl Iterator<Item = (Address, TokenQuality)>) {
let now = Instant::now();
tokens.into_iter().for_each(|(token, quality)| {
self.cache.insert(token, (now, quality));
Expand Down Expand Up @@ -134,7 +134,7 @@ mod tests {

for _ in 0..2 {
let result = detector
.detect(H160::from_low_u64_le(0))
.detect(Address::with_last_byte(0))
.now_or_never()
.unwrap();
assert!(result.unwrap().is_good());
Expand All @@ -144,7 +144,7 @@ mod tests {
#[tokio::test]
async fn cache_expires() {
let inner = MockBadTokenDetecting::new();
let token = H160::from_low_u64_le(0);
let token = Address::with_last_byte(0);
let detector = CachingDetector::new(
Box::new(inner),
Duration::from_secs(2),
Expand Down Expand Up @@ -193,22 +193,22 @@ mod tests {
);

let result = detector
.detect(H160::from_low_u64_le(0))
.detect(Address::with_last_byte(0))
.now_or_never()
.unwrap();
assert!(result.unwrap().is_good());
// Check that the result is the same because we haven't reached the prefetch
// time yet
tokio::time::sleep(Duration::from_millis(100)).await;
let result = detector
.detect(H160::from_low_u64_le(0))
.detect(Address::with_last_byte(0))
.now_or_never()
.unwrap();
assert!(result.unwrap().is_good());
// We wait so the prefetch fetches the data
tokio::time::sleep(Duration::from_millis(70)).await;
let result = detector
.detect(H160::from_low_u64_le(0))
.detect(Address::with_last_byte(0))
.now_or_never()
.unwrap();
assert!(!result.unwrap().is_good());
Expand Down
3 changes: 2 additions & 1 deletion crates/shared/src/bad_token/instrumented.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use {
super::{BadTokenDetecting, TokenQuality},
alloy::primitives::Address,
anyhow::Result,
prometheus::IntCounterVec,
prometheus_metric_storage::MetricStorage,
Expand Down Expand Up @@ -33,7 +34,7 @@ pub struct InstrumentedBadTokenDetector {

#[async_trait::async_trait]
impl BadTokenDetecting for InstrumentedBadTokenDetector {
async fn detect(&self, token: ethcontract::H160) -> Result<TokenQuality> {
async fn detect(&self, token: Address) -> Result<TokenQuality> {
let result = self
.inner
.detect(token)
Expand Down
28 changes: 14 additions & 14 deletions crates/shared/src/bad_token/list_based.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use {
super::{BadTokenDetecting, TokenQuality},
alloy::primitives::Address,
anyhow::Result,
primitive_types::H160,
std::sync::Arc,
tracing::instrument,
};
Expand All @@ -15,16 +15,16 @@ pub enum UnknownTokenStrategy {

/// Classify tokens with explicit allow and deny lists.
pub struct ListBasedDetector {
allow_list: Vec<H160>,
deny_list: Vec<H160>,
allow_list: Vec<Address>,
deny_list: Vec<Address>,
strategy: UnknownTokenStrategy,
}

impl ListBasedDetector {
/// Panics if same token is both allowed and denied.
pub fn new(
allow_list: Vec<H160>,
deny_list: Vec<H160>,
allow_list: Vec<Address>,
deny_list: Vec<Address>,
strategy: UnknownTokenStrategy,
) -> Self {
assert!(
Expand All @@ -38,7 +38,7 @@ impl ListBasedDetector {
}
}

pub fn deny_list(list: Vec<H160>) -> Self {
pub fn deny_list(list: Vec<Address>) -> Self {
Self {
allow_list: Vec::new(),
deny_list: list,
Expand All @@ -50,7 +50,7 @@ impl ListBasedDetector {
#[async_trait::async_trait]
impl BadTokenDetecting for ListBasedDetector {
#[instrument(skip_all)]
async fn detect(&self, token: ethcontract::H160) -> Result<TokenQuality> {
async fn detect(&self, token: Address) -> Result<TokenQuality> {
if self.allow_list.contains(&token) {
return Ok(TokenQuality::Good);
}
Expand Down Expand Up @@ -80,19 +80,19 @@ mod tests {
// Would panic if used.
let inner = MockBadTokenDetecting::new();
let detector = ListBasedDetector {
allow_list: vec![H160::from_low_u64_le(0)],
deny_list: vec![H160::from_low_u64_le(1)],
allow_list: vec![Address::with_last_byte(0)],
deny_list: vec![Address::with_last_byte(1)],
strategy: UnknownTokenStrategy::Forward(Arc::new(inner)),
};

let result = detector
.detect(H160::from_low_u64_le(0))
.detect(Address::with_last_byte(0))
.now_or_never()
.unwrap();
assert!(result.unwrap().is_good());

let result = detector
.detect(H160::from_low_u64_le(1))
.detect(Address::with_last_byte(1))
.now_or_never()
.unwrap();
assert!(!result.unwrap().is_good());
Expand All @@ -106,7 +106,7 @@ mod tests {
strategy: UnknownTokenStrategy::Allow,
};
let result = detector
.detect(H160::from_low_u64_le(0))
.detect(Address::with_last_byte(0))
.now_or_never()
.unwrap();
assert!(result.unwrap().is_good());
Expand All @@ -117,7 +117,7 @@ mod tests {
strategy: UnknownTokenStrategy::Deny,
};
let result = detector
.detect(H160::from_low_u64_le(0))
.detect(Address::with_last_byte(0))
.now_or_never()
.unwrap();
assert!(!result.unwrap().is_good());
Expand All @@ -138,7 +138,7 @@ mod tests {
};

let result = detector
.detect(H160::from_low_u64_le(0))
.detect(Address::with_last_byte(0))
.now_or_never()
.unwrap();
assert!(result.unwrap().is_good());
Expand Down
Loading
Loading