From 454f70b4732f8689ca014e767e4fee8a90e288f4 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Fri, 21 Feb 2025 22:49:39 +0000 Subject: [PATCH 01/10] improve(tasks): Auto deposit-and-burn for new LP tokens When adding new deposit routes, if the LP token for the given L1 token does not yet exist, create the LP token and atomically deposit and burn a single unit. This prevents the HubPool from ever having a 0 balance of the l1 token, protecting against a known issue. It's been on the todo list to automate this for a while, but it hasn't been prioritised because adding LP tokens is quite infrequent. Atomic deposit-and-burn was done manually last time (0x566087fbaa74a32ec94bf4ba58c1451bf916991b686c0895245f726339b1725d), but it's preferable to automate this as much as possible to remove the human element. --- tasks/enableL1TokenAcrossEcosystem.ts | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/tasks/enableL1TokenAcrossEcosystem.ts b/tasks/enableL1TokenAcrossEcosystem.ts index b6d414f78..d6ac7668e 100644 --- a/tasks/enableL1TokenAcrossEcosystem.ts +++ b/tasks/enableL1TokenAcrossEcosystem.ts @@ -62,6 +62,7 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent console.log(`\nRunning task to enable L1 token over entire Across ecosystem 🌉. L1 token: ${l1Token}`); const { deployments, ethers } = hre; + const { AddressZero: ZERO_ADDRESS } = ethers.constants; const [signer] = await hre.ethers.getSigners(); // Remove chainIds that are in the ignore list. @@ -107,10 +108,25 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent // Construct calldata to enable these tokens. const callData = []; - // If deposit route chains are defined then we don't want to add a new LP token: - if (depositRouteChains.length === 0) { + // If the l1 token is not yet enabled for LP, enable it. + let { lpTokenAddress } = await hubPool.pooledTokens(l1Token); + if (lpTokenAddress === ZERO_ADDRESS) { console.log(`\nAdding calldata to enable liquidity provision on ${l1Token}`); callData.push(hubPool.interface.encodeFunctionData("enableL1TokenForLiquidityProvision", [l1Token])); + + // Ensure to always seed the LP with at least 1 unit of the LP token. Burn the LP token to prevent 0 LP. + if (hubChainId === CHAIN_IDs.MAINNET) { + console.log(`\nAdding calldata to enable ensure atomic deposit-and-burn of ${l1Token}`); + const _lpTokenFactory = await hubPool.lpTokenFactory(); + const lpTokenFactory = new ethers.Contract(_lpTokenFactory.address, _lpTokenFactory.abi, signer); + lpTokenAddress = await lpTokenFactory.callStatic.createLpToken(l1Token); + + const minDeposit = "1"; + callData.push(hubPool.interface.encodeFunctionData("addLiquidity", [l1Token, minDeposit])); + + const lpToken = (await ethers.getContractFactory("ExpandedERC20")).attach(lpTokenAddress); + callData.push(lpToken.interface.encodeFunctionData("transfer", [ZERO_ADDRESS, minDeposit])); + } } else { depositRouteChains.forEach((chainId) => assert(tokens[chainId].symbol !== NO_SYMBOL, `Token ${symbol} is not defined for chain ${chainId}`) From 9f477359635693ce2dc5d9530d9cc4c31ee7a59f Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Fri, 21 Feb 2025 23:20:55 +0000 Subject: [PATCH 02/10] reflow --- tasks/enableL1TokenAcrossEcosystem.ts | 28 +++++++++++++-------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/tasks/enableL1TokenAcrossEcosystem.ts b/tasks/enableL1TokenAcrossEcosystem.ts index d6ac7668e..114a44998 100644 --- a/tasks/enableL1TokenAcrossEcosystem.ts +++ b/tasks/enableL1TokenAcrossEcosystem.ts @@ -111,26 +111,24 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent // If the l1 token is not yet enabled for LP, enable it. let { lpTokenAddress } = await hubPool.pooledTokens(l1Token); if (lpTokenAddress === ZERO_ADDRESS) { - console.log(`\nAdding calldata to enable liquidity provision on ${l1Token}`); + const [lpFactoryAddr, { abi: lpFactoryABI }] = await Promise.all([ + hubPool.lpTokenFactory(), + deployments.get("LpTokenFactory"), + ]); + const lpTokenFactory = new ethers.Contract(lpFactoryAddr, lpFactoryABI, signer); + lpTokenAddress = await lpTokenFactory.callStatic.createLpToken(l1Token); + console.log(`\nAdding calldata to enable liquidity provision on ${l1Token} (LP token ${lpTokenAddress})`); + callData.push(hubPool.interface.encodeFunctionData("enableL1TokenForLiquidityProvision", [l1Token])); // Ensure to always seed the LP with at least 1 unit of the LP token. Burn the LP token to prevent 0 LP. - if (hubChainId === CHAIN_IDs.MAINNET) { - console.log(`\nAdding calldata to enable ensure atomic deposit-and-burn of ${l1Token}`); - const _lpTokenFactory = await hubPool.lpTokenFactory(); - const lpTokenFactory = new ethers.Contract(_lpTokenFactory.address, _lpTokenFactory.abi, signer); - lpTokenAddress = await lpTokenFactory.callStatic.createLpToken(l1Token); + console.log(`\nAdding calldata to enable ensure atomic deposit-and-burn of LP token ${lpTokenAddress}`); - const minDeposit = "1"; - callData.push(hubPool.interface.encodeFunctionData("addLiquidity", [l1Token, minDeposit])); + const minDeposit = "1"; + callData.push(hubPool.interface.encodeFunctionData("addLiquidity", [l1Token, minDeposit])); - const lpToken = (await ethers.getContractFactory("ExpandedERC20")).attach(lpTokenAddress); - callData.push(lpToken.interface.encodeFunctionData("transfer", [ZERO_ADDRESS, minDeposit])); - } - } else { - depositRouteChains.forEach((chainId) => - assert(tokens[chainId].symbol !== NO_SYMBOL, `Token ${symbol} is not defined for chain ${chainId}`) - ); + const lpToken = (await ethers.getContractFactory("ExpandedERC20")).attach(lpTokenAddress); + callData.push(lpToken.interface.encodeFunctionData("transfer", [ZERO_ADDRESS, minDeposit])); } console.log("\nAdding calldata to enable routes between all chains and tokens:"); From 721946646f52b47a62216a96e1be5180cf417198 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Sat, 22 Feb 2025 00:45:43 +0100 Subject: [PATCH 03/10] Apply suggestions from code review --- tasks/enableL1TokenAcrossEcosystem.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tasks/enableL1TokenAcrossEcosystem.ts b/tasks/enableL1TokenAcrossEcosystem.ts index 114a44998..b7ce7afe6 100644 --- a/tasks/enableL1TokenAcrossEcosystem.ts +++ b/tasks/enableL1TokenAcrossEcosystem.ts @@ -109,7 +109,7 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent const callData = []; // If the l1 token is not yet enabled for LP, enable it. - let { lpTokenAddress } = await hubPool.pooledTokens(l1Token); + let { lpToken: lpTokenAddress } = await hubPool.pooledTokens(l1Token); if (lpTokenAddress === ZERO_ADDRESS) { const [lpFactoryAddr, { abi: lpFactoryABI }] = await Promise.all([ hubPool.lpTokenFactory(), From 038e97137a2ffa16a1faf34599c2a7e0453adadd Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Sat, 22 Feb 2025 00:46:23 +0100 Subject: [PATCH 04/10] Apply suggestions from code review --- tasks/enableL1TokenAcrossEcosystem.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tasks/enableL1TokenAcrossEcosystem.ts b/tasks/enableL1TokenAcrossEcosystem.ts index b7ce7afe6..5d37781cd 100644 --- a/tasks/enableL1TokenAcrossEcosystem.ts +++ b/tasks/enableL1TokenAcrossEcosystem.ts @@ -125,9 +125,8 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent console.log(`\nAdding calldata to enable ensure atomic deposit-and-burn of LP token ${lpTokenAddress}`); const minDeposit = "1"; - callData.push(hubPool.interface.encodeFunctionData("addLiquidity", [l1Token, minDeposit])); - const lpToken = (await ethers.getContractFactory("ExpandedERC20")).attach(lpTokenAddress); + callData.push(hubPool.interface.encodeFunctionData("addLiquidity", [l1Token, minDeposit])); callData.push(lpToken.interface.encodeFunctionData("transfer", [ZERO_ADDRESS, minDeposit])); } From b1a3d4d978bc433c74a8693c4d76e9ac03d3d8aa Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Sat, 22 Feb 2025 00:36:46 +0000 Subject: [PATCH 05/10] Update & fix --- tasks/enableL1TokenAcrossEcosystem.ts | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tasks/enableL1TokenAcrossEcosystem.ts b/tasks/enableL1TokenAcrossEcosystem.ts index 5d37781cd..5b8376598 100644 --- a/tasks/enableL1TokenAcrossEcosystem.ts +++ b/tasks/enableL1TokenAcrossEcosystem.ts @@ -109,25 +109,25 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent const callData = []; // If the l1 token is not yet enabled for LP, enable it. - let { lpToken: lpTokenAddress } = await hubPool.pooledTokens(l1Token); - if (lpTokenAddress === ZERO_ADDRESS) { + let { lpToken } = await hubPool.pooledTokens(l1Token); + if (lpToken === ZERO_ADDRESS) { const [lpFactoryAddr, { abi: lpFactoryABI }] = await Promise.all([ hubPool.lpTokenFactory(), deployments.get("LpTokenFactory"), ]); const lpTokenFactory = new ethers.Contract(lpFactoryAddr, lpFactoryABI, signer); - lpTokenAddress = await lpTokenFactory.callStatic.createLpToken(l1Token); - console.log(`\nAdding calldata to enable liquidity provision on ${l1Token} (LP token ${lpTokenAddress})`); + lpToken = await lpTokenFactory.callStatic.createLpToken(l1Token); + console.log(`\nAdding calldata to enable liquidity provision on ${l1Token} (LP token ${lpToken})`); callData.push(hubPool.interface.encodeFunctionData("enableL1TokenForLiquidityProvision", [l1Token])); - // Ensure to always seed the LP with at least 1 unit of the LP token. Burn the LP token to prevent 0 LP. - console.log(`\nAdding calldata to enable ensure atomic deposit-and-burn of LP token ${lpTokenAddress}`); - + // Ensure to always seed the LP with at least 1 unit of the LP token. + console.log( + `\nAdding calldata to enable ensure atomic deposit of L1 token for LP token ${lpToken}` + + "\n\n\tNOTE: ENSURE TO BURN AT LEAST 1 UNIT OF THE LP TOKEN AFTER EXECUTING." + ); const minDeposit = "1"; - const lpToken = (await ethers.getContractFactory("ExpandedERC20")).attach(lpTokenAddress); callData.push(hubPool.interface.encodeFunctionData("addLiquidity", [l1Token, minDeposit])); - callData.push(lpToken.interface.encodeFunctionData("transfer", [ZERO_ADDRESS, minDeposit])); } console.log("\nAdding calldata to enable routes between all chains and tokens:"); @@ -136,7 +136,7 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent const routeChainIds = Object.keys(tokens).map(Number); routeChainIds.forEach((fromId) => { const formattedFromId = formatChainId(fromId); - const { symbol, address: inputToken } = tokens[fromId]; + const { address: inputToken } = tokens[fromId]; skipped[fromId] = []; routeChainIds.forEach((toId) => { if (fromId === toId || [fromId, toId].some((chainId) => tokens[chainId].symbol === NO_SYMBOL)) { From 44e71b1eb2b98fd0c07f4c18a5cca24c70a0d599 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Mon, 24 Feb 2025 21:12:23 +0100 Subject: [PATCH 06/10] chore: Bump constants & update enable token script (#900) For testnet definitions. --- package.json | 2 +- tasks/enableL1TokenAcrossEcosystem.ts | 25 ++++++++++++++----------- utils/constants.ts | 1 + yarn.lock | 8 ++++---- 4 files changed, 20 insertions(+), 16 deletions(-) diff --git a/package.json b/package.json index 1441f018b..e3ed2a589 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,7 @@ "pre-commit-hook": "sh scripts/preCommitHook.sh" }, "dependencies": { - "@across-protocol/constants": "^3.1.35", + "@across-protocol/constants": "^3.1.37", "@coral-xyz/anchor": "^0.30.1", "@defi-wonderland/smock": "^2.3.4", "@eth-optimism/contracts": "^0.5.40", diff --git a/tasks/enableL1TokenAcrossEcosystem.ts b/tasks/enableL1TokenAcrossEcosystem.ts index 5b8376598..b400fe362 100644 --- a/tasks/enableL1TokenAcrossEcosystem.ts +++ b/tasks/enableL1TokenAcrossEcosystem.ts @@ -1,6 +1,6 @@ import { task } from "hardhat/config"; import assert from "assert"; -import { CHAIN_IDs, MAINNET_CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "../utils/constants"; +import { CHAIN_IDs, MAINNET_CHAIN_IDs, TESTNET_CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "../utils/constants"; import { askYesNoQuestion, resolveTokenOnChain, isTokenSymbol, minimalSpokePoolInterface } from "./utils"; import { TokenSymbol } from "./types"; @@ -9,13 +9,13 @@ const NO_SYMBOL = "----"; const NO_ADDRESS = "------------------------------------------"; // Supported mainnet chain IDs. -const enabledChainIds = Object.values(MAINNET_CHAIN_IDs) - .map(Number) - .filter((chainId) => chainId !== CHAIN_IDs.BOBA) - .sort((x, y) => x - y); - -const chainPadding = enabledChainIds[enabledChainIds.length - 1].toString().length; -const formatChainId = (chainId: number): string => chainId.toString().padStart(chainPadding, " "); +const enabledChainIds = (hubChainId: number) => { + const chainIds = hubChainId === CHAIN_IDs.MAINNET ? MAINNET_CHAIN_IDs : TESTNET_CHAIN_IDs; + return Object.values(chainIds) + .map(Number) + .filter((chainId) => chainId !== CHAIN_IDs.BOBA) + .sort((x, y) => x - y); +}; const getChainsFromList = (taskArgInput: string): number[] => taskArgInput @@ -66,18 +66,19 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent const [signer] = await hre.ethers.getSigners(); // Remove chainIds that are in the ignore list. + const _enabledChainIds = enabledChainIds(hubChainId); let inputChains: number[] = []; try { - inputChains = (chains?.split(",") ?? enabledChainIds).map(Number); + inputChains = (chains?.split(",") ?? _enabledChainIds).map(Number); console.log(`\nParsed 'chains' argument:`, inputChains); } catch (error) { throw new Error(`Failed to parse 'chains' argument ${chains} as a comma-separated list of numbers.`); } - if (inputChains.length === 0) inputChains = enabledChainIds; + if (inputChains.length === 0) inputChains = _enabledChainIds; else if (inputChains.some((chain) => isNaN(chain) || !Number.isInteger(chain) || chain < 0)) { throw new Error(`Invalid chains list: ${inputChains}`); } - const chainIds = enabledChainIds.filter((chainId) => inputChains.includes(chainId)); + const chainIds = _enabledChainIds.filter((chainId) => inputChains.includes(chainId)); console.log("\nLoading L2 companion token address for provided L1 token."); const tokens = Object.fromEntries( @@ -134,6 +135,8 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent let i = 0; // counter for logging. const skipped: { [originChainId: number]: number[] } = {}; const routeChainIds = Object.keys(tokens).map(Number); + const chainPadding = _enabledChainIds[enabledChainIds.length - 1].toString().length; + const formatChainId = (chainId: number): string => chainId.toString().padStart(chainPadding, " "); routeChainIds.forEach((fromId) => { const formattedFromId = formatChainId(fromId); const { address: inputToken } = tokens[fromId]; diff --git a/utils/constants.ts b/utils/constants.ts index ea8d5e773..92f12210e 100644 --- a/utils/constants.ts +++ b/utils/constants.ts @@ -3,6 +3,7 @@ export { MAINNET_CHAIN_IDs, PRODUCTION_NETWORKS, PUBLIC_NETWORKS, + TESTNET_CHAIN_IDs, TOKEN_SYMBOLS_MAP, } from "@across-protocol/constants"; diff --git a/yarn.lock b/yarn.lock index 98be62f4a..bc681fc93 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,10 +2,10 @@ # yarn lockfile v1 -"@across-protocol/constants@^3.1.35": - version "3.1.35" - resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.35.tgz#80ee8e569bc5c1fc94b5087d357d9612fd782151" - integrity sha512-2Fj9mqBEVQu4Bsq6o7helUkhEjpce+uqni0pTV51y1QOEQdgAJU5U5BNQFXUMUDMQRaM3DqB4ys89GVJ4TuA/w== +"@across-protocol/constants@^3.1.37": + version "3.1.37" + resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.1.37.tgz#7f1da248abe809eb9fb621f1876fbde78ec3509b" + integrity sha512-qPRntmeIDxgOwfXhPl90Mf61Fb4k20ecCc0N9Cco+5fSJE9PM4oI6Jq8VYnewyMPglYHZ1kMcdCPWYGebUs0Aw== "@across-protocol/contracts@^0.1.4": version "0.1.4" From 36eaca3971ff8ae759f6b3cf29239325e86e58a2 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Wed, 26 Mar 2025 08:58:52 +0000 Subject: [PATCH 07/10] Make lp deposit amount configurable --- tasks/enableL1TokenAcrossEcosystem.ts | 28 ++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/tasks/enableL1TokenAcrossEcosystem.ts b/tasks/enableL1TokenAcrossEcosystem.ts index b400fe362..b9d7dd191 100644 --- a/tasks/enableL1TokenAcrossEcosystem.ts +++ b/tasks/enableL1TokenAcrossEcosystem.ts @@ -1,6 +1,7 @@ import { task } from "hardhat/config"; import assert from "assert"; -import { CHAIN_IDs, MAINNET_CHAIN_IDs, TESTNET_CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "../utils/constants"; +import { toBN } from "../utils/utils"; +import { CHAIN_IDs, MAINNET_CHAIN_IDs, TESTNET_CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "../utils"; import { askYesNoQuestion, resolveTokenOnChain, isTokenSymbol, minimalSpokePoolInterface } from "./utils"; import { TokenSymbol } from "./types"; @@ -27,6 +28,7 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent .addFlag("execute", "Provide this flag if you would like to actually execute the transaction from the EOA") .addParam("token", "Symbol of token to enable") .addOptionalParam("chains", "Comma-delimited list of chains to enable the token on. Defaults to all supported chains") + .addOptionalParam("lpAmount", "Amount of LP token to burn when enabling a new token", "1") .addOptionalParam( "customoptimismbridge", "Custom token bridge to set for optimism, for example used with SNX and DAI" @@ -45,8 +47,8 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent assert(isTokenSymbol(_matchedSymbol)); const matchedSymbol = _matchedSymbol as TokenSymbol; - const l1Token = TOKEN_SYMBOLS_MAP[matchedSymbol].addresses[hubChainId]; - assert(l1Token !== undefined, `Could not find ${symbol} in TOKEN_SYMBOLS_MAP`); + const l1TokenAddr = TOKEN_SYMBOLS_MAP[matchedSymbol].addresses[hubChainId]; + assert(l1TokenAddr !== undefined, `Could not find ${symbol} in TOKEN_SYMBOLS_MAP`); // If deposit routes chains are provided then we'll only add routes involving these chains. This is used to add new // deposit routes to a new chain for an existing L1 token, so we also won't add a new LP token if this is defined. @@ -60,7 +62,7 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent ); if (!hasSetConfigStore) process.exit(0); - console.log(`\nRunning task to enable L1 token over entire Across ecosystem 🌉. L1 token: ${l1Token}`); + console.log(`\nRunning task to enable L1 token over entire Across ecosystem 🌉. L1 token: ${l1TokenAddr}`); const { deployments, ethers } = hre; const { AddressZero: ZERO_ADDRESS } = ethers.constants; const [signer] = await hre.ethers.getSigners(); @@ -110,25 +112,29 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent const callData = []; // If the l1 token is not yet enabled for LP, enable it. - let { lpToken } = await hubPool.pooledTokens(l1Token); + let { lpToken } = await hubPool.pooledTokens(l1TokenAddr); if (lpToken === ZERO_ADDRESS) { const [lpFactoryAddr, { abi: lpFactoryABI }] = await Promise.all([ hubPool.lpTokenFactory(), deployments.get("LpTokenFactory"), ]); const lpTokenFactory = new ethers.Contract(lpFactoryAddr, lpFactoryABI, signer); - lpToken = await lpTokenFactory.callStatic.createLpToken(l1Token); - console.log(`\nAdding calldata to enable liquidity provision on ${l1Token} (LP token ${lpToken})`); + lpToken = await lpTokenFactory.callStatic.createLpToken(l1TokenAddr); + console.log(`\nAdding calldata to enable liquidity provision on ${l1TokenAddr} (LP token ${lpToken})`); - callData.push(hubPool.interface.encodeFunctionData("enableL1TokenForLiquidityProvision", [l1Token])); + callData.push(hubPool.interface.encodeFunctionData("enableL1TokenForLiquidityProvision", [l1TokenAddr])); + + const l1Token = (await ethers.getContractFactory("ExpandedERC20")).attach(l1TokenAddr); + const decimals = await l1Token.symbol(); + const depositAmount = toBN(taskArguments.lpAmount).mul(toBN(10).pow(decimals)); // Ensure to always seed the LP with at least 1 unit of the LP token. console.log( `\nAdding calldata to enable ensure atomic deposit of L1 token for LP token ${lpToken}` + - "\n\n\tNOTE: ENSURE TO BURN AT LEAST 1 UNIT OF THE LP TOKEN AFTER EXECUTING." + "\n\n\tNOTE: ENSURE TO BURN ${depositAmount} UNITS OF THE LP TOKEN AFTER EXECUTING." ); - const minDeposit = "1"; - callData.push(hubPool.interface.encodeFunctionData("addLiquidity", [l1Token, minDeposit])); + + callData.push(hubPool.interface.encodeFunctionData("addLiquidity", [l1Token, depositAmount])); } console.log("\nAdding calldata to enable routes between all chains and tokens:"); From 36fa204b9340cca9b8c7295d4e413b0a1cb73d34 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Wed, 26 Mar 2025 08:59:48 +0000 Subject: [PATCH 08/10] Add type validation --- tasks/enableL1TokenAcrossEcosystem.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tasks/enableL1TokenAcrossEcosystem.ts b/tasks/enableL1TokenAcrossEcosystem.ts index b9d7dd191..bfeddf9ca 100644 --- a/tasks/enableL1TokenAcrossEcosystem.ts +++ b/tasks/enableL1TokenAcrossEcosystem.ts @@ -1,4 +1,4 @@ -import { task } from "hardhat/config"; +import { task, types } from "hardhat/config"; import assert from "assert"; import { toBN } from "../utils/utils"; import { CHAIN_IDs, MAINNET_CHAIN_IDs, TESTNET_CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "../utils"; @@ -28,7 +28,7 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent .addFlag("execute", "Provide this flag if you would like to actually execute the transaction from the EOA") .addParam("token", "Symbol of token to enable") .addOptionalParam("chains", "Comma-delimited list of chains to enable the token on. Defaults to all supported chains") - .addOptionalParam("lpAmount", "Amount of LP token to burn when enabling a new token", "1") + .addOptionalParam("lpAmount", "Amount of LP token to burn when enabling a new token", 1, types.int) .addOptionalParam( "customoptimismbridge", "Custom token bridge to set for optimism, for example used with SNX and DAI" From 9f72b2ee66e5b5cee48a72d37406b5f75a1ec233 Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Wed, 26 Mar 2025 09:06:56 +0000 Subject: [PATCH 09/10] Back out bad change --- programs/multicall-handler/src/lib.rs | 16 ++++++++-- programs/svm-spoke/src/lib.rs | 44 ++++++++++++++------------- 2 files changed, 36 insertions(+), 24 deletions(-) diff --git a/programs/multicall-handler/src/lib.rs b/programs/multicall-handler/src/lib.rs index ad934469a..6bc38b264 100644 --- a/programs/multicall-handler/src/lib.rs +++ b/programs/multicall-handler/src/lib.rs @@ -1,4 +1,10 @@ -use anchor_lang::{ prelude::*, solana_program::{ instruction::Instruction, program::{ invoke, invoke_signed } } }; +use anchor_lang::{ + prelude::*, + solana_program::{ + instruction::Instruction, + program::{invoke, invoke_signed}, + }, +}; #[cfg(not(feature = "no-entrypoint"))] use ::solana_security_txt::security_txt; @@ -37,13 +43,17 @@ pub mod multicall_handler { let mut accounts = Vec::with_capacity(compiled_ix.account_key_indexes.len()); let mut account_infos = Vec::with_capacity(compiled_ix.account_key_indexes.len()); - let target_program = ctx.remaining_accounts + let target_program = ctx + .remaining_accounts .get(compiled_ix.program_id_index as usize) .ok_or(ErrorCode::AccountNotEnoughKeys)?; // Resolve CPI accounts from indexed references to the remaining accounts. for index in compiled_ix.account_key_indexes { - let account_info = ctx.remaining_accounts.get(index as usize).ok_or(ErrorCode::AccountNotEnoughKeys)?; + let account_info = ctx + .remaining_accounts + .get(index as usize) + .ok_or(ErrorCode::AccountNotEnoughKeys)?; let is_handler_signer = account_info.key() == handler_signer; use_handler_signer |= is_handler_signer; diff --git a/programs/svm-spoke/src/lib.rs b/programs/svm-spoke/src/lib.rs index 23ebdf6f6..efcfc2edb 100644 --- a/programs/svm-spoke/src/lib.rs +++ b/programs/svm-spoke/src/lib.rs @@ -73,7 +73,7 @@ pub mod svm_spoke { remote_domain: u32, cross_domain_admin: Pubkey, deposit_quote_time_buffer: u32, - fill_deadline_buffer: u32 + fill_deadline_buffer: u32, ) -> Result<()> { instructions::initialize( ctx, @@ -83,7 +83,7 @@ pub mod svm_spoke { remote_domain, cross_domain_admin, deposit_quote_time_buffer, - fill_deadline_buffer + fill_deadline_buffer, ) } @@ -146,7 +146,7 @@ pub mod svm_spoke { ctx: Context, origin_token: Pubkey, destination_chain_id: u64, - enabled: bool + enabled: bool, ) -> Result<()> { instructions::set_enable_route(ctx, origin_token, destination_chain_id, enabled) } @@ -183,7 +183,7 @@ pub mod svm_spoke { pub fn relay_root_bundle( ctx: Context, relayer_refund_root: [u8; 32], - slow_relay_root: [u8; 32] + slow_relay_root: [u8; 32], ) -> Result<()> { instructions::relay_root_bundle(ctx, relayer_refund_root, slow_relay_root) } @@ -203,7 +203,7 @@ pub mod svm_spoke { /// - root_bundle_id: Index of the root bundle that needs to be deleted. pub fn emergency_delete_root_bundle( ctx: Context, - root_bundle_id: u32 + root_bundle_id: u32, ) -> Result<()> { instructions::emergency_delete_root_bundle(ctx, root_bundle_id) } @@ -274,7 +274,7 @@ pub mod svm_spoke { quote_timestamp: u32, fill_deadline: u32, exclusivity_parameter: u32, - message: Vec + message: Vec, ) -> Result<()> { instructions::deposit( ctx, @@ -289,7 +289,7 @@ pub mod svm_spoke { quote_timestamp, fill_deadline, exclusivity_parameter, - message + message, ) } @@ -307,7 +307,7 @@ pub mod svm_spoke { exclusive_relayer: Pubkey, fill_deadline_offset: u32, exclusivity_parameter: u32, - message: Vec + message: Vec, ) -> Result<()> { instructions::deposit_now( ctx, @@ -321,7 +321,7 @@ pub mod svm_spoke { exclusive_relayer, fill_deadline_offset, exclusivity_parameter, - message + message, ) } @@ -345,7 +345,7 @@ pub mod svm_spoke { quote_timestamp: u32, fill_deadline: u32, exclusivity_parameter: u32, - message: Vec + message: Vec, ) -> Result<()> { instructions::unsafe_deposit( ctx, @@ -361,7 +361,7 @@ pub mod svm_spoke { quote_timestamp, fill_deadline, exclusivity_parameter, - message + message, ) } @@ -376,7 +376,7 @@ pub mod svm_spoke { _ctx: Context, signer: Pubkey, depositor: Pubkey, - deposit_nonce: u64 + deposit_nonce: u64, ) -> Result<[u8; 32]> { Ok(utils::get_unsafe_deposit_id(signer, depositor, deposit_nonce)) } @@ -443,7 +443,7 @@ pub mod svm_spoke { _relay_hash: [u8; 32], relay_data: Option, repayment_chain_id: Option, - repayment_address: Option + repayment_address: Option, ) -> Result<()> { instructions::fill_relay(ctx, relay_data, repayment_chain_id, repayment_address) } @@ -538,18 +538,20 @@ pub mod svm_spoke { /// /// execute_relayer_refund_leaf executes in mode where refunds are sent to ATA directly. pub fn execute_relayer_refund_leaf<'c, 'info>( - ctx: Context<'_, '_, 'c, 'info, ExecuteRelayerRefundLeaf<'info>> + ctx: Context<'_, '_, 'c, 'info, ExecuteRelayerRefundLeaf<'info>>, ) -> Result<()> - where 'c: 'info + where + 'c: 'info, { instructions::execute_relayer_refund_leaf(ctx, false) } /// Similar to execute_relayer_refund_leaf, but executes in mode where refunds are allocated to claim_account PDAs. pub fn execute_relayer_refund_leaf_deferred<'c, 'info>( - ctx: Context<'_, '_, 'c, 'info, ExecuteRelayerRefundLeaf<'info>> + ctx: Context<'_, '_, 'c, 'info, ExecuteRelayerRefundLeaf<'info>>, ) -> Result<()> - where 'c: 'info + where + 'c: 'info, { instructions::execute_relayer_refund_leaf(ctx, true) } @@ -620,7 +622,7 @@ pub mod svm_spoke { pub fn write_instruction_params_fragment( ctx: Context>, offset: u32, - fragment: Vec + fragment: Vec, ) -> Result<()> { instructions::write_instruction_params_fragment(ctx, offset, fragment) } @@ -704,7 +706,7 @@ pub mod svm_spoke { pub fn request_slow_fill( ctx: Context, _relay_hash: [u8; 32], - relay_data: Option + relay_data: Option, ) -> Result<()> { instructions::request_slow_fill(ctx, relay_data) } @@ -750,7 +752,7 @@ pub mod svm_spoke { _relay_hash: [u8; 32], slow_fill_leaf: Option, _root_bundle_id: Option, - proof: Option> + proof: Option>, ) -> Result<()> { instructions::execute_slow_relay_leaf(ctx, slow_fill_leaf, proof) } @@ -783,7 +785,7 @@ pub mod svm_spoke { /// - authority_bump: The authority bump for the message transmitter. pub fn handle_receive_message<'info>( ctx: Context<'_, '_, '_, 'info, HandleReceiveMessage<'info>>, - params: HandleReceiveMessageParams + params: HandleReceiveMessageParams, ) -> Result<()> { instructions::handle_receive_message(ctx, params) } From 7b46d5e94bcce9bec8dd1c8b6f46fa2facd5b2dc Mon Sep 17 00:00:00 2001 From: Paul <108695806+pxrl@users.noreply.github.com> Date: Wed, 26 Mar 2025 10:06:27 +0000 Subject: [PATCH 10/10] fix --- tasks/enableL1TokenAcrossEcosystem.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tasks/enableL1TokenAcrossEcosystem.ts b/tasks/enableL1TokenAcrossEcosystem.ts index 20409b5b9..cf92201fe 100644 --- a/tasks/enableL1TokenAcrossEcosystem.ts +++ b/tasks/enableL1TokenAcrossEcosystem.ts @@ -1,6 +1,5 @@ import { task, types } from "hardhat/config"; import assert from "assert"; -import { toBN } from "../utils/utils"; import { CHAIN_IDs, MAINNET_CHAIN_IDs, TESTNET_CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "../utils/constants"; import { askYesNoQuestion, resolveTokenOnChain, isTokenSymbol, minimalSpokePoolInterface } from "./utils"; import { TokenSymbol } from "./types"; @@ -68,6 +67,7 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent const { deployments, ethers } = hre; const { AddressZero: ZERO_ADDRESS } = ethers.constants; const [signer] = await hre.ethers.getSigners(); + const { BigNumber } = ethers; // Remove chainIds that are in the ignore list. const _enabledChainIds = enabledChainIds(hubChainId); @@ -128,7 +128,7 @@ task("enable-l1-token-across-ecosystem", "Enable a provided token across the ent const l1Token = (await ethers.getContractFactory("ExpandedERC20")).attach(l1TokenAddr); const decimals = await l1Token.symbol(); - const depositAmount = toBN(taskArguments.lpAmount).mul(toBN(10).pow(decimals)); + const depositAmount = BigNumber.from(taskArguments.lpAmount).mul(BigNumber.from(10).pow(decimals)); // Ensure to always seed the LP with at least 1 unit of the LP token. console.log(