diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2c28fce83404..11f9ff1941b3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,6 +22,8 @@ jobs: with: fetch-depth: 2 submodules: "recursive" + # FIXME: Temporarily needed for CI + fetch-additional-submodule-history: true - name: Get changed files id: changed-files @@ -44,6 +46,8 @@ jobs: core: - 'core/**' - '!core/CHANGELOG.md' + - 'contracts' + - 'contracts/**' - 'docker/contract-verifier/**' - 'docker/external-node/**' - 'docker/server/**' diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000000..c45fa432297a --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,191 @@ +# ZK Stack Development Guidelines + +## Rebuilding After Changes + +### When to Rebuild zkstackup + +**IMPORTANT:** After making changes to Rust code in the `zkstack_cli` directory, you must rebuild zkstackup for the +changes to take effect: + +```bash +cd /path/to/zksync-working +zkstackup --local +``` + +This is necessary because: + +- The `zkstack` command is a a compiled binary installed in `~/.local/bin/` +- Changes to Rust source code won't take effect until the binary is rebuilt +- Without rebuilding, you'll be running the old version of the CLI + +**When to rebuild:** + +- After modifying any `.rs` files in `zkstack_cli/` +- After modifying Forge script parameters in `zkstack_cli/crates/config/src/forge_interface/script_params.rs` +- After making changes to upgrade command handlers + +## Solidity Upgrade Scripts + +### Ecosystem Upgrade Architecture + +The v31 ecosystem upgrade uses a unified approach that combines both core contract upgrades and CTM (Chain Type Manager) +upgrades: + +``` +EcosystemUpgrade_v31 + ↓ extends +DefaultEcosystemUpgrade + ↓ extends ↓ has instance of +DefaultCoreUpgrade CTMUpgrade_v31 + ↓ extends + DefaultCTMUpgrade +``` + +**Key Features:** + +- `DefaultEcosystemUpgrade` runs both core and CTM upgrades sequentially +- Combines governance calls from both upgrades into unified stage0/1/2 calls +- Copies diamond cut data from CTM upgrade to ecosystem output file +- Avoids diamond inheritance conflicts by using composition (CTM upgrade as state variable) + +**Files:** + +- `deploy-scripts/upgrade/default_upgrade/DefaultEcosystemUpgrade.s.sol` - Base class for unified upgrades +- `deploy-scripts/upgrade/v31/EcosystemUpgrade_v31.s.sol` - v31-specific implementation +- `deploy-scripts/upgrade/v31/CTMUpgrade_v31.s.sol` - v31 CTM upgrade +- `deploy-scripts/upgrade/v31/CoreUpgrade_v31.s.sol` - Standalone core upgrade (not used by ecosystem upgrade) + +**Environment Variables:** + +- `V31_UPGRADE_ECOSYSTEM_OUTPUT` - Main output file path (e.g., `/script-out/v31-upgrade-core.toml`) +- `V31_UPGRADE_CTM_OUTPUT` - CTM output file path (e.g., `/script-out/v31-upgrade-ctm.toml`) + +**Output Files:** + +- The ecosystem output file (`v31-upgrade-core.toml`) contains: + - Ecosystem contract addresses + - CTM contract addresses + - **Diamond cut data** for chain upgrades + - Combined governance calls for all upgrade stages + +## Debugging Forge Scripts + +### Common Issues + +1. **"call to non-contract address 0x0"** + + - Usually means a contract hasn't been deployed or registered yet + - Check if required contracts exist at the expected addresses + - For upgrades: ensure chains are registered before running upgrade scripts + +2. **"vm.writeToml: path not allowed"** + + - Check that paths are correctly constructed (relative vs absolute) + - Ensure `vm.projectRoot()` is only concatenated once + - Environment variable paths like `/script-out/...` are relative to project root + +3. **Missing diamond cut data** + - Ensure both core and CTM upgrades are running + - Verify `saveCombinedOutput()` is called after CTM upgrade completes + - Check that CTM output file is being read correctly + +### Debugging Failed Transactions with `cast run` + +When you encounter "missing revert data" or unclear transaction failures, use this method to get the full execution +trace: + +**Step 1: Extract transaction details from error** + +From an error like: + +``` +transaction={ "data": "0xd52471c1...", "from": "0x97D2A9...", "to": "0xfe3EE966..." } +``` + +**Step 2: Send the transaction manually with sufficient gas** + +```bash +TX_HASH=$(cast send \ + "" \ + --value \ + --private-key \ + --rpc-url http://127.0.0.1:8545 \ + --gas-price 50gwei \ + --gas-limit 10000000 2>&1 | grep "transactionHash" | awk '{print $2}') +``` + +**Important**: Use `--gas-limit 10000000` to ensure the transaction gets mined even if it reverts. This allows us to +trace it. + +**Step 3: Trace the transaction to see where it failed** + +```bash +cast run $TX_HASH --rpc-url http://127.0.0.1:8545 +``` + +This will show the full call trace with: + +- Every contract call in the execution path +- Function names and parameters +- Where exactly the revert occurred +- The revert reason (e.g., "call to non-contract address 0x0000...") + +**Example**: + +```bash +# From error message, extract: to=0xfe3EE966..., data=0xd52471c1..., value=1050000121535147500000 + +TX_HASH=$(cast send 0xfe3EE966E7790b427F7B078f304C7B4DDCd4bbfe \ + "0xd52471c10000000000000000000000000000000000000000000000000000000000000020..." \ + --value 1050000121535147500000 \ + --private-key 0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110 \ + --rpc-url http://127.0.0.1:8545 \ + --gas-price 50gwei \ + --gas-limit 10000000 2>&1 | grep "transactionHash" | awk '{print $2}') + +cast run $TX_HASH --rpc-url http://127.0.0.1:8545 +``` + +This will output: + +``` +Traces: + [99306] Bridgehub::requestL2TransactionDirect(...) + ├─ [92138] BridgehubImpl::requestL2TransactionDirect(...) [delegatecall] + │ ├─ [70552] L1AssetRouter::bridgehubDepositBaseToken(...) + │ │ ├─ [63432] L1AssetRouterImpl::bridgehubDepositBaseToken(...) [delegatecall] + │ │ │ ├─ [48684] NativeTokenVault::bridgeBurn(...) + │ │ │ │ └─ ← [Revert] call to non-contract address 0x0000000000000000000000000000000000000000 +``` + +**Key benefits**: + +- Shows the exact call path leading to the failure +- Reveals which contract call failed and why +- Makes it clear if a required contract is missing or uninitialized +- Much more informative than "missing revert data" errors + +## General Rules + +### NEVER USE try-catch OR staticcall in Upgrade Scripts + +**THIS IS AN ABSOLUTE RULE - NO EXCEPTIONS** + +❌ **FORBIDDEN PATTERNS:** + +- `try contract.someFunction() { ... } catch { ... }` +- `(bool ok, bytes memory data) = target.staticcall(...)` + +✅ **CORRECT APPROACH:** + +- If a function reverts, fix the root cause (missing deployment, wrong order, etc.) +- Check if contracts exist before calling them: `if (address != address(0)) { ... }` +- Query protocol version or initialization state +- Restructure when the script runs + +**WHY THIS RULE EXISTS:** + +- try-catch and staticcall hide real errors instead of fixing them +- These patterns make debugging extremely difficult +- They mask initialization issues and timing problems +- The codebase should fail fast and clearly, not silently return defaults diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 000000000000..43c994c2d361 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1 @@ +@AGENTS.md diff --git a/contracts b/contracts index 117c440d6648..e5ad7dc2a450 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit 117c440d664836e17d77854033c6bd2d06a75c9e +Subproject commit e5ad7dc2a450db37211c55a67930eb39416baf0c diff --git a/core/lib/dal/.sqlx/query-1e6162df0f4c89e5ddafa76884009fa606de7cdd661842d4be983f8c8f345b92.json b/core/lib/dal/.sqlx/query-1e6162df0f4c89e5ddafa76884009fa606de7cdd661842d4be983f8c8f345b92.json new file mode 100644 index 000000000000..dfe87c92a532 --- /dev/null +++ b/core/lib/dal/.sqlx/query-1e6162df0f4c89e5ddafa76884009fa606de7cdd661842d4be983f8c8f345b92.json @@ -0,0 +1,23 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT COUNT(*)\n FROM l1_batches\n WHERE\n settlement_layer_type = $1\n AND settlement_layer_chain_id = $2\n AND (\n NOT is_sealed\n OR l1_batches.eth_commit_tx_id IS NULL\n OR NOT EXISTS (\n SELECT 1\n FROM eth_txs_history\n WHERE\n eth_tx_id = l1_batches.eth_commit_tx_id\n AND sent_successfully = TRUE\n AND finality_status = 'finalized'\n )\n )\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text", + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "1e6162df0f4c89e5ddafa76884009fa606de7cdd661842d4be983f8c8f345b92" +} diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs index b22530de1cb0..383fca3f0fc2 100644 --- a/core/lib/dal/src/blocks_dal.rs +++ b/core/lib/dal/src/blocks_dal.rs @@ -24,6 +24,7 @@ use zksync_types::{ }, commitment::{L1BatchCommitmentArtifacts, L1BatchWithMetadata, PubdataParams}, l2_to_l1_log::{BatchAndChainMerklePath, UserL2ToL1Log}, + settlement::SettlementLayer, writes::TreeWrite, Address, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, SLChainId, H256, U256, }; @@ -241,6 +242,16 @@ impl BlocksDal<'_, '_> { Ok(Some(header.into())) } + /// Returns latest sealed L1 batch header. Returns `None` if there are no sealed batches. + pub async fn get_latest_sealed_l1_batch_header( + &mut self, + ) -> DalResult> { + let Some(number) = self.get_sealed_l1_batch_number().await? else { + return Ok(None); + }; + self.get_common_l1_batch_header(number).await + } + pub async fn get_sealed_l2_block_number(&mut self) -> DalResult> { let row = sqlx::query!( r#" @@ -3207,6 +3218,47 @@ impl BlocksDal<'_, '_> { Ok(count != 0) } + /// Returns `true` if there is any batch on the provided settlement layer that isn't fully committed yet. + /// This includes unsealed batches and sealed batches without a finalized commit tx. + pub async fn has_uncommitted_batches_on_settlement_layer( + &mut self, + settlement_layer: &SettlementLayer, + ) -> DalResult { + let (settlement_layer_type, settlement_layer_chain_id) = + from_settlement_layer(settlement_layer); + let count = sqlx::query_scalar!( + r#" + SELECT COUNT(*) + FROM l1_batches + WHERE + settlement_layer_type = $1 + AND settlement_layer_chain_id = $2 + AND ( + NOT is_sealed + OR l1_batches.eth_commit_tx_id IS NULL + OR NOT EXISTS ( + SELECT 1 + FROM eth_txs_history + WHERE + eth_tx_id = l1_batches.eth_commit_tx_id + AND sent_successfully = TRUE + AND finality_status = 'finalized' + ) + ) + "#, + settlement_layer_type.as_str(), + settlement_layer_chain_id + ) + .instrument("has_uncommitted_batches_on_settlement_layer") + .with_arg("settlement_layer_type", &settlement_layer_type) + .with_arg("settlement_layer_chain_id", &settlement_layer_chain_id) + .fetch_one(self.storage) + .await? + .unwrap_or(0); + + Ok(count != 0) + } + // methods used for measuring Eth tx stage transition latencies // and emitting metrics base on these measured data pub async fn oldest_uncommitted_batch_timestamp(&mut self) -> DalResult> { diff --git a/core/node/api_server/src/web3/namespaces/unstable/mod.rs b/core/node/api_server/src/web3/namespaces/unstable/mod.rs index ccf4267e833b..5eb1d7005b15 100644 --- a/core/node/api_server/src/web3/namespaces/unstable/mod.rs +++ b/core/node/api_server/src/web3/namespaces/unstable/mod.rs @@ -8,14 +8,12 @@ use zksync_dal::{Connection, Core, CoreDal, DalError}; use zksync_mini_merkle_tree::MiniMerkleTree; use zksync_multivm::{interface::VmEvent, zk_evm_latest::ethereum_types::U64}; use zksync_types::{ - aggregated_operations::L1BatchAggregatedActionType, api, api::{ ChainAggProof, DataAvailabilityDetails, GatewayMigrationStatus, L1ToL2TxsStatus, TeeProof, TransactionDetailedResult, TransactionExecutionInfo, }, - eth_sender::EthTxFinalityStatus, - server_notification::GatewayMigrationState, + server_notification::{GatewayMigrationNotification, GatewayMigrationState}, tee_types::TeeType, web3, web3::Bytes, @@ -250,27 +248,6 @@ impl UnstableNamespace { .await .map_err(DalError::generalize)?; - let all_batches_with_interop_roots_committed = match connection - .interop_root_dal() - .get_latest_processed_interop_root_l1_batch_number() - .await - .map_err(DalError::generalize)? - { - None => true, - Some(latest_processed_l1_batch_number) => { - match connection - .eth_sender_dal() - .get_last_sent_successfully_eth_tx_by_batch_and_op( - L1BatchNumber::from(latest_processed_l1_batch_number), - L1BatchAggregatedActionType::Commit, - ) - .await - { - Some(tx) => tx.eth_tx_finality_status == EthTxFinalityStatus::Finalized, - None => false, - } - } - }; let state = GatewayMigrationState::from_sl_and_notification( self.state .api_config @@ -278,6 +255,34 @@ impl UnstableNamespace { .settlement_layer_for_sending_txs(), latest_notification, ); + let settlement_layer = self.state.api_config.settlement_layer.settlement_layer(); + let has_uncommitted_batches = connection + .blocks_dal() + .has_uncommitted_batches_on_settlement_layer(&settlement_layer) + .await + .map_err(DalError::generalize)?; + let latest_sealed_matches_expected = match ( + latest_notification, + connection + .blocks_dal() + .get_latest_sealed_l1_batch_header() + .await + .map_err(DalError::generalize)?, + ) { + (Some(GatewayMigrationNotification::ToGateway), Some(header)) => { + header.settlement_layer.is_gateway() + } + (Some(GatewayMigrationNotification::FromGateway), Some(header)) => { + !header.settlement_layer.is_gateway() + } + (Some(_), None) => false, + (None, _) => true, + }; + let all_batches_committed = if state == GatewayMigrationState::InProgress { + !has_uncommitted_batches + } else { + !has_uncommitted_batches && latest_sealed_matches_expected + }; Ok(GatewayMigrationStatus { latest_notification, @@ -287,7 +292,7 @@ impl UnstableNamespace { .api_config .settlement_layer .settlement_layer_for_sending_txs(), - wait_for_batches_to_be_committed: !all_batches_with_interop_roots_committed, + wait_for_batches_to_be_committed: !all_batches_committed, }) } diff --git a/core/node/eth_sender/src/eth_tx_aggregator.rs b/core/node/eth_sender/src/eth_tx_aggregator.rs index 906b081557ce..3bc2080897eb 100644 --- a/core/node/eth_sender/src/eth_tx_aggregator.rs +++ b/core/node/eth_sender/src/eth_tx_aggregator.rs @@ -23,10 +23,7 @@ use zksync_types::{ AggregatedActionType, L1BatchAggregatedActionType, L2BlockAggregatedActionType, }, commitment::{L1BatchWithMetadata, L2DACommitmentScheme, SerializeCommitment}, - eth_sender::{ - EthTx, EthTxBlobSidecar, EthTxBlobSidecarV1, EthTxBlobSidecarV2, EthTxFinalityStatus, - SidecarBlobV1, - }, + eth_sender::{EthTx, EthTxBlobSidecar, EthTxBlobSidecarV1, EthTxBlobSidecarV2, SidecarBlobV1}, ethabi::{Function, Token}, l2_to_l1_log::UserL2ToL1Log, protocol_version::{L1VerifierConfig, PACKED_SEMVER_MINOR_MASK}, @@ -34,7 +31,7 @@ use zksync_types::{ server_notification::GatewayMigrationState, settlement::SettlementLayer, web3::{contract::Error as Web3ContractError, BlockId, BlockNumber, CallRequest}, - Address, L1BatchNumber, L2ChainId, ProtocolVersionId, SLChainId, H256, U256, + Address, L2ChainId, ProtocolVersionId, SLChainId, H256, U256, }; use super::aggregated_operations::{ @@ -859,11 +856,11 @@ impl EthTxAggregator { // so there is no restriction for prove and execute operations if matches!(self.settlement_layer, Some(SettlementLayer::Gateway(_))) { if self - .is_waiting_for_batches_with_interop_roots_to_be_committed(storage) + .is_waiting_for_batches_with_current_settlement_layer_to_be_committed(storage) .await? { - // For the migration from gateway to L1, we need to ensure all batches containing interop roots - // get committed and executed. Once this happens, we can re-enable commit & precommit. + // For migration from gateway to L1, keep commits/precommits flowing + // once old settlement layer batches are finalized. op_restrictions.commit_restriction = None; op_restrictions.precommit_restriction = None; } @@ -1380,34 +1377,21 @@ impl EthTxAggregator { GatewayMigrationState::from_sl_and_notification(self.settlement_layer, notification) } - async fn is_waiting_for_batches_with_interop_roots_to_be_committed( + /// Returns `true` if there are batches on the current settlement layer not yet committed. + /// Used to block gateway migration until all batches are finalized. + async fn is_waiting_for_batches_with_current_settlement_layer_to_be_committed( &self, storage: &mut Connection<'_, Core>, ) -> Result { - let latest_processed_l1_batch_number = storage - .interop_root_dal() - .get_latest_processed_interop_root_l1_batch_number() + let settlement_layer = self + .settlement_layer + .expect("settlement layer should be known"); + let has_uncommitted_batches = storage + .blocks_dal() + .has_uncommitted_batches_on_settlement_layer(&settlement_layer) .await?; - if latest_processed_l1_batch_number.is_none() { - return Ok(false); - } - - let last_sent_successfully_eth_tx = storage - .eth_sender_dal() - .get_last_sent_successfully_eth_tx_by_batch_and_op( - L1BatchNumber::from(latest_processed_l1_batch_number.unwrap()), - L1BatchAggregatedActionType::Commit, - ) - .await; - - if last_sent_successfully_eth_tx - .is_some_and(|tx| tx.eth_tx_finality_status == EthTxFinalityStatus::Finalized) - { - return Ok(false); - } - - Ok(true) + Ok(has_uncommitted_batches) } } diff --git a/core/tests/gateway-migration-test/tests/migration.test.ts b/core/tests/gateway-migration-test/tests/migration.test.ts index dcb1c95b2d58..8716dcec2303 100644 --- a/core/tests/gateway-migration-test/tests/migration.test.ts +++ b/core/tests/gateway-migration-test/tests/migration.test.ts @@ -11,6 +11,12 @@ import path from 'path'; import { logsTestPath } from 'utils/build/logs'; import { getEcosystemContracts } from 'utils/build/tokens'; import { getMainWalletPk } from 'highlevel-test-tools/src/wallets'; +import { + waitForAllBatchesToBeExecuted, + waitForMigrationReadyForFinalize, + RpcHealthGuard +} from 'highlevel-test-tools/src'; +import { FileMutex } from 'highlevel-test-tools/src/file-mutex'; async function logsPath(name: string): Promise { return await logsTestPath(fileConfig.chain, 'logs/migration/', name); @@ -24,6 +30,7 @@ const ZK_CHAIN_INTERFACE = JSON.parse( ).abi; const depositAmount = ethers.parseEther('0.001'); +const migrationMutex = new FileMutex(); interface GatewayInfo { gatewayChainId: string; @@ -50,6 +57,7 @@ describe('Migration from gateway test', function () { let web3JsonRpc: string | undefined; let mainNodeSpawner: utils.NodeSpawner; + let gatewayRpcUrl: string; before('Create test wallet', async () => { direction = process.env.DIRECTION || 'TO'; @@ -93,7 +101,7 @@ describe('Migration from gateway test', function () { await mainNodeSpawner.killAndSpawnMainNode(); - const gatewayRpcUrl = secretsConfig.l1.gateway_rpc_url; + gatewayRpcUrl = secretsConfig.l1.gateway_rpc_url; tester = await Tester.init(ethProviderAddress!, web3JsonRpc!, gatewayRpcUrl!); alice = tester.emptyWallet(); @@ -132,9 +140,7 @@ describe('Migration from gateway test', function () { to: alice.address }); await firstDepositHandle.wait(); - while ((await tester.web3Provider.getL1BatchNumber()) <= initialL1BatchNumber) { - await utils.sleep(1); - } + await waitForBatchAdvance(tester, initialL1BatchNumber, 'first deposit'); const secondDepositHandle = await tester.syncWallet.deposit({ token: baseToken, @@ -142,9 +148,7 @@ describe('Migration from gateway test', function () { to: alice.address }); await secondDepositHandle.wait(); - while ((await tester.web3Provider.getL1BatchNumber()) <= initialL1BatchNumber + 1) { - await utils.sleep(1); - } + await waitForBatchAdvance(tester, initialL1BatchNumber + 1, 'second deposit'); const balance = await alice.getBalance(); expect(balance === depositAmount * 2n, 'Incorrect balance after deposits').to.be.true; @@ -162,9 +166,7 @@ describe('Migration from gateway test', function () { to: alice.address }); await thirdDepositHandle.wait(); - while ((await tester.web3Provider.getL1BatchNumber()) <= initialL1BatchNumber) { - await utils.sleep(1); - } + await waitForBatchAdvance(tester, initialL1BatchNumber, 'third deposit'); // kl todo add an L2 token and withdrawal here, to check token balance migration properly. @@ -179,7 +181,10 @@ describe('Migration from gateway test', function () { }); step('Pause deposits before initiating migration', async () => { - await utils.spawn(`zkstack chain pause-deposits --chain ${fileConfig.chain}`); + await zkstackExecWithMutex( + `zkstack chain pause-deposits --chain ${fileConfig.chain}`, + 'pausing deposits before initiating migration' + ); // Wait until the priority queue is empty let tryCount = 0; @@ -187,15 +192,21 @@ describe('Migration from gateway test', function () { tryCount += 1; await utils.sleep(1); } - console.error('tryCount', tryCount); }); step('Migrate to/from gateway', async () => { if (direction == 'TO') { - await utils.spawn(`zkstack chain gateway notify-about-to-gateway-update --chain ${fileConfig.chain}`); + await zkstackExecWithMutex( + `zkstack chain gateway notify-about-to-gateway-update --chain ${fileConfig.chain}`, + 'notifying about to gateway update' + ); } else { - await utils.spawn(`zkstack chain gateway notify-about-from-gateway-update --chain ${fileConfig.chain}`); + await zkstackExecWithMutex( + `zkstack chain gateway notify-about-from-gateway-update --chain ${fileConfig.chain}`, + 'notifying about from gateway update' + ); } + // Trying to send a transaction from the same address again await checkedRandomTransfer(alice, 1n); @@ -203,40 +214,76 @@ describe('Migration from gateway test', function () { // where there is an inflight transaction before the migration is complete. // If you encounter an error, such as a failed transaction, after the migration, // this area might be worth revisiting to wait for unconfirmed transactions on the server. - await utils.sleep(30); - - // Wait for all batches to be executed - await waitForAllBatchesToBeExecuted(); + await waitForAllBatchesToBeExecuted(fileConfig.chain!); if (direction == 'TO') { - const maxRetries = 3; - for (let i = 0; i < maxRetries; i++) { - try { - // We use utils.exec instead of utils.spawn to capture stdout/stderr - await utils.exec( - `zkstack chain gateway migrate-to-gateway --chain ${fileConfig.chain} --gateway-chain-name ${gatewayChain}` - ); - break; - } catch (error) { - if (i === maxRetries - 1) { - console.error(`Gateway migration failed after ${maxRetries} attempts.`); - throw error; - } - console.log(`Gateway migration failed (attempt ${i + 1}/${maxRetries}). Retrying...`); - } - } + await zkstackExecWithMutex( + `zkstack chain gateway migrate-to-gateway --chain ${fileConfig.chain} --gateway-chain-name ${gatewayChain}`, + 'gateway migration' + ); + + // Wait until the migration is ready to finalize without holding the mutex. + await waitForMigrationReadyForFinalize(fileConfig.chain!); + + await zkstackExecWithMutex( + `zkstack chain gateway finalize-chain-migration-to-gateway --chain ${fileConfig.chain} --gateway-chain-name ${gatewayChain} --deploy-paymaster`, + 'finalizing gateway migration' + ); } else { let migrationSucceeded = false; - for (let i = 0; i < 60; i++) { + let tryCount = 0; + // Health guards detect dead servers early instead of burning through all 60 retries. + // Chain server: dies when migration is initiated (expected) → treat as success. + // Gateway: dies independently (unexpected) → abort immediately. + const chainHealth = new RpcHealthGuard(web3JsonRpc!, 3, 'chain server'); + const gwHealth = new RpcHealthGuard(gatewayRpcUrl, 3, 'gateway'); + + while (!migrationSucceeded && tryCount < 60) { + if (tryCount > 0) { + const chainStatus = await chainHealth.check(); + if (chainStatus === 'dead') { + console.log( + `Migration was likely already initiated and chain server shut down as expected. Proceeding to restart.` + ); + migrationSucceeded = true; + break; + } + if (chainStatus === 'failing') { + await utils.sleep(10); + tryCount++; + continue; + } + + const gwStatus = await gwHealth.check(); + if (gwStatus === 'dead') { + throw new Error(`Gateway server unreachable at ${gatewayRpcUrl}. Aborting migration retries.`); + } + if (gwStatus === 'failing') { + await utils.sleep(10); + tryCount++; + continue; + } + } + try { - await utils.spawn( - `zkstack chain gateway migrate-from-gateway --chain ${fileConfig.chain} --gateway-chain-name ${gatewayChain}` - ); - migrationSucceeded = true; - break; + // Acquire mutex for migration attempt + console.log(`🔒 Acquiring mutex for migration attempt ${tryCount}...`); + await migrationMutex.acquire(); + console.log(`✅ Mutex acquired for migration attempt ${tryCount}`); + + try { + await utils.spawn( + `zkstack chain gateway migrate-from-gateway --chain ${fileConfig.chain} --gateway-chain-name ${gatewayChain}` + ); + migrationSucceeded = true; + } finally { + // Always release the mutex + migrationMutex.release(); + } } catch (e) { - console.log(`Migration attempt ${i} failed with error: ${e}`); - await utils.sleep(2); + tryCount++; + console.log(`Migration attempt ${tryCount}/60 failed with error: ${e}`); + await utils.sleep(10); } } @@ -269,7 +316,6 @@ describe('Migration from gateway test', function () { }); step('Wait for block finalization', async () => { - await utils.spawn(`zkstack server wait --ignore-prerequisites --verbose --chain ${fileConfig.chain}`); // Execute an L2 transaction const txHandle = await checkedRandomTransfer(alice, 1n); await txHandle.waitFinalize(); @@ -282,7 +328,10 @@ describe('Migration from gateway test', function () { const chainId = (await tester.syncWallet.provider!.getNetwork()).chainId; const migrationNumberL1 = await ecosystemContracts.assetTracker.assetMigrationNumber(chainId, assetId); - await utils.spawn(`zkstack dev init-test-wallet --chain gateway`); + await zkstackExecWithMutex( + `zkstack dev init-test-wallet --chain gateway`, + 'initializing test wallet for gateway' + ); const gatewayInfo = getGatewayInfo(pathToHome, fileConfig.chain!); const gatewayEcosystemContracts = await getEcosystemContracts( @@ -318,10 +367,18 @@ describe('Migration from gateway test', function () { /// Verify that the precommits are enabled on the gateway. This check is enough for making sure // precommits are working correctly. The rest of the checks are done by contract. step('Verify precommits', async () => { + const precommitStart = Date.now(); + const precommitTimeoutMs = 5 * 60 * 1000; let gwCommittedBatches = await gwMainContract.getTotalBatchesCommitted(); while (gwCommittedBatches === 1) { + if (Date.now() - precommitStart > precommitTimeoutMs) { + throw new Error( + `Verify precommits: timed out after ${(precommitTimeoutMs / 1000).toFixed(0)}s waiting for gateway batch commits (stuck at ${gwCommittedBatches})` + ); + } console.log(`Waiting for at least one batch committed batch on gateway... ${gwCommittedBatches}`); await utils.sleep(1); + gwCommittedBatches = await gwMainContract.getTotalBatchesCommitted(); } // Now we sure that we have at least one batch was committed from the gateway @@ -338,12 +395,23 @@ describe('Migration from gateway test', function () { // TODO: When support is restored in future versions, remove this negative test. step('Migrating back to gateway fails', async () => { // Pause deposits before trying migration back to gateway - await utils.spawn(`zkstack chain pause-deposits --chain ${fileConfig.chain}`); + await zkstackExecWithMutex( + `zkstack chain pause-deposits --chain ${fileConfig.chain}`, + 'pausing deposits before migrating back to gateway' + ); + + // Wait until the priority queue is empty + let tryCount = 0; + while ((await getPriorityQueueSize()) > 0 && tryCount < 100) { + tryCount += 1; + await utils.sleep(1); + } try { // We use utils.exec instead of utils.spawn to capture stdout/stderr for assertion - await utils.exec( - `zkstack chain gateway migrate-to-gateway --chain ${fileConfig.chain} --gateway-chain-name ${gatewayChain}` + await zkstackExecWithMutex( + `zkstack chain gateway migrate-to-gateway --chain ${fileConfig.chain} --gateway-chain-name ${gatewayChain}`, + 'migrating back to gateway' ); expect.fail('Migrating back to gateway should have failed'); } catch (e: any) { @@ -366,34 +434,6 @@ describe('Migration from gateway test', function () { return await chainGatewayContract.getPriorityQueueSize(); } } - - async function waitForAllBatchesToBeExecuted() { - let tryCount = 0; - let totalBatchesCommitted = await getTotalBatchesCommitted(); - let totalBatchesExecuted = await getTotalBatchesExecuted(); - while (totalBatchesCommitted !== totalBatchesExecuted && tryCount < 100) { - tryCount += 1; - await utils.sleep(1); - totalBatchesCommitted = await getTotalBatchesCommitted(); - totalBatchesExecuted = await getTotalBatchesExecuted(); - } - } - - async function getTotalBatchesCommitted() { - if (direction == 'TO') { - return await l1MainContract.getTotalBatchesCommitted(); - } else { - return await chainGatewayContract.getTotalBatchesCommitted(); - } - } - - async function getTotalBatchesExecuted() { - if (direction == 'TO') { - return await l1MainContract.getTotalBatchesExecuted(); - } else { - return await chainGatewayContract.getTotalBatchesExecuted(); - } - } }); async function checkedRandomTransfer(sender: zksync.Wallet, amount: bigint): Promise { @@ -438,6 +478,45 @@ async function mintToAddress( await (await l1Erc20Contract.mint(addressToMintTo, amountToMint)).wait(); } +async function zkstackExecWithMutex(command: string, name: string) { + try { + // Acquire mutex for zkstack exec + console.log(`🔒 Acquiring mutex for ${name} of ${fileConfig.chain}...`); + await migrationMutex.acquire(); + console.log(`✅ Mutex acquired for ${name} of ${fileConfig.chain}`); + + try { + await utils.exec(command); + + console.log(`✅ Successfully executed ${name} for chain ${fileConfig.chain}`); + } finally { + // Always release the mutex + migrationMutex.release(); + } + } catch (e) { + console.error(`❌ Failed to execute ${name} for chain ${fileConfig.chain}:`, e); + throw e; + } +} + +async function waitForBatchAdvance( + tester: Tester, + pastBatchNumber: number, + label: string, + timeoutMs: number = 5 * 60 * 1000 +): Promise { + const start = Date.now(); + while ((await tester.web3Provider.getL1BatchNumber()) <= pastBatchNumber) { + if (Date.now() - start > timeoutMs) { + const current = await tester.web3Provider.getL1BatchNumber(); + throw new Error( + `waitForBatchAdvance(${label}): timed out after ${(timeoutMs / 1000).toFixed(0)}s waiting for batch > ${pastBatchNumber} (current: ${current})` + ); + } + await utils.sleep(1); + } +} + export function getGatewayInfo(pathToHome: string, chain: string): GatewayInfo | null { const gatewayChainConfig = loadConfig({ pathToHome, diff --git a/core/tests/highlevel-test-tools/src/deadline.ts b/core/tests/highlevel-test-tools/src/deadline.ts new file mode 100644 index 000000000000..a85031c221d5 --- /dev/null +++ b/core/tests/highlevel-test-tools/src/deadline.ts @@ -0,0 +1,115 @@ +/** + * Returns true if the error looks like a TCP connection failure (server is dead). + */ +export function isConnectionError(error: unknown): boolean { + const msg = String(error).toLowerCase(); + return ( + msg.includes('connection refused') || + msg.includes('econnrefused') || + msg.includes('socket hang up') || + msg.includes('econnreset') + ); +} + +/** + * Quick health check: sends a trivial RPC call to verify the server is reachable. + * Returns true if the server responds, false if connection is refused / unreachable. + */ +export async function checkRpcHealth(rpcUrl: string): Promise { + try { + const resp = await fetch(rpcUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ jsonrpc: '2.0', id: 1, method: 'eth_blockNumber', params: [] }), + signal: AbortSignal.timeout(5000) + }); + return resp.ok; + } catch { + return false; + } +} + +export type HealthStatus = 'healthy' | 'failing' | 'dead'; + +/** + * Tracks consecutive RPC health-check failures for a single endpoint. + * + * const guard = new RpcHealthGuard(url, 3, 'gateway'); + * const status = await guard.check(); // 'healthy' | 'failing' | 'dead' + */ +export class RpcHealthGuard { + private consecutiveFailures = 0; + + constructor( + private rpcUrl: string, + private maxFailures: number = 3, + private label: string = rpcUrl + ) {} + + async check(): Promise { + const healthy = await checkRpcHealth(this.rpcUrl); + if (healthy) { + this.consecutiveFailures = 0; + return 'healthy'; + } + this.consecutiveFailures++; + console.log(`⚠️ Health check failed for ${this.label} (${this.consecutiveFailures}/${this.maxFailures})`); + return this.consecutiveFailures >= this.maxFailures ? 'dead' : 'failing'; + } +} + +/** + * Polls `fn` until it returns a non-null/non-undefined value, or the deadline expires. + * + * @param fn - Async function that returns `T` when the condition is met, or `null`/`undefined` to keep waiting. + * If `fn` throws, the error is logged and treated as "not ready yet" (retried). + * However, connection errors (ECONNREFUSED etc.) are treated as fatal after 3 consecutive occurrences. + * @param opts.timeoutMs - Maximum wall-clock time to wait before throwing. + * @param opts.intervalMs - Sleep between polls (default 2000). + * @param opts.label - Human-readable context included in the timeout error. + * @param opts.failOnConnectionError - If true (default), connection errors abort after 3 consecutive occurrences. + * @returns The first non-null/non-undefined value returned by `fn`. + */ +export async function withDeadline( + fn: () => Promise, + opts: { timeoutMs: number; intervalMs?: number; label: string; failOnConnectionError?: boolean } +): Promise { + const { timeoutMs, intervalMs = 2000, label, failOnConnectionError = true } = opts; + const start = Date.now(); + let consecutiveConnectionErrors = 0; + + while (true) { + const elapsed = Date.now() - start; + if (elapsed >= timeoutMs) { + throw new Error( + `[withDeadline] ${label}: timed out after ${(elapsed / 1000).toFixed(1)}s (limit: ${(timeoutMs / 1000).toFixed(0)}s)` + ); + } + + try { + const result = await fn(); + if (result !== null && result !== undefined) { + return result; + } + consecutiveConnectionErrors = 0; + } catch (error) { + if (failOnConnectionError && isConnectionError(error)) { + consecutiveConnectionErrors++; + if (consecutiveConnectionErrors >= 3) { + throw new Error( + `[withDeadline] ${label}: server unreachable (${consecutiveConnectionErrors} consecutive connection errors). Last error: ${error}` + ); + } + console.warn( + `[withDeadline] ${label}: connection error (${consecutiveConnectionErrors}/3 before abort): ${error}` + ); + } else { + consecutiveConnectionErrors = 0; + const remaining = ((timeoutMs - elapsed) / 1000).toFixed(0); + console.warn(`[withDeadline] ${label}: poll error (${remaining}s remaining): ${error}`); + } + } + + await new Promise((resolve) => setTimeout(resolve, intervalMs)); + } +} diff --git a/core/tests/highlevel-test-tools/src/file-mutex.ts b/core/tests/highlevel-test-tools/src/file-mutex.ts index 7d99cc4e5cef..28d2a9265321 100644 --- a/core/tests/highlevel-test-tools/src/file-mutex.ts +++ b/core/tests/highlevel-test-tools/src/file-mutex.ts @@ -1,5 +1,17 @@ import * as fs from 'fs'; import * as path from 'path'; +import { findHome } from './zksync-home'; + +/** + * Maximum age (in ms) before a lock is considered stale regardless of PID liveness. + * Guards against PID reuse: even if the PID is alive, a very old lock was almost + * certainly left by a dead process whose PID was recycled. + * + * Must be longer than the longest critical section. The heaviest holder is + * create-chain.ts which runs `chain create` + `chain init` under the lock + * and can take 5–10 minutes under parallel load. 15 minutes gives safe headroom. + */ +const STALE_LOCK_AGE_MS = 15 * 60 * 1000; /** * Simple file-based mutex implementation for Node.js @@ -9,12 +21,16 @@ export class FileMutex { private lockDir: string; constructor() { - this.lockDir = '.'; + try { + this.lockDir = findHome(); + } catch (_) { + this.lockDir = '.'; + } this.lockFile = path.join(this.lockDir, 'highlevel_tests.lock'); } async acquire(): Promise { - const maxRetries = 600 * 10; // 10 minutes with 100ms intervals + const maxRetries = 600 * 12; // 12 minutes with 100ms intervals const retryDelay = 100; // 100ms for (let i = 0; i < maxRetries; i++) { @@ -35,6 +51,11 @@ export class FileMutex { return; } catch (error: any) { if (error.code === 'EEXIST') { + // Check if the existing lock is stale (dead owner or too old) + if (this.tryRecoverStaleLock()) { + // Lock was stale and removed — retry immediately + continue; + } await new Promise((resolve) => setTimeout(resolve, retryDelay)); } else { throw error; @@ -53,6 +74,50 @@ export class FileMutex { console.warn(`Warning: Failed to release mutex lock: ${this.lockFile}`, error); } } + + /** + * Checks whether the current lock holder is still alive. If the owning + * process is dead or the lock is older than STALE_LOCK_AGE_MS, removes + * the lock file and returns true so the caller can retry acquisition. + */ + private tryRecoverStaleLock(): boolean { + try { + const content = fs.readFileSync(this.lockFile, 'utf8'); + const info = JSON.parse(content) as { pid?: number; timestamp?: string; command?: string }; + + const lockAge = info.timestamp ? Date.now() - new Date(info.timestamp).getTime() : Infinity; + + // If the lock is older than the threshold, treat as stale regardless of PID. + // This guards against PID reuse on long-running CI hosts. + if (lockAge > STALE_LOCK_AGE_MS) { + console.warn( + `Removing stale mutex lock (age: ${(lockAge / 1000).toFixed(0)}s, ` + + `pid: ${info.pid}, command: ${info.command}): ${this.lockFile}` + ); + fs.unlinkSync(this.lockFile); + return true; + } + + // If PID is present, check whether the process is still alive. + if (info.pid) { + try { + process.kill(info.pid, 0); // signal 0 = existence check, no actual signal sent + } catch { + // process.kill throws if the PID doesn't exist — lock is stale. + console.warn( + `Removing stale mutex lock (owner pid ${info.pid} is dead, ` + + `age: ${(lockAge / 1000).toFixed(0)}s): ${this.lockFile}` + ); + fs.unlinkSync(this.lockFile); + return true; + } + } + } catch { + // If we can't read/parse the lock file, leave it alone and let the + // normal retry loop handle it. + } + return false; + } } /** @@ -92,7 +157,10 @@ export function cleanTestChains(chainsDir: string = './chains'): void { * Cleans up any leftover mutex lock files from previous test runs */ export function cleanMutexLockFiles(): void { - const mutexLockFile = 'highlevel_tests.lock'; + let mutexLockFile = 'highlevel_tests.lock'; + try { + mutexLockFile = path.join(findHome(), 'highlevel_tests.lock'); + } catch (_) {} if (fs.existsSync(mutexLockFile)) { try { fs.unlinkSync(mutexLockFile); diff --git a/core/tests/highlevel-test-tools/src/gateway.ts b/core/tests/highlevel-test-tools/src/gateway.ts index 07727519586c..af810c8d9741 100644 --- a/core/tests/highlevel-test-tools/src/gateway.ts +++ b/core/tests/highlevel-test-tools/src/gateway.ts @@ -1,11 +1,22 @@ import { executeCommand } from './execute-command'; import { FileMutex } from './file-mutex'; -import { startServer } from './start-server'; +import { findHome } from './zksync-home'; +import { withDeadline } from './deadline'; +import * as utils from 'utils'; +import { loadConfig } from 'utils/build/file-configs'; +import * as ethers from 'ethers'; +import * as zksync from 'zksync-ethers'; /** * Global mutex for gateway migration to prevent concurrent migrations */ const gatewayMutex = new FileMutex(); +/** + * Constants for migration readiness check + */ +const MIGRATION_STARTED_TOPIC = ethers.id('MigrationStarted(uint256,uint256,bytes32,uint256)'); +const BLOCK_SEARCH_RANGE = 5000; +const MAX_BLOCK_LOOKBACK = 200000; /** * Migrates a chain to gateway if the USE_GATEWAY_CHAIN environment variable is set to 'WITH_GATEWAY' @@ -30,33 +41,33 @@ export async function migrateToGatewayIfNeeded(chainName: string): Promise console.log(`✅ Mutex acquired for gateway migration of ${chainName}`); try { - const maxRetries = 3; - for (let i = 0; i < maxRetries; i++) { - try { - await executeCommand( - 'zkstack', - [ - 'chain', - 'gateway', - 'migrate-to-gateway', - '--chain', - chainName, - '--gateway-chain-name', - 'gateway' - ], - chainName, - 'gateway_migration' - ); - break; - } catch (error) { - if (i === maxRetries - 1) { - console.error(`❌ Gateway migration failed after ${maxRetries} attempts.`); - throw error; - } - console.log(`⚠️ Gateway migration failed (attempt ${i + 1}/${maxRetries}). Retrying...`); - } - } + await executeCommand( + 'zkstack', + ['chain', 'gateway', 'migrate-to-gateway', '--chain', chainName, '--gateway-chain-name', 'gateway'], + chainName, + 'gateway_migration' + ); + + console.log(`✅ Successfully migrated chain ${chainName} to gateway`); + } finally { + // Always release the mutex + gatewayMutex.release(); + } + } catch (error) { + console.error(`❌ Failed to migrate chain ${chainName} to gateway:`, error); + throw error; + } + + // Wait until the migration is ready to finalize without holding the mutex. + await waitForMigrationReadyForFinalize(chainName); + try { + // Acquire mutex for finalizing gateway migration + console.log(`🔒 Acquiring mutex for finalizing gateway migration of ${chainName}...`); + await gatewayMutex.acquire(); + console.log(`✅ Mutex acquired for finalizing gateway migration of ${chainName}`); + + try { await executeCommand( 'zkstack', [ @@ -73,13 +84,152 @@ export async function migrateToGatewayIfNeeded(chainName: string): Promise 'gateway_migration' ); - console.log(`✅ Successfully migrated chain ${chainName} to gateway`); + console.log(`✅ Successfully finalized migration of chain ${chainName} to gateway`); } finally { // Always release the mutex gatewayMutex.release(); } } catch (error) { - console.error(`❌ Failed to migrate chain ${chainName} to gateway:`, error); + console.error(`❌ Failed to finalize migration of chain ${chainName} to gateway:`, error); throw error; } } + +function loadMigrationFinalizeCheckConfig(chainName: string) { + const pathToHome = findHome(); + const contractsConfig = loadConfig({ + pathToHome, + chain: chainName, + config: 'contracts.yaml' + }); + const secretsConfig = loadConfig({ + pathToHome, + chain: chainName, + config: 'secrets.yaml' + }); + const genesisConfig = loadConfig({ + pathToHome, + chain: chainName, + config: 'genesis.yaml' + }); + const gatewayContractsConfig = loadConfig({ + pathToHome, + chain: 'gateway', + config: 'contracts.yaml' + }); + + const l1RpcUrl = secretsConfig?.l1?.l1_rpc_url; + const gatewayRpcUrl = secretsConfig?.l1?.gateway_rpc_url; + const bridgehubProxyAddr = contractsConfig?.ecosystem_contracts?.bridgehub_proxy_addr; + const gatewayDiamondProxyAddr = gatewayContractsConfig?.l1?.diamond_proxy_addr; + const l2ChainId = Number(genesisConfig?.l2_chain_id); + + if (!l1RpcUrl || !gatewayRpcUrl || !bridgehubProxyAddr || !gatewayDiamondProxyAddr || !Number.isFinite(l2ChainId)) { + throw new Error( + `Missing gateway migration config for chain ${chainName} ` + + `(l1RpcUrl=${!!l1RpcUrl}, gatewayRpcUrl=${!!gatewayRpcUrl}, bridgehubProxyAddr=${!!bridgehubProxyAddr}, ` + + `gatewayDiamondProxyAddr=${!!gatewayDiamondProxyAddr}, l2ChainId=${l2ChainId})` + ); + } + + return { + l1RpcUrl, + gatewayRpcUrl, + l2ChainId, + bridgehubProxyAddr, + gatewayDiamondProxyAddr + }; +} + +async function findLatestMigrationTxHash( + l1Provider: ethers.JsonRpcProvider, + chainAssetHandlerAddr: string, + l2ChainId: number +): Promise { + const latestBlock = await l1Provider.getBlockNumber(); + const chainIdTopic = ethers.zeroPadValue(ethers.toBeHex(l2ChainId), 32); + + for ( + let toBlock = latestBlock; + toBlock >= 0 && latestBlock - toBlock <= MAX_BLOCK_LOOKBACK; + toBlock -= BLOCK_SEARCH_RANGE + ) { + const fromBlock = Math.max(0, toBlock - BLOCK_SEARCH_RANGE + 1); + const logs = await l1Provider.getLogs({ + address: chainAssetHandlerAddr, + topics: [MIGRATION_STARTED_TOPIC, chainIdTopic], + fromBlock, + toBlock + }); + + if (logs.length > 0) { + return logs[logs.length - 1].transactionHash; + } + } + + return null; +} + +async function isMigrationReadyForFinalize(chainName: string): Promise { + const config = loadMigrationFinalizeCheckConfig(chainName); + const l1Provider = new ethers.JsonRpcProvider(config.l1RpcUrl); + const gatewayProvider = new zksync.Provider(config.gatewayRpcUrl); + const bridgehub = new ethers.Contract( + config.bridgehubProxyAddr, + ['function chainAssetHandler() view returns (address)'], + l1Provider + ); + const chainAssetHandlerAddr = await bridgehub.chainAssetHandler(); + + const migrationTxHash = await findLatestMigrationTxHash(l1Provider, chainAssetHandlerAddr, config.l2ChainId); + if (!migrationTxHash) { + console.log( + `[${chainName}] MigrationStarted event not found on chainAssetHandler=${chainAssetHandlerAddr} ` + + `for chainId=${config.l2ChainId}, topic=${MIGRATION_STARTED_TOPIC}` + ); + return false; + } + const receipt = await l1Provider.getTransactionReceipt(migrationTxHash); + if (!receipt) { + console.log(`[${chainName}] No receipt for migrationTxHash=${migrationTxHash}`); + return false; + } + + const gatewayMainContract = await gatewayProvider.getMainContractAddress(); + const priorityOpHash = zksync.utils.getL2HashFromPriorityOp(receipt, gatewayMainContract); + const l2Receipt = await gatewayProvider.getTransactionReceipt(priorityOpHash); + if (!l2Receipt?.l1BatchNumber) { + console.log( + `[${chainName}] L2 receipt not ready: priorityOpHash=${priorityOpHash}, ` + + `l1BatchNumber=${l2Receipt?.l1BatchNumber ?? 'null'}` + ); + return false; + } + + const gatewayDiamondProxy = new ethers.Contract( + config.gatewayDiamondProxyAddr, + ['function getTotalBatchesExecuted() view returns (uint256)'], + l1Provider + ); + + const totalExecuted = BigInt(await gatewayDiamondProxy.getTotalBatchesExecuted()); + const batchNumber = BigInt(l2Receipt.l1BatchNumber); + if (totalExecuted < batchNumber) { + console.log(`[${chainName}] Batch not yet executed: totalExecuted=${totalExecuted}, needed=${batchNumber}`); + } + return totalExecuted >= batchNumber; +} + +export async function waitForMigrationReadyForFinalize(chainName: string): Promise { + await withDeadline( + async () => { + if (await isMigrationReadyForFinalize(chainName)) { + console.log(`✅ Migration is ready to finalize for ${chainName}`); + return true; + } + console.log(`⏳ Migration not ready to finalize for ${chainName}, retrying...`); + return null; + }, + { timeoutMs: 10 * 60 * 1000, intervalMs: 2000, label: `waitForMigrationReadyForFinalize(${chainName})` } + ); +} diff --git a/core/tests/highlevel-test-tools/src/index.ts b/core/tests/highlevel-test-tools/src/index.ts index 6af25ea6a9b6..773cdec0448c 100644 --- a/core/tests/highlevel-test-tools/src/index.ts +++ b/core/tests/highlevel-test-tools/src/index.ts @@ -18,5 +18,6 @@ export { generateLoad } from './generate-load'; export { getRpcUrl, queryJsonRpc, getL1BatchNumber, getL1BatchDetails } from './rpc-utils'; export { waitForAllBatchesToBeExecuted, generateRealisticLoad } from './wait-for-batches'; export { TESTED_CHAIN_TYPE } from './chain-types'; -export { migrateToGatewayIfNeeded } from './gateway'; +export { migrateToGatewayIfNeeded, waitForMigrationReadyForFinalize } from './gateway'; export { getMainWalletPk } from './wallets'; +export { withDeadline, checkRpcHealth, isConnectionError, RpcHealthGuard, type HealthStatus } from './deadline'; diff --git a/core/tests/highlevel-test-tools/src/wait-for-batches.ts b/core/tests/highlevel-test-tools/src/wait-for-batches.ts index f479fc36f340..f16565c0fda0 100644 --- a/core/tests/highlevel-test-tools/src/wait-for-batches.ts +++ b/core/tests/highlevel-test-tools/src/wait-for-batches.ts @@ -23,10 +23,10 @@ export async function waitForAllBatchesToBeExecuted(chainName: string, timeoutMs return l1BatchDetails; } - await new Promise((resolve) => setTimeout(resolve, 5000)); + await new Promise((resolve) => setTimeout(resolve, 2000)); } catch (error) { console.log(`⚠️ Error checking L1 batch execution status, retrying... Error: ${error}`); - await new Promise((resolve) => setTimeout(resolve, 5000)); + await new Promise((resolve) => setTimeout(resolve, 2000)); } } diff --git a/core/tests/highlevel-test-tools/tests/en-integration-test.test.ts b/core/tests/highlevel-test-tools/tests/en-integration-test.test.ts index c1840284cce7..fe0073cee12a 100644 --- a/core/tests/highlevel-test-tools/tests/en-integration-test.test.ts +++ b/core/tests/highlevel-test-tools/tests/en-integration-test.test.ts @@ -1,14 +1,14 @@ import { describe, it } from 'vitest'; -import { TESTED_CHAIN_TYPE } from '../src'; -// import { enIntegrationTests } from '../src/run-integration-tests'; +import { createChainAndStartServer, TESTED_CHAIN_TYPE } from '../src'; +import { enIntegrationTests } from '../src/run-integration-tests'; describe('External Node Integration tests Test', () => { it(`for ${TESTED_CHAIN_TYPE} chain`, async () => { - // const testChain = await createChainAndStartServer(TESTED_CHAIN_TYPE, 'External Node Integration tests Test'); - // Define some chain B used for interop tests - // const testSecondChain = await createChainAndStartServer('era', 'External Node Integration tests'); - // await testChain.generateRealisticLoad(); - // await testChain.waitForAllBatchesToBeExecuted(); + const testChain = await createChainAndStartServer(TESTED_CHAIN_TYPE, 'External Node Integration tests Test'); + //Define some chain B used for interop tests + const testSecondChain = await createChainAndStartServer('era', 'External Node Integration tests'); + await testChain.generateRealisticLoad(); + await testChain.waitForAllBatchesToBeExecuted(); // await testChain.initExternalNode(); // await testChain.runExternalNode(); // await enIntegrationTests(testChain.chainName, testSecondChain.chainName); diff --git a/core/tests/highlevel-test-tools/tests/revert-test.test.ts b/core/tests/highlevel-test-tools/tests/revert-test.test.ts index c843d26d8a9c..67cacea2eb87 100644 --- a/core/tests/highlevel-test-tools/tests/revert-test.test.ts +++ b/core/tests/highlevel-test-tools/tests/revert-test.test.ts @@ -4,7 +4,7 @@ // main_contract.getTotalBatchesCommitted actually checks the number of batches committed. // main_contract.getTotalBatchesExecuted actually checks the number of batches executed. import { beforeAll, describe, it } from 'vitest'; -import { createChainAndStartServer, TESTED_CHAIN_TYPE, TestChain, getMainWalletPk } from '../src'; +import { createChainAndStartServer, TESTED_CHAIN_TYPE, TestChain, getMainWalletPk, queryJsonRpc } from '../src'; import * as utils from 'utils'; import { checkRandomTransfer, @@ -81,13 +81,6 @@ describe('Block reverting test', function () { await testChain.runExternalNode(); - console.log(`😴 Sleeping for 60 seconds before killing external node to wait for it to sync..`); - await new Promise((resolve) => setTimeout(resolve, 60000)); - - let ethClientWeb3Url: string; - let apiWeb3JsonRpcHttpUrl: string; - let enEthClientUrl: string; - const secretsConfig = loadConfig({ pathToHome, chain: chainName, config: 'secrets.yaml' }); const generalConfig = loadConfig({ pathToHome, chain: chainName, config: 'general.yaml' }); const contractsConfig = loadConfig({ pathToHome, chain: chainName, config: 'contracts.yaml' }); @@ -99,9 +92,33 @@ describe('Block reverting test', function () { }); const walletsConfig = loadConfig({ pathToHome, chain: chainName, config: 'wallets.yaml' }); + // Wait for EN to sync to the same batch as main node instead of hardcoded sleep + const mainNodeRpcUrl = generalConfig.api.web3_json_rpc.http_url; + const enRpcUrl = externalNodeGeneralConfig.api.web3_json_rpc.http_url; + const syncTimeoutMs = 120_000; + const syncPollIntervalMs = 2_000; + const syncStart = Date.now(); + console.log(`Waiting for external node to sync (timeout: ${syncTimeoutMs / 1000}s)...`); + while (Date.now() - syncStart < syncTimeoutMs) { + const mainBatchHex = await queryJsonRpc(mainNodeRpcUrl, 'zks_L1BatchNumber'); + const enBatchHex = await queryJsonRpc(enRpcUrl, 'zks_L1BatchNumber'); + const mainBatch = parseInt(mainBatchHex, 16); + const enBatch = parseInt(enBatchHex, 16); + if (enBatch >= mainBatch) { + console.log(`External node synced: EN batch ${enBatch} >= main node batch ${mainBatch}`); + break; + } + console.log(`EN batch ${enBatch} < main node batch ${mainBatch}, waiting...`); + await new Promise((resolve) => setTimeout(resolve, syncPollIntervalMs)); + } + + let ethClientWeb3Url: string; + let apiWeb3JsonRpcHttpUrl: string; + let enEthClientUrl: string; + ethClientWeb3Url = secretsConfig.l1.l1_rpc_url; - apiWeb3JsonRpcHttpUrl = generalConfig.api.web3_json_rpc.http_url; - enEthClientUrl = externalNodeGeneralConfig.api.web3_json_rpc.http_url; + apiWeb3JsonRpcHttpUrl = mainNodeRpcUrl; + enEthClientUrl = enRpcUrl; operatorAddress = walletsConfig.operator.address; mainNodeTester = createTester(ethClientWeb3Url, apiWeb3JsonRpcHttpUrl); diff --git a/core/tests/highlevel-test-tools/tests/token-balance-migration-from.test.ts b/core/tests/highlevel-test-tools/tests/token-balance-migration-from.test.ts deleted file mode 100644 index 4de179575796..000000000000 --- a/core/tests/highlevel-test-tools/tests/token-balance-migration-from.test.ts +++ /dev/null @@ -1,375 +0,0 @@ -import { afterAll, beforeAll, describe, it } from 'vitest'; -import { createChainAndStartServer, TESTED_CHAIN_TYPE, tokenBalanceMigrationTest } from '../src'; - -import * as utils from 'utils'; -import { - WithdrawalHandler, - ChainHandler, - generateChainRichWallet, - ERC20Handler, - RICH_WALLET_L1_BALANCE, - RICH_WALLET_L2_BALANCE -} from './token-balance-migration-tester'; -import * as zksync from 'zksync-ethers'; -import * as ethers from 'ethers'; -import { expect } from 'vitest'; -import fs from 'node:fs/promises'; -import { existsSync, readFileSync } from 'node:fs'; -import { BytesLike } from '@ethersproject/bytes'; -import { BigNumberish } from 'ethers'; -import { loadConfig, shouldLoadConfigFromFile } from 'utils/build/file-configs'; -import path from 'path'; -import { CONTRACT_DEPLOYER, CONTRACT_DEPLOYER_ADDRESS, hashBytecode, ZKSYNC_MAIN_ABI } from 'zksync-ethers/build/utils'; -import { utils as zksync_utils } from 'zksync-ethers'; -import { logsTestPath } from 'utils/build/logs'; -import { waitForNewL1Batch } from 'utils'; -import { getMainWalletPk } from 'highlevel-test-tools/src/wallets'; -import { initTestWallet } from '../src/run-integration-tests'; -import { GATEWAY_CHAIN_ID } from 'utils/src/constants'; - -async function logsPath(name: string): Promise { - return await logsTestPath(fileConfig.chain, 'logs/upgrade/', name); -} - -const L2_BRIDGEHUB_ADDRESS = '0x0000000000000000000000000000000000010002'; -const pathToHome = path.join(__dirname, '../../../..'); -const fileConfig = shouldLoadConfigFromFile(); - -// const contracts: Contracts = initContracts(pathToHome, fileConfig.loadFromFile); - -const ZK_CHAIN_INTERFACE = JSON.parse( - readFileSync(pathToHome + '/contracts/l1-contracts/out/IZKChain.sol/IZKChain.json').toString() -).abi; - -const depositAmount = ethers.parseEther('0.001'); - -interface GatewayInfo { - gatewayChainId: string; - gatewayProvider: zksync.Provider; - gatewayCTM: string; - l2ChainAdmin: string; - l2DiamondProxyAddress: string; -} - -interface Call { - target: string; - value: BigNumberish; - data: BytesLike; -} - -// This test requires interop and so it requires Gateway chain. -// This is the name of the chain. -const GATEWAY_CHAIN_NAME = 'gateway'; - -const useGatewayChain = process.env.USE_GATEWAY_CHAIN; -const shouldSkip = useGatewayChain !== 'WITH_GATEWAY'; - -if (shouldSkip) { - console.log( - `⏭️ Skipping asset migration test for ${TESTED_CHAIN_TYPE} chain (USE_GATEWAY_CHAIN=${useGatewayChain})` - ); -} - -/// There are the following kinds of tokens' states that we test: -/// At the moment of migration the token can be: -/// - Native to chain, already present on L1/other L2s. -/// - Native to chain, not present on L1 at all (can have unfinalized withdrawal). -/// - Native to L1, never been on the chain. -/// - Native to L1, already present on the chain. -/// - Native to another L2, never present on the chain. -/// - Native to another L2, already present on the chain. -/// After the chain migrates to GW, we can classify the states of the tokens the following way: -/// - Migrated the balance to GW. May be done after the token already received some deposits. -/// - Never migrated the balance to GW (but the token is known to the chain). May be done after the token. -/// - Never migrated the balance to GW (but the token is bridged for the first time). No migration should be needed at all. -/// After the chain migrates from GW, we need to test that all the tokens can be withdrawn in sufficient amounts to move -/// the entire balance to L1. It should not be possible to finalize all old interops. -(shouldSkip ? describe.skip : describe)('Token balance migration FROM GW tests', function () { - let chainHandler: ChainHandler; - let customTokenChainHandler: ChainHandler; - - let l1RichWallet: ethers.Wallet; - let gwRichWallet: zksync.Wallet; - let chainRichWallet: zksync.Wallet; - let customTokenChainRichWallet: zksync.Wallet; - - // Stored token data for cross-test assertions - const tokens: Record = {}; - // Stored withdrawal data for cross-test assertions - const withdrawalsBeforeTBM: Record = {}; - const withdrawalsAfterTBM: Record = {}; - - beforeAll(async () => { - // Initialize gateway chain - console.log('Initializing rich wallet for gateway chain...'); - await initTestWallet(GATEWAY_CHAIN_NAME); - gwRichWallet = await generateChainRichWallet(GATEWAY_CHAIN_NAME); - l1RichWallet = gwRichWallet.ethWallet(); - console.log('Gateway rich wallet private key:', gwRichWallet.privateKey); - - // Initialize tested chain - console.log(`Creating a new ${TESTED_CHAIN_TYPE} chain...`); - chainHandler = await ChainHandler.createNewChain(TESTED_CHAIN_TYPE); - await chainHandler.initEcosystemContracts(gwRichWallet); - chainRichWallet = chainHandler.l2RichWallet; - console.log('Chain rich wallet private key:', chainRichWallet.privateKey); - - // Initialize auxiliary chain - console.log('Creating a secondary chain...'); - customTokenChainHandler = await ChainHandler.createNewChain('era'); - customTokenChainRichWallet = customTokenChainHandler.l2RichWallet; - - const withdrawalsToBeFinalized: Record = {}; - // DEPLOY TOKENS THAT WILL BE TESTED - // We first deploy all tokens that will need to be withdrawn from L2 to make testing faster - // Token native to L1, deposited to L2, fully withdrawn from L2 - tokens.L1NativeWithdrawnFromL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); - const L1NativeWithdrawnFromL2Amount = await tokens.L1NativeWithdrawnFromL2.deposit(chainHandler); - withdrawalsToBeFinalized.L1NativeWithdrawnFromL2 = await tokens.L1NativeWithdrawnFromL2.withdraw( - chainHandler, - true, - L1NativeWithdrawnFromL2Amount - ); - // Token native to L1, deposited to L2, partially withdrawn from L2 - tokens.L1NativePartiallyWithdrawnFromL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); - await tokens.L1NativePartiallyWithdrawnFromL2.deposit(chainHandler); - withdrawalsToBeFinalized.L1NativePartiallyWithdrawnFromL2 = - await tokens.L1NativePartiallyWithdrawnFromL2.withdraw(chainHandler); - // Token native to L1, deposited to L2, fully withdrawn from L2 but not finalized yet - tokens.L1NativeUnfinalizedWithdrawalBeforeTBMToGW = await ERC20Handler.deployTokenOnL1(chainRichWallet); - await tokens.L1NativeUnfinalizedWithdrawalBeforeTBMToGW.deposit(chainHandler); - withdrawalsBeforeTBM.L1NativeUnfinalizedWithdrawalBeforeTBMToGW = - await tokens.L1NativeUnfinalizedWithdrawalBeforeTBMToGW.withdraw(chainHandler, false); - tokens.L1NativeUnfinalizedWithdrawalAfterTBMToGW = await ERC20Handler.deployTokenOnL1(chainRichWallet); - await tokens.L1NativeUnfinalizedWithdrawalAfterTBMToGW.deposit(chainHandler); - withdrawalsAfterTBM.L1NativeUnfinalizedWithdrawalAfterTBMToGW = - await tokens.L1NativeUnfinalizedWithdrawalAfterTBMToGW.withdraw(chainHandler, false); - // Token native to L1, deposited to L2 - tokens.L1NativeDepositedToL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); - await tokens.L1NativeDepositedToL2.deposit(chainHandler); - // Token native to L1, not deposited to L2 yet - tokens.L1NativeNotDepositedToL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); - - // Token native to L2-A, fully withdrawn to L1 - tokens.L2NativeWithdrawnToL1 = await ERC20Handler.deployTokenOnL2(chainHandler); - const L2NativeWithdrawnToL1Amount = await tokens.L2NativeWithdrawnToL1.getL2Balance(); - withdrawalsToBeFinalized.L2NativeWithdrawnToL1 = await tokens.L2NativeWithdrawnToL1.withdraw( - chainHandler, - L2NativeWithdrawnToL1Amount - ); - // Token native to L2-A, partially withdrawn to L1 - tokens.L2NativePartiallyWithdrawnToL1 = await ERC20Handler.deployTokenOnL2(chainHandler); - withdrawalsToBeFinalized.L2NativePartiallyWithdrawnToL1 = - await tokens.L2NativePartiallyWithdrawnToL1.withdraw(chainHandler); - // Token native to L2-A, fully withdrawn to L1 but not finalized yet - tokens.L2NativeUnfinalizedWithdrawalBeforeTBMToGW = await ERC20Handler.deployTokenOnL2(chainHandler); - withdrawalsBeforeTBM.L2NativeUnfinalizedWithdrawalBeforeTBMToGW = - await tokens.L2NativeUnfinalizedWithdrawalBeforeTBMToGW.withdraw(chainHandler, false); - tokens.L2NativeUnfinalizedWithdrawalAfterTBMToGW = await ERC20Handler.deployTokenOnL2(chainHandler); - withdrawalsAfterTBM.L2NativeUnfinalizedWithdrawalAfterTBMToGW = - await tokens.L2NativeUnfinalizedWithdrawalAfterTBMToGW.withdraw(chainHandler, false); - // Token native to L2-A, not withdrawn to L1 yet - tokens.L2NativeNotWithdrawnToL1 = await ERC20Handler.deployTokenOnL2(chainHandler); - - // Token native to L2-B, withdrawn from L2-B, and deposited to L2-A - const L2BToken = await ERC20Handler.deployTokenOnL2(customTokenChainHandler, RICH_WALLET_L1_BALANCE); - const L2BTokenWithdrawal = await L2BToken.withdraw(customTokenChainHandler, true, RICH_WALLET_L1_BALANCE); - - // Finalize all needed withdrawals - for (const tokenName of Object.keys(withdrawalsToBeFinalized)) { - await withdrawalsToBeFinalized[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()); - // We can now define the L1 contracts for the tokens - await tokens[tokenName].setL1Contract(chainHandler); - } - // Get the L1 contract for the L2-B token - await L2BTokenWithdrawal.finalizeWithdrawal(chainRichWallet.ethWallet()); - const L2BTokenL1Contract = await L2BToken.getL1Contract(customTokenChainHandler); - - // Deposit L2-B token to L2-A - tokens.L2BToken = await ERC20Handler.fromL2BL1Token( - L2BTokenL1Contract, - chainRichWallet, - customTokenChainRichWallet - ); - await tokens.L2BToken.deposit(chainHandler); - - // Add the base token to the list - tokens.baseToken = new ERC20Handler(chainHandler.l2RichWallet, chainHandler.l1BaseTokenContract, undefined); - const baseTokenAssetId = await tokens.baseToken.assetId(chainHandler); - // Get the current balance of the base token on the chain for accounting purposes - chainHandler.chainBalances[baseTokenAssetId] = await chainHandler.l1AssetTracker.chainBalance( - chainHandler.inner.chainId, - baseTokenAssetId - ); - - for (const token of Object.keys(tokens)) { - console.log(`Token ${token} Asset ID: ${await tokens[token].assetId(chainHandler)}`); - } - }); - - it('Can migrate token balances to GW', async () => { - // Migrate the chain to gateway - await chainHandler.migrateToGateway(); - // Migrate token balances to gateway -- will just migrate the base token - await chainHandler.migrateTokenBalancesToGateway(); - }); - - it('Can migrate the chain from Gateway', async () => { - await chainHandler.migrateFromGateway(); - }); - - it('Can finalize pending withdrawals', async () => { - for (const tokenName of Object.keys(withdrawalsBeforeTBM)) { - await withdrawalsBeforeTBM[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()); - const assetId = await tokens[tokenName].assetId(chainHandler); - - if (tokens[tokenName].isL2Token) { - chainHandler.chainBalances[assetId] = ethers.MaxUint256 - withdrawalsBeforeTBM[tokenName].amount; - } else { - chainHandler.chainBalances[assetId] -= withdrawalsBeforeTBM[tokenName].amount; - } - - delete withdrawalsBeforeTBM[tokenName]; - } - }); - - it('Can deposit a token to the chain after migrating from gateway', async () => { - tokens.L1NativeDepositedToL2AfterMigrationFromGW = await ERC20Handler.deployTokenOnL1(chainRichWallet); - // Fresh deposit after the chain migrated to gateway marks the asset ID as effectively migrated - await tokens.L1NativeDepositedToL2AfterMigrationFromGW.deposit(chainHandler); - const assetIdA = await tokens.L1NativeDepositedToL2AfterMigrationFromGW.assetId(chainHandler); - await expect( - chainHandler.assertAssetTrackersState(assetIdA, { - balances: { - L1AT_GW: 0n, - GWAT: 0n - }, - migrations: { - L1AT: 2n, - L1AT_GW: 0n, - GWAT: 0n - } - }) - ).resolves.toBe(true); - // Deposit existing token - await tokens.L1NativeNotDepositedToL2.deposit(chainHandler); - const assetIdB = await tokens.L1NativeNotDepositedToL2.assetId(chainHandler); - await expect( - chainHandler.assertAssetTrackersState(assetIdB, { - balances: { - L1AT_GW: 0n, - GWAT: 0n - }, - migrations: { - L1AT: 2n, - L1AT_GW: 0n, - GWAT: 0n - } - }) - ).resolves.toBe(true); - }); - - it('Can migrate token balances to L1', async () => { - for (const token of Object.keys(tokens)) { - console.log(`Token ${token} Asset ID: ${await tokens[token].assetId(chainHandler)}`); - } - // Migrate token balances to gateway - // This also tests repeated migrations, as `L1NativeDepositedToL2AfterMigrationFromGW` was already effectively migrated - // This command tries to migrate it again, which will succeed, but later balance check will show it stays the same - await chainHandler.migrateTokenBalancesToL1(); - // We need to wait for a bit for L1AT's `_sendConfirmationToChains` to propagate to GW and the tested L2 chain - await utils.sleep(5); - // After migration, update the existing balance to exclude this chain's balance - chainHandler.existingBaseTokenL1ATBalanceForGW = await chainHandler.l1AssetTracker.chainBalance( - GATEWAY_CHAIN_ID, - chainHandler.baseTokenAssetId - ); - }); - - it('Correctly assigns chain token balances after migrating token balances to L1', async () => { - // Chain balances are accounted correctly on L1AT - for (const tokenName of Object.keys(tokens)) { - const assetId = await tokens[tokenName].assetId(chainHandler); - if (assetId === ethers.ZeroHash) continue; - - const isL2Token = tokens[tokenName].isL2Token; - const baseBalance = chainHandler.chainBalances[assetId] ?? (isL2Token ? ethers.MaxUint256 : 0n); - const l1Expected = baseBalance; - - // Tokens deposited AFTER migrating from gateway won't have a GWAT migration number set - const depositedAfterFromGW = - tokenName === 'L1NativeNotDepositedToL2' || tokenName === 'L1NativeDepositedToL2AfterMigrationFromGW'; - - await expect( - chainHandler.assertAssetTrackersState(assetId, { - balances: { - L1AT: l1Expected, - L1AT_GW: 0n, - GWAT: 0n - }, - migrations: { - L1AT: 2n, - L1AT_GW: 0n, - GWAT: depositedAfterFromGW ? 0n : 2n - } - }) - ).resolves.toBe(true); - } - }); - - it('Can finalize pending withdrawals', async () => { - for (const tokenName of Object.keys(withdrawalsAfterTBM)) { - await withdrawalsAfterTBM[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()); - const assetId = await tokens[tokenName].assetId(chainHandler); - - if (tokens[tokenName].isL2Token) { - chainHandler.chainBalances[assetId] = ethers.MaxUint256 - withdrawalsAfterTBM[tokenName].amount; - } else { - chainHandler.chainBalances[assetId] -= withdrawalsAfterTBM[tokenName].amount; - } - - delete withdrawalsAfterTBM[tokenName]; - } - }); - - it('Can withdraw migrated tokens from the chain', async () => { - // Can fully withdraw existing tokens - const withdrawalA = await tokens.L1NativeDepositedToL2.withdraw(chainHandler); - const withdrawalB = await tokens.L1NativeNotDepositedToL2.withdraw(chainHandler); - const withdrawalC = await tokens.L2BToken.withdraw(chainHandler); - await withdrawalA.finalizeWithdrawal(chainRichWallet.ethWallet()); - await withdrawalB.finalizeWithdrawal(chainRichWallet.ethWallet()); - await withdrawalC.finalizeWithdrawal(chainRichWallet.ethWallet()); - }); - - it('Can deposit a token to the chain after migrating balances from gateway', async () => { - tokens.L1NativeDepositedToL2AfterTBMToGW = await ERC20Handler.deployTokenOnL1(chainRichWallet); - // Fresh deposit after the chain migrated its token balances to gateway marks the asset ID as effectively migrated - await tokens.L1NativeDepositedToL2AfterTBMToGW.deposit(chainHandler); - const assetId = await tokens.L1NativeDepositedToL2AfterTBMToGW.assetId(chainHandler); - await expect( - chainHandler.assertAssetTrackersState(assetId, { - balances: { - L1AT_GW: 0n, - GWAT: 0n - }, - migrations: { - L1AT: 2n, - L1AT_GW: 0n, - GWAT: 0n - } - }) - ).resolves.toBe(true); - }); - - afterAll(async () => { - console.log('Tearing down chains...'); - if (chainHandler) { - await chainHandler.stopServer(); - } - if (customTokenChainHandler) { - await customTokenChainHandler.stopServer(); - } - console.log('Complete'); - }); -}); diff --git a/core/tests/highlevel-test-tools/tests/token-balance-migration-tester.ts b/core/tests/highlevel-test-tools/tests/token-balance-migration-tester.ts index 2530072f79b7..5911eca90210 100644 --- a/core/tests/highlevel-test-tools/tests/token-balance-migration-tester.ts +++ b/core/tests/highlevel-test-tools/tests/token-balance-migration-tester.ts @@ -4,6 +4,7 @@ import * as utils from 'utils'; import * as yaml from 'js-yaml'; import * as fs from 'fs'; import path from 'path'; +import { expect } from 'vitest'; import { loadConfig, loadEcosystemConfig } from 'utils/build/file-configs'; import { sleep } from 'zksync-ethers/build/utils'; import { getMainWalletPk } from 'highlevel-test-tools/src/wallets'; @@ -14,15 +15,39 @@ import { GW_ASSET_TRACKER_ADDRESS, GATEWAY_CHAIN_ID } from 'utils/src/constants'; -import { executeCommand, migrateToGatewayIfNeeded, startServer } from '../src'; +import { executeCommand, FileMutex, migrateToGatewayIfNeeded, startServer } from '../src'; import { removeErrorListeners } from '../src/execute-command'; import { initTestWallet } from '../src/run-integration-tests'; -export const RICH_WALLET_L1_BALANCE = ethers.parseEther('10.0'); -export const RICH_WALLET_L2_BALANCE = RICH_WALLET_L1_BALANCE; +const tbmMutex = new FileMutex(); +export const RICH_WALLET_L2_BALANCE = ethers.parseEther('10.0'); +export const TOKEN_MINT_AMOUNT = ethers.parseEther('1.0'); +const MAX_WITHDRAW_AMOUNT = ethers.parseEther('0.1'); const TEST_SUITE_NAME = 'Token Balance Migration Test'; const pathToHome = path.join(__dirname, '../../../..'); +export async function expectRevertWithSelector( + action: Promise, + selector: string, + failureMessage = 'Expected transaction to revert with selector' +): Promise { + try { + await action; + expect.fail(`${failureMessage} ${selector}`); + } catch (err) { + const errorText = [ + (err as any)?.data, + (err as any)?.error?.data, + (err as any)?.info?.error?.data, + (err as any)?.shortMessage, + (err as any)?.message + ] + .filter(Boolean) + .join(' '); + expect(errorText).toContain(selector); + } +} + function readArtifact(contractName: string, outFolder: string = 'out', fileName: string = contractName) { return JSON.parse( fs @@ -39,9 +64,6 @@ const ERC20_ABI = ERC20_EVM_ARTIFACT.abi; const ERC20_ZKEVM_BYTECODE = readArtifact('TestnetERC20Token', 'zkout').bytecode.object; -const AMOUNT_FLOOR = ethers.parseEther('0.01'); -const AMOUNT_CEILING = ethers.parseEther('1'); - type AssetTrackerLocation = 'L1AT' | 'L1AT_GW' | 'GWAT'; const ASSET_TRACKERS: readonly AssetTrackerLocation[] = ['L1AT', 'L1AT_GW', 'GWAT'] as const; @@ -170,27 +192,49 @@ export class ChainHandler { migrations?: Record; } ): Promise { + const failures: string[] = []; + const recordFailure = (where: AssetTrackerLocation, err: unknown) => { + const reason = err instanceof Error ? err.message : String(err); + failures.push(`[${where}] ${reason}`); + }; + for (const where of ASSET_TRACKERS) { + // Since we have several chains settling on the same gateway in parallel, accounting + // for the base token on L1AT_GW can be very tricky, so we just skip it. + if (where === 'L1AT_GW' && assetId === this.baseTokenAssetId) continue; const expected = balances?.[where]; - if (expected !== undefined) { - await this.assertChainBalance(assetId, where, expected); - } else { - await this.assertChainBalance(assetId, where); + try { + if (expected !== undefined) { + await this.assertChainBalance(assetId, where, expected); + } else { + await this.assertChainBalance(assetId, where); + } + } catch (err) { + recordFailure(where, err); } } if (migrations) { for (const where of ASSET_TRACKERS) { - await this.assertAssetMigrationNumber(assetId, where, migrations[where]); + try { + await this.assertAssetMigrationNumber(assetId, where, migrations[where]); + } catch (err) { + recordFailure(where, err); + } } } + if (failures.length > 0) { + const message = `Asset tracker assertion failures:\n${failures.map((f) => `- ${f}`).join('\n')}`; + console.error(message); + throw new Error(message); + } + return true; } async stopServer() { await this.inner.mainNode.kill(); - await this.waitForShutdown(); } async startServer() { @@ -222,10 +266,9 @@ export class ChainHandler { async migrateToGateway() { // Pause deposits before initiating migration - await executeCommand( - 'zkstack', + await this.zkstackExecWithMutex( ['chain', 'pause-deposits', '--chain', this.inner.chainName], - this.inner.chainName, + 'pausing deposits before initiating migration', 'gateway_migration' ); // Wait for priority queue to be empty @@ -256,10 +299,9 @@ export class ChainHandler { async migrateFromGateway() { // Pause deposits before initiating migration - await executeCommand( - 'zkstack', + await this.zkstackExecWithMutex( ['chain', 'pause-deposits', '--chain', this.inner.chainName], - this.inner.chainName, + 'pausing deposits before initiating migration', 'gateway_migration' ); // Wait for priority queue to be empty @@ -267,16 +309,14 @@ export class ChainHandler { // Wait for all batches to be executed await this.inner.waitForAllBatchesToBeExecuted(); // Notify server - await executeCommand( - 'zkstack', + await this.zkstackExecWithMutex( ['chain', 'gateway', 'notify-about-from-gateway-update', '--chain', this.inner.chainName], - this.inner.chainName, + 'notifying about from gateway update', 'gateway_migration' ); // We can now reliably migrate from gateway - await this.stopServer(); - await executeCommand( - 'zkstack', + removeErrorListeners(this.inner.mainNode.process!); + await this.zkstackExecWithMutex( [ 'chain', 'gateway', @@ -286,22 +326,22 @@ export class ChainHandler { '--chain', this.inner.chainName ], - this.inner.chainName, + 'migrating from gateway', 'gateway_migration' ); await this.waitForShutdown(); await this.startServer(); } - async migrateTokenBalancesToGateway() { + async initiateTokenBalanceMigration(direction: 'to-gateway' | 'from-gateway') { await executeCommand( 'zkstack', [ 'chain', 'gateway', - 'migrate-token-balances', + 'initiate-token-balance-migration', '--to-gateway', - 'true', + String(direction === 'to-gateway'), '--gateway-chain-name', 'gateway', '--chain', @@ -312,15 +352,15 @@ export class ChainHandler { ); } - async migrateTokenBalancesToL1() { + async finalizeTokenBalanceMigration(direction: 'to-gateway' | 'from-gateway') { await executeCommand( 'zkstack', [ 'chain', 'gateway', - 'migrate-token-balances', + 'finalize-token-balance-migration', '--to-gateway', - 'false', + String(direction === 'to-gateway'), '--gateway-chain-name', 'gateway', '--chain', @@ -356,6 +396,27 @@ export class ChainHandler { return new ChainHandler(testChain, await generateChainRichWallet(testChain.chainName)); } + private async zkstackExecWithMutex(command: string[], name: string, logFileName: string) { + try { + // Acquire mutex for zkstack exec + console.log(`🔒 Acquiring mutex for ${name} of ${this.inner.chainName}...`); + await tbmMutex.acquire(); + console.log(`✅ Mutex acquired for ${name} of ${this.inner.chainName}`); + + try { + await executeCommand('zkstack', command, this.inner.chainName, logFileName); + + console.log(`✅ Successfully executed ${name} for chain ${this.inner.chainName}`); + } finally { + // Always release the mutex + tbmMutex.release(); + } + } catch (e) { + console.error(`❌ Failed to execute ${name} for chain ${this.inner.chainName}:`, e); + throw e; + } + } + private async assertChainBalance( assetId: string, where: 'L1AT' | 'L1AT_GW' | 'GWAT', @@ -375,7 +436,7 @@ export class ChainHandler { if (assetId === this.baseTokenAssetId && (where === 'GWAT' || where === 'L1AT')) { // Here we have to account for some balance drift from the migrate_token_balances.rs script - const tolerance = ethers.parseEther('0.0015'); + const tolerance = ethers.parseEther('0.005'); const diff = actualBalance > balance ? actualBalance - balance : balance - actualBalance; if (diff > tolerance) { throw new Error(`Balance mismatch for ${where} ${assetId}: expected ${balance}, got ${actualBalance}`); @@ -425,17 +486,20 @@ export class ERC20Handler { public l1Contract: ethers.Contract | undefined; public l2Contract: zksync.Contract | undefined; public isL2Token: boolean; + public isBaseToken: boolean; cachedAssetId: string | null = null; constructor( wallet: zksync.Wallet, l1Contract: ethers.Contract | undefined, - l2Contract: zksync.Contract | undefined + l2Contract: zksync.Contract | undefined, + isBaseToken = false ) { this.wallet = wallet; this.l1Contract = l1Contract; this.l2Contract = l2Contract; this.isL2Token = !!l2Contract; + this.isBaseToken = isBaseToken; } async assetId(chainHandler: ChainHandler): Promise { @@ -451,11 +515,10 @@ export class ERC20Handler { return assetId; } - async deposit(chainHandler: ChainHandler, amount?: bigint): Promise { - const depositAmount = amount ?? getRandomDepositAmount(); + async deposit(chainHandler: ChainHandler) { const depositTx = await this.wallet.deposit({ token: await this.l1Contract!.getAddress(), - amount: depositAmount, + amount: TOKEN_MINT_AMOUNT, approveERC20: true, approveBaseERC20: true }); @@ -465,35 +528,44 @@ export class ERC20Handler { await waitForBalanceNonZero(this.l2Contract!, this.wallet); const assetId = await this.assetId(chainHandler); - chainHandler.chainBalances[assetId] = (chainHandler.chainBalances[assetId] ?? 0n) + depositAmount; - - return depositAmount; + chainHandler.chainBalances[assetId] = (chainHandler.chainBalances[assetId] ?? 0n) + TOKEN_MINT_AMOUNT; } - async withdraw( - chainHandler: ChainHandler, - decreaseChainBalance = true, - amount?: bigint - ): Promise { + async withdraw(amount?: bigint): Promise { const withdrawAmount = amount ?? getRandomWithdrawAmount(); - if ((await this.l2Contract!.allowance(this.wallet.address, L2_NATIVE_TOKEN_VAULT_ADDRESS)) < withdrawAmount) { + let isETHBaseToken = false; + let token; + if (this.isBaseToken) { + const baseToken = await this.wallet.provider.getBaseTokenContractAddress(); + isETHBaseToken = zksync.utils.isAddressEq(baseToken, zksync.utils.ETH_ADDRESS_IN_CONTRACTS); + if (isETHBaseToken) { + token = zksync.utils.ETH_ADDRESS; + } else { + const l2BaseTokenAddress = zksync.utils.L2_BASE_TOKEN_ADDRESS; + token = l2BaseTokenAddress; + if (!this.l2Contract || (await this.l2Contract.getAddress()) !== l2BaseTokenAddress) { + this.l2Contract = new zksync.Contract(l2BaseTokenAddress, ERC20_ABI, this.wallet); + } + } + } else { + token = await this.l2Contract!.getAddress(); + } + + if ( + !this.isBaseToken && + (await this.l2Contract!.allowance(this.wallet.address, L2_NATIVE_TOKEN_VAULT_ADDRESS)) < withdrawAmount + ) { await (await this.l2Contract!.approve(L2_NATIVE_TOKEN_VAULT_ADDRESS, 0)).wait(); await (await this.l2Contract!.approve(L2_NATIVE_TOKEN_VAULT_ADDRESS, withdrawAmount)).wait(); } const withdrawTx = await this.wallet.withdraw({ - token: await this.l2Contract!.getAddress(), + token, amount: withdrawAmount }); await withdrawTx.wait(); - const assetId = await this.assetId(chainHandler); - if (decreaseChainBalance) { - if (this.isL2Token) chainHandler.chainBalances[assetId] = ethers.MaxUint256; - chainHandler.chainBalances[assetId] -= withdrawAmount; - } - return new WithdrawalHandler(withdrawTx.hash, this.wallet.provider, withdrawAmount); } @@ -529,7 +601,7 @@ export class ERC20Handler { // L2-B wallet must hold some balance of the L2-B token on L1 const balance = await secondChainWallet.getBalanceL1(await l1Contract.getAddress()); if (balance === 0n) throw new Error('L2-B wallet must hold some balance of the L2-B token on L1'); - // We need to provide the chain rich wallet with some balance of the L2-B token on L1, to + // Transfer the L2-B token balance on L1 to the target wallet. const l1Erc20 = new ethers.Contract(await l1Contract.getAddress(), ERC20_ABI, secondChainWallet.ethWallet()); await (await l1Erc20.transfer(wallet.address, balance)).wait(); return new ERC20Handler(wallet, l1Contract, undefined); @@ -543,13 +615,12 @@ export class ERC20Handler { const newToken = await factory.deploy(props.name, props.symbol, props.decimals); await newToken.waitForDeployment(); const l1Contract = new ethers.Contract(await newToken.getAddress(), ERC20_ABI, l1Wallet); - await (await l1Contract.mint(l1Wallet.address, RICH_WALLET_L1_BALANCE)).wait(); + await (await l1Contract.mint(l1Wallet.address, TOKEN_MINT_AMOUNT)).wait(); return new ERC20Handler(wallet, l1Contract, undefined); } - static async deployTokenOnL2(chainHandler: ChainHandler, _mintAmount?: bigint) { - const mintAmount = _mintAmount ?? getRandomDepositAmount(); + static async deployTokenOnL2(chainHandler: ChainHandler) { const factory = new zksync.ContractFactory( ERC20_ABI, ERC20_ZKEVM_BYTECODE, @@ -561,7 +632,7 @@ export class ERC20Handler { const newToken = await factory.deploy(props.name, props.symbol, props.decimals); await newToken.waitForDeployment(); const l2Contract = new zksync.Contract(await newToken.getAddress(), ERC20_ABI, chainHandler.l2RichWallet); - await (await l2Contract.mint(chainHandler.l2RichWallet.address, mintAmount)).wait(); + await (await l2Contract.mint(chainHandler.l2RichWallet.address, TOKEN_MINT_AMOUNT)).wait(); await (await chainHandler.l2Ntv.registerToken(await l2Contract.getAddress())).wait(); @@ -603,12 +674,8 @@ export class WithdrawalHandler { } } -function getRandomDepositAmount(): bigint { - return AMOUNT_FLOOR + BigInt(Math.floor(Math.random() * Number(AMOUNT_CEILING - AMOUNT_FLOOR + 1n))); -} - function getRandomWithdrawAmount(): bigint { - return 1n + BigInt(Math.floor(Math.random() * Number(AMOUNT_FLOOR / 2n - 1n))); + return BigInt(Math.floor(Math.random() * Number(MAX_WITHDRAW_AMOUNT))); } async function waitForBalanceNonZero(contract: ethers.Contract | zksync.Contract, wallet: zksync.Wallet) { @@ -630,6 +697,13 @@ async function waitUntilBlockFinalized(wallet: zksync.Wallet, blockNumber: numbe if (printedBlockNumber < block.number) { printedBlockNumber = block.number; } + // We make repeated transactions to force the L2 to update. + await ( + await wallet.transfer({ + to: wallet.address, + amount: 1 + }) + ).wait(); await zksync.utils.sleep(wallet.provider.pollingInterval); } } diff --git a/core/tests/highlevel-test-tools/tests/token-balance-migration-to.test.ts b/core/tests/highlevel-test-tools/tests/token-balance-migration-to.test.ts deleted file mode 100644 index e94cf8ebb428..000000000000 --- a/core/tests/highlevel-test-tools/tests/token-balance-migration-to.test.ts +++ /dev/null @@ -1,405 +0,0 @@ -import { afterAll, beforeAll, describe, it } from 'vitest'; -import { createChainAndStartServer, TESTED_CHAIN_TYPE, tokenBalanceMigrationTest } from '../src'; - -import * as utils from 'utils'; -import { - WithdrawalHandler, - ChainHandler, - generateChainRichWallet, - ERC20Handler, - RICH_WALLET_L1_BALANCE, - RICH_WALLET_L2_BALANCE -} from './token-balance-migration-tester'; -import * as zksync from 'zksync-ethers'; -import * as ethers from 'ethers'; -import { expect } from 'vitest'; -import fs from 'node:fs/promises'; -import { existsSync, readFileSync } from 'node:fs'; -import { BytesLike } from '@ethersproject/bytes'; -import { BigNumberish } from 'ethers'; -import { loadConfig, shouldLoadConfigFromFile } from 'utils/build/file-configs'; -import path from 'path'; -import { CONTRACT_DEPLOYER, CONTRACT_DEPLOYER_ADDRESS, hashBytecode, ZKSYNC_MAIN_ABI } from 'zksync-ethers/build/utils'; -import { utils as zksync_utils } from 'zksync-ethers'; -import { logsTestPath } from 'utils/build/logs'; -import { waitForNewL1Batch } from 'utils'; -import { getMainWalletPk } from 'highlevel-test-tools/src/wallets'; -import { initTestWallet } from '../src/run-integration-tests'; -import { GATEWAY_CHAIN_ID } from 'utils/src/constants'; - -async function logsPath(name: string): Promise { - return await logsTestPath(fileConfig.chain, 'logs/upgrade/', name); -} - -const L2_BRIDGEHUB_ADDRESS = '0x0000000000000000000000000000000000010002'; -const pathToHome = path.join(__dirname, '../../../..'); -const fileConfig = shouldLoadConfigFromFile(); - -// const contracts: Contracts = initContracts(pathToHome, fileConfig.loadFromFile); - -const ZK_CHAIN_INTERFACE = JSON.parse( - readFileSync(pathToHome + '/contracts/l1-contracts/out/IZKChain.sol/IZKChain.json').toString() -).abi; - -const depositAmount = ethers.parseEther('0.001'); - -interface GatewayInfo { - gatewayChainId: string; - gatewayProvider: zksync.Provider; - gatewayCTM: string; - l2ChainAdmin: string; - l2DiamondProxyAddress: string; -} - -interface Call { - target: string; - value: BigNumberish; - data: BytesLike; -} - -// This test requires interop and so it requires Gateway chain. -// This is the name of the chain. -const GATEWAY_CHAIN_NAME = 'gateway'; - -const useGatewayChain = process.env.USE_GATEWAY_CHAIN; -const shouldSkip = useGatewayChain !== 'WITH_GATEWAY'; - -if (shouldSkip) { - console.log( - `⏭️ Skipping asset migration test for ${TESTED_CHAIN_TYPE} chain (USE_GATEWAY_CHAIN=${useGatewayChain})` - ); -} - -/// There are the following kinds of tokens' states that we test: -/// At the moment of migration the token can be: -/// - Native to chain, already present on L1/other L2s. -/// - Native to chain, not present on L1 at all (can have unfinalized withdrawal). -/// - Native to L1, never been on the chain. -/// - Native to L1, already present on the chain. -/// - Native to another L2, never present on the chain. -/// - Native to another L2, already present on the chain. -/// After the chain migrates to GW, we can classify the states of the tokens the following way: -/// - Migrated the balance to GW. May be done after the token already received some deposits. -/// - Never migrated the balance to GW (but the token is known to the chain). May be done after the token. -/// - Never migrated the balance to GW (but the token is bridged for the first time). No migration should be needed at all. -/// After the chain migrates from GW, we need to test that all the tokens can be withdrawn in sufficient amounts to move -/// the entire balance to L1. It should not be possible to finalize all old interops. -(shouldSkip ? describe.skip : describe)('Token balance migration TO GW tests', function () { - let chainHandler: ChainHandler; - let customTokenChainHandler: ChainHandler; - - let l1RichWallet: ethers.Wallet; - let gwRichWallet: zksync.Wallet; - let chainRichWallet: zksync.Wallet; - let customTokenChainRichWallet: zksync.Wallet; - - // Stored token data for cross-test assertions - const tokens: Record = {}; - // Stored withdrawal data for cross-test assertions - const withdrawalsBeforeTBM: Record = {}; - const withdrawalsAfterTBM: Record = {}; - - beforeAll(async () => { - // Initialize gateway chain - console.log('Initializing rich wallet for gateway chain...'); - await initTestWallet(GATEWAY_CHAIN_NAME); - gwRichWallet = await generateChainRichWallet(GATEWAY_CHAIN_NAME); - l1RichWallet = gwRichWallet.ethWallet(); - console.log('Gateway rich wallet private key:', gwRichWallet.privateKey); - - // Initialize tested chain - console.log(`Creating a new ${TESTED_CHAIN_TYPE} chain...`); - chainHandler = await ChainHandler.createNewChain(TESTED_CHAIN_TYPE); - await chainHandler.initEcosystemContracts(gwRichWallet); - chainRichWallet = chainHandler.l2RichWallet; - console.log('Chain rich wallet private key:', chainRichWallet.privateKey); - // Initialize auxiliary chain - console.log('Creating a secondary chain...'); - customTokenChainHandler = await ChainHandler.createNewChain('era'); - customTokenChainRichWallet = customTokenChainHandler.l2RichWallet; - - const withdrawalsToBeFinalized: Record = {}; - // DEPLOY TOKENS THAT WILL BE TESTED - // We first deploy all tokens that will need to be withdrawn from L2 to make testing faster - // Token native to L1, deposited to L2, fully withdrawn from L2 - tokens.L1NativeWithdrawnFromL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); - const L1NativeWithdrawnFromL2Amount = await tokens.L1NativeWithdrawnFromL2.deposit(chainHandler); - withdrawalsToBeFinalized.L1NativeWithdrawnFromL2 = await tokens.L1NativeWithdrawnFromL2.withdraw( - chainHandler, - true, - L1NativeWithdrawnFromL2Amount - ); - // Token native to L1, deposited to L2, partially withdrawn from L2 - tokens.L1NativePartiallyWithdrawnFromL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); - await tokens.L1NativePartiallyWithdrawnFromL2.deposit(chainHandler); - withdrawalsToBeFinalized.L1NativePartiallyWithdrawnFromL2 = - await tokens.L1NativePartiallyWithdrawnFromL2.withdraw(chainHandler); - // Token native to L1, deposited to L2, fully withdrawn from L2 but not finalized yet - tokens.L1NativeUnfinalizedWithdrawalBeforeTBMToGW = await ERC20Handler.deployTokenOnL1(chainRichWallet); - await tokens.L1NativeUnfinalizedWithdrawalBeforeTBMToGW.deposit(chainHandler); - withdrawalsBeforeTBM.L1NativeUnfinalizedWithdrawalBeforeTBMToGW = - await tokens.L1NativeUnfinalizedWithdrawalBeforeTBMToGW.withdraw(chainHandler, false); - tokens.L1NativeUnfinalizedWithdrawalAfterTBMToGW = await ERC20Handler.deployTokenOnL1(chainRichWallet); - await tokens.L1NativeUnfinalizedWithdrawalAfterTBMToGW.deposit(chainHandler); - withdrawalsAfterTBM.L1NativeUnfinalizedWithdrawalAfterTBMToGW = - await tokens.L1NativeUnfinalizedWithdrawalAfterTBMToGW.withdraw(chainHandler, false); - // Token native to L1, deposited to L2 - tokens.L1NativeDepositedToL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); - await tokens.L1NativeDepositedToL2.deposit(chainHandler); - // Token native to L1, not deposited to L2 yet - tokens.L1NativeNotDepositedToL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); - - // Token native to L2-A, fully withdrawn to L1 - tokens.L2NativeWithdrawnToL1 = await ERC20Handler.deployTokenOnL2(chainHandler); - const L2NativeWithdrawnToL1Amount = await tokens.L2NativeWithdrawnToL1.getL2Balance(); - withdrawalsToBeFinalized.L2NativeWithdrawnToL1 = await tokens.L2NativeWithdrawnToL1.withdraw( - chainHandler, - L2NativeWithdrawnToL1Amount - ); - // Token native to L2-A, partially withdrawn to L1 - tokens.L2NativePartiallyWithdrawnToL1 = await ERC20Handler.deployTokenOnL2(chainHandler); - withdrawalsToBeFinalized.L2NativePartiallyWithdrawnToL1 = - await tokens.L2NativePartiallyWithdrawnToL1.withdraw(chainHandler); - // Token native to L2-A, fully withdrawn to L1 but not finalized yet - tokens.L2NativeUnfinalizedWithdrawalBeforeTBMToGW = await ERC20Handler.deployTokenOnL2(chainHandler); - withdrawalsBeforeTBM.L2NativeUnfinalizedWithdrawalBeforeTBMToGW = - await tokens.L2NativeUnfinalizedWithdrawalBeforeTBMToGW.withdraw(chainHandler, false); - tokens.L2NativeUnfinalizedWithdrawalAfterTBMToGW = await ERC20Handler.deployTokenOnL2(chainHandler); - withdrawalsAfterTBM.L2NativeUnfinalizedWithdrawalAfterTBMToGW = - await tokens.L2NativeUnfinalizedWithdrawalAfterTBMToGW.withdraw(chainHandler, false); - // Token native to L2-A, not withdrawn to L1 yet - tokens.L2NativeNotWithdrawnToL1 = await ERC20Handler.deployTokenOnL2(chainHandler); - - // Token native to L2-B, withdrawn from L2-B, and deposited to L2-A - const L2BToken = await ERC20Handler.deployTokenOnL2(customTokenChainHandler, RICH_WALLET_L1_BALANCE); - const L2BTokenWithdrawal = await L2BToken.withdraw(customTokenChainHandler, true, RICH_WALLET_L1_BALANCE); - - // Finalize all needed withdrawals - for (const tokenName of Object.keys(withdrawalsToBeFinalized)) { - await withdrawalsToBeFinalized[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()); - // We can now define the L1 contracts for the tokens - await tokens[tokenName].setL1Contract(chainHandler); - } - // Get the L1 contract for the L2-B token - await L2BTokenWithdrawal.finalizeWithdrawal(chainRichWallet.ethWallet()); - const L2BTokenL1Contract = await L2BToken.getL1Contract(customTokenChainHandler); - - // Deposit L2-B token to L2-A - tokens.L2BToken = await ERC20Handler.fromL2BL1Token( - L2BTokenL1Contract, - chainRichWallet, - customTokenChainRichWallet - ); - await tokens.L2BToken.deposit(chainHandler); - - // Add the base token to the list - tokens.baseToken = new ERC20Handler(chainHandler.l2RichWallet, chainHandler.l1BaseTokenContract, undefined); - const baseTokenAssetId = await tokens.baseToken.assetId(chainHandler); - // Get the current balance of the base token on the chain for accounting purposes - chainHandler.chainBalances[baseTokenAssetId] = await chainHandler.l1AssetTracker.chainBalance( - chainHandler.inner.chainId, - baseTokenAssetId - ); - - for (const tokenName of Object.keys(tokens)) { - if (tokenName === 'L1NativeNotDepositedToL2') continue; - console.log(`Token ${tokenName} Asset ID: ${await tokens[tokenName].assetId(chainHandler)}`); - } - }); - - it('Correctly assigns chain token balances', async () => { - // Chain balances are accounted correctly on L1AT - for (const token of Object.keys(tokens)) { - const assetId = await tokens[token].assetId(chainHandler); - if (assetId === ethers.ZeroHash) continue; - await expect( - chainHandler.assertAssetTrackersState(assetId, { - balances: { - L1AT_GW: 0n, - GWAT: 0n - }, - migrations: { - L1AT: 0n, - L1AT_GW: 0n, - GWAT: 0n - } - }) - ).resolves.toBe(true); - } - }); - - it('Can migrate the chain to Gateway', async () => { - await chainHandler.migrateToGateway(); - }); - - it('Can finalize pending withdrawals', async () => { - for (const tokenName of Object.keys(withdrawalsBeforeTBM)) { - await withdrawalsBeforeTBM[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()); - const assetId = await tokens[tokenName].assetId(chainHandler); - - if (tokens[tokenName].isL2Token) { - chainHandler.chainBalances[assetId] = ethers.MaxUint256 - withdrawalsBeforeTBM[tokenName].amount; - } else { - chainHandler.chainBalances[assetId] -= withdrawalsBeforeTBM[tokenName].amount; - } - - delete withdrawalsBeforeTBM[tokenName]; - } - }); - - it('Can deposit a token to the chain after migrating to gateway', async () => { - tokens.L1NativeDepositedToL2AfterMigrationToGW = await ERC20Handler.deployTokenOnL1(chainRichWallet); - // Fresh deposit after the chain migrated to gateway marks the asset ID as effectively migrated - await tokens.L1NativeDepositedToL2AfterMigrationToGW.deposit(chainHandler); - const assetIdA = await tokens.L1NativeDepositedToL2AfterMigrationToGW.assetId(chainHandler); - await expect( - chainHandler.assertAssetTrackersState(assetIdA, { - balances: { - L1AT: 0n - }, - migrations: { - L1AT: 1n, - L1AT_GW: 0n, - GWAT: 1n - } - }) - ).resolves.toBe(true); - // Deposit existing tokens has the same result - await tokens.L1NativeWithdrawnFromL2.deposit(chainHandler); - const assetIdB = await tokens.L1NativeWithdrawnFromL2.assetId(chainHandler); - await expect( - chainHandler.assertAssetTrackersState(assetIdB, { - balances: { - L1AT: 0n - }, - migrations: { - L1AT: 1n, - L1AT_GW: 0n, - GWAT: 1n - } - }) - ).resolves.toBe(true); - await tokens.L1NativeNotDepositedToL2.deposit(chainHandler); - const assetIdC = await tokens.L1NativeNotDepositedToL2.assetId(chainHandler); - await expect( - chainHandler.assertAssetTrackersState(assetIdC, { - balances: { - L1AT: 0n - }, - migrations: { - L1AT: 1n, - L1AT_GW: 0n, - GWAT: 1n - } - }) - ).resolves.toBe(true); - }); - - it('Cannot initiate migration for a false assetId', async () => { - const bogusAssetId = ethers.randomBytes(32); - await expect(chainHandler.l2AssetTracker.initiateL1ToGatewayMigrationOnL2(bogusAssetId)).rejects.toThrow(); - }); - - it('Can migrate token balances to GW', async () => { - for (const token of Object.keys(tokens)) { - console.log(`Token ${token} Asset ID: ${await tokens[token].assetId(chainHandler)}`); - } - // Take snapshot right before migration - // Base token balance increases slighly due to previous token deposits, here we account for that - const existingBaseTokenL1ATBalanceForGW = await chainHandler.l1AssetTracker.chainBalance( - GATEWAY_CHAIN_ID, - chainHandler.baseTokenAssetId - ); - chainHandler.existingBaseTokenL1ATBalanceForGW = existingBaseTokenL1ATBalanceForGW; - // Migrate token balances to gateway - // This also tests repeated migrations, as `L1NativeDepositedToL2AfterMigrationToGW` was already effectively migrated - // This command tries to migrate it again, which will succeed, but later balance check will show it stays the same - await chainHandler.migrateTokenBalancesToGateway(); - // We need to wait for a bit for L1AT's `_sendConfirmationToChains` to propagate to GW and the tested L2 chain - await utils.sleep(5); - }); - - it('Correctly assigns chain token balances after migrating token balances to gateway', async () => { - // Chain balances are accounted correctly on L1AT - for (const tokenName of Object.keys(tokens)) { - const assetId = await tokens[tokenName].assetId(chainHandler); - if (assetId === ethers.ZeroHash) continue; - - const isL2Token = tokens[tokenName].isL2Token; - const baseBalance = chainHandler.chainBalances[assetId] ?? (isL2Token ? ethers.MaxUint256 : 0n); - const pending = withdrawalsAfterTBM[tokenName]; - const pendingAmount = pending?.amount ?? 0n; - const gwExpected = pendingAmount > 0n ? baseBalance - pendingAmount : baseBalance; - - await expect( - chainHandler.assertAssetTrackersState(assetId, { - balances: { - L1AT: pendingAmount, - L1AT_GW: gwExpected, - GWAT: gwExpected - }, - migrations: { - L1AT: 1n, - L1AT_GW: 0n, - GWAT: 1n - } - }) - ).resolves.toBe(true); - } - }); - - it('Can finalize pending withdrawals', async () => { - for (const tokenName of Object.keys(withdrawalsAfterTBM)) { - await withdrawalsAfterTBM[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()); - const assetId = await tokens[tokenName].assetId(chainHandler); - - if (tokens[tokenName].isL2Token) { - chainHandler.chainBalances[assetId] = ethers.MaxUint256 - withdrawalsAfterTBM[tokenName].amount; - } else { - chainHandler.chainBalances[assetId] -= withdrawalsAfterTBM[tokenName].amount; - } - - delete withdrawalsAfterTBM[tokenName]; - } - }); - - it('Can withdraw tokens from the chain', async () => { - // Can fully withdraw existing tokens - const withdrawalA = await tokens.L1NativeDepositedToL2.withdraw(chainHandler); - const withdrawalB = await tokens.L1NativeNotDepositedToL2.withdraw(chainHandler); - const withdrawalC = await tokens.L2BToken.withdraw(chainHandler); - await withdrawalA.finalizeWithdrawal(chainRichWallet.ethWallet()); - await withdrawalB.finalizeWithdrawal(chainRichWallet.ethWallet()); - await withdrawalC.finalizeWithdrawal(chainRichWallet.ethWallet()); - }); - - it('Can deposit a token to the chain after migrating balances to gateway', async () => { - tokens.L1NativeDepositedToL2AfterTBMToGW = await ERC20Handler.deployTokenOnL1(chainRichWallet); - // Fresh deposit after the chain migrated its token balances to gateway marks the asset ID as effectively migrated - await tokens.L1NativeDepositedToL2AfterTBMToGW.deposit(chainHandler); - const assetId = await tokens.L1NativeDepositedToL2AfterTBMToGW.assetId(chainHandler); - await expect( - chainHandler.assertAssetTrackersState(assetId, { - balances: { - L1AT: 0n - }, - migrations: { - L1AT: 1n, - L1AT_GW: 0n, - GWAT: 1n - } - }) - ).resolves.toBe(true); - }); - - afterAll(async () => { - console.log('Tearing down chains...'); - if (chainHandler) { - await chainHandler.stopServer(); - } - if (customTokenChainHandler) { - await customTokenChainHandler.stopServer(); - } - console.log('Complete'); - }); -}); diff --git a/core/tests/highlevel-test-tools/tests/token-balance-migration.test.ts b/core/tests/highlevel-test-tools/tests/token-balance-migration.test.ts new file mode 100644 index 000000000000..3b25a1ab1b04 --- /dev/null +++ b/core/tests/highlevel-test-tools/tests/token-balance-migration.test.ts @@ -0,0 +1,386 @@ +import { afterAll, beforeAll, describe, it } from 'vitest'; +import { TESTED_CHAIN_TYPE } from '../src'; + +import * as utils from 'utils'; +import { + WithdrawalHandler, + ChainHandler, + generateChainRichWallet, + ERC20Handler, + expectRevertWithSelector, + TOKEN_MINT_AMOUNT +} from './token-balance-migration-tester'; +import * as zksync from 'zksync-ethers'; +import * as ethers from 'ethers'; +import { expect } from 'vitest'; +import { initTestWallet } from '../src/run-integration-tests'; +import { GATEWAY_CHAIN_ID } from 'utils/src/constants'; + +// This test requires the Gateway chain to be present. +const GATEWAY_CHAIN_NAME = 'gateway'; +const useGatewayChain = process.env.USE_GATEWAY_CHAIN; +const shouldSkip = useGatewayChain !== 'WITH_GATEWAY'; + +if (shouldSkip) { + console.log( + `⏭️ Skipping asset migration test for ${TESTED_CHAIN_TYPE} chain (USE_GATEWAY_CHAIN=${useGatewayChain})` + ); +} + +/// There are the following kinds of tokens' states that we test: +/// At the moment of migration the token can be: +/// - Native to chain, already present on L1/other L2s. +/// - Native to chain, not present on L1 at all (can have unfinalized withdrawal). +/// - Native to L1, never been on the chain. +/// - Native to L1, already present on the chain. +/// - Native to another L2, never present on the chain. +/// - Native to another L2, already present on the chain. +/// After the chain migrates to GW, we can classify the states of the tokens the following way: +/// - Migrated the balance to GW. May be done after the token already received some deposits. +/// - Never migrated the balance to GW (but the token is known to the chain). May be done after the token. +/// - Never migrated the balance to GW (but the token is bridged for the first time). No migration should be needed at all. +/// After the chain migrates from GW, we need to test that all the tokens can be withdrawn in sufficient amounts to move +/// the entire balance to L1. It should not be possible to finalize all old interops. +(shouldSkip ? describe.skip : describe)('Token balance migration tests', function () { + let chainHandler: ChainHandler; + let customTokenChainHandler: ChainHandler; + + let l1RichWallet: ethers.Wallet; + let gwRichWallet: zksync.Wallet; + let chainRichWallet: zksync.Wallet; + let customTokenChainRichWallet: zksync.Wallet; + + // Stored token data for cross-test assertions + const tokens: Record = {}; + const tokensSecondChain: Record = {}; + // Unfinalized withdrawal data for cross-test assertions + const unfinalizedWithdrawals: Record = {}; + const unfinalizedWithdrawalsSecondChain: Record = {}; + // Withdrawals initiated while the chain is on Gateway + const gatewayEraWithdrawals: Record = {}; + + beforeAll(async () => { + // Initialize gateway chain + console.log('Initializing rich wallet for gateway chain...'); + await initTestWallet(GATEWAY_CHAIN_NAME); + gwRichWallet = await generateChainRichWallet(GATEWAY_CHAIN_NAME); + l1RichWallet = gwRichWallet.ethWallet(); + console.log('Gateway rich wallet private key:', gwRichWallet.privateKey); + + // Initialize tested chain + console.log(`Creating a new ${TESTED_CHAIN_TYPE} chain...`); + chainHandler = await ChainHandler.createNewChain(TESTED_CHAIN_TYPE); + await chainHandler.initEcosystemContracts(gwRichWallet); + chainRichWallet = chainHandler.l2RichWallet; + console.log('Chain rich wallet private key:', chainRichWallet.privateKey); + // Initialize auxiliary chain + console.log('Creating a secondary chain...'); + customTokenChainHandler = await ChainHandler.createNewChain('era'); + customTokenChainRichWallet = customTokenChainHandler.l2RichWallet; + + // DEPLOY TOKENS THAT WILL BE TESTED + // Token native to L1, deposited to L2 + tokens.L1NativeDepositedToL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); + await tokens.L1NativeDepositedToL2.deposit(chainHandler); + unfinalizedWithdrawals.L1NativeDepositedToL2 = await tokens.L1NativeDepositedToL2.withdraw(); + // Token native to L2-A, withdrawn to L1 + tokens.L2NativeWithdrawnToL1 = await ERC20Handler.deployTokenOnL2(chainHandler); + unfinalizedWithdrawals.L2NativeWithdrawnToL1 = await tokens.L2NativeWithdrawnToL1.withdraw(); + + // Token native to L2-B, withdrawn from L2-B, and deposited to L2-A + tokensSecondChain.L2BToken = await ERC20Handler.deployTokenOnL2(customTokenChainHandler); + unfinalizedWithdrawalsSecondChain.L2BToken = await tokensSecondChain.L2BToken.withdraw(TOKEN_MINT_AMOUNT); + // Token native to L2-B, withdrawn from L2-B, not yet deposited to L2-A + tokensSecondChain.L2BTokenNotDepositedToL2A = await ERC20Handler.deployTokenOnL2(customTokenChainHandler); + unfinalizedWithdrawalsSecondChain.L2BTokenNotDepositedToL2A = + await tokensSecondChain.L2BTokenNotDepositedToL2A.withdraw(TOKEN_MINT_AMOUNT); + + // Token native to L1, not deposited to L2 yet + tokens.L1NativeNotDepositedToL2 = await ERC20Handler.deployTokenOnL1(chainRichWallet); + // Token native to L2-A, not withdrawn to L1 yet + tokens.L2NativeNotWithdrawnToL1 = await ERC20Handler.deployTokenOnL2(chainHandler); + + // Add the base token to the list + tokens.baseToken = new ERC20Handler( + chainHandler.l2RichWallet, + chainHandler.l1BaseTokenContract, + undefined, + true + ); + const baseTokenAssetId = await tokens.baseToken.assetId(chainHandler); + // Get the current balance of the base token on the chain for accounting purposes + chainHandler.chainBalances[baseTokenAssetId] = await chainHandler.l1AssetTracker.chainBalance( + chainHandler.inner.chainId, + baseTokenAssetId + ); + }); + + it('Correctly assigns chain token balances', async () => { + // Chain balances are accounted correctly on L1AT + for (const token of Object.keys(tokens)) { + const assetId = await tokens[token].assetId(chainHandler); + if (assetId === ethers.ZeroHash) continue; + await expect( + chainHandler.assertAssetTrackersState(assetId, { + balances: { + L1AT_GW: 0n, + GWAT: 0n + }, + migrations: { + L1AT: 0n, + L1AT_GW: 0n, + GWAT: 0n + } + }) + ).resolves.toBe(true); + } + }); + + it('Can migrate the chain to Gateway', async () => { + await chainHandler.migrateToGateway(); + }); + + it('Can deposit a token to the chain after migrating to gateway', async () => { + // Deposit L1 token that was not deposited to L2 yet + await tokens.L1NativeNotDepositedToL2.deposit(chainHandler); + await expect( + chainHandler.assertAssetTrackersState(await tokens.L1NativeNotDepositedToL2.assetId(chainHandler), { + balances: { + L1AT: 0n + }, + migrations: { + L1AT: 1n, + L1AT_GW: 0n, + GWAT: 1n + } + }) + ).resolves.toBe(true); + + // Finalize withdrawal of L2-B token + await unfinalizedWithdrawalsSecondChain.L2BToken.finalizeWithdrawal(chainRichWallet.ethWallet()); + delete unfinalizedWithdrawalsSecondChain.L2BToken; + // Define the L2-B token for L2-A use + const L2BTokenL1Contract = await tokensSecondChain.L2BToken.getL1Contract(customTokenChainHandler); + tokens.L2BToken = await ERC20Handler.fromL2BL1Token( + L2BTokenL1Contract, + chainRichWallet, + customTokenChainRichWallet + ); + // Deposit L2-B token to L2-A + await tokens.L2BToken.deposit(chainHandler); + await expect( + chainHandler.assertAssetTrackersState(await tokens.L2BToken.assetId(chainHandler), { + balances: { + L1AT: 0n + }, + migrations: { + L1AT: 1n, + L1AT_GW: 0n, + GWAT: 1n + } + }) + ).resolves.toBe(true); + }); + + it('Can initiate token balance migration to Gateway', async () => { + await chainHandler.initiateTokenBalanceMigration('to-gateway'); + }); + + it('Cannot withdraw tokens that have not been migrated', async () => { + await expectRevertWithSelector( + tokens.L1NativeDepositedToL2.withdraw(), + '0x90ed63bb', + 'Withdrawal before finalizing token balance migration to gateway should revert' + ); + }); + + it('Can finalize pending withdrawals after migrating to gateway', async () => { + // Finalize all pending withdrawals for L2-B + for (const tokenName of Object.keys(unfinalizedWithdrawalsSecondChain)) { + await unfinalizedWithdrawalsSecondChain[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()); + delete unfinalizedWithdrawalsSecondChain[tokenName]; + } + + // Finalize all pending withdrawals for L2-A + for (const tokenName of Object.keys(unfinalizedWithdrawals)) { + await unfinalizedWithdrawals[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()); + + // Ensure accounting is correct + const assetId = await tokens[tokenName].assetId(chainHandler); + if (tokens[tokenName].isL2Token) chainHandler.chainBalances[assetId] = ethers.MaxUint256; + chainHandler.chainBalances[assetId] -= unfinalizedWithdrawals[tokenName].amount; + + // We can now define the L1 contracts for the tokens + await tokens[tokenName]?.setL1Contract(chainHandler); + + delete unfinalizedWithdrawals[tokenName]; + } + + // Define the L2-B token for L2-A use + const L2BTokenNotDepositedToL2AL1Contract = + await tokensSecondChain.L2BTokenNotDepositedToL2A.getL1Contract(customTokenChainHandler); + tokens.L2BTokenNotDepositedToL2A = await ERC20Handler.fromL2BL1Token( + L2BTokenNotDepositedToL2AL1Contract, + chainRichWallet, + customTokenChainRichWallet + ); + }); + + it('Cannot initiate migration for a false assetId', async () => { + const bogusAssetId = ethers.randomBytes(32); + await expectRevertWithSelector( + chainHandler.l2AssetTracker.initiateL1ToGatewayMigrationOnL2(bogusAssetId), + '0xda72d995', + 'Initiate migration for false assetId should revert' + ); + }); + + it('Can migrate token balances to gateway', async () => { + // Take snapshot right before migration + // Base token balance increases slighly due to previous token deposits, here we account for that + const existingBaseTokenL1ATBalanceForGW = await chainHandler.l1AssetTracker.chainBalance( + GATEWAY_CHAIN_ID, + chainHandler.baseTokenAssetId + ); + chainHandler.existingBaseTokenL1ATBalanceForGW = existingBaseTokenL1ATBalanceForGW; + // Finalize migrating token balances to Gateway + // This also tests repeated migrations, as `L1NativeNotDepositedToL2` was already effectively migrated + // This command tries to migrate it again, which will succeed, but later balance check will show it stays the same + await chainHandler.finalizeTokenBalanceMigration('to-gateway'); + // We need to wait for a bit for L1AT's `_sendConfirmationToChains` to propagate to GW and the tested L2 chain + await utils.sleep(1); + }); + + it('Can withdraw tokens after migrating token balances to gateway', async () => { + gatewayEraWithdrawals.L1NativeDepositedToL2 = await tokens.L1NativeDepositedToL2.withdraw(); + }); + + it('Correctly assigns chain token balances after migrating token balances to gateway', async () => { + for (const tokenName of Object.keys(tokens)) { + if (tokenName === 'L2BTokenNotDepositedToL2A') continue; + const assetId = await tokens[tokenName].assetId(chainHandler); + if (assetId === ethers.ZeroHash) continue; + + const isL2Token = tokens[tokenName].isL2Token; + const baseBalance = chainHandler.chainBalances[assetId] ?? (isL2Token ? ethers.MaxUint256 : 0n); + + await expect( + chainHandler.assertAssetTrackersState(assetId, { + balances: { + L1AT: 0n, + L1AT_GW: baseBalance, + GWAT: baseBalance + }, + migrations: { + L1AT: 1n, + L1AT_GW: 0n, + GWAT: 1n + } + }) + ).resolves.toBe(true); + } + }); + + it('Can migrate the chain from gateway', async () => { + await chainHandler.migrateFromGateway(); + }); + + it('Can withdraw tokens from the chain', async () => { + unfinalizedWithdrawals.L1NativeDepositedToL2 = await tokens.L1NativeDepositedToL2.withdraw(); + unfinalizedWithdrawals.baseToken = await tokens.baseToken.withdraw(); + }); + + it('Can initiate token balance migration from Gateway', async () => { + await chainHandler.initiateTokenBalanceMigration('from-gateway'); + }); + + it('Can deposit a token to the chain after migrating from gateway', async () => { + // Deposit L2-B token that was not deposited to L2-A yet effectively marks it as migrated + await tokens.L2BTokenNotDepositedToL2A.deposit(chainHandler); + await expect( + chainHandler.assertAssetTrackersState(await tokens.L2BTokenNotDepositedToL2A.assetId(chainHandler), { + balances: { + L1AT_GW: 0n, + GWAT: 0n + }, + migrations: { + L1AT: 2n, + L1AT_GW: 0n, + GWAT: 0n + } + }) + ).resolves.toBe(true); + }); + + it('Cannot finalize pending withdrawals before finalizing token balance migration to L1', async () => { + for (const tokenName of Object.keys(unfinalizedWithdrawals)) { + await expectRevertWithSelector( + unfinalizedWithdrawals[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()), + '0x07859b3b', // InsufficientChainBalance + 'Withdrawal before finalizing token balance migration to L1 should revert' + ); + } + }); + + it('Can migrate token balances to L1', async () => { + // Migrate token balances from gateway + // This also tests repeated migrations, as `L2BTokenNotDepositedToL2A` was already effectively migrated + // This command tries to migrate it again, which will succeed, but later balance check will show it stays the same + await chainHandler.finalizeTokenBalanceMigration('from-gateway'); + // We need to wait for a bit for L1AT's `_sendConfirmationToChains` to propagate to GW and the tested L2 chain + await utils.sleep(5); + // After migration, update the existing balance to exclude this chain's balance + chainHandler.existingBaseTokenL1ATBalanceForGW = await chainHandler.l1AssetTracker.chainBalance( + GATEWAY_CHAIN_ID, + chainHandler.baseTokenAssetId + ); + }); + + it('Correctly assigns chain token balances after migrating token balances to L1', async () => { + for (const tokenName of Object.keys(tokens)) { + const assetId = await tokens[tokenName].assetId(chainHandler); + if (assetId === ethers.ZeroHash) continue; + + const isL2Token = tokens[tokenName].isL2Token; + const baseBalance = chainHandler.chainBalances[assetId] ?? (isL2Token ? ethers.MaxUint256 : 0n); + const l1GatewayExpected = gatewayEraWithdrawals[tokenName]?.amount ?? 0n; + const l1Expected = baseBalance - l1GatewayExpected; + + // Tokens deposited AFTER migrating from gateway won't have a GWAT migration number set + const depositedAfterFromGW = tokenName === 'L2BTokenNotDepositedToL2A'; + + await expect( + chainHandler.assertAssetTrackersState(assetId, { + balances: { + L1AT: l1Expected, + L1AT_GW: l1GatewayExpected, + GWAT: 0n + }, + migrations: { + L1AT: 2n, + L1AT_GW: 0n, + GWAT: depositedAfterFromGW ? 0n : 2n + } + }) + ).resolves.toBe(true); + } + }); + + it('Can finalize pending withdrawals after migrating token balances from gateway', async () => { + for (const tokenName of Object.keys(unfinalizedWithdrawals)) { + await unfinalizedWithdrawals[tokenName].finalizeWithdrawal(chainRichWallet.ethWallet()); + delete unfinalizedWithdrawals[tokenName]; + } + }); + + afterAll(async () => { + console.log('Tearing down chains...'); + if (chainHandler) { + await chainHandler.stopServer(); + } + if (customTokenChainHandler) { + await customTokenChainHandler.stopServer(); + } + console.log('Complete'); + }); +}); diff --git a/core/tests/highlevel-test-tools/vitest.config.ts b/core/tests/highlevel-test-tools/vitest.config.ts index dbfb8d5b5dbd..e16e19cce2fc 100644 --- a/core/tests/highlevel-test-tools/vitest.config.ts +++ b/core/tests/highlevel-test-tools/vitest.config.ts @@ -11,8 +11,8 @@ export default defineConfig({ reporter: ['text', 'json', 'html'], exclude: ['node_modules/', 'dist/', '**/*.d.ts', '**/*.test.ts', '**/*.spec.ts', 'tests/setup.ts'] }, - testTimeout: 15 * 60 * 1000, // 15 minutes - hookTimeout: 15 * 60 * 1000, + testTimeout: 20 * 60 * 1000, // 20 minutes + hookTimeout: 25 * 60 * 1000, // 25 minutes - revert test beforeAll needs ~20 min globalSetup: './global-setup.ts' }, resolve: { diff --git a/core/tests/ts-integration/src/constants.ts b/core/tests/ts-integration/src/constants.ts index 98ab5c4ce08c..ad24dc16e706 100644 --- a/core/tests/ts-integration/src/constants.ts +++ b/core/tests/ts-integration/src/constants.ts @@ -60,4 +60,8 @@ export const ArtifactL2AssetTracker = readContract(`${ARTIFACTS_PATH}`, 'L2Asset export const ArtifactDummyInteropRecipient = readContract(`${L1_ZK_ARTIFACTS_PATH}`, 'DummyInteropRecipient'); export const ArtifactIBridgehubBase = readContract(`${ARTIFACTS_PATH}`, 'IBridgehubBase'); export const ArtifactIGetters = readContract(`${ARTIFACTS_PATH}`, 'IGetters'); -export const ArtifactIChainAssetHandler = readContract(`${ARTIFACTS_PATH}`, 'IChainAssetHandler'); +export const ArtifactIChainAssetHandler = readContract( + `${ARTIFACTS_PATH}`, + 'IChainAssetHandler', + 'IChainAssetHandlerBase' +); diff --git a/core/tests/ts-integration/src/helpers.ts b/core/tests/ts-integration/src/helpers.ts index aa58de6d2740..89cf19db142c 100644 --- a/core/tests/ts-integration/src/helpers.ts +++ b/core/tests/ts-integration/src/helpers.ts @@ -82,10 +82,21 @@ export async function anyTransaction(wallet: zksync.Wallet): Promise timeoutMs) { + throw new Error( + `waitUntilBlockFinalized: timed out after ${(timeoutMs / 1000).toFixed(0)}s waiting for block ${blockNumber} to be finalized (last finalized: ${printedBlockNumber})` + ); + } const block = await wallet.provider.getBlock('finalized'); if (blockNumber <= block.number) { break; @@ -99,16 +110,20 @@ export async function waitUntilBlockFinalized(wallet: zksync.Wallet, blockNumber } /** - * Waits until the requested block is finalized. + * Waits until the requested block is executed on the gateway. * * @param wallet Wallet to use to poll the server. + * @param gwWallet Gateway wallet. * @param blockNumber Number of block. + * @param timeoutMs Maximum time to wait (default 10 min). */ export async function waitUntilBlockExecutedOnGateway( wallet: zksync.Wallet, gwWallet: zksync.Wallet, - blockNumber: number + blockNumber: number, + timeoutMs: number = 10 * 60 * 1000 ) { + const start = Date.now(); const bridgehub = new ethers.Contract(L2_BRIDGEHUB_ADDRESS, ArtifactIBridgehubBase.abi, gwWallet); const zkChainAddr = await bridgehub.getZKChain(await wallet.provider.getNetwork().then((net: any) => net.chainId)); const gettersFacet = new ethers.Contract(zkChainAddr, ArtifactIGetters.abi, gwWallet); @@ -116,6 +131,11 @@ export async function waitUntilBlockExecutedOnGateway( let batchNumber = (await wallet.provider.getBlockDetails(blockNumber)).l1BatchNumber; let currentExecutedBatchNumber = 0; while (currentExecutedBatchNumber < batchNumber) { + if (Date.now() - start > timeoutMs) { + throw new Error( + `waitUntilBlockExecutedOnGateway: timed out after ${(timeoutMs / 1000).toFixed(0)}s waiting for block ${blockNumber} (batch ${batchNumber}, current executed: ${currentExecutedBatchNumber})` + ); + } currentExecutedBatchNumber = await gettersFacet.getTotalBatchesExecuted(); if (currentExecutedBatchNumber >= batchNumber) { break; @@ -125,9 +145,19 @@ export async function waitUntilBlockExecutedOnGateway( } } -export async function waitUntilBlockCommitted(wallet: zksync.Wallet, blockNumber: number) { +export async function waitUntilBlockCommitted( + wallet: zksync.Wallet, + blockNumber: number, + timeoutMs: number = 10 * 60 * 1000 +) { + const start = Date.now(); console.log('Waiting for block to be committed...', blockNumber); while (true) { + if (Date.now() - start > timeoutMs) { + throw new Error( + `waitUntilBlockCommitted: timed out after ${(timeoutMs / 1000).toFixed(0)}s waiting for block ${blockNumber} to be committed` + ); + } const block = await wallet.provider.getBlock('committed'); if (blockNumber <= block.number) { break; @@ -152,9 +182,14 @@ async function getL1BatchFinalizationStatus(provider: zksync.Provider, number: n return null; } -export async function waitForBlockToBeFinalizedOnL1(wallet: zksync.Wallet, blockNumber: number) { +export async function waitForBlockToBeFinalizedOnL1( + wallet: zksync.Wallet, + blockNumber: number, + timeoutMs: number = 10 * 60 * 1000 +) { + const start = Date.now(); // Waiting for the block to be finalized on the immediate settlement layer. - await waitUntilBlockFinalized(wallet, blockNumber); + await waitUntilBlockFinalized(wallet, blockNumber, timeoutMs); const provider = wallet.provider; @@ -163,27 +198,67 @@ export async function waitForBlockToBeFinalizedOnL1(wallet: zksync.Wallet, block let result = await getL1BatchFinalizationStatus(provider, batchNumber); while (result == null) { + if (Date.now() - start > timeoutMs) { + throw new Error( + `waitForBlockToBeFinalizedOnL1: timed out after ${(timeoutMs / 1000).toFixed(0)}s waiting for batch ${batchNumber} (block ${blockNumber}) to be finalized on L1` + ); + } await zksync.utils.sleep(provider.pollingInterval); result = await getL1BatchFinalizationStatus(provider, batchNumber); } } -export async function waitForL2ToL1LogProof(wallet: zksync.Wallet, blockNumber: number, txHash: string) { +export async function waitForL2ToL1LogProof( + wallet: zksync.Wallet, + blockNumber: number, + txHash: string, + timeoutMs: number = 10 * 60 * 1000 +) { + const start = Date.now(); log('waiting for block to be finalized'); // First, we wait for block to be finalized. - await waitUntilBlockFinalized(wallet, blockNumber); + await waitUntilBlockFinalized(wallet, blockNumber, timeoutMs); log('waiting for log proof'); // Second, we wait for the log proof. let i = 0; while ((await wallet.provider.getLogProof(txHash)) == null) { + if (Date.now() - start > timeoutMs) { + throw new Error( + `waitForL2ToL1LogProof: timed out after ${(timeoutMs / 1000).toFixed(0)}s waiting for log proof of tx ${txHash} (block ${blockNumber})` + ); + } log(`Waiting for log proof... ${i}`); await zksync.utils.sleep(wallet.provider.pollingInterval); i++; } } +export async function waitForPriorityOp( + wallet: zksync.Wallet, + l1Receipt: ethers.TransactionReceipt, + timeoutMs: number = 10 * 60 * 1000 +) { + const start = Date.now(); + const mainContractAddress = await wallet.provider.getMainContractAddress(); + const l2Hash = zksync.utils.getL2HashFromPriorityOp(l1Receipt, mainContractAddress); + let l2Receipt: ethers.TransactionReceipt | null = null; + while (!l2Receipt) { + if (Date.now() - start > timeoutMs) { + throw new Error( + `waitForPriorityOp: timed out after ${(timeoutMs / 1000).toFixed(0)}s waiting for L2 receipt of priority op ${l2Hash}` + ); + } + l2Receipt = await wallet.provider.getTransactionReceipt(l2Hash); + if (!l2Receipt) { + await zksync.utils.sleep(wallet.provider.pollingInterval); + } + } + await waitUntilBlockFinalized(wallet, l2Receipt.blockNumber, timeoutMs - (Date.now() - start)); + return l2Receipt; +} + export async function waitForInteropRootNonZero( provider: zksync.Provider, wallet: zksync.Wallet, diff --git a/core/tests/ts-integration/src/interop-setup.ts b/core/tests/ts-integration/src/interop-setup.ts index 11dc91d4c024..ba153145ff72 100644 --- a/core/tests/ts-integration/src/interop-setup.ts +++ b/core/tests/ts-integration/src/interop-setup.ts @@ -4,15 +4,16 @@ import * as path from 'path'; import { TestMaster } from './test-master'; import { Token } from './types'; import * as utils from 'utils'; -import { shouldLoadConfigFromFile } from 'utils/build/file-configs'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; +import { encodeNTVAssetId } from 'zksync-ethers/build/utils'; import { RetryableWallet } from './retry-provider'; import { scaledGasPrice, deployContract, + readContract, waitUntilBlockFinalized, waitForInteropRootNonZero, getGWBlockNumber, @@ -27,6 +28,7 @@ import { L2_INTEROP_HANDLER_ADDRESS, L2_INTEROP_CENTER_ADDRESS, ETH_ADDRESS_IN_CONTRACTS, + ARTIFACTS_PATH, ArtifactInteropCenter, ArtifactInteropHandler, ArtifactNativeTokenVault, @@ -330,6 +332,7 @@ export class InteropTestContext { const state = JSON.parse(fs.readFileSync(SHARED_STATE_FILE, 'utf-8')); this.loadState(state); + await this.fundInterop1TokenAForSuite(); return; } catch (e) { // File might be half-written, continue waiting @@ -380,36 +383,35 @@ export class InteropTestContext { } private async performSetup() { - const tokenADeploy = await deployContract(this.interop1Wallet, ArtifactMintableERC20, [ + const l1TokenArtifact = readContract(ARTIFACTS_PATH, 'TestnetERC20Token'); + const l1TokenBytecode = l1TokenArtifact.bytecode.object ?? l1TokenArtifact.bytecode; + const l1TokenFactory = new ethers.ContractFactory( + l1TokenArtifact.abi, + l1TokenBytecode, + this.interop1RichWallet.ethWallet() + ); + const l1TokenDeploy = await l1TokenFactory.deploy( this.tokenA.name, this.tokenA.symbol, - this.tokenA.decimals - ]); - this.tokenA.l2Address = await tokenADeploy.getAddress(); - - // Register tokens on Interop1 - await (await this.interop1NativeTokenVault.registerToken(this.tokenA.l2Address)).wait(); - this.tokenA.assetId = await this.interop1NativeTokenVault.assetId(this.tokenA.l2Address); + Number(this.tokenA.decimals) + ); + await l1TokenDeploy.waitForDeployment(); + this.tokenA.l1Address = await l1TokenDeploy.getAddress(); + await this.fundInterop1TokenAForSuite(); + + const l1ChainId = (await this.l1Provider.getNetwork()).chainId; + this.tokenA.assetId = encodeNTVAssetId(l1ChainId, this.tokenA.l1Address); + while (true) { + this.tokenA.l2Address = await this.interop1NativeTokenVault.tokenAddress(this.tokenA.assetId); + if (this.tokenA.l2Address !== ethers.ZeroAddress) break; + await utils.sleep(1); + } this.interop1TokenA = new zksync.Contract( this.tokenA.l2Address, ArtifactMintableERC20.abi, this.interop1Wallet ); - const fileConfig = shouldLoadConfigFromFile(); - const migrationCmd = `zkstack chain gateway migrate-token-balances --to-gateway true --gateway-chain-name gateway --chain ${fileConfig.chain}`; - - // Migration might sometimes fail, so we retry a few times. - for (let attempt = 1; attempt <= 3; attempt++) { - try { - await utils.spawn(migrationCmd); - break; - } catch (e) { - if (attempt === 3) throw e; - await utils.sleep(2 * attempt); - } - } - // Save State const newState = { tokenA: { @@ -426,6 +428,25 @@ export class InteropTestContext { fs.writeFileSync(SHARED_STATE_FILE, JSON.stringify(newState, null, 2)); } + private async fundInterop1TokenAForSuite() { + const l1Token = new ethers.Contract( + this.tokenA.l1Address, + readContract(ARTIFACTS_PATH, 'TestnetERC20Token').abi, + this.interop1RichWallet.ethWallet() + ); + const initialL1Amount = ethers.parseEther('1000'); + await (await l1Token.mint(this.interop1RichWallet.address, initialL1Amount)).wait(); + await ( + await this.interop1RichWallet.deposit({ + token: this.tokenA.l1Address, + amount: initialL1Amount, + to: this.interop1Wallet.address, + approveERC20: true, + approveBaseERC20: true + }) + ).wait(); + } + private loadState(state: any) { this.tokenA = { ...state.tokenA, @@ -610,17 +631,13 @@ export class InteropTestContext { } /** - * Approves and mints a random amount of test tokens and returns the amount. + * Approves a random amount of test tokens and returns the amount. */ async getAndApproveTokenTransferAmount(): Promise { const transferAmount = BigInt(Math.floor(Math.random() * 900) + 100); - await Promise.all([ - // Approve token transfer on Interop1 - (await this.interop1TokenA.approve(L2_NATIVE_TOKEN_VAULT_ADDRESS, transferAmount)).wait(), - // Mint tokens for the test wallet on Interop1 for the transfer - (await this.interop1TokenA.mint(this.interop1Wallet.address, transferAmount)).wait() - ]); + // Approve token transfer on Interop1 + await (await this.interop1TokenA.approve(L2_NATIVE_TOKEN_VAULT_ADDRESS, transferAmount)).wait(); return transferAmount; } diff --git a/core/tests/ts-integration/src/modifiers/balance-checker.ts b/core/tests/ts-integration/src/modifiers/balance-checker.ts index dabfe3124a9d..7259de8c6b5c 100644 --- a/core/tests/ts-integration/src/modifiers/balance-checker.ts +++ b/core/tests/ts-integration/src/modifiers/balance-checker.ts @@ -275,33 +275,52 @@ function extractRefundForL1ToL2(receipt: zksync.types.TransactionReceipt, refund refundRecipient = refundRecipient ?? receipt.from; const mintTopic = ethers.keccak256(ethers.toUtf8Bytes('Mint(address,uint256)')); + const formattedRefundRecipient = ethers.hexlify(ethers.zeroPadValue(refundRecipient, 32)); + const bootloaderAddress = '0x0000000000000000000000000000000000008001'; + const formattedBootloader = ethers.hexlify(ethers.zeroPadValue(bootloaderAddress, 32)); + + // With the new bootloader behavior, there may be multiple Mint events: + // 1. toMint (deposit) to `from` - BEFORE operator fee mint + // 2. payToOperator to bootloader - operator fee + // 3. toRefundRecipient (gas refund) to refund recipient - AFTER operator fee mint + // + // For failed transactions: + // 1. (toMint reverted) + // 2. payToOperator to bootloader + // 3. toRefundRecipient (full deposit - gas) to refund recipient + // + // We only want to subtract the gas refund (mints AFTER the bootloader mint), + // not the deposit (mints BEFORE the bootloader mint). + + // Find the bootloader mint (operator fee) - this separates deposit from refund + const bootloaderMintIndex = receipt.logs.findIndex((log) => { + return ( + log.topics.length == 2 && + log.topics[0] == mintTopic && + log.topics[1].toLowerCase() === formattedBootloader.toLowerCase() + ); + }); - const refundLogs = receipt.logs.filter((log) => { - return log.topics.length == 2 && log.topics[0] == mintTopic; + // Find Mint events to the refund recipient AFTER the bootloader mint + const refundLogs = receipt.logs.filter((log, index) => { + // Only consider mints after the bootloader mint (these are gas refunds) + // If no bootloader mint found, fall back to considering all mints as refunds + const isAfterBootloaderMint = bootloaderMintIndex === -1 || index > bootloaderMintIndex; + return ( + isAfterBootloaderMint && + log.topics.length == 2 && + log.topics[0] == mintTopic && + log.topics[1].toLowerCase() === formattedRefundRecipient.toLowerCase() + ); }); if (refundLogs.length === 0) { - throw { - message: `No refund log was found in the following transaction receipt`, - receipt - }; - } - - // Note, that it is important that the refund log is the last log in the receipt, because - // there are multiple `Mint` events during a single L1->L2 transaction, so this one covers the - // final refund. - const refundLog = refundLogs[refundLogs.length - 1]; - - const formattedRefundRecipient = ethers.hexlify(ethers.zeroPadValue(refundRecipient, 32)); - - if (refundLog.topics[1].toLowerCase() !== formattedRefundRecipient.toLowerCase()) { - throw { - message: `The last ETH minted is not the refund recipient in the following transaction receipt`, - receipt - }; + // No refund after bootloader mint - this is valid (e.g., all gas was consumed) + return 0n; } - return BigInt(refundLog.data); + // Sum all refunds to this recipient (in case there are multiple after bootloader mint) + return refundLogs.reduce((total, log) => total + BigInt(log.data), 0n); } /** diff --git a/core/tests/ts-integration/tests/fees.test.ts b/core/tests/ts-integration/tests/fees.test.ts index 4b80fd75b809..eb94c64963c3 100644 --- a/core/tests/ts-integration/tests/fees.test.ts +++ b/core/tests/ts-integration/tests/fees.test.ts @@ -289,7 +289,7 @@ testFees('Test fees', function () { expect(receipt.gasPrice).toBe(BigInt(expectedConvertedGasPrice)); }); - test('Test base token ratio fluctuations', async () => { + test.skip('Test base token ratio fluctuations', async () => { const l1GasPrice = 2_000_000_000n; /// set to 2 gwei if (isETHBasedChain) return; diff --git a/core/tests/ts-integration/tests/interop-b-bundles.test.ts b/core/tests/ts-integration/tests/interop-b-bundles.test.ts index 012cb2730f5d..43a04352cd78 100644 --- a/core/tests/ts-integration/tests/interop-b-bundles.test.ts +++ b/core/tests/ts-integration/tests/interop-b-bundles.test.ts @@ -289,7 +289,7 @@ describe('Interop-B Bundles behavior checks', () => { expect((otherRecipientBalance - otherRecipientBalanceBefore).toString()).toBe(bundles.twoIndirect.amounts[1]); }); - test('Can received a mixed call bundle', async () => { + test('Can receive a mixed call bundle', async () => { if (ctx.skipInteropTests) return; const recipientBalanceBefore = await ctx.getInterop2Balance(ctx.dummyInteropRecipient); diff --git a/core/tests/ts-integration/tests/l1.test.ts b/core/tests/ts-integration/tests/l1.test.ts index 5d5d087c51aa..80660531193f 100644 --- a/core/tests/ts-integration/tests/l1.test.ts +++ b/core/tests/ts-integration/tests/l1.test.ts @@ -37,7 +37,16 @@ describe('Tests for L1 behavior', () => { let errorContract: zksync.Contract; let isETHBasedChain: boolean; - let expectedL2Costs: bigint; + + const mintValueForRequestExecute = async (gasPrice: bigint) => { + if (isETHBasedChain) return 0n; + const baseCost = await alice.getBaseCost({ + gasLimit: maxL2GasLimitForPriorityTxs(testMaster.environment().priorityTxMaxGasLimit), + gasPerPubdataByte: REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT, + gasPrice + }); + return (baseCost * 140n) / 100n; + }; beforeAll(() => { testMaster = TestMaster.getInstance(__filename); @@ -63,28 +72,23 @@ describe('Tests for L1 behavior', () => { }); test('Should calculate l2 base cost, if base token is not ETH', async () => { + if (isETHBasedChain) return; + const gasPrice = await scaledGasPrice(alice); - if (!isETHBasedChain) { - expectedL2Costs = - ((await alice.getBaseCost({ - gasLimit: maxL2GasLimitForPriorityTxs(testMaster.environment().priorityTxMaxGasLimit), - gasPerPubdataByte: REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT, - gasPrice - })) * - 140n) / - 100n; - } + const mintValue = await mintValueForRequestExecute(gasPrice); + expect(mintValue).toBeGreaterThan(0n); }); test('Should request L1 execute', async () => { const calldata = counterContract.interface.encodeFunctionData('increment', ['1']); const gasPrice = await scaledGasPrice(alice); + const mintValue = await mintValueForRequestExecute(gasPrice); await expect( alice.requestExecute({ contractAddress: await counterContract.getAddress(), calldata, - mintValue: isETHBasedChain ? 0n : expectedL2Costs, + mintValue, overrides: { gasPrice } @@ -96,13 +100,14 @@ describe('Tests for L1 behavior', () => { const l2Value = 10; const calldata = contextContract.interface.encodeFunctionData('requireMsgValue', [l2Value]); const gasPrice = await scaledGasPrice(alice); + const mintValue = await mintValueForRequestExecute(gasPrice); await expect( alice.requestExecute({ contractAddress: await contextContract.getAddress(), calldata, l2Value, - mintValue: isETHBasedChain ? 0n : expectedL2Costs, + mintValue, overrides: { gasPrice } @@ -113,13 +118,14 @@ describe('Tests for L1 behavior', () => { test('Should fail requested L1 execute', async () => { const calldata = errorContract.interface.encodeFunctionData('require_short', []); const gasPrice = await scaledGasPrice(alice); + const mintValue = await mintValueForRequestExecute(gasPrice); await expect( alice.requestExecute({ contractAddress: await errorContract.getAddress(), calldata, l2GasLimit: DEFAULT_L2_GAS_LIMIT, - mintValue: isETHBasedChain ? 0n : expectedL2Costs, + mintValue, overrides: { gasPrice } @@ -169,13 +175,14 @@ describe('Tests for L1 behavior', () => { test('Should check max L2 gas limit for priority txs', async () => { const gasPrice = await scaledGasPrice(alice); const l2GasLimit = maxL2GasLimitForPriorityTxs(testMaster.environment().priorityTxMaxGasLimit); + const mintValue = await mintValueForRequestExecute(gasPrice); // Check that the request with higher `gasLimit` fails. let priorityOpHandle = await alice.requestExecute({ contractAddress: alice.address, calldata: '0x', l2GasLimit: l2GasLimit + 1n, - mintValue: isETHBasedChain ? 0n : expectedL2Costs, + mintValue, overrides: { gasPrice, gasLimit: 600_000 @@ -194,7 +201,7 @@ describe('Tests for L1 behavior', () => { contractAddress: alice.address, calldata: '0x', l2GasLimit, - mintValue: isETHBasedChain ? 0n : expectedL2Costs, + mintValue, overrides: { gasPrice } @@ -215,6 +222,7 @@ describe('Tests for L1 behavior', () => { // We check that we will run out of gas if we do a bit smaller amount of writes. const calldata = contract.interface.encodeFunctionData('writes', [0, 4500, 1]); const gasPrice = await scaledGasPrice(alice); + const mintValue = await mintValueForRequestExecute(gasPrice); const l2GasLimit = maxL2GasLimitForPriorityTxs(testMaster.environment().priorityTxMaxGasLimit); @@ -222,7 +230,7 @@ describe('Tests for L1 behavior', () => { contractAddress: await contract.getAddress(), calldata, l2GasLimit, - mintValue: isETHBasedChain ? 0n : expectedL2Costs, + mintValue, overrides: { gasPrice } @@ -271,13 +279,14 @@ describe('Tests for L1 behavior', () => { const calldata = contract.interface.encodeFunctionData('writes', [0, repeatedWritesInOneTx, 2]); const gasPrice = await scaledGasPrice(alice); + const mintValue = await mintValueForRequestExecute(gasPrice); const l2GasLimit = maxL2GasLimitForPriorityTxs(testMaster.environment().priorityTxMaxGasLimit); const priorityOpHandle = await alice.requestExecute({ contractAddress: await contract.getAddress(), calldata, l2GasLimit, - mintValue: isETHBasedChain ? 0n : expectedL2Costs, + mintValue, overrides: { gasPrice } @@ -305,6 +314,7 @@ describe('Tests for L1 behavior', () => { // We check that we will run out of gas if we send a bit smaller amount of L2->L1 logs. const calldata = contract.interface.encodeFunctionData('l2_l1_messages', [1000]); const gasPrice = await scaledGasPrice(alice); + const mintValue = await mintValueForRequestExecute(gasPrice); const l2GasLimit = maxL2GasLimitForPriorityTxs(testMaster.environment().priorityTxMaxGasLimit); @@ -312,7 +322,7 @@ describe('Tests for L1 behavior', () => { contractAddress: await contract.getAddress(), calldata, l2GasLimit, - mintValue: isETHBasedChain ? 0n : expectedL2Costs, + mintValue, overrides: { gasPrice } @@ -345,6 +355,7 @@ describe('Tests for L1 behavior', () => { (MAX_PUBDATA_PER_BATCH * 9n) / 10n ]); const gasPrice = await scaledGasPrice(alice); + const mintValue = await mintValueForRequestExecute(gasPrice); const l2GasLimit = maxL2GasLimitForPriorityTxs(testMaster.environment().priorityTxMaxGasLimit); @@ -352,7 +363,7 @@ describe('Tests for L1 behavior', () => { contractAddress: await contract.getAddress(), calldata, l2GasLimit, - mintValue: isETHBasedChain ? 0n : expectedL2Costs, + mintValue, overrides: { gasPrice } diff --git a/core/tests/ts-integration/tests/l2-erc20.test.ts b/core/tests/ts-integration/tests/l2-erc20.test.ts index 82b36b57b930..615b7b8e0b67 100644 --- a/core/tests/ts-integration/tests/l2-erc20.test.ts +++ b/core/tests/ts-integration/tests/l2-erc20.test.ts @@ -9,10 +9,9 @@ import { shouldChangeTokenBalances, shouldOnlyTakeFee } from '../src/modifiers/b import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; import { Provider, Wallet } from 'ethers'; -import { scaledGasPrice, deployContract, readContract, waitForL2ToL1LogProof } from '../src/helpers'; +import { scaledGasPrice, deployContract, readContract, waitForL2ToL1LogProof, waitForPriorityOp } from '../src/helpers'; import { encodeNTVAssetId } from 'zksync-ethers/build/utils'; import { ARTIFACTS_PATH, L2_ASSET_TRACKER_ADDRESS } from '../src/constants'; -import { sleep } from 'utils/src'; async function migrateTokenBalanceFromL1ToGateway( alice: zksync.Wallet, @@ -50,10 +49,12 @@ async function migrateTokenBalanceFromL1ToGateway( }; // Finalize the migration on L1. - await expect(l1AssetTracker.receiveMigrationOnL1(finalizeDepositParams)).toBeAccepted(); + const l1ReceiveTx = await l1AssetTracker.receiveMigrationOnL1(finalizeDepositParams); + await expect(l1ReceiveTx).toBeAccepted(); + const l1Receipt = await l1ReceiveTx.wait(); - // TODO: the above tx has created some priority ops, we should wait for them - await sleep(5); + // Wait for priority ops created by the migration to execute. + await waitForPriorityOp(alice, l1Receipt); } describe('L2 native ERC20 contract checks', () => { diff --git a/core/tests/upgrade-test/tests/upgrade.test.ts b/core/tests/upgrade-test/tests/upgrade.test.ts index e4fc53fb2d98..760405147d07 100644 --- a/core/tests/upgrade-test/tests/upgrade.test.ts +++ b/core/tests/upgrade-test/tests/upgrade.test.ts @@ -52,6 +52,58 @@ interface Call { data: BytesLike; } +/** + * TypeScript interface matching the Solidity L2CanonicalTransaction struct from Messaging.sol. + * Field names must match exactly for ethers.js to encode correctly. + */ +interface L2CanonicalTransaction { + txType: BigNumberish; + from: BigNumberish; + to: BigNumberish; + gasLimit: BigNumberish; + gasPerPubdataByteLimit: BigNumberish; + maxFeePerGas: BigNumberish; + maxPriorityFeePerGas: BigNumberish; + paymaster: BigNumberish; + nonce: BigNumberish; + value: BigNumberish; + reserved: [BigNumberish, BigNumberish, BigNumberish, BigNumberish]; + data: BytesLike; + signature: BytesLike; + factoryDeps: BigNumberish[]; + paymasterInput: BytesLike; + reservedDynamic: BytesLike; +} + +/** + * TypeScript interface matching the Solidity VerifierParams struct from IVerifier.sol. + */ +interface VerifierParams { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; +} + +/** + * TypeScript interface matching the Solidity ProposedUpgrade struct from BaseZkSyncUpgrade.sol. + * Field names must match exactly for ethers.js to encode correctly. + * + * The DefaultUpgradeAbi import from zkstack-out ensures that if the Solidity struct changes, + * ethers.js will fail at runtime with a clear mismatch error. + */ +interface ProposedUpgrade { + l2ProtocolUpgradeTx: L2CanonicalTransaction; + bootloaderHash: BytesLike; + defaultAccountHash: BytesLike; + evmEmulatorHash: BytesLike; + verifier: string; + verifierParams: VerifierParams; + l1ContractsUpgradeCalldata: BytesLike; + postUpgradeCalldata: BytesLike; + upgradeTimestamp: BigNumberish; + newProtocolVersion: BigNumberish; +} + describe('Upgrade test', function () { // Utility wallets for facilitating testing let tester: Tester; @@ -650,33 +702,10 @@ async function prepareUpgradeCalldata( l2Provider: zksync.Provider, upgradeAddress: string, params: { - l2ProtocolUpgradeTx: { - txType: BigNumberish; - from: BigNumberish; - to: BigNumberish; - gasLimit: BigNumberish; - gasPerPubdataByteLimit: BigNumberish; - maxFeePerGas: BigNumberish; - maxPriorityFeePerGas: BigNumberish; - paymaster: BigNumberish; - nonce?: BigNumberish; - value: BigNumberish; - reserved: [BigNumberish, BigNumberish, BigNumberish, BigNumberish]; - data: BytesLike; - signature: BytesLike; - factoryDeps: BigNumberish[]; - paymasterInput: BytesLike; - reservedDynamic: BytesLike; - }; + l2ProtocolUpgradeTx: Omit & { nonce?: BigNumberish }; bootloaderHash?: BytesLike; - defaultAAHash?: BytesLike; + defaultAccountHash?: BytesLike; evmEmulatorHash?: BytesLike; - verifier?: string; - verifierParams?: { - recursionNodeLevelVkHash: BytesLike; - recursionLeafLevelVkHash: BytesLike; - recursionCircuitsSetVksHash: BytesLike; - }; l1ContractsUpgradeCalldata?: BytesLike; postUpgradeCalldata?: BytesLike; upgradeTimestamp: BigNumberish; @@ -698,22 +727,31 @@ async function prepareUpgradeCalldata( const oldProtocolVersion = Number(await settlementLayerDiamondProxy.getProtocolVersion()); const newProtocolVersion = addToProtocolVersion(oldProtocolVersion, 1, 1); + const verifierAddress = await settlementLayerDiamondProxy.getVerifier(); params.l2ProtocolUpgradeTx.nonce ??= BigInt(unpackNumberSemVer(newProtocolVersion)[1]); - const upgradeInitData = contracts.l1DefaultUpgradeAbi.encodeFunctionData('upgrade', [ - [ - params.l2ProtocolUpgradeTx, - params.bootloaderHash ?? ethers.ZeroHash, - params.defaultAAHash ?? ethers.ZeroHash, - params.evmEmulatorHash ?? ethers.ZeroHash, - params.verifier ?? ethers.ZeroAddress, - params.verifierParams ?? [ethers.ZeroHash, ethers.ZeroHash, ethers.ZeroHash], - params.l1ContractsUpgradeCalldata ?? '0x', - params.postUpgradeCalldata ?? '0x', - params.upgradeTimestamp, - newProtocolVersion - ] - ]); + + // Construct ProposedUpgrade struct using named fields to match the Solidity struct. + // This ensures TypeScript and ethers.js will catch any field name mismatches. + // Note: verifier and verifierParams are set to zero - the verifier is now set via setNewVersionUpgrade on CTM. + const proposedUpgrade: ProposedUpgrade = { + l2ProtocolUpgradeTx: params.l2ProtocolUpgradeTx as L2CanonicalTransaction, + bootloaderHash: params.bootloaderHash ?? ethers.ZeroHash, + defaultAccountHash: params.defaultAccountHash ?? ethers.ZeroHash, + evmEmulatorHash: params.evmEmulatorHash ?? ethers.ZeroHash, + verifier: ethers.ZeroAddress, + verifierParams: { + recursionNodeLevelVkHash: ethers.ZeroHash, + recursionLeafLevelVkHash: ethers.ZeroHash, + recursionCircuitsSetVksHash: ethers.ZeroHash + }, + l1ContractsUpgradeCalldata: params.l1ContractsUpgradeCalldata ?? '0x', + postUpgradeCalldata: params.postUpgradeCalldata ?? '0x', + upgradeTimestamp: params.upgradeTimestamp, + newProtocolVersion: newProtocolVersion + }; + + const upgradeInitData = contracts.l1DefaultUpgradeAbi.encodeFunctionData('upgrade', [proposedUpgrade]); // Prepare the diamond cut data const upgradeParam = { @@ -728,7 +766,8 @@ async function prepareUpgradeCalldata( oldProtocolVersion, // The protocol version will not have any deadline in this upgrade ethers.MaxUint256, - newProtocolVersion + newProtocolVersion, + verifierAddress ]); // Execute this upgrade on a specific chain under this STM. diff --git a/core/tests/upgrade-test/tests/utils.ts b/core/tests/upgrade-test/tests/utils.ts index 915c53f04708..cbf26a38deca 100644 --- a/core/tests/upgrade-test/tests/utils.ts +++ b/core/tests/upgrade-test/tests/utils.ts @@ -81,7 +81,7 @@ export function initContracts(pathToHome: string, zkStack: boolean): Contracts { require(`${CONTRACTS_FOLDER}/l1-contracts/out/IBridgehubBase.sol/IBridgehubBase.json`).abi ), chainAssetHandlerAbi: new ethers.Interface( - require(`${CONTRACTS_FOLDER}/l1-contracts/out/IChainAssetHandler.sol/IChainAssetHandler.json`).abi + require(`${CONTRACTS_FOLDER}/l1-contracts/out/IChainAssetHandler.sol/IChainAssetHandlerBase.json`).abi ) }; } else { @@ -120,7 +120,7 @@ export function initContracts(pathToHome: string, zkStack: boolean): Contracts { ), chainAssetHandlerAbi: new ethers.Interface( require( - `${L1_CONTRACTS_FOLDER}/core/chain-asset-handler/IChainAssetHandler.sol/IChainAssetHandler.json` + `${L1_CONTRACTS_FOLDER}/core/chain-asset-handler/IChainAssetHandler.sol/IChainAssetHandlerBase.json` ).abi ) }; diff --git a/infrastructure/local-upgrade-testing/era-cacher/do-no-server-upgrade.sh b/infrastructure/local-upgrade-testing/era-cacher/do-no-server-upgrade.sh index 276770c10772..9274bdcd32ba 100755 --- a/infrastructure/local-upgrade-testing/era-cacher/do-no-server-upgrade.sh +++ b/infrastructure/local-upgrade-testing/era-cacher/do-no-server-upgrade.sh @@ -1,6 +1,7 @@ # era-cacher/reset.sh -# era-cacher/use-new-era.sh && cd zksync-working +# era-cacher/use-new-era.sh && +cd zksync-working upgrade_version="v31-interop-b" # "v28-1-vk" @@ -17,18 +18,18 @@ zkstack ecosystem init --deploy-paymaster --deploy-erc20 \ --server-db-name=zksync_server_localhost_era \ --ignore-prerequisites --verbose \ --observability=false \ - --validium-type no-da \ + --chain era \ --update-submodules false # Server should be started in a different window for consistency zkstack server --ignore-prerequisites --chain era &> ../rollup.log & echo "Server started" -zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage no-governance-prepare --update-submodules false +# zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage no-governance-prepare --update-submodules false -zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage governance-stage0 --update-submodules false +# zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage governance-stage0 --update-submodules false -zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage governance-stage1 --update-submodules false +# zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage governance-stage1 --update-submodules false cd contracts/l1-contracts UPGRADE_ECOSYSTEM_OUTPUT=script-out/$upgrade_file_extension-upgrade-ecosystem.toml \ @@ -36,13 +37,13 @@ UPGRADE_ECOSYSTEM_OUTPUT_TRANSACTIONS=broadcast/EcosystemUpgrade_$upgrade_file_e YAML_OUTPUT_FILE=script-out/$upgrade_file_extension-local-output.yaml yarn upgrade-yaml-output-generator cd ../../ -zkstack dev run-chain-upgrade --upgrade-version $upgrade_version +# zkstack dev run-chain-upgrade --upgrade-version $upgrade_version -zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage governance-stage2 +# zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage governance-stage2 -pkill -9 zksync_server -zkstack server --ignore-prerequisites --chain era &> ../rollup2.log & +# pkill -9 zksync_server +# zkstack server --ignore-prerequisites --chain era &> ../rollup2.log & -sleep 10 +# sleep 10 -zkstack dev test integration --no-deps --ignore-prerequisites --chain era \ No newline at end of file +# zkstack dev test integration --no-deps --ignore-prerequisites --chain era \ No newline at end of file diff --git a/infrastructure/local-upgrade-testing/era-cacher/do-upgrade.sh b/infrastructure/local-upgrade-testing/era-cacher/do-upgrade.sh index e70cc0570f2d..a5bc766fdd8e 100644 --- a/infrastructure/local-upgrade-testing/era-cacher/do-upgrade.sh +++ b/infrastructure/local-upgrade-testing/era-cacher/do-upgrade.sh @@ -2,50 +2,72 @@ era-cacher/reset.sh era-cacher/use-old-era.sh && cd zksync-working +upgrade_version="v31-interop-b" +upgrade_file_extension="v31" + # We delete information about the gateway chain as presence of old chain configs # changes the beavior of zkstack ecosystem init. [ -d "./chains/gateway" ] && rm -rf "./chains/gateway" zkstackup --local && zkstack dev clean containers && zkstack up --observability false +# zkstack ecosystem init --deploy-paymaster --deploy-erc20 \ +# --deploy-ecosystem --l1-rpc-url=http://127.0.0.1:8545 \ +# --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ +# --server-db-name=zksync_server_localhost_era \ +# --ignore-prerequisites --verbose \ +# --chain era \ +# --observability=false + +# zkstack dev generate-genesis + zkstack ecosystem init --deploy-paymaster --deploy-erc20 \ --deploy-ecosystem --l1-rpc-url=http://127.0.0.1:8545 \ --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ --server-db-name=zksync_server_localhost_era \ --ignore-prerequisites --verbose \ + --chain era \ --observability=false -zkstack chain create \ - --chain-name gateway \ - --chain-id 506 \ - --prover-mode no-proofs \ - --wallet-creation localhost \ - --l1-batch-commit-data-generator-mode rollup \ - --base-token-address 0x0000000000000000000000000000000000000001 \ - --base-token-price-nominator 1 \ - --base-token-price-denominator 1 \ - --set-as-default false \ - --evm-emulator false \ - --ignore-prerequisites - -zkstack chain init \ - --deploy-paymaster \ - --l1-rpc-url=http://127.0.0.1:8545 \ - --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --server-db-name=zksync_server_localhost_gateway \ - --chain gateway +zkstack server --chain era &> ../rollup3.log & -zkstack chain gateway convert-to-gateway --chain gateway -# When running locally, it makes sense to not redirect to file, but start a new terminal window here. -zkstack server --chain gateway &> ../gateway.log & +zkstack dev rich-account 0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 --chain era -# When running locally, open a new terminal window here. -zkstack chain gateway migrate-to-gateway --chain era --gateway-chain-name gateway -# When running locally, it makes sense to not redirect to file, but start a new terminal window during the next operation. -zkstack server --chain era &> ../rollup.log & +sleep 10 pkill -9 zksync_server +# zkstack dev generate-genesis + +# zkstack chain create \ +# --chain-name gateway \ +# --chain-id 505 \ +# --prover-mode no-proofs \ +# --wallet-creation localhost \ +# --l1-batch-commit-data-generator-mode rollup \ +# --base-token-address 0x0000000000000000000000000000000000000001 \ +# --base-token-price-nominator 1 \ +# --base-token-price-denominator 1 \ +# --set-as-default false \ +# --evm-emulator false \ +# --ignore-prerequisites + +# zkstack chain init \ +# --deploy-paymaster \ +# --l1-rpc-url=http://127.0.0.1:8545 \ +# --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ +# --server-db-name=zksync_server_localhost_gateway \ +# --chain gateway + +# zkstack chain gateway convert-to-gateway --chain gateway +# # When running locally, it makes sense to not redirect to file, but start a new terminal window here. +# zkstack server --chain gateway &> ../gateway.log & + +# # When running locally, open a new terminal window here. +# zkstack chain gateway migrate-to-gateway --chain era --gateway-chain-name gateway +# # When running locally, it makes sense to not redirect to file, but start a new terminal window during the next operation. +# zkstack server --chain era &> ../rollup3.log & + # When running locally, open a new terminal window here. cd .. && era-cacher/use-new-era.sh && cd zksync-working @@ -56,35 +78,67 @@ cd .. && era-cacher/use-new-era.sh && cd zksync-working zkstackup --local zkstack dev contracts -cd contracts -git checkout sb-v29-upgrade-testing -cd .. +# cd contracts +# git checkout +# cd .. zkstack dev database migrate --prover false --core true --chain era -zkstack dev database migrate --prover false --core true --chain gateway +# zkstack dev database migrate --prover false --core true --chain gateway # All the actions below may be performed in a different window. -zkstack server --ignore-prerequisites --chain gateway &> ../gateway.log & -zkstack server --ignore-prerequisites --chain era &> ../rollup.log & +# zkstack server --ignore-prerequisites --chain gateway &> ../gateway.log & +RUST_BACKTRACE=1 zkstack server --ignore-prerequisites --chain era &> ../rollup.log & echo "Server started" -zkstack dev run-ecosystem-upgrade --upgrade-version v29-interop-a-ff --ecosystem-upgrade-stage no-governance-prepare -zkstack dev run-ecosystem-upgrade --upgrade-version v29-interop-a-ff --ecosystem-upgrade-stage ecosystem-admin -zkstack dev run-ecosystem-upgrade --upgrade-version v29-interop-a-ff --ecosystem-upgrade-stage governance-stage0 -zkstack dev run-ecosystem-upgrade --upgrade-version v29-interop-a-ff --ecosystem-upgrade-stage governance-stage1 +# Wait for server to be ready +sleep 5 + +# Update permanent-values.toml with addresses from running deployment +../era-cacher/update-permanent-values.sh + +zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage no-governance-prepare +zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage ecosystem-admin +zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage governance-stage0 +zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage governance-stage1 cd contracts/l1-contracts -UPGRADE_ECOSYSTEM_OUTPUT=script-out/v29-upgrade-ecosystem.toml UPGRADE_ECOSYSTEM_OUTPUT_TRANSACTIONS=broadcast/EcosystemUpgrade_v29.s.sol/9/run-latest.json YAML_OUTPUT_FILE=script-out/v29-local-output.yaml yarn upgrade-yaml-output-generator +UPGRADE_ECOSYSTEM_OUTPUT=script-out/v31-upgrade-ecosystem.toml UPGRADE_ECOSYSTEM_OUTPUT_TRANSACTIONS=broadcast/EcosystemUpgrade_v31.s.sol/9/run-latest.json YAML_OUTPUT_FILE=script-out/v31-local-output.yaml yarn upgrade-yaml-output-generator cd ../../ -zkstack dev run-v29-chain-upgrade --force-display-finalization-params=true --dangerous-local-default-overrides=true --chain era -zkstack dev run-v29-chain-upgrade --force-display-finalization-params=true --dangerous-local-default-overrides=true --chain gateway -zkstack dev run-ecosystem-upgrade --upgrade-version v29-interop-a-ff --ecosystem-upgrade-stage governance-stage2 +zkstack dev run-chain-upgrade --upgrade-version $upgrade_version --force-display-finalization-params=true --dangerous-local-default-overrides=true --chain era +# zkstack dev run-chain-upgrade --upgrade-version $upgrade_version --force-display-finalization-params=true --dangerous-local-default-overrides=true --chain gateway +zkstack dev run-ecosystem-upgrade --upgrade-version $upgrade_version --ecosystem-upgrade-stage governance-stage2 + +# Stage 3: Migrate token balances from NTV to AssetTracker +# This can be done with any private key (deployer is used here) +cd contracts/l1-contracts +forge script deploy-scripts/upgrade/v31/EcosystemUpgrade_v31.s.sol:EcosystemUpgrade_v31 \ + --sig "stage3()" \ + --rpc-url http://localhost:8545 \ + --broadcast \ + --private-key 0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110 \ + --legacy \ + --slow \ + --gas-price 50000000000 +cd ../../ pkill -9 zksync_server zkstack server --ignore-prerequisites --chain era &> ../rollup2.log & sleep 10 +# Fund the main wallet (test_mnemonic index 0) with L1 ETH +# This wallet is used by init-test-wallet to distribute to the actual test wallet (index 101) +# init-test-wallet sends 10k ETH, so we need at least that much plus gas +# Using the rich L1 account (RETH pre-funded account) to send ETH +cast send 0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 \ + --value 10001ether \ + --rpc-url http://127.0.0.1:8545 \ + --private-key 0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110 \ + --gas-price 50gwei + +# Initialize test wallet - this will distribute 10k ETH from main wallet to test wallet +zkstack dev init-test-wallet --chain era + zkstack dev test integration --no-deps --ignore-prerequisites --chain era diff --git a/infrastructure/local-upgrade-testing/era-cacher/update-permanent-values.sh b/infrastructure/local-upgrade-testing/era-cacher/update-permanent-values.sh new file mode 100755 index 000000000000..f2fb25767d4c --- /dev/null +++ b/infrastructure/local-upgrade-testing/era-cacher/update-permanent-values.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Update permanent-values.toml with addresses from running server and zkstack config + +set -e + +echo "Updating permanent-values.toml with deployed contract addresses..." + +# Change to zksync-working directory +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +WORKING_DIR="$SCRIPT_DIR/../zksync-working" + +if [ ! -d "$WORKING_DIR" ]; then + echo "Error: zksync-working directory not found at $WORKING_DIR" + exit 1 +fi + +cd "$WORKING_DIR" +echo "Working directory: $(pwd)" + +# Get RPC URL from zkstack config +RPC_URL=$(awk '/http_url:/ {print $2; exit}' chains/era/configs/general.yaml) + +if [ -z "$RPC_URL" ]; then + echo "Error: Failed to read RPC URL from chains/era/configs/general.yaml" + exit 1 +fi + +echo "Using RPC URL: $RPC_URL" + +# Get bridgehub address from contracts.yaml (ecosystem_contracts section) +BRIDGEHUB_ADDR=$(grep -A 20 "^ecosystem_contracts:" chains/era/configs/contracts.yaml | grep "bridgehub_proxy_addr:" | awk '{print $2}' | head -1) + +if [ -z "$BRIDGEHUB_ADDR" ]; then + echo "Error: Failed to read bridgehub_proxy_addr from ecosystem_contracts in chains/era/configs/contracts.yaml" + exit 1 +fi + +echo "Bridgehub address (from config): $BRIDGEHUB_ADDR" + +# Read other addresses from zkstack config +ERA_CHAIN_ID=$(awk '/^ l2_chain_id:/ {print $2; exit}' chains/era/configs/general.yaml) +CTM_ADDR=$(awk '/state_transition_proxy_addr:/ {print $2; exit}' chains/era/configs/contracts.yaml) +BYTECODES_SUPPLIER=$(awk '/l1_bytecodes_supplier_addr:/ {print $2; exit}' chains/era/configs/contracts.yaml) +CREATE2_FACTORY=$(awk '/^create2_factory_addr:/ {print $2; exit}' chains/era/configs/contracts.yaml) +CREATE2_SALT=$(awk '/^create2_factory_salt:/ {print $2; exit}' chains/era/configs/contracts.yaml) + +# Validate all required values were read +if [ -z "$ERA_CHAIN_ID" ]; then + echo "Error: Failed to read l2_chain_id from chains/era/configs/general.yaml" + exit 1 +fi + +if [ -z "$CTM_ADDR" ]; then + echo "Error: Failed to read state_transition_proxy_addr from chains/era/configs/contracts.yaml" + exit 1 +fi + +if [ -z "$BYTECODES_SUPPLIER" ]; then + echo "Error: Failed to read l1_bytecodes_supplier_addr from chains/era/configs/contracts.yaml" + exit 1 +fi + +echo "ERA Chain ID: $ERA_CHAIN_ID" +echo "CTM Address: $CTM_ADDR" +echo "Bytecodes Supplier: $BYTECODES_SUPPLIER" + +# Update permanent-values.toml in both locations +OUTPUT_FILE1="contracts/l1-contracts/script-config/permanent-values.toml" +OUTPUT_FILE2="contracts/l1-contracts/upgrade-envs/permanent-values/local.toml" + +# Create the directory if it doesn't exist +mkdir -p "$(dirname "$OUTPUT_FILE2")" + +# Generate the content +CONTENT=$(cat < "$OUTPUT_FILE1" +echo "$CONTENT" > "$OUTPUT_FILE2" + +echo "" +echo "✓ Updated $OUTPUT_FILE1" +echo "✓ Updated $OUTPUT_FILE2" +echo "" +echo " - ERA chain ID: $ERA_CHAIN_ID" +echo " - Bridgehub: $BRIDGEHUB_ADDR" +echo " - CTM: $CTM_ADDR" +echo " - Bytecodes Supplier: $BYTECODES_SUPPLIER" diff --git a/infrastructure/local-upgrade-testing/era-cacher/use-old-era.sh b/infrastructure/local-upgrade-testing/era-cacher/use-old-era.sh index c06a076e8ab1..1939d91a81d0 100755 --- a/infrastructure/local-upgrade-testing/era-cacher/use-old-era.sh +++ b/infrastructure/local-upgrade-testing/era-cacher/use-old-era.sh @@ -5,6 +5,14 @@ NEW_REPO=./zksync-new WORKING_DIRECTORY=./zksync-working +# If zksync-working exists and zksync-new is empty/doesn't exist, move it to zksync-new +if [ -d "$WORKING_DIRECTORY" ]; then + if [ ! -d "$NEW_REPO" ] || [ -z "$(ls -A "$NEW_REPO" 2>/dev/null)" ]; then + echo "Moving existing zksync-working to zksync-new..." + mv "$WORKING_DIRECTORY" "$NEW_REPO" + fi +fi + # Check if the folder exists if [ ! -d "$OLD_REPO" ]; then echo "Error: The folder '$OLD_REPO' does not exist." diff --git a/infrastructure/scripts/debug-tx.sh b/infrastructure/scripts/debug-tx.sh new file mode 100755 index 000000000000..b28ef101f5a4 --- /dev/null +++ b/infrastructure/scripts/debug-tx.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Debug a failed transaction by sending it with high gas and getting the trace +# Usage: ./debug-tx.sh [value_in_wei] + +set -e + +TO_ADDRESS="$1" +CALLDATA="$2" +VALUE="${3:-0}" + +if [ -z "$TO_ADDRESS" ] || [ -z "$CALLDATA" ]; then + echo "Usage: $0 [value_in_wei]" + echo "Example: $0 0xfe3EE966E7790b427F7B078f304C7B4DDCd4bbfe 0xd52471c1... 1050000121535147500000" + exit 1 +fi + +RPC_URL="${RPC_URL:-http://127.0.0.1:8545}" +PRIVATE_KEY="${PRIVATE_KEY:-0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110}" + +echo "===================================" +echo "Sending transaction..." +echo "To: $TO_ADDRESS" +echo "Value: $VALUE wei" +echo "Calldata: ${CALLDATA:0:66}..." +echo "===================================" + +# Send transaction with high gas limit +TX_HASH=$(cast send "$TO_ADDRESS" \ + --data "$CALLDATA" \ + --value "$VALUE" \ + --private-key "$PRIVATE_KEY" \ + --rpc-url "$RPC_URL" \ + --gas-limit 10000000 \ + --gas-price 50gwei 2>&1 | grep "transactionHash" | awk '{print $2}') + +echo "" +echo "Transaction hash: $TX_HASH" +echo "" +echo "===================================" +echo "Getting trace..." +echo "===================================" +echo "" + +# Get the trace +cast run "$TX_HASH" --rpc-url "$RPC_URL" diff --git a/infrastructure/scripts/token_balance_migration.sh b/infrastructure/scripts/token_balance_migration.sh index bdc470c0d382..53c03f522c85 100644 --- a/infrastructure/scripts/token_balance_migration.sh +++ b/infrastructure/scripts/token_balance_migration.sh @@ -47,5 +47,4 @@ zkstack server wait --ignore-prerequisites --verbose --chain gateway export CI=1 # Needed to avoid killing the server in core/tests/highlevel-test-tools/global-setup.ts#48 export USE_GATEWAY_CHAIN=WITH_GATEWAY export TESTED_CHAIN_TYPE=era -yarn --cwd core/tests/highlevel-test-tools test -t "Token balance migration TO GW tests" -# yarn --cwd core/tests/highlevel-test-tools test -t "Token balance migration FROM GW tests" +yarn --cwd core/tests/highlevel-test-tools test -t "Token balance migration tests" diff --git a/zkstack_cli/crates/config/src/forge_interface/script_params.rs b/zkstack_cli/crates/config/src/forge_interface/script_params.rs index 956bcde38f28..0e9bdf798c4c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/script_params.rs +++ b/zkstack_cli/crates/config/src/forge_interface/script_params.rs @@ -125,24 +125,18 @@ pub const V29_UPGRADE_CHAIN_PARAMS: ForgeScriptParams = ForgeScriptParams { script_path: "deploy-scripts/upgrade/v29/ChainUpgrade_v29.s.sol", }; -pub const V31_UPGRADE_CORE_CONTRACTS_PARAMS: ForgeScriptParams = ForgeScriptParams { - input: "script-config/v31-upgrade-core.toml", - output: "script-out/v31-upgrade-core.toml", - script_path: "deploy-scripts/upgrade/v31/EcosystemUpgrade_v31.s.sol", -}; - -pub const V31_UPGRADE_CTM_CONTRACTS_PARAMS: ForgeScriptParams = ForgeScriptParams { - input: "script-config/v31-upgrade-ctm.toml", - output: "script-out/v31-upgrade-ctm.toml", - script_path: "deploy-scripts/upgrade/v31/EcosystemUpgrade_v31.s.sol", -}; - pub const V31_UPGRADE_CHAIN_PARAMS: ForgeScriptParams = ForgeScriptParams { - input: "script-config/v31-upgrade-chain.toml", + input: "script-config/should-not-exists-3.toml", output: "script-out/v31-upgrade-chain.toml", script_path: "deploy-scripts/upgrade/v31/ChainUpgrade_v31.s.sol", }; +pub const V31_UPGRADE_ECOSYSTEM_PARAMS: ForgeScriptParams = ForgeScriptParams { + input: "script-config/should-not-exists-0.toml", + output: "script-out/v31-upgrade-ecosystem.toml", + script_path: "deploy-scripts/upgrade/v31/EcosystemUpgrade_v31.s.sol", +}; + pub const FINALIZE_UPGRADE_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptParams { input: "script-config/gateway-finalize-upgrade.toml", output: "script-out/gateway-finalize-upgrade.toml", diff --git a/zkstack_cli/crates/config/src/forge_interface/upgrade_ecosystem/output.rs b/zkstack_cli/crates/config/src/forge_interface/upgrade_ecosystem/output.rs index 65283f9358bc..7447789123ab 100644 --- a/zkstack_cli/crates/config/src/forge_interface/upgrade_ecosystem/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/upgrade_ecosystem/output.rs @@ -6,18 +6,20 @@ use crate::traits::{FileConfigTrait, FileConfigWithDefaultName}; #[derive(Debug, Deserialize, Serialize, Clone)] pub struct EcosystemUpgradeOutput { - pub create2_factory_addr: Address, - pub create2_factory_salt: H256, - pub deployer_addr: Address, - pub era_chain_id: u32, - pub l1_chain_id: u32, - pub owner_address: Address, + // pub create2_factory_addr: Address, + // pub create2_factory_salt: H256, + // pub deployer_addr: Address, + // pub era_chain_id: u32, + // pub l1_chain_id: u32, + // pub owner_address: Address, + #[serde(default)] pub chain_upgrade_diamond_cut: Bytes, pub governance_calls: GovernanceCalls, - pub ecosystem_admin_calls: EcosystemAdminCalls, - pub contracts_config: EcosystemUpgradeContractsOutput, - pub deployed_addresses: EcosystemUpgradeDeployedAddresses, + // pub ecosystem_admin_calls: EcosystemAdminCalls, + // pub contracts_config: EcosystemUpgradeContractsOutput, + #[serde(default)] + pub state_transition: Option, /// List of transactions that were executed during the upgrade. /// This is added later by the zkstack and not present in the toml file that solidity creates. #[serde(default)] @@ -25,7 +27,7 @@ pub struct EcosystemUpgradeOutput { } impl FileConfigWithDefaultName for EcosystemUpgradeOutput { - const FILE_NAME: &'static str = "../contracts/l1-contracts/script-out/v29_local_output.yaml"; + const FILE_NAME: &'static str = "../contracts/l1-contracts/script-out/v31-local-output.yaml"; } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -101,6 +103,8 @@ pub struct EcosystemUpgradeStateTransition { pub mailbox_facet_addr: Address, pub state_transition_implementation_addr: Address, pub verifier_addr: Address, + #[serde(default)] + pub bytecodes_supplier_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] diff --git a/zkstack_cli/crates/zkstack/src/abi.rs b/zkstack_cli/crates/zkstack/src/abi.rs index 62f5f670c271..48ac40e7eec9 100644 --- a/zkstack_cli/crates/zkstack/src/abi.rs +++ b/zkstack_cli/crates/zkstack/src/abi.rs @@ -13,14 +13,14 @@ abigen!( ); abigen!( - ZkChainAbi, - "../../../contracts/l1-contracts/zkstack-out/IZKChain.sol/IZKChain.json", + IChainTypeManagerAbi, + "../../../contracts/l1-contracts/zkstack-out/IChainTypeManager.sol/IChainTypeManager.json", event_derives(serde::Deserialize, serde::Serialize) ); abigen!( - IChainTypeManagerAbi, - "../../../contracts/l1-contracts/zkstack-out/IChainTypeManager.sol/IChainTypeManager.json", + ZkChainAbi, + "../../../contracts/l1-contracts/zkstack-out/IZKChain.sol/IZKChain.json", event_derives(serde::Deserialize, serde::Serialize) ); @@ -32,7 +32,7 @@ abigen!( abigen!( IChainAssetHandlerAbi, - "../../../contracts/l1-contracts/zkstack-out/IChainAssetHandler.sol/IChainAssetHandler.json", + "../../../contracts/l1-contracts/zkstack-out/IChainAssetHandler.sol/IChainAssetHandlerBase.json", event_derives(serde::Deserialize, serde::Serialize) ); @@ -43,19 +43,30 @@ abigen!( event_derives(serde::Deserialize, serde::Serialize) ); -// Using IAdmin for the diamondCut function abigen!( - DiamondCutAbi, + AdminAbi, "../../../contracts/l1-contracts/zkstack-out/IAdmin.sol/IAdmin.json", event_derives(serde::Deserialize, serde::Serialize) ); +abigen!( + DiamondCutAbi, + "../../../contracts/l1-contracts/zkstack-out/IDiamondCut.sol/IDiamondCut.json", + event_derives(serde::Deserialize, serde::Serialize) +); + abigen!( ChainAdminOwnableAbi, "../../../contracts/l1-contracts/zkstack-out/IChainAdminOwnable.sol/IChainAdminOwnable.json", event_derives(serde::Deserialize, serde::Serialize) ); +abigen!( + IChainAdminAbi, + "../../../contracts/l1-contracts/zkstack-out/IChainAdmin.sol/IChainAdmin.json", + event_derives(serde::Deserialize, serde::Serialize) +); + abigen!( IRegisterZKChainAbi, "../../../contracts/l1-contracts/zkstack-out/IRegisterZKChain.sol/IRegisterZKChain.json", @@ -134,12 +145,24 @@ abigen!( event_derives(serde::Deserialize, serde::Serialize) ); +abigen!( + IL1NativeTokenVaultAbi, + "../../../contracts/l1-contracts/zkstack-out/IL1NativeTokenVault.sol/IL1NativeTokenVault.json", + event_derives(serde::Deserialize, serde::Serialize) +); + abigen!( IL2NativeTokenVaultAbi, "../../../contracts/l1-contracts/zkstack-out/IL2NativeTokenVault.sol/IL2NativeTokenVault.json", event_derives(serde::Deserialize, serde::Serialize) ); +abigen!( + IL1AssetRouterAbi, + "../../../contracts/l1-contracts/zkstack-out/IL1AssetRouter.sol/IL1AssetRouter.json", + event_derives(serde::Deserialize, serde::Serialize) +); + abigen!( IL2AssetRouterAbi, "../../../contracts/l1-contracts/zkstack-out/IL2AssetRouter.sol/IL2AssetRouter.json", @@ -153,20 +176,20 @@ abigen!( ); abigen!( - IL2AssetTrackerAbi, - "../../../contracts/l1-contracts/zkstack-out/IL2AssetTracker.sol/IL2AssetTracker.json", + IL1AssetTrackerAbi, + "../../../contracts/l1-contracts/zkstack-out/IL1AssetTracker.sol/IL1AssetTracker.json", event_derives(serde::Deserialize, serde::Serialize) ); abigen!( - IGWAssetTrackerAbi, - "../../../contracts/l1-contracts/zkstack-out/IGWAssetTracker.sol/IGWAssetTracker.json", + IL2AssetTrackerAbi, + "../../../contracts/l1-contracts/zkstack-out/IL2AssetTracker.sol/IL2AssetTracker.json", event_derives(serde::Deserialize, serde::Serialize) ); abigen!( - IChainAdminAbi, - "../../../contracts/l1-contracts/zkstack-out/IChainAdmin.sol/IChainAdmin.json", + IGWAssetTrackerAbi, + "../../../contracts/l1-contracts/zkstack-out/IGWAssetTracker.sol/IGWAssetTracker.json", event_derives(serde::Deserialize, serde::Serialize) ); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/admin_call_builder.rs b/zkstack_cli/crates/zkstack/src/commands/chain/admin_call_builder.rs index 5b5c9703728b..c4838ce9b3ea 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/admin_call_builder.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/admin_call_builder.rs @@ -11,8 +11,7 @@ use zkstack_cli_common::forge::ForgeScriptArgs; use zksync_types::{Address, U256}; use crate::abi::{ - CHAINADMINOWNABLEABI_ABI as CHAIN_ADMIN_OWNABLE_ABI, - CHAINTYPEMANAGERUPGRADEFNABI_ABI as CHAIN_TYPE_MANAGER_UPGRADE_ABI, + ADMINABI_ABI as ADMIN_ABI, CHAINADMINOWNABLEABI_ABI as CHAIN_ADMIN_OWNABLE_ABI, DIAMONDCUTABI_ABI as DIAMOND_CUT_ABI, }; @@ -103,6 +102,10 @@ impl AdminCallBuilder { self.calls.extend(calls); } + pub fn is_empty(&self) -> bool { + self.calls.is_empty() + } + #[allow(clippy::too_many_arguments)] pub async fn prepare_upgrade_chain_on_gateway_calls( &mut self, @@ -158,22 +161,28 @@ impl AdminCallBuilder { .function("diamondCut") .expect("diamondCut ABI not found"); - let upgrade_fn = CHAIN_TYPE_MANAGER_UPGRADE_ABI + let upgrade_fn = ADMIN_ABI .function("upgradeChainFromVersion") .expect("upgradeChainFromVersion ABI not found"); - let decoded = diamond_cut_fn - .decode_input(diamond_cut_data.0.get(4..).unwrap_or(&diamond_cut_data.0)) - .or_else(|_| diamond_cut_fn.decode_input(&diamond_cut_data.0)) - .expect("invalid diamondCut calldata"); - - let cfg_tuple = decoded + // Get the parameter type for DiamondCutData from the diamondCut function + let diamond_cut_param_type = diamond_cut_fn + .inputs + .get(0) + .expect("diamondCut function has no parameters") + .kind + .clone(); + + // Decode the raw diamond_cut_data bytes directly as DiamondCutData struct + let diamond_cut_token = decode(&[diamond_cut_param_type], &diamond_cut_data.0) + .expect("Failed to decode diamond_cut_data") .into_iter() .next() - .expect("diamondCut expects 1 argument (tuple)"); + .expect("Failed to extract DiamondCutData token"); + // Admin.upgradeChainFromVersion expects: (oldProtocolVersion, DiamondCutData) let data = upgrade_fn - .encode_input(&[Token::Uint(U256::from(protocol_version)), cfg_tuple]) + .encode_input(&[Token::Uint(U256::from(protocol_version)), diamond_cut_token]) .expect("encode upgradeChainFromVersion failed"); let description = "Executing upgrade:".to_string(); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_token_balances.rs b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_token_balances.rs index 11a7ccd9f2eb..06e224268ea2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_token_balances.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_token_balances.rs @@ -1,18 +1,15 @@ -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{collections::HashMap, sync::Arc}; -use anyhow::Context; +use anyhow::{bail, Context}; use clap::Parser; use ethers::{ - abi::Address, + abi::{Address, ParamType, Token}, contract::{BaseContract, Contract}, middleware::SignerMiddleware, providers::{Http, Middleware, Provider}, signers::Signer, - types::{BlockId, BlockNumber}, - utils::hex, + types::{BlockId, BlockNumber, Filter, H256 as EthersH256, U64}, + utils::{hex, keccak256}, }; use futures::stream::{FuturesUnordered, StreamExt}; use lazy_static::lazy_static; @@ -21,15 +18,12 @@ use xshell::Shell; use zkstack_cli_common::{ config::global_config, ethereum::{get_ethers_provider, get_zk_client}, - forge::{Forge, ForgeScriptArgs}, + forge::ForgeScriptArgs, logger, wallets::Wallet, - zks_provider::ZKSProvider, -}; -use zkstack_cli_config::{ - forge_interface::script_params::GATEWAY_MIGRATE_TOKEN_BALANCES_SCRIPT_PATH, ZkStackConfig, - ZkStackConfigTrait, + zks_provider::{FinalizeWithdrawalParams, ZKSProvider}, }; +use zkstack_cli_config::ZkStackConfig; use zksync_basic_types::U256; use zksync_system_constants::{ GW_ASSET_TRACKER_ADDRESS, L2_ASSET_ROUTER_ADDRESS, L2_ASSET_TRACKER_ADDRESS, @@ -39,17 +33,14 @@ use zksync_types::{L2ChainId, H256}; use crate::{ abi::{ - BridgehubAbi, MessageRootAbi, ZkChainAbi, IGATEWAYMIGRATETOKENBALANCESABI_ABI, - IL2ASSETROUTERABI_ABI, IL2NATIVETOKENVAULTABI_ABI, + BridgehubAbi, MessageRootAbi, ZkChainAbi, IL1ASSETROUTERABI_ABI, IL1ASSETTRACKERABI_ABI, + IL1NATIVETOKENVAULTABI_ABI, IL2ASSETROUTERABI_ABI, IL2NATIVETOKENVAULTABI_ABI, }, commands::dev::commands::{rich_account, rich_account::args::RichAccountArgs}, messages::MSG_CHAIN_NOT_INITIALIZED, - utils::forge::{fill_forge_private_key, WalletOwner}, }; lazy_static! { - static ref GATEWAY_MIGRATE_TOKEN_BALANCES_FUNCTIONS: BaseContract = - BaseContract::from(IGATEWAYMIGRATETOKENBALANCESABI_ABI.clone()); static ref L2_NTV_FUNCTIONS: BaseContract = BaseContract::from(IL2NATIVETOKENVAULTABI_ABI.clone()); static ref L2_ASSET_ROUTER_FUNCTIONS: BaseContract = @@ -57,7 +48,7 @@ lazy_static! { } #[derive(Debug, Serialize, Deserialize, Parser)] -pub struct MigrateTokenBalancesArgs { +pub struct InitiateTokenBalanceMigrationArgs { /// All ethereum environment related arguments #[clap(flatten)] #[serde(flatten)] @@ -73,53 +64,113 @@ pub struct MigrateTokenBalancesArgs { pub to_gateway: Option, } -// sma todo: this script should be broken down into multiple steps -pub async fn run(args: MigrateTokenBalancesArgs, shell: &Shell) -> anyhow::Result<()> { - let ecosystem_config = ZkStackConfig::ecosystem(shell)?; +#[derive(Debug, Serialize, Deserialize, Parser)] +pub struct FinalizeTokenBalanceMigrationArgs { + /// All ethereum environment related arguments + #[clap(flatten)] + #[serde(flatten)] + pub forge_args: ForgeScriptArgs, - let chain_name = global_config().chain_name.clone(); - let chain_config = ecosystem_config - .load_chain(chain_name) - .context(MSG_CHAIN_NOT_INITIALIZED)?; + #[clap(long)] + pub gateway_chain_name: String, - let gateway_chain_config = ecosystem_config - .load_chain(Some(args.gateway_chain_name.clone())) - .context("Gateway not present")?; - // let gateway_chain_id = gateway_chain_config.chain_id.as_u64(); - // let gateway_gateway_config = gateway_chain_config - // .get_gateway_config() - // .context("Gateway config not present")?; + #[clap(long, default_missing_value = "true", num_args = 0..=1)] + pub to_gateway: Option, - let l1_url = chain_config.get_secrets_config().await?.l1_rpc_url()?; + /// Start block for reading migration initiation events + #[clap(long)] + pub from_block: Option, + + /// End block for reading migration initiation events + #[clap(long)] + pub to_block: Option, +} + +pub async fn run_initiate( + args: InitiateTokenBalanceMigrationArgs, + shell: &Shell, +) -> anyhow::Result<()> { + let (wallet, _l1_bridgehub_addr, l2_chain_id, gw_chain_id, l1_url, l2_url, gw_rpc_url) = + load_migration_context(shell, args.gateway_chain_name).await?; + + let to_gateway = args.to_gateway.unwrap_or(true); + logger::info(format!( + "Initiating the token balance migration {} the Gateway...", + if to_gateway { "to" } else { "from" } + )); - let general_chain_config = chain_config.get_general_config().await?; - let l2_url = general_chain_config.l2_http_url()?; + initiate_token_balance_migration( + shell, + args.skip_funding.unwrap_or(false), + to_gateway, + wallet, + l2_chain_id, + gw_chain_id, + l1_url.clone(), + gw_rpc_url.clone(), + l2_url.clone(), + ) + .await?; - // let genesis_config = chain_config.get_genesis_config().await?; - // let gateway_contract_config = gateway_chain_config.get_contracts_config()?; + Ok(()) +} - // let chain_contracts_config = chain_config.get_contracts_config().unwrap(); +pub async fn run_finalize( + args: FinalizeTokenBalanceMigrationArgs, + shell: &Shell, +) -> anyhow::Result<()> { + let (wallet, l1_bridgehub_addr, l2_chain_id, gw_chain_id, l1_url, l2_url, gw_rpc_url) = + load_migration_context(shell, args.gateway_chain_name).await?; + let to_gateway = args.to_gateway.unwrap_or(true); logger::info(format!( - "Migrating the token balances {} the Gateway...", - if args.to_gateway.unwrap_or(true) { - "to" - } else { - "from" - } + "Finalizing the token balance migration {} the Gateway...", + if to_gateway { "to" } else { "from" } )); - let general_config = gateway_chain_config.get_general_config().await?; - let gw_rpc_url = general_config.l2_http_url()?; + finalize_token_balance_migration( + wallet, + l1_bridgehub_addr, + l2_chain_id, + gw_chain_id, + l1_url.clone(), + gw_rpc_url.clone(), + l2_url.clone(), + to_gateway, + args.from_block, + args.to_block, + ) + .await?; - // let chain_secrets_config = chain_config.get_wallets_config().unwrap(); + Ok(()) +} - migrate_token_balances_from_gateway( - shell, - args.skip_funding.unwrap_or(false), - &args.forge_args.clone(), - args.to_gateway.unwrap_or(true), - &chain_config.path_to_foundry_scripts(), +const LOOK_WAITING_TIME_MS: u64 = 1600; + +#[allow(clippy::too_many_arguments)] +async fn load_migration_context( + shell: &Shell, + gateway_chain_name: String, +) -> anyhow::Result<(Wallet, Address, u64, u64, String, String, String)> { + let ecosystem_config = ZkStackConfig::ecosystem(shell)?; + + let chain_name = global_config().chain_name.clone(); + let chain_config = ecosystem_config + .load_chain(chain_name) + .context(MSG_CHAIN_NOT_INITIALIZED)?; + + let gateway_chain_config = ecosystem_config + .load_chain(Some(gateway_chain_name)) + .context("Gateway not present")?; + + let l1_url = chain_config.get_secrets_config().await?.l1_rpc_url()?; + let l2_url = chain_config.get_general_config().await?.l2_http_url()?; + let gw_rpc_url = gateway_chain_config + .get_general_config() + .await? + .l2_http_url()?; + + Ok(( ecosystem_config .get_wallets()? .deployer @@ -130,26 +181,17 @@ pub async fn run(args: MigrateTokenBalancesArgs, shell: &Shell) -> anyhow::Resul .bridgehub_proxy_addr, chain_config.chain_id.as_u64(), gateway_chain_config.chain_id.as_u64(), - l1_url.clone(), - gw_rpc_url.clone(), - l2_url.clone(), - ) - .await?; - - Ok(()) + l1_url, + l2_url, + gw_rpc_url, + )) } -const LOOK_WAITING_TIME_MS: u64 = 1600; - -#[allow(clippy::too_many_arguments)] -pub async fn migrate_token_balances_from_gateway( +async fn initiate_token_balance_migration( shell: &Shell, skip_funding: bool, - forge_args: &ForgeScriptArgs, to_gateway: bool, - foundry_scripts_path: &Path, wallet: Wallet, - l1_bridgehub_addr: Address, l2_chain_id: u64, gw_chain_id: u64, l1_rpc_url: String, @@ -200,48 +242,11 @@ pub async fn migrate_token_balances_from_gateway( } let mut tx_hashes = Vec::new(); - let mut asset_ids = Vec::new(); + let asset_ids = get_asset_ids(&l2_rpc_url).await?; + let l2_provider = Provider::::try_from(l2_rpc_url.as_str())?; let l2_chain_id = l2_provider.get_chainid().await?.as_u64(); - let l2_signer = wallet - .private_key - .clone() - .unwrap() - .with_chain_id(l2_chain_id); - let l2_client = Arc::new(SignerMiddleware::new(l2_provider.clone(), l2_signer)); - - // Get bridged token count and asset IDs - let ntv = Contract::new( - L2_NATIVE_TOKEN_VAULT_ADDRESS, - L2_NTV_FUNCTIONS.abi().clone(), - l2_client.clone(), - ); - let count: U256 = ntv - .method::<_, U256>("bridgedTokensCount", ())? - .call() - .await?; - - for i in 0..count.as_u64() { - asset_ids.push( - ntv.method::<_, [u8; 32]>("bridgedTokens", U256::from(i))? - .call() - .await?, - ); - } - - // Add base token asset ID - let router = Contract::new( - L2_ASSET_ROUTER_ADDRESS, - L2_ASSET_ROUTER_FUNCTIONS.abi().clone(), - Arc::new(l2_provider), - ); - let base_token_asset_id = router - .method::<_, [u8; 32]>("BASE_TOKEN_ASSET_ID", ())? - .call() - .await?; - asset_ids.push(base_token_asset_id); - // Migrate each token let (tracker_addr, tracker_abi) = if to_gateway { ( @@ -326,14 +331,53 @@ pub async fn migrate_token_balances_from_gateway( } println!("Token migration started"); + if tx_hashes.is_empty() { + println!("No migration transactions were sent."); + } - let (migration_rpc_url, source_chain_id) = if to_gateway { - (l2_rpc_url.as_str(), l2_chain_id) + Ok(()) +} + +async fn finalize_token_balance_migration( + wallet: Wallet, + l1_bridgehub_addr: Address, + l2_chain_id: u64, + gw_chain_id: u64, + l1_rpc_url: String, + gw_rpc_url: String, + l2_rpc_url: String, + to_gateway: bool, + from_block: Option, + to_block: Option, +) -> anyhow::Result<()> { + let (tracker_addr, event_signature, migration_rpc_url, source_chain_id) = if to_gateway { + ( + L2_ASSET_TRACKER_ADDRESS, + "L1ToGatewayMigrationInitiated(bytes32,uint256,uint256)", + l2_rpc_url.as_str(), + l2_chain_id, + ) } else { - (gw_rpc_url.as_str(), gw_chain_id) + ( + GW_ASSET_TRACKER_ADDRESS, + "GatewayToL1MigrationInitiated(bytes32,uint256,uint256)", + gw_rpc_url.as_str(), + gw_chain_id, + ) }; + let expected_data_chain_id = l2_chain_id; - wait_for_migration_ready( + let (_log_asset_ids, tx_hashes) = fetch_migration_events( + migration_rpc_url, + tracker_addr, + event_signature, + if to_gateway { None } else { Some(l2_chain_id) }, + from_block, + to_block, + ) + .await?; + + let finalize_params = wait_for_migration_ready( l1_rpc_url.clone(), l1_bridgehub_addr, migration_rpc_url, @@ -342,45 +386,187 @@ pub async fn migrate_token_balances_from_gateway( ) .await?; - let calldata = GATEWAY_MIGRATE_TOKEN_BALANCES_FUNCTIONS - .encode( - "finishMigrationOnL1", - ( - to_gateway, - l1_bridgehub_addr, - U256::from(l2_chain_id), - U256::from(gw_chain_id), - l2_rpc_url.clone(), - gw_rpc_url.clone(), - false, - tx_hashes, - ), - ) - .unwrap(); + let mut migrated_asset_ids = Vec::new(); + if finalize_params.is_empty() { + logger::info("No migration params found; skipping L1 finalize calls."); + } else { + let l1_provider = Arc::new(Provider::::try_from(l1_rpc_url.as_str())?); + let l1_chain_id = l1_provider.get_chainid().await?.as_u64(); + let l1_signer = wallet + .private_key + .clone() + .unwrap() + .with_chain_id(l1_chain_id); + let l1_client = Arc::new(SignerMiddleware::new(l1_provider.clone(), l1_signer)); + + let bridgehub = BridgehubAbi::new(l1_bridgehub_addr, l1_provider.clone()); + let l1_asset_router_addr = bridgehub.asset_router().call().await?; + let l1_asset_router = Contract::new( + l1_asset_router_addr, + IL1ASSETROUTERABI_ABI.clone(), + l1_provider.clone(), + ); + let l1_native_token_vault_addr: Address = l1_asset_router + .method::<_, Address>("nativeTokenVault", ())? + .call() + .await?; + let l1_native_token_vault = Contract::new( + l1_native_token_vault_addr, + IL1NATIVETOKENVAULTABI_ABI.clone(), + l1_provider.clone(), + ); + let l1_asset_tracker_addr: Address = l1_native_token_vault + .method::<_, Address>("l1AssetTracker", ())? + .call() + .await?; - let mut forge = Forge::new(foundry_scripts_path) - .script( - &PathBuf::from(GATEWAY_MIGRATE_TOKEN_BALANCES_SCRIPT_PATH), - forge_args.clone(), - ) - .with_ffi() - .with_rpc_url(l1_rpc_url.clone()) - .with_broadcast() - .with_gas_per_pubdata(8000) - .with_calldata(&calldata); + let l1_asset_tracker = Contract::new( + l1_asset_tracker_addr, + IL1ASSETTRACKERABI_ABI.clone(), + l1_client.clone(), + ); + let l1_asset_tracker_base = Contract::new( + l1_asset_tracker_addr, + crate::abi::IASSETTRACKERBASEABI_ABI.clone(), + l1_provider.clone(), + ); + + let expected_selector: [u8; 4] = keccak256( + "receiveMigrationOnL1((bytes1,bool,address,uint256,bytes32,uint256,uint256,uint256,uint256))", + )[0..4] + .try_into() + .expect("selector length is always 4 bytes"); + + let mut next_nonce = l1_client + .get_transaction_count(wallet.address, Some(BlockId::Number(BlockNumber::Pending))) + .await?; + + let mut pending_txs = FuturesUnordered::new(); + for (tx_hash, maybe_params) in tx_hashes.iter().zip(finalize_params.iter()) { + let Some(params) = maybe_params else { + println!("No finalize params for tx hash: 0x{}", hex::encode(tx_hash)); + continue; + }; + + if params.proof.proof.is_empty() { + println!( + "No withdrawal proof found for tx hash: 0x{}", + hex::encode(tx_hash) + ); + continue; + } + + let (data_chain_id, asset_id, selector) = + decode_token_balance_migration_message(¶ms.message.0)?; + if data_chain_id.as_u64() != expected_data_chain_id { + println!( + "Skipping tx hash from different chain: 0x{}", + hex::encode(tx_hash) + ); + continue; + } + if selector != expected_selector { + println!( + "Unexpected function selector for tx hash: 0x{}", + hex::encode(tx_hash) + ); + continue; + } - // Governor private key is required for this script - forge = fill_forge_private_key(forge, Some(&wallet), WalletOwner::Deployer)?; - forge.run(shell)?; + let already_migrated: bool = l1_asset_tracker_base + .method::<_, bool>("tokenMigrated", (data_chain_id, asset_id))? + .call() + .await?; + if already_migrated { + println!( + "Token already migrated for assetId: 0x{}", + hex::encode(asset_id.as_bytes()) + ); + continue; + } + + let l2_tx_number_in_batch: u16 = params + .l2_tx_number_in_block + .as_u64() + .try_into() + .context("l2_tx_number_in_block does not fit into u16")?; + let finalize_param = ( + U256::from(source_chain_id), + U256::from(params.l2_batch_number.as_u64()), + U256::from(params.l2_message_index.as_u64()), + params.sender, + l2_tx_number_in_batch, + params.message.clone(), + params.proof.proof.clone(), + ); + + let call_result = + l1_asset_tracker.method::<_, ()>("receiveMigrationOnL1", (finalize_param,)); + + match call_result { + Ok(mut call) => { + migrated_asset_ids.push(asset_id); + let gas_estimate = call + .estimate_gas() + .await + .unwrap_or_else(|_| U256::from(1_500_000u64)); + let gas_limit = + std::cmp::max(gas_estimate * U256::from(2u64), U256::from(1_500_000u64)); + call.tx.set_gas(gas_limit); + call.tx.set_nonce(next_nonce); + next_nonce = next_nonce + U256::from(1u64); + pending_txs.push(async move { + match call.send().await { + Ok(pending_tx) => (asset_id, pending_tx.await), + Err(e) => { + println!("Warning: Failed to migrate asset: {}", e); + (asset_id, Ok(None)) + } + } + }); + } + Err(e) => println!("Warning: Failed to create L1 call: {}", e), + } + } + + while let Some((asset_id, receipt_res)) = pending_txs.next().await { + match receipt_res { + Ok(Some(receipt)) => { + println!( + "L1 tx hash for assetId 0x{}: 0x{}", + hex::encode(asset_id.as_bytes()), + hex::encode(receipt.transaction_hash) + ); + } + Ok(None) => println!( + "Warning: L1 transaction dropped for assetId 0x{}", + hex::encode(asset_id.as_bytes()) + ), + Err(e) => println!( + "Warning: Failed to get L1 receipt for assetId 0x{}: {}", + hex::encode(asset_id.as_bytes()), + e + ), + } + } + } // Wait for all tokens to be migrated println!("Waiting for all tokens to be migrated..."); + let l2_provider = Provider::::try_from(l2_rpc_url.as_str())?; + let l2_chain_id = l2_provider.get_chainid().await?.as_u64(); + let l2_signer = wallet + .private_key + .clone() + .unwrap() + .with_chain_id(l2_chain_id); + let l2_client = Arc::new(SignerMiddleware::new(l2_provider.clone(), l2_signer)); let tracker = Contract::new( L2_ASSET_TRACKER_ADDRESS, crate::abi::IASSETTRACKERBASEABI_ABI.clone(), l2_client.clone(), ); - for asset_id in asset_ids.iter().copied() { + for asset_id in migrated_asset_ids.iter().copied() { loop { let asset_is_migrated = tracker .method::<_, bool>("tokenMigratedThisChain", asset_id)? @@ -395,47 +581,142 @@ pub async fn migrate_token_balances_from_gateway( println!("Token migration finished"); - // let calldata = GATEWAY_MIGRATE_TOKEN_BALANCES_FUNCTIONS - // .encode( - // "checkAllMigrated", - // (U256::from(l2_chain_id), l2_rpc_url.clone()), - // ) - // .unwrap(); - - // let mut forge = Forge::new(foundry_scripts_path) - // .script( - // &PathBuf::from(GATEWAY_MIGRATE_TOKEN_BALANCES_SCRIPT_PATH), - // forge_args.clone(), - // ) - // .with_ffi() - // .with_rpc_url(l2_rpc_url.clone()) - // .with_broadcast() - // .with_zksync() - // .with_slow() - // .with_gas_per_pubdata(8000) - // .with_calldata(&calldata); - - // // Governor private key is required for this script - // if run_initial { - // forge = fill_forge_private_key(forge, Some(&wallet), WalletOwner::Deployer)?; - // forge.run(shell)?; - // } - - // println!("Token migration checked"); - Ok(()) } +async fn get_asset_ids(l2_rpc_url: &str) -> anyhow::Result> { + let mut asset_ids = Vec::new(); + let l2_provider = Provider::::try_from(l2_rpc_url)?; + + let ntv = Contract::new( + L2_NATIVE_TOKEN_VAULT_ADDRESS, + L2_NTV_FUNCTIONS.abi().clone(), + Arc::new(l2_provider.clone()), + ); + let count: U256 = ntv + .method::<_, U256>("bridgedTokensCount", ())? + .call() + .await?; + + for i in 0..count.as_u64() { + asset_ids.push( + ntv.method::<_, [u8; 32]>("bridgedTokens", U256::from(i))? + .call() + .await?, + ); + } + + let router = Contract::new( + L2_ASSET_ROUTER_ADDRESS, + L2_ASSET_ROUTER_FUNCTIONS.abi().clone(), + Arc::new(l2_provider), + ); + let base_token_asset_id = router + .method::<_, [u8; 32]>("BASE_TOKEN_ASSET_ID", ())? + .call() + .await?; + asset_ids.push(base_token_asset_id); + + Ok(asset_ids) +} + +async fn fetch_migration_events( + rpc_url: &str, + tracker_addr: Address, + event_signature: &str, + chain_id_topic: Option, + from_block: Option, + to_block: Option, +) -> anyhow::Result<(Vec<[u8; 32]>, Vec)> { + let provider = Provider::::try_from(rpc_url)?; + let event_topic = EthersH256::from_slice(&keccak256(event_signature)); + + let mut logs = Vec::new(); + let mut attempts = 0u32; + let max_attempts = 10u32; + while attempts < max_attempts { + let resolved_from = from_block.unwrap_or(0); + let resolved_to = match to_block { + Some(to_block) => BlockNumber::Number(U64::from(to_block)), + None => BlockNumber::Latest, + }; + + let mut filter = Filter::new().address(tracker_addr).topic0(event_topic); + filter = filter.from_block(BlockNumber::Number(U64::from(resolved_from))); + filter = filter.to_block(resolved_to); + if let Some(chain_id) = chain_id_topic { + filter = filter.topic2(EthersH256::from_low_u64_be(chain_id)); + } + logs = provider.get_logs(&filter).await?; + if !logs.is_empty() { + break; + } + + attempts += 1; + tokio::time::sleep(std::time::Duration::from_millis(LOOK_WAITING_TIME_MS)).await; + } + let mut latest_logs: HashMap = HashMap::new(); + + for log in logs { + let Some(asset_topic) = log.topics.get(1).copied() else { + continue; + }; + let Some(tx_hash) = log.transaction_hash else { + continue; + }; + let block_number = log.block_number.map(|b| b.as_u64()).unwrap_or(0); + let log_index = log.log_index.map(|i| i.as_u64()).unwrap_or(0); + + match latest_logs.get(&asset_topic) { + Some((prev_block, _, _, _)) if *prev_block > block_number => continue, + Some((prev_block, prev_index, _, _)) + if *prev_block == block_number && *prev_index >= log_index => + { + continue + } + _ => { + let asset_id = asset_topic.as_bytes(); + let mut asset_bytes = [0u8; 32]; + asset_bytes.copy_from_slice(asset_id); + latest_logs.insert( + asset_topic, + ( + block_number, + log_index, + H256::from_slice(tx_hash.as_bytes()), + asset_bytes, + ), + ); + } + } + } + + if latest_logs.is_empty() { + logger::info("No migration events found; skipping L1 finalize calls."); + return Ok((Vec::new(), Vec::new())); + } + + let mut entries: Vec<([u8; 32], H256)> = latest_logs + .values() + .map(|(_, _, tx_hash, asset_id)| (*asset_id, *tx_hash)) + .collect(); + entries.sort_by(|(a, _), (b, _)| a.cmp(b)); + + let (asset_ids, tx_hashes): (Vec<[u8; 32]>, Vec) = entries.into_iter().unzip(); + + Ok((asset_ids, tx_hashes)) +} + async fn wait_for_migration_ready( l1_rpc_url: String, l1_bridgehub_addr: Address, l2_or_gw_rpc: &str, source_chain_id: u64, tx_hashes: &[H256], -) -> anyhow::Result<()> { +) -> anyhow::Result>> { if tx_hashes.is_empty() { logger::info("No migration transactions found; skipping L1 wait."); - return Ok(()); + return Ok(Vec::new()); } println!("Waiting for migration to be ready..."); @@ -516,5 +797,49 @@ async fn wait_for_migration_ready( } } - Ok(()) + Ok(finalize_params) +} + +fn decode_token_balance_migration_message(message: &[u8]) -> anyhow::Result<(U256, H256, [u8; 4])> { + if message.len() < 4 { + bail!("L2->L1 message is too short"); + } + + let selector: [u8; 4] = message[0..4] + .try_into() + .context("Failed to read function selector")?; + let tokens = ethers::abi::decode( + &[ParamType::Tuple(vec![ + ParamType::FixedBytes(1), + ParamType::Bool, + ParamType::Address, + ParamType::Uint(256), + ParamType::FixedBytes(32), + ParamType::Uint(256), + ParamType::Uint(256), + ParamType::Uint(256), + ParamType::Uint(256), + ])], + &message[4..], + ) + .context("Failed to decode token balance migration data")?; + + let Token::Tuple(values) = tokens + .into_iter() + .next() + .context("Missing token balance migration data")? + else { + bail!("Invalid token balance migration data"); + }; + + let chain_id = values + .get(3) + .and_then(|token| token.clone().into_uint()) + .context("Missing chainId")?; + let asset_id_bytes = values + .get(4) + .and_then(|token| token.clone().into_fixed_bytes()) + .context("Missing assetId")?; + + Ok((chain_id, H256::from_slice(&asset_id_bytes), selector)) } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/gateway/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/mod.rs index 49664ae18c6c..bde3b084f2b1 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/gateway/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/mod.rs @@ -42,7 +42,8 @@ pub enum GatewayComamnds { MigrateFromGateway(migrate_from_gateway::MigrateFromGatewayArgs), NotifyAboutToGatewayUpdate(NotifyServerArgs), NotifyAboutFromGatewayUpdate(NotifyServerArgs), - MigrateTokenBalances(migrate_token_balances::MigrateTokenBalancesArgs), + InitiateTokenBalanceMigration(migrate_token_balances::InitiateTokenBalanceMigrationArgs), + FinalizeTokenBalanceMigration(migrate_token_balances::FinalizeTokenBalanceMigrationArgs), } pub async fn run(shell: &Shell, args: GatewayComamnds) -> anyhow::Result<()> { @@ -78,8 +79,11 @@ pub async fn run(shell: &Shell, args: GatewayComamnds) -> anyhow::Result<()> { GatewayComamnds::NotifyAboutFromGatewayUpdate(args) => { gateway_common::notify_server(args, shell, MigrationDirection::FromGateway).await } - GatewayComamnds::MigrateTokenBalances(args) => { - migrate_token_balances::run(args, shell).await + GatewayComamnds::InitiateTokenBalanceMigration(args) => { + migrate_token_balances::run_initiate(args, shell).await + } + GatewayComamnds::FinalizeTokenBalanceMigration(args) => { + migrate_token_balances::run_finalize(args, shell).await } } } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/args/chain.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/args/chain.rs index 74e0fea188cd..3505df4c0925 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/args/chain.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/args/chain.rs @@ -41,10 +41,6 @@ impl DefaultChainUpgradeArgs { impl ChainUpgradeParams { pub async fn fill_if_empty(mut self, shell: &Shell) -> anyhow::Result { - if !self.dangerous_local_default_overrides.unwrap_or_default() { - return Ok(self); - } - let chain_config = ZkStackConfig::current_chain(shell)?; self.chain_id = Some(self.chain_id.unwrap_or(chain_config.chain_id.as_u64())); @@ -61,6 +57,11 @@ impl ChainUpgradeParams { .unwrap_or("http://localhost:3250".to_string()), ); + self.l1_rpc_url = Some( + self.l1_rpc_url + .unwrap_or("http://localhost:8545".to_string()), + ); + self.gw_rpc_url = if let Some(url) = self.gw_rpc_url { Some(url) } else { @@ -71,12 +72,6 @@ impl ChainUpgradeParams { .ok() }; - self.l1_rpc_url = if let Some(url) = self.l1_rpc_url { - Some(url) - } else { - chain_config.get_secrets_config().await?.l1_rpc_url().ok() - }; - self.gw_chain_id = Some(self.gw_chain_id.unwrap_or(506)); self.l1_gas_price = Some(self.l1_gas_price.unwrap_or(100000)); self.l2_rpc_url = if let Some(url) = self.l2_rpc_url { diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/default_chain_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/default_chain_upgrade.rs index bb7dd7a482f7..261491906393 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/default_chain_upgrade.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/default_chain_upgrade.rs @@ -1,18 +1,22 @@ -use anyhow::{bail, ensure, Context}; -use ethers::{providers::Middleware, utils::hex}; +use std::path::PathBuf; + +use anyhow::{bail, Context}; +use ethers::{contract::BaseContract, providers::Middleware, utils::hex}; +use lazy_static::lazy_static; use serde::{Deserialize, Serialize}; use xshell::Shell; use zkstack_cli_common::{ ethereum::{get_ethers_provider, get_zk_client}, + forge::{Forge, ForgeScriptArgs}, logger, + wallets::Wallet, }; use zkstack_cli_config::{ + forge_interface::script_params::ACCEPT_GOVERNANCE_SCRIPT_PARAMS, traits::{FileConfigTrait, ReadConfig}, ZkStackConfig, ZkStackConfigTrait, }; -use zksync_basic_types::{ - protocol_version::ProtocolVersionId, web3::Bytes, Address, L1BatchNumber, L2BlockNumber, U256, -}; +use zksync_basic_types::{web3::Bytes, Address, L1BatchNumber, H256, U256}; use zksync_types::L2_BRIDGEHUB_ADDRESS; use zksync_web3_decl::{ client::{DynClient, L2}, @@ -20,7 +24,7 @@ use zksync_web3_decl::{ }; use crate::{ - abi::{BridgehubAbi, ZkChainAbi}, + abi::{BridgehubAbi, ZkChainAbi, ADMINFUNCTIONSABI_ABI}, commands::{ chain::{ admin_call_builder::{AdminCall, AdminCallBuilder}, @@ -32,9 +36,16 @@ use crate::{ utils::{print_error, set_upgrade_timestamp_calldata}, }, }, - utils::addresses::apply_l1_to_l2_alias, + utils::{ + addresses::apply_l1_to_l2_alias, + forge::{check_the_balance, fill_forge_private_key, WalletOwner}, + }, }; +lazy_static! { + static ref ADMIN_FUNCTIONS: BaseContract = BaseContract::from(ADMINFUNCTIONSABI_ABI.clone()); +} + #[derive(Debug, Default)] pub struct FetchedChainInfo { pub hyperchain_addr: Address, @@ -56,27 +67,27 @@ async fn verify_next_batch_new_version( let next_l2_block = right_bound + 1; - let block_details = main_node_client - .get_block_details(L2BlockNumber(next_l2_block.as_u32())) - .await? - .with_context(|| format!("No L2 block is present after the batch {}", batch_number))?; - - let protocol_version = block_details.protocol_version.with_context(|| { - format!( - "Protocol version not present for block {}", - next_l2_block.as_u64() - ) - })?; - match upgrade_versions { - UpgradeVersion::V29InteropAFf => ensure!( - protocol_version >= ProtocolVersionId::Version29, - "THe block does not yet contain the v29 upgrade" - ), - UpgradeVersion::V31InteropB => ensure!( - protocol_version >= ProtocolVersionId::Version31, - "The block does not yet contain the v31 upgrade" - ), - } + // let block_details = main_node_client + // .get_block_details(L2BlockNumber(next_l2_block.as_u32())) + // .await? + // .with_context(|| format!("No L2 block is present after the batch {}", batch_number))?; + + // let protocol_version = block_details.protocol_version.with_context(|| { + // format!( + // "Protocol version not present for block {}", + // next_l2_block.as_u64() + // ) + // })?; + // match upgrade_versions { + // UpgradeVersion::V29InteropAFf => ensure!( + // protocol_version >= ProtocolVersionId::Version29, + // "THe block does not yet contain the v29 upgrade" + // ), + // UpgradeVersion::V31InteropB => ensure!( + // protocol_version >= ProtocolVersionId::Version31, + // "The block does not yet contain the v31 upgrade" + // ), + // } Ok(()) } @@ -132,10 +143,7 @@ pub async fn fetch_chain_info( let chain_id = U256::from(args.chain_id); let bridgehub = BridgehubAbi::new( - upgrade_info - .deployed_addresses - .bridgehub - .bridgehub_proxy_addr, + upgrade_info.core_contracts.bridgehub_proxy_addr, l1_provider.clone(), ); let zkchain_addr = bridgehub.get_zk_chain(chain_id).await?; @@ -186,34 +194,50 @@ pub async fn fetch_chain_info( #[derive(Debug, Serialize, Deserialize, Clone)] pub struct UpgradeInfo { - // Information about pre-upgrade contracts. - pub(crate) l1_chain_id: u32, - pub(crate) gateway_chain_id: u32, - pub(crate) deployed_addresses: DeployedAddresses, + // Simplified: just read the specific addresses we need + pub(crate) core_contracts: CoreContracts, + pub(crate) state_transition: StateTransition, + pub(crate) contracts_config: ContractsConfig, - pub(crate) gateway: Gateway, // Information from upgrade + #[serde(skip)] pub(crate) chain_upgrade_diamond_cut: Bytes, + #[serde(default)] + pub(crate) chain_upgrade_diamond_cut_file: Option, +} + +impl UpgradeInfo { + /// Load the diamond cut data from the file if it hasn't been loaded yet + pub fn load_diamond_cut(&mut self) -> anyhow::Result<()> { + if self.chain_upgrade_diamond_cut.0.is_empty() { + if let Some(ref file_path) = self.chain_upgrade_diamond_cut_file { + let hex_string = std::fs::read_to_string(file_path)?; + let hex_trimmed = hex_string.trim().trim_start_matches("0x"); + let bytes = hex::decode(hex_trimmed)?; + self.chain_upgrade_diamond_cut = Bytes(bytes); + } + } + Ok(()) + } } impl FileConfigTrait for UpgradeInfo {} #[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ContractsConfig { - pub(crate) new_protocol_version: u64, - pub(crate) old_protocol_version: u64, +pub struct CoreContracts { + pub(crate) bridgehub_proxy_addr: Address, } #[derive(Debug, Serialize, Deserialize, Clone)] -pub struct DeployedAddresses { - pub(crate) bridgehub: BridgehubAddresses, +pub struct StateTransition { pub(crate) validator_timelock_addr: Address, } #[derive(Debug, Serialize, Deserialize, Clone)] -pub struct BridgehubAddresses { - pub(crate) bridgehub_proxy_addr: Address, +pub struct ContractsConfig { + pub(crate) new_protocol_version: u64, + pub(crate) old_protocol_version: u64, } #[derive(Debug, Serialize, Deserialize, Clone)] @@ -245,7 +269,7 @@ pub(crate) async fn run_chain_upgrade( run_upgrade: bool, upgrade_version: UpgradeVersion, ) -> anyhow::Result<()> { - let forge_args = &Default::default(); + let forge_args: &ForgeScriptArgs = &Default::default(); let contracts_foundry_path = ZkStackConfig::from_file(shell)?.path_to_foundry_scripts(); let chain_config = ZkStackConfig::current_chain(shell)?; @@ -261,12 +285,16 @@ pub(crate) async fn run_chain_upgrade( } // 0. Read the GatewayUpgradeInfo - let upgrade_info = UpgradeInfo::read( + let mut upgrade_info = UpgradeInfo::read( shell, args.clone() .upgrade_description_path .expect("upgrade_description_path is required"), )?; + + // Load the diamond cut data from file + upgrade_info.load_diamond_cut()?; + logger::info("upgrade_info: "); // 1. Update all the configs @@ -309,73 +337,72 @@ pub(crate) async fn run_chain_upgrade( let (calldata, total_value) = if chain_info.settlement_layer == args.gw_chain_id.unwrap() { let mut admin_calls_gw = AdminCallBuilder::new(vec![]); - admin_calls_gw.append_execute_upgrade( - chain_info.hyperchain_addr, - upgrade_info.contracts_config.old_protocol_version, - upgrade_info.chain_upgrade_diamond_cut.clone(), - ); - - admin_calls_gw - .prepare_upgrade_chain_on_gateway_calls( - shell, - forge_args, - &contracts_foundry_path, - args.chain_id.expect("chain_id is required"), - args.gw_chain_id.expect("gw_chain_id is required"), - upgrade_info - .deployed_addresses - .bridgehub - .bridgehub_proxy_addr, - args.l1_gas_price.expect("l1_gas_price is required"), - upgrade_info.contracts_config.old_protocol_version, - chain_info.gw_hyperchain_addr, - chain_info.l1_asset_router_proxy, - args_input - .refund_recipient - .context("refund_recipient is required")? - .parse() - .context("refund recipient is not a valid address")?, - upgrade_info.gateway.upgrade_cut_data.0.into(), - args.l1_rpc_url.clone().expect("l1_rpc_url is required"), - ) - .await; + // admin_calls_gw.append_execute_upgrade( + // chain_info.hyperchain_addr, + // upgrade_info.contracts_config.old_protocol_version, + // upgrade_info.chain_upgrade_diamond_cut.clone(), + // ); + + // admin_calls_gw + // .prepare_upgrade_chain_on_gateway_calls( + // shell, + // forge_args, + // &contracts_foundry_path, + // args.chain_id.expect("chain_id is required"), + // args.gw_chain_id.expect("gw_chain_id is required"), + // upgrade_info.core_contracts.bridgehub_proxy_addr, + // args.l1_gas_price.expect("l1_gas_price is required"), + // upgrade_info.contracts_config.old_protocol_version, + // chain_info.gw_hyperchain_addr, + // chain_info.l1_asset_router_proxy, + // args_input + // .refund_recipient + // .context("refund_recipient is required")? + // .parse() + // .context("refund recipient is not a valid address")?, + // upgrade_info.gateway.upgrade_cut_data.0.into(), + // args.l1_rpc_url.clone().expect("l1_rpc_url is required"), + // ) + // .await; // v29: enable_validator_via_gateway for operator - if let Some(validators) = &additional.updated_validators { - let operator = validators.operator.context("operator is required")?; - let enable_validator_calls = crate::admin_functions::enable_validator_via_gateway( - shell, - forge_args, - &contracts_foundry_path, - crate::admin_functions::AdminScriptMode::OnlySave, - upgrade_info - .deployed_addresses - .bridgehub - .bridgehub_proxy_addr, - args.l1_gas_price.expect("l1_gas_price is required").into(), - args.chain_id.expect("chain_id is required"), - args.gw_chain_id.expect("gw_chain_id is required"), - operator, - upgrade_info - .gateway - .gateway_state_transition - .validator_timelock_addr, - operator, - args.l1_rpc_url.clone().expect("l1_rpc_url is required"), - ) - .await?; - admin_calls_gw.extend_with_calls(enable_validator_calls.calls); - } + // if let Some(validators) = &additional.updated_validators { + // let operator = validators.operator.context("operator is required")?; + // let enable_validator_calls = crate::admin_functions::enable_validator_via_gateway( + // shell, + // forge_args, + // &contracts_foundry_path, + // crate::admin_functions::AdminScriptMode::OnlySave, + // upgrade_info.core_contracts.bridgehub_proxy_addr, + // args.l1_gas_price.expect("l1_gas_price is required").into(), + // args.chain_id.expect("chain_id is required"), + // args.gw_chain_id.expect("gw_chain_id is required"), + // operator, + // upgrade_info + // .gateway + // .gateway_state_transition + // .validator_timelock_addr, + // operator, + // args.l1_rpc_url.clone().expect("l1_rpc_url is required"), + // ) + // .await?; + // admin_calls_gw.extend_with_calls(enable_validator_calls.calls); + // } admin_calls_gw.display(); - let (gw_chain_admin_calldata, total_value) = admin_calls_gw.compile_full_calldata(); - - logger::info(format!( - "Full calldata to call `ChainAdmin` with : {}\nTotal value: {}", - hex::encode(&gw_chain_admin_calldata), - total_value, - )); + let (gw_chain_admin_calldata, total_value) = if admin_calls_gw.is_empty() { + logger::info("No calls to execute for gateway upgrade"); + (vec![], U256::zero()) + } else { + let (data, value) = admin_calls_gw.compile_full_calldata(); + logger::info(format!( + "Full calldata to call `ChainAdmin` with : {}\nTotal value: {}", + hex::encode(&data), + value, + )); + (data, value) + }; (gw_chain_admin_calldata, total_value) } else { let mut admin_calls_finalize = AdminCallBuilder::new(vec![]); @@ -387,41 +414,43 @@ pub(crate) async fn run_chain_upgrade( ); // v29: enable_validator for operator and blob_operator - if let Some(validators) = &additional.updated_validators { - for validator in [ - validators.operator.context("operator is required")?, - validators - .blob_operator - .context("blob_operator is required")?, - ] { - let enable_validator_calls = crate::admin_functions::enable_validator( - shell, - forge_args, - &contracts_foundry_path, - crate::admin_functions::AdminScriptMode::OnlySave, - upgrade_info - .deployed_addresses - .bridgehub - .bridgehub_proxy_addr, - args.chain_id.expect("chain_id is required"), - validator, - upgrade_info.deployed_addresses.validator_timelock_addr, - args.l1_rpc_url.clone().expect("l1_rpc_url is required"), - ) - .await?; - admin_calls_finalize.extend_with_calls(enable_validator_calls.calls); - } - } + // if let Some(validators) = &additional.updated_validators { + // for validator in [ + // validators.operator.context("operator is required")?, + // validators + // .blob_operator + // .context("blob_operator is required")?, + // ] { + // let enable_validator_calls = crate::admin_functions::enable_validator( + // shell, + // forge_args, + // &contracts_foundry_path, + // crate::admin_functions::AdminScriptMode::OnlySave, + // upgrade_info.core_contracts.bridgehub_proxy_addr, + // args.chain_id.expect("chain_id is required"), + // validator, + // upgrade_info.state_transition.validator_timelock_addr, + // args.l1_rpc_url.clone().expect("l1_rpc_url is required"), + // ) + // .await?; + // admin_calls_finalize.extend_with_calls(enable_validator_calls.calls); + // } + // } admin_calls_finalize.display(); - let (chain_admin_calldata, total_value) = admin_calls_finalize.compile_full_calldata(); - - logger::info(format!( - "Full calldata to call `ChainAdmin` with : {}\nTotal value: {}", - hex::encode(&chain_admin_calldata), - total_value, - )); + let (chain_admin_calldata, total_value) = if admin_calls_finalize.is_empty() { + logger::info("No calls to execute for direct upgrade"); + (vec![], U256::zero()) + } else { + let (data, value) = admin_calls_finalize.compile_full_calldata(); + logger::info(format!( + "Full calldata to call `ChainAdmin` with : {}\nTotal value: {}", + hex::encode(&data), + value, + )); + (data, value) + }; (chain_admin_calldata, total_value) }; @@ -445,22 +474,27 @@ pub(crate) async fn run_chain_upgrade( logger::info("Set upgrade timestamp successfully!"); logger::info(format!("receipt: {:#?}", receipt1)); - logger::info("Starting the migration!"); - let receipt = send_tx( - chain_info.chain_admin_addr, - calldata, - total_value, - args.l1_rpc_url.clone().unwrap(), - chain_config - .get_wallets_config()? - .governor - .private_key_h256() - .unwrap(), - "finalize upgrade", - ) - .await?; - logger::info("Upgrade completed successfully!"); - logger::info(format!("receipt: {:#?}", receipt)); + // Only run migration if there are calls to execute + if !calldata.is_empty() { + logger::info("Starting the migration!"); + let receipt = send_tx( + chain_info.chain_admin_addr, + calldata, + total_value, + args.l1_rpc_url.clone().unwrap(), + chain_config + .get_wallets_config()? + .governor + .private_key_h256() + .unwrap(), + "finalize upgrade", + ) + .await?; + logger::info("Upgrade completed successfully!"); + logger::info(format!("receipt: {:#?}", receipt)); + } else { + logger::info("Skipping migration (no calls to execute)"); + } } Ok(()) @@ -471,12 +505,110 @@ pub(crate) async fn run( args_input: DefaultChainUpgradeArgs, run_upgrade: bool, ) -> anyhow::Result<()> { - run_chain_upgrade( - shell, - args_input.params.clone(), - AdditionalUpgradeParams::default(), - run_upgrade, - args_input.upgrade_version, - ) - .await + if run_upgrade { + // Use simplified approach: call Forge script that reads from CTM + let chain_config = ZkStackConfig::current_chain(shell)?; + let contracts_config = chain_config.get_contracts_config()?; + let wallets_config = chain_config.get_wallets_config()?; + + // Fill in default parameters + let args = args_input.params.fill_if_empty(shell).await?; + + let chain_address = contracts_config.l1.diamond_proxy_addr; + let ctm_address = contracts_config + .ecosystem_contracts + .ctm + .state_transition_proxy_addr; + let l1_rpc_url = args.l1_rpc_url.clone().context("l1_rpc_url is required")?; + let governor_private_key = wallets_config + .governor + .private_key_h256() + .context("governor private key is required")?; + + run_chain_upgrade_from_ctm( + shell, + chain_address, + ctm_address, + l1_rpc_url, + governor_private_key, + ) + .await + } else { + // Dry run - use old approach + run_chain_upgrade( + shell, + args_input.params.clone(), + AdditionalUpgradeParams::default(), + run_upgrade, + args_input.upgrade_version, + ) + .await + } +} + +/// Run chain upgrade using Forge script that reads diamond cut from CTM +/// This bypasses TOML parsing issues with large hex strings +pub(crate) async fn run_chain_upgrade_from_ctm( + shell: &Shell, + chain_address: Address, + ctm_address: Address, + l1_rpc_url: String, + _governor_private_key: H256, +) -> anyhow::Result<()> { + let contracts_foundry_path = ZkStackConfig::from_file(shell)?.path_to_foundry_scripts(); + let chain_config = ZkStackConfig::current_chain(shell)?; + let wallets_config = chain_config.get_wallets_config()?; + let contracts_config = chain_config.get_contracts_config()?; + + logger::info(format!( + "Running chain upgrade from CTM: chain={:?}, ctm={:?}", + chain_address, ctm_address + )); + + let mut forge_args = ForgeScriptArgs::default(); + forge_args.resume = false; + + // Get admin addresses + let admin_address = contracts_config.l1.chain_admin_addr; + let access_control_restriction = contracts_config + .l1 + .access_control_restriction_addr + .context("access_control_restriction_addr is required")?; + + // Encode the function call + let calldata = ADMIN_FUNCTIONS + .encode( + "upgradeChainFromCTM", + ( + chain_address, + ctm_address, + admin_address, + access_control_restriction, + ), + ) + .context("Failed to encode upgradeChainFromCTM call")?; + + // Set up the Forge script + let forge = Forge::new(&contracts_foundry_path) + .script( + &ACCEPT_GOVERNANCE_SCRIPT_PARAMS.script(), + forge_args.clone(), + ) + .with_ffi() + .with_rpc_url(l1_rpc_url.clone()) + .with_broadcast() + .with_calldata(&calldata); + + // Fill in the private key for the governor wallet + let forge = + fill_forge_private_key(forge, Some(&wallets_config.governor), WalletOwner::Governor)?; + + // Check balance + check_the_balance(&forge).await?; + + // Run the script + forge.run(shell)?; + + logger::success("Chain upgrade from CTM completed successfully"); + Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/default_ecosystem_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/default_ecosystem_upgrade.rs index 3427007377b8..31a4452c3ecb 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/default_ecosystem_upgrade.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/default_ecosystem_upgrade.rs @@ -1,39 +1,26 @@ use anyhow::Context; -use ethers::{ - abi::{encode, Token}, - contract::BaseContract, - providers::Middleware, - utils::hex, -}; +use ethers::{contract::BaseContract, types::Address}; use lazy_static::lazy_static; use serde::Deserialize; -use xshell::{cmd, Shell}; -use zkstack_cli_common::{ethereum::get_ethers_provider, forge::Forge, logger, spinner::Spinner}; +use xshell::Shell; +use zkstack_cli_common::{forge::Forge, logger, spinner::Spinner}; use zkstack_cli_config::{ forge_interface::{ script_params::{ ForgeScriptParams, FINALIZE_UPGRADE_SCRIPT_PARAMS, V29_UPGRADE_ECOSYSTEM_PARAMS, - V31_UPGRADE_CORE_CONTRACTS_PARAMS, V31_UPGRADE_CTM_CONTRACTS_PARAMS, - }, - upgrade_ecosystem::{ - input::{ - EcosystemUpgradeInput, EcosystemUpgradeSpecificConfig, - GatewayStateTransitionConfig, GatewayUpgradeContractsConfig, V29UpgradeParams, - V31UpgradeParams, - }, - output::EcosystemUpgradeOutput, + V31_UPGRADE_ECOSYSTEM_PARAMS, }, + upgrade_ecosystem::output::EcosystemUpgradeOutput, }, - traits::{ReadConfig, ReadConfigWithBasePath, SaveConfig, SaveConfigWithBasePath}, - ChainConfig, CoreContractsConfig, EcosystemConfig, ZkStackConfig, + traits::{ReadConfig, ReadConfigWithBasePath, SaveConfigWithBasePath}, + CoreContractsConfig, EcosystemConfig, ZkStackConfig, }; -use zkstack_cli_types::{ProverMode, VMOption}; -use zksync_basic_types::Address; -use zksync_types::{h256_to_address, H256, SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, U256}; +use zkstack_cli_types::VMOption; +use zksync_types::{SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, U256}; use crate::{ abi::IFINALIZEUPGRADEABI_ABI, - admin_functions::{ecosystem_admin_execute_calls, governance_execute_calls, AdminScriptMode}, + admin_functions::{governance_execute_calls, AdminScriptMode}, commands::dev::commands::upgrades::{ args::ecosystem::{EcosystemUpgradeArgs, EcosystemUpgradeArgsFinal, EcosystemUpgradeStage}, types::UpgradeVersion, @@ -42,8 +29,7 @@ use crate::{ utils::forge::{fill_forge_private_key, WalletOwner}, }; -// TODO: make it non-constant -pub const LOCAL_GATEWAY_CHAIN_NAME: &str = "gateway"; +// Removed: LOCAL_GATEWAY_CHAIN_NAME - no longer needed with env var approach pub async fn run( shell: &Shell, @@ -142,112 +128,8 @@ async fn no_governance_prepare( .l1_rpc_url()? }; - let current_contracts_config = ecosystem_config.get_contracts_config()?; - let bridgehub_proxy_address = current_contracts_config - .core_ecosystem_contracts - .bridgehub_proxy_addr; - - let bridgehub_proxy_address_str = format!("{:#x}", bridgehub_proxy_address); - - logger::info(format!( - "Executing: cast call {} \"messageRoot()(address)\" to get the current messageRoot address from BridgeHub.", - bridgehub_proxy_address_str - )); - - // Execute the cast call command. - // The command is: cast call "messageRoot()(address)" - // This retrieves the address of the messageRoot contract associated with the BridgeHub. - let cast_output_stdout = cmd!( - shell, - "cast call {bridgehub_proxy_address_str} messageRoot()(address) -r {l1_rpc_url}" - ) - .read() - .context("Failed to execute 'cast call' to retrieve messageRoot address from BridgeHub.")?; - - // The output from `cast call` is typically the address followed by a newline. - // Trim whitespace and store it. - let message_root_address_from_cast = cast_output_stdout.trim().to_string(); - - if message_root_address_from_cast.is_empty() - || message_root_address_from_cast == "0x0000000000000000000000000000000000000000" - { - anyhow::bail!( - "Retrieved messageRoot address from BridgeHub is empty or zero: '{}'. This indicates an issue.", - message_root_address_from_cast - ); - } - - logger::info(format!( - "Successfully retrieved messageRoot address from BridgeHub: {}", - message_root_address_from_cast - )); - - let initial_deployment_config = ecosystem_config.get_initial_deployment_config()?; - - let ecosystem_upgrade_config_path = get_ecosystem_upgrade_params(upgrade_version) - .input(&ecosystem_config.path_to_foundry_scripts_for_ctm(vm_option)); - - let ctm_upgrade_config_path = get_ctm_upgrade_params(upgrade_version) - .input(&ecosystem_config.path_to_foundry_scripts_for_ctm(vm_option)); - - let gateway_upgrade_config = GatewayUpgradeContractsConfig { - gateway_state_transition: GatewayStateTransitionConfig { - chain_type_manager_proxy_addr: Address::zero(), - chain_type_manager_proxy_admin: Address::zero(), - rollup_da_manager: Address::zero(), - rollup_sl_da_validator: Address::zero(), - }, - chain_id: 0, - }; - if *upgrade_version != UpgradeVersion::V31InteropB { - let _gateway_upgrade_config = get_gateway_state_transition_config(ecosystem_config).await?; - } - - let upgrade_specific_config = match upgrade_version { - UpgradeVersion::V29InteropAFf => { - let gateway_chain_config = get_local_gateway_chain_config(ecosystem_config)?; - let gateway_validator_timelock_addr = gateway_chain_config - .get_gateway_config() - .unwrap() - .validator_timelock_addr; - EcosystemUpgradeSpecificConfig::V29(V29UpgradeParams { - encoded_old_validator_timelocks: hex::encode(encode(&[Token::Array(vec![ - Token::Address( - current_contracts_config - .ctm(vm_option) - .validator_timelock_addr, - ), - ])])), - encoded_old_gateway_validator_timelocks: hex::encode(encode(&[Token::Array( - vec![Token::Address(gateway_validator_timelock_addr)], - )])), - }) - } - UpgradeVersion::V31InteropB => EcosystemUpgradeSpecificConfig::V31(V31UpgradeParams { - some_value_for_serialization: "".to_string(), - }), - }; - - let ecosystem_upgrade = EcosystemUpgradeInput::new( - ¤t_contracts_config, - &gateway_upgrade_config, - &initial_deployment_config, - ecosystem_config.era_chain_id, - // TODO NEED TO USE ERA DIAMOND PROXY - Address::zero(), - ecosystem_config.prover_version == ProverMode::NoProofs, - upgrade_specific_config, - vm_option, - ); - - logger::info(format!("ecosystem_upgrade: {:?}", ecosystem_upgrade)); - logger::info(format!( - "ecosystem_upgrade_config_path: {:?}", - ecosystem_upgrade_config_path - )); - - ecosystem_upgrade.save(shell, ecosystem_upgrade_config_path.clone())?; - ecosystem_upgrade.save(shell, ctm_upgrade_config_path.clone())?; + // Note: The forge script now reads configuration from environment variables + // instead of input TOML files, so we no longer need to create EcosystemUpgradeInput let mut forge = Forge::new(&ecosystem_config.path_to_foundry_scripts_for_ctm(vm_option)) .script( &get_ecosystem_upgrade_params(upgrade_version).script(), @@ -287,16 +169,21 @@ async fn no_governance_prepare( serde_json::from_str(&file_content).context("Failed to parse broadcast file")? }; + logger::info("done! 1"); + let mut output = EcosystemUpgradeOutput::read( shell, get_ecosystem_upgrade_params(upgrade_version) .output(&ecosystem_config.path_to_foundry_scripts_for_ctm(vm_option)), )?; + logger::info("done! 2"); + // Add all the transaction hashes. for tx in broadcast_file.transactions { output.transactions.push(tx.hash); } + logger::info("done! 3"); output.save_with_base_path(shell, &ecosystem_config.config)?; @@ -304,7 +191,7 @@ async fn no_governance_prepare( } async fn ecosystem_admin( - init_args: &EcosystemUpgradeArgsFinal, + _init_args: &EcosystemUpgradeArgsFinal, shell: &Shell, ecosystem_config: &EcosystemConfig, upgrade_version: &UpgradeVersion, @@ -318,30 +205,32 @@ async fn ecosystem_admin( .output(&ecosystem_config.path_to_foundry_scripts_for_ctm(vm_option)), )?; previous_output.save_with_base_path(shell, &ecosystem_config.config)?; - let l1_rpc_url = if let Some(url) = init_args.l1_rpc_url.clone() { - url - } else { - ecosystem_config - .load_current_chain()? - .get_secrets_config() - .await? - .l1_rpc_url()? - }; - - // These are ABI-encoded - let ecosystem_admin_calls = previous_output.ecosystem_admin_calls; - - ecosystem_admin_execute_calls( - shell, - // Note, that ecosystem admin and governor use the same wallet. - &ecosystem_config.get_wallets()?.governor, - ecosystem_config.get_contracts_config()?.l1.chain_admin_addr, - ecosystem_config.path_to_foundry_scripts_for_ctm(vm_option), - ecosystem_admin_calls.server_notifier_upgrade.0, - &init_args.forge_args.clone(), - l1_rpc_url, - ) - .await?; + // let l1_rpc_url = if let Some(url) = init_args.l1_rpc_url.clone() { + // url + // } else { + // ecosystem_config + // .load_current_chain()? + // .get_secrets_config() + // .await? + // .l1_rpc_url()? + // }; + + // // These are ABI-encoded + + // Note: ecosystem_admin calls are not currently used + // let ecosystem_admin_calls = previous_output.ecosystem_admin_calls + + // ecosystem_admin_execute_calls( + // shell, + // // Note, that ecosystem admin and governor use the same wallet. + // &ecosystem_config.get_wallets()?.governor, + // ecosystem_config.get_contracts_config()?.l1.chain_admin_addr, + // ecosystem_config.path_to_foundry_scripts_for_ctm(vm_option), + // ecosystem_admin_calls.server_notifier_upgrade.0, + // &init_args.forge_args.clone(), + // l1_rpc_url, + // ) + // .await?; spinner.finish(); Ok(()) @@ -451,17 +340,23 @@ fn update_contracts_config_from_output( output: &EcosystemUpgradeOutput, vm_option: VMOption, ) { - let ctm = match vm_option { - VMOption::EraVM => contracts_config.era_ctm.as_mut().unwrap(), - VMOption::ZKSyncOsVM => contracts_config.zksync_os_ctm.as_mut().unwrap(), - }; - - // This is force deployment data for creating new contracts, not really relevant here tbh, - ctm.force_deployments_data = Some(hex::encode( - &output.contracts_config.force_deployments_data.0, - )); - ctm.rollup_l1_da_validator_addr = output.deployed_addresses.rollup_l1_da_validator_addr; - ctm.no_da_validium_l1_validator_addr = output.deployed_addresses.validium_l1_da_validator_addr; + // Update the BytecodesSupplier address in the CTM config if it's present in the upgrade output + if let Some(ref state_transition) = output.state_transition { + if state_transition.bytecodes_supplier_addr != Address::zero() { + let ctm = match vm_option { + VMOption::EraVM => contracts_config.era_ctm.as_mut(), + VMOption::ZKSyncOsVM => contracts_config.zksync_os_ctm.as_mut(), + }; + + if let Some(ctm) = ctm { + ctm.l1_bytecodes_supplier_addr = state_transition.bytecodes_supplier_addr; + logger::info(format!( + "Updated BytecodesSupplier address in CTM config to: {:?}", + state_transition.bytecodes_supplier_addr + )); + } + } + } } // Governance has approved the proposal, now it will insert the new protocol version into our STM (CTM) @@ -627,59 +522,59 @@ async fn no_governance_stage_2( fn get_ecosystem_upgrade_params(upgrade_version: &UpgradeVersion) -> ForgeScriptParams { match upgrade_version { UpgradeVersion::V29InteropAFf => V29_UPGRADE_ECOSYSTEM_PARAMS, - UpgradeVersion::V31InteropB => V31_UPGRADE_CORE_CONTRACTS_PARAMS, - } -} - -fn get_ctm_upgrade_params(upgrade_version: &UpgradeVersion) -> ForgeScriptParams { - match upgrade_version { - UpgradeVersion::V31InteropB => V31_UPGRADE_CTM_CONTRACTS_PARAMS, - _ => panic!( - "Upgrade version {} is not supported for CTM upgrade", - upgrade_version - ), + UpgradeVersion::V31InteropB => V31_UPGRADE_ECOSYSTEM_PARAMS, } } -const PROXY_ADMIN_SLOT: H256 = H256([ - 0xb5, 0x31, 0x27, 0x68, 0x4a, 0x56, 0x8b, 0x31, 0x73, 0xae, 0x13, 0xb9, 0xf8, 0xa6, 0x01, 0x6e, - 0x24, 0x3e, 0x63, 0xb6, 0xe8, 0xee, 0x11, 0x78, 0xd6, 0xa7, 0x17, 0x85, 0x0b, 0x5d, 0x61, 0x03, -]); - -fn get_local_gateway_chain_config( - ecosystem_config: &EcosystemConfig, -) -> anyhow::Result { - let chain_config = ecosystem_config.load_chain(Some(LOCAL_GATEWAY_CHAIN_NAME.to_string()))?; - Ok(chain_config) -} - -async fn get_gateway_state_transition_config( - ecosystem_config: &EcosystemConfig, -) -> anyhow::Result { - // Firstly, we obtain the gateway config - let chain_config = get_local_gateway_chain_config(ecosystem_config)?; - let gw_config = chain_config.get_gateway_config()?; - let general_config = chain_config.get_general_config().await?; - - let provider = get_ethers_provider(&general_config.l2_http_url()?)?; - let proxy_admin_addr = provider - .get_storage_at( - gw_config.state_transition_proxy_addr, - PROXY_ADMIN_SLOT, - None, - ) - .await?; - let proxy_admin_addr = h256_to_address(&proxy_admin_addr); - - let chain_id = chain_config.chain_id.as_u64(); - - Ok(GatewayUpgradeContractsConfig { - gateway_state_transition: GatewayStateTransitionConfig { - chain_type_manager_proxy_addr: gw_config.state_transition_proxy_addr, - chain_type_manager_proxy_admin: proxy_admin_addr, - rollup_da_manager: gw_config.rollup_da_manager, - rollup_sl_da_validator: gw_config.relayed_sl_da_validator, - }, - chain_id, - }) -} +// fn get_ctm_upgrade_params(upgrade_version: &UpgradeVersion) -> ForgeScriptParams { +// match upgrade_version { +// UpgradeVersion::V31InteropB => V31_UPGRADE_CTM_CONTRACTS_PARAMS, +// _ => panic!( +// "Upgrade version {} is not supported for CTM upgrade", +// upgrade_version +// ), +// } +// } + +// const PROXY_ADMIN_SLOT: H256 = H256([ +// 0xb5, 0x31, 0x27, 0x68, 0x4a, 0x56, 0x8b, 0x31, 0x73, 0xae, 0x13, 0xb9, 0xf8, 0xa6, 0x01, 0x6e, +// 0x24, 0x3e, 0x63, 0xb6, 0xe8, 0xee, 0x11, 0x78, 0xd6, 0xa7, 0x17, 0x85, 0x0b, 0x5d, 0x61, 0x03, +// ]); + +// fn get_local_gateway_chain_config( +// ecosystem_config: &EcosystemConfig, +// ) -> anyhow::Result { +// let chain_config = ecosystem_config.load_chain(Some(LOCAL_GATEWAY_CHAIN_NAME.to_string()))?; +// Ok(chain_config) +// } + +// async fn get_gateway_state_transition_config( +// ecosystem_config: &EcosystemConfig, +// ) -> anyhow::Result { +// // Firstly, we obtain the gateway config +// let chain_config = get_local_gateway_chain_config(ecosystem_config)?; +// let gw_config = chain_config.get_gateway_config()?; +// let general_config = chain_config.get_general_config().await?; + +// let provider = get_ethers_provider(&general_config.l2_http_url()?)?; +// let proxy_admin_addr = provider +// .get_storage_at( +// gw_config.state_transition_proxy_addr, +// PROXY_ADMIN_SLOT, +// None, +// ) +// .await?; +// let proxy_admin_addr = h256_to_address(&proxy_admin_addr); + +// let chain_id = chain_config.chain_id.as_u64(); + +// Ok(GatewayUpgradeContractsConfig { +// gateway_state_transition: GatewayStateTransitionConfig { +// chain_type_manager_proxy_addr: gw_config.state_transition_proxy_addr, +// chain_type_manager_proxy_admin: proxy_admin_addr, +// rollup_da_manager: gw_config.rollup_da_manager, +// rollup_sl_da_validator: gw_config.relayed_sl_da_validator, +// }, +// chain_id, +// }) +// } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/types.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/types.rs index 06b2a5561ec8..ea867d96a155 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/types.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/types.rs @@ -15,7 +15,7 @@ impl UpgradeVersion { pub const fn get_default_upgrade_description_path(&self) -> &'static str { match self { UpgradeVersion::V29InteropAFf => "./l1-contracts/script-out/v29-upgrade-ecosystem.toml", - UpgradeVersion::V31InteropB => "./l1-contracts/script-out/v31-upgrade-core.toml", + UpgradeVersion::V31InteropB => "./l1-contracts/script-out/v31-upgrade-ecosystem.toml", } } } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/utils.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/utils.rs index f5257618730a..6b326dee8e05 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/utils.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/upgrades/utils.rs @@ -2,10 +2,10 @@ use ethers::contract::BaseContract; use lazy_static::lazy_static; use zkstack_cli_common::logger; -use crate::abi::ICHAINADMINABI_ABI; +use crate::abi::CHAINADMINOWNABLEABI_ABI; lazy_static! { - static ref CHAIN_ADMIN_ABI: BaseContract = BaseContract::from(ICHAINADMINABI_ABI.clone()); + static ref CHAIN_ADMIN_ABI: BaseContract = BaseContract::from(CHAINADMINOWNABLEABI_ABI.clone()); } pub(crate) fn print_error(err: anyhow::Error) {