diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index d08ed1b0fb64..1cf96716cb63 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -175,6 +175,18 @@ jobs: - name: Setup Environment uses: ./.github/actions/setup-env + # TODO: Remove once merged into main + - name: (TEMPORARY) Update foundry + run: | + ci_run curl -LO https://github.com/matter-labs/foundry-zksync/releases/download/foundry-zksync-v0.0.26/foundry_zksync_v0.0.26_linux_amd64.tar.gz + ci_run mkdir ./foundry-temp + ci_run tar zxf foundry_zksync_v0.0.26_linux_amd64.tar.gz -C ./foundry-temp + ci_run cp ./foundry-temp/forge /usr/local/cargo/bin/forge + ci_run cp ./foundry-temp/cast /usr/local/cargo/bin/cast + echo "Foundry version after update:" + ci_run forge --version + ci_run rm -rf ./foundry-temp foundry_zksync_v0.0.26_linux_amd64.tar.gz + - name: Build test dependencies run: | ci_run zkstack dev test build @@ -262,6 +274,13 @@ jobs: ci_run zkstack contract-verifier run --chain era &> ${{ env.SERVER_LOGS_DIR }}/contract-verifier-rollup.log & ci_run zkstack contract-verifier wait --chain era --verbose + - name: Check permissions + run: | + whoami + pwd + touch testfile + ls -l testfile + - name: Run tests run: | ci_run yarn highlevel-test-tools test diff --git a/.gitignore b/.gitignore index 94224a8a01c1..e2edae9bc96f 100644 --- a/.gitignore +++ b/.gitignore @@ -119,6 +119,7 @@ transactions/ # foundry-zksync install +out .format_sql_snapshot diff --git a/bin/build_and_init_ecosystem b/bin/build_and_init_ecosystem index 59abeaca1860..c0ae52129ca1 100755 --- a/bin/build_and_init_ecosystem +++ b/bin/build_and_init_ecosystem @@ -23,7 +23,16 @@ initialize_ecosystem() { --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ --server-db-name=zksync_server_localhost_era \ --ignore-prerequisites --verbose \ + --observability=false || true + zkstack dev generate-genesis + zkstack ecosystem init \ + --deploy-paymaster --deploy-erc20 --deploy-ecosystem \ + --l1-rpc-url=http://localhost:8545 \ + --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --server-db-name=zksync_server_localhost_era \ + --ignore-prerequisites --verbose \ --observability=false + echo "[initialize_ecosystem] finished" } diff --git a/contracts b/contracts index e06fb30c3d73..a391100b9e98 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit e06fb30c3d73dcde103db9e6098042a4eb0d08b7 +Subproject commit a391100b9e98ad03d1251cf39398ad658c2eecb1 diff --git a/core/bin/snapshots_creator/src/tests.rs b/core/bin/snapshots_creator/src/tests.rs index e01c27bdf0cb..bf96425aed6b 100644 --- a/core/bin/snapshots_creator/src/tests.rs +++ b/core/bin/snapshots_creator/src/tests.rs @@ -197,7 +197,13 @@ async fn create_l1_batch( l1_batch_number: L1BatchNumber, logs_for_initial_writes: &[StorageLog], ) { - let header = L1BatchHeader::new(l1_batch_number, 0, Default::default(), Default::default()); + let header = L1BatchHeader::new( + l1_batch_number, + 0, + Default::default(), + Default::default(), + SettlementLayer::for_tests(), + ); conn.blocks_dal() .insert_mock_l1_batch(&header) .await diff --git a/core/bin/system-constants-generator/src/utils.rs b/core/bin/system-constants-generator/src/utils.rs index 0585b112e6d4..631e9c05d6c4 100644 --- a/core/bin/system-constants-generator/src/utils.rs +++ b/core/bin/system-constants-generator/src/utils.rs @@ -22,11 +22,12 @@ use zksync_multivm::{ }; use zksync_types::{ block::L2BlockHasher, bytecode::BytecodeHash, ethabi::Token, fee::Fee, - fee_model::BatchFeeInput, l1::L1Tx, l2::L2Tx, u256_to_h256, utils::storage_key_for_eth_balance, - AccountTreeId, Address, Execute, K256PrivateKey, L1BatchNumber, L1TxCommonData, L2BlockNumber, - L2ChainId, Nonce, ProtocolVersionId, StorageKey, Transaction, BOOTLOADER_ADDRESS, - SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_GAS_PRICE_POSITION, SYSTEM_CONTEXT_TX_ORIGIN_POSITION, - U256, ZKPORTER_IS_AVAILABLE, + fee_model::BatchFeeInput, l1::L1Tx, l2::L2Tx, settlement::SettlementLayer, u256_to_h256, + utils::storage_key_for_eth_balance, AccountTreeId, Address, Execute, K256PrivateKey, + L1BatchNumber, L1TxCommonData, L2BlockNumber, L2ChainId, Nonce, ProtocolVersionId, SLChainId, + StorageKey, Transaction, BOOTLOADER_ADDRESS, SYSTEM_CONTEXT_ADDRESS, + SYSTEM_CONTEXT_GAS_PRICE_POSITION, SYSTEM_CONTEXT_TX_ORIGIN_POSITION, U256, + ZKPORTER_IS_AVAILABLE, }; use crate::intrinsic_costs::VmSpentResourcesResult; @@ -192,6 +193,7 @@ fn default_l1_batch() -> L1BatchEnv { max_virtual_blocks_to_create: 100, interop_roots: vec![], }, + settlement_layer: SettlementLayer::L1(SLChainId(1)), } } diff --git a/core/lib/basic_types/src/protocol_version.rs b/core/lib/basic_types/src/protocol_version.rs index 37ce62163cf5..700f8509ab2d 100644 --- a/core/lib/basic_types/src/protocol_version.rs +++ b/core/lib/basic_types/src/protocol_version.rs @@ -131,9 +131,10 @@ impl ProtocolVersionId { ProtocolVersionId::Version27 => VmVersion::VmEvmEmulator, ProtocolVersionId::Version28 => VmVersion::VmEcPrecompiles, ProtocolVersionId::Version29 => VmVersion::VmInterop, - ProtocolVersionId::Version30 => VmVersion::VmInterop, + ProtocolVersionId::Version30 => VmVersion::VmMediumInterop, + // Speculative VM version for the next protocol version to be used in the upgrade integration test etc. - ProtocolVersionId::Version31 => VmVersion::VmInterop, + ProtocolVersionId::Version31 => VmVersion::VmMediumInterop, } } @@ -171,6 +172,10 @@ impl ProtocolVersionId { self < &Self::Version29 } + pub fn is_pre_medium_interop(&self) -> bool { + self < &Self::Version30 + } + pub fn is_1_4_0(&self) -> bool { self >= &ProtocolVersionId::Version18 && self < &ProtocolVersionId::Version20 } @@ -207,10 +212,6 @@ impl ProtocolVersionId { self >= &ProtocolVersionId::Version23 } - pub fn is_pre_medium_interop(&self) -> bool { - self < &ProtocolVersionId::Version30 - } - pub const fn gateway_upgrade() -> Self { ProtocolVersionId::Version26 } @@ -322,9 +323,9 @@ impl From for VmVersion { ProtocolVersionId::Version27 => VmVersion::VmEvmEmulator, ProtocolVersionId::Version28 => VmVersion::VmEcPrecompiles, ProtocolVersionId::Version29 => VmVersion::VmInterop, - ProtocolVersionId::Version30 => VmVersion::VmInterop, + ProtocolVersionId::Version30 => VmVersion::VmMediumInterop, // Speculative VM version for the next protocol version to be used in the upgrade integration test etc. - ProtocolVersionId::Version31 => VmVersion::VmInterop, + ProtocolVersionId::Version31 => VmVersion::VmMediumInterop, } } } diff --git a/core/lib/basic_types/src/vm.rs b/core/lib/basic_types/src/vm.rs index eb36dbbe8887..e0b75b9dc89c 100644 --- a/core/lib/basic_types/src/vm.rs +++ b/core/lib/basic_types/src/vm.rs @@ -20,6 +20,7 @@ pub enum VmVersion { VmEvmEmulator, VmEcPrecompiles, VmInterop, + VmMediumInterop, } impl VmVersion { diff --git a/core/lib/constants/src/contracts.rs b/core/lib/constants/src/contracts.rs index a12fb0730140..cdb8d27b9366 100644 --- a/core/lib/constants/src/contracts.rs +++ b/core/lib/constants/src/contracts.rs @@ -206,6 +206,26 @@ pub const L2_CHAIN_ASSET_HANDLER_ADDRESS: Address = H160([ 0x00, 0x01, 0x00, 0x0a, ]); +pub const L2_INTEROP_CENTER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x01, 0x00, 0x0b, +]); + +pub const L2_INTEROP_HANDLER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x01, 0x00, 0x0c, +]); + +pub const L2_ASSET_TRACKER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x01, 0x00, 0x0d, +]); + +pub const GW_ASSET_TRACKER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x01, 0x00, 0x0e, +]); + pub const ERC20_TRANSFER_TOPIC: H256 = H256([ 221, 242, 82, 173, 27, 226, 200, 155, 105, 194, 176, 104, 252, 55, 141, 170, 149, 43, 167, 241, 99, 196, 161, 22, 40, 245, 90, 77, 245, 35, 179, 239, diff --git a/core/lib/contracts/src/lib.rs b/core/lib/contracts/src/lib.rs index b47047aea0ea..30ab9d794977 100644 --- a/core/lib/contracts/src/lib.rs +++ b/core/lib/contracts/src/lib.rs @@ -653,6 +653,12 @@ impl BaseSystemContracts { BaseSystemContracts::load_with_bootloader(bootloader_bytecode, true) } + pub fn playground_medium_interop() -> Self { + let bootloader_bytecode: Vec = read_bootloader_code("playground_batch"); + // kl todo once contracts are stabilized move to etc/multivm + BaseSystemContracts::load_with_bootloader(bootloader_bytecode, true) + } + pub fn estimate_gas_pre_virtual_blocks() -> Self { let bootloader_bytecode = read_zbin_bytecode( "etc/multivm_bootloaders/vm_1_3_2/fee_estimate.yul/fee_estimate.yul.zbin", @@ -751,6 +757,11 @@ impl BaseSystemContracts { BaseSystemContracts::load_with_bootloader(bootloader_bytecode, true) } + pub fn estimate_gas_medium_interop() -> Self { + let bootloader_bytecode = read_bootloader_code("fee_estimate"); + BaseSystemContracts::load_with_bootloader(bootloader_bytecode, true) + } + pub fn hashes(&self) -> BaseSystemContractsHashes { BaseSystemContractsHashes { bootloader: self.bootloader.hash, diff --git a/core/lib/dal/.sqlx/query-2ab4fcd10071629228c5a2e88f764bbb0cd3ac314bb882052511518c2e2d1c60.json b/core/lib/dal/.sqlx/query-027c922ca751891efce706bfaeb367d6cc539776df7e48c573ed9882bc7f813e.json similarity index 80% rename from core/lib/dal/.sqlx/query-2ab4fcd10071629228c5a2e88f764bbb0cd3ac314bb882052511518c2e2d1c60.json rename to core/lib/dal/.sqlx/query-027c922ca751891efce706bfaeb367d6cc539776df7e48c573ed9882bc7f813e.json index 572be9f4a5e2..19ccbf18ef0b 100644 --- a/core/lib/dal/.sqlx/query-2ab4fcd10071629228c5a2e88f764bbb0cd3ac314bb882052511518c2e2d1c60.json +++ b/core/lib/dal/.sqlx/query-027c922ca751891efce706bfaeb367d6cc539776df7e48c573ed9882bc7f813e.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n system_logs,\n compressed_state_diffs,\n protocol_version,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n (\n SELECT\n l1_batches.*,\n ROW_NUMBER() OVER (\n ORDER BY\n number ASC\n ) AS row_number\n FROM\n l1_batches\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND l1_batches.skip_proof = TRUE\n AND l1_batches.number > $1\n ORDER BY\n number\n LIMIT\n $2\n ) inn\n LEFT JOIN commitments ON commitments.l1_batch_number = inn.number\n LEFT JOIN data_availability ON data_availability.l1_batch_number = inn.number\n WHERE\n number - row_number = $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n system_logs,\n compressed_state_diffs,\n protocol_version,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_chain_id,\n settlement_layer_type\n \n FROM\n (\n SELECT\n l1_batches.*,\n ROW_NUMBER() OVER (\n ORDER BY\n number ASC\n ) AS row_number\n FROM\n l1_batches\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND l1_batches.skip_proof = TRUE\n AND l1_batches.number > $1\n ORDER BY\n number\n LIMIT\n $2\n ) inn\n LEFT JOIN commitments ON commitments.l1_batch_number = inn.number\n LEFT JOIN data_availability ON data_availability.l1_batch_number = inn.number\n WHERE\n number - row_number = $1\n ", "describe": { "columns": [ { @@ -182,6 +182,16 @@ "ordinal": 35, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 36, + "name": "settlement_layer_chain_id", + "type_info": "Int8" + }, + { + "ordinal": 37, + "name": "settlement_layer_type", + "type_info": "Text" } ], "parameters": { @@ -226,8 +236,10 @@ false, false, false, + true, + true, true ] }, - "hash": "2ab4fcd10071629228c5a2e88f764bbb0cd3ac314bb882052511518c2e2d1c60" + "hash": "027c922ca751891efce706bfaeb367d6cc539776df7e48c573ed9882bc7f813e" } diff --git a/core/lib/dal/.sqlx/query-1a06bc41b885bc57fe2ec5cae0f0c5b89d2ab12fbe624be833c163209c480ba3.json b/core/lib/dal/.sqlx/query-31e8acd75ef5198db6a4c0100940761a5f1f6e0689b10634be01d1e3ce9f6cf4.json similarity index 86% rename from core/lib/dal/.sqlx/query-1a06bc41b885bc57fe2ec5cae0f0c5b89d2ab12fbe624be833c163209c480ba3.json rename to core/lib/dal/.sqlx/query-31e8acd75ef5198db6a4c0100940761a5f1f6e0689b10634be01d1e3ce9f6cf4.json index 92bc35acb303..5a8f57aaa7ae 100644 --- a/core/lib/dal/.sqlx/query-1a06bc41b885bc57fe2ec5cae0f0c5b89d2ab12fbe624be833c163209c480ba3.json +++ b/core/lib/dal/.sqlx/query-31e8acd75ef5198db6a4c0100940761a5f1f6e0689b10634be01d1e3ce9f6cf4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number BETWEEN $1 AND $2\n ORDER BY\n number\n LIMIT\n $3\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_chain_id,\n settlement_layer_type\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number BETWEEN $1 AND $2\n ORDER BY\n number\n LIMIT\n $3\n ", "describe": { "columns": [ { @@ -182,6 +182,16 @@ "ordinal": 35, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 36, + "name": "settlement_layer_chain_id", + "type_info": "Int8" + }, + { + "ordinal": 37, + "name": "settlement_layer_type", + "type_info": "Text" } ], "parameters": { @@ -227,8 +237,10 @@ false, false, false, + true, + true, true ] }, - "hash": "1a06bc41b885bc57fe2ec5cae0f0c5b89d2ab12fbe624be833c163209c480ba3" + "hash": "31e8acd75ef5198db6a4c0100940761a5f1f6e0689b10634be01d1e3ce9f6cf4" } diff --git a/core/lib/dal/.sqlx/query-653ede3029966b6b2e66c341adb43def1f0043efd4fd086261c940a7405bbf93.json b/core/lib/dal/.sqlx/query-45e9184145f631d90d569d468494adda476573312021803ba6a46be0f8479f9e.json similarity index 83% rename from core/lib/dal/.sqlx/query-653ede3029966b6b2e66c341adb43def1f0043efd4fd086261c940a7405bbf93.json rename to core/lib/dal/.sqlx/query-45e9184145f631d90d569d468494adda476573312021803ba6a46be0f8479f9e.json index 0cbede8eb271..c8aa379800bc 100644 --- a/core/lib/dal/.sqlx/query-653ede3029966b6b2e66c341adb43def1f0043efd4fd086261c940a7405bbf93.json +++ b/core/lib/dal/.sqlx/query-45e9184145f631d90d569d468494adda476573312021803ba6a46be0f8479f9e.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n l1_tx_count,\n l2_tx_count,\n timestamp,\n l2_to_l1_messages,\n bloom,\n priority_ops_onchain_data,\n used_contract_hashes,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n protocol_version,\n system_logs,\n pubdata_input,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n WHERE\n is_sealed\n AND number = $1\n ", + "query": "\n SELECT\n number,\n l1_tx_count,\n l2_tx_count,\n timestamp,\n l2_to_l1_messages,\n bloom,\n priority_ops_onchain_data,\n used_contract_hashes,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n protocol_version,\n system_logs,\n pubdata_input,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_type,\n settlement_layer_chain_id\n FROM\n l1_batches\n WHERE\n is_sealed\n AND number = $1\n ", "describe": { "columns": [ { @@ -97,6 +97,16 @@ "ordinal": 18, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 19, + "name": "settlement_layer_type", + "type_info": "Text" + }, + { + "ordinal": 20, + "name": "settlement_layer_chain_id", + "type_info": "Int8" } ], "parameters": { @@ -123,8 +133,10 @@ false, false, false, + true, + true, true ] }, - "hash": "653ede3029966b6b2e66c341adb43def1f0043efd4fd086261c940a7405bbf93" + "hash": "45e9184145f631d90d569d468494adda476573312021803ba6a46be0f8479f9e" } diff --git a/core/lib/dal/.sqlx/query-ade4c448ee4807c1fe2d749fe5c8ddefa7adf6ee85f1963b3ee4dce3a7c7f204.json b/core/lib/dal/.sqlx/query-4b05d6656a6f455343de619802f6920013e790e6a3cb209baf40a4c5d6fd060e.json similarity index 85% rename from core/lib/dal/.sqlx/query-ade4c448ee4807c1fe2d749fe5c8ddefa7adf6ee85f1963b3ee4dce3a7c7f204.json rename to core/lib/dal/.sqlx/query-4b05d6656a6f455343de619802f6920013e790e6a3cb209baf40a4c5d6fd060e.json index e1ace0d0430b..03ff7b60a666 100644 --- a/core/lib/dal/.sqlx/query-ade4c448ee4807c1fe2d749fe5c8ddefa7adf6ee85f1963b3ee4dce3a7c7f204.json +++ b/core/lib/dal/.sqlx/query-4b05d6656a6f455343de619802f6920013e790e6a3cb209baf40a4c5d6fd060e.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_prove_tx_id IS NOT NULL\n AND eth_execute_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_chain_id,\n settlement_layer_type\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_prove_tx_id IS NOT NULL\n AND eth_execute_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", "describe": { "columns": [ { @@ -182,6 +182,16 @@ "ordinal": 35, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 36, + "name": "settlement_layer_chain_id", + "type_info": "Int8" + }, + { + "ordinal": 37, + "name": "settlement_layer_type", + "type_info": "Text" } ], "parameters": { @@ -225,8 +235,10 @@ false, false, false, + true, + true, true ] }, - "hash": "ade4c448ee4807c1fe2d749fe5c8ddefa7adf6ee85f1963b3ee4dce3a7c7f204" + "hash": "4b05d6656a6f455343de619802f6920013e790e6a3cb209baf40a4c5d6fd060e" } diff --git a/core/lib/dal/.sqlx/query-5110364fcaf8a803bafe641c77d64902d0156fc69b77a2e6f527fcc7686358f2.json b/core/lib/dal/.sqlx/query-5110364fcaf8a803bafe641c77d64902d0156fc69b77a2e6f527fcc7686358f2.json new file mode 100644 index 000000000000..73818d9cc2e5 --- /dev/null +++ b/core/lib/dal/.sqlx/query-5110364fcaf8a803bafe641c77d64902d0156fc69b77a2e6f527fcc7686358f2.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l2_to_l1_messages\n FROM\n l1_batches\n WHERE\n number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l2_to_l1_messages", + "type_info": "ByteaArray" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "5110364fcaf8a803bafe641c77d64902d0156fc69b77a2e6f527fcc7686358f2" +} diff --git a/core/lib/dal/.sqlx/query-6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27.json b/core/lib/dal/.sqlx/query-6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27.json index 56ff860f3579..2fab6fb1d9f0 100644 --- a/core/lib/dal/.sqlx/query-6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27.json +++ b/core/lib/dal/.sqlx/query-6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27.json @@ -1,6 +1,10 @@ { "db_name": "PostgreSQL", +<<<<<<<< HEAD:core/lib/dal/.sqlx/query-960843a7c6e7729d359c66305923d3dec0ce7bcc6d930da11a77586cd26aff8e.json + "query": "\n WITH l1_batch AS (\n SELECT COALESCE(\n (\n SELECT miniblocks.l1_batch_number\n FROM miniblocks\n WHERE number = $1\n ),\n (\n SELECT\n (MAX(number) + 1)\n FROM\n l1_batches\n WHERE\n is_sealed\n ),\n (\n SELECT\n MAX(l1_batch_number) + 1\n FROM\n snapshot_recovery\n )\n ) AS number\n )\n \n SELECT\n miniblocks.number,\n l1_batch.number AS \"l1_batch_number!\",\n (miniblocks.l1_tx_count + miniblocks.l2_tx_count) AS \"tx_count!\",\n miniblocks.timestamp,\n miniblocks.l1_gas_price,\n miniblocks.l2_fair_gas_price,\n miniblocks.fair_pubdata_price,\n miniblocks.bootloader_code_hash,\n miniblocks.default_aa_code_hash,\n miniblocks.evm_emulator_code_hash,\n miniblocks.virtual_blocks,\n miniblocks.hash,\n miniblocks.protocol_version AS \"protocol_version!\",\n miniblocks.fee_account_address AS \"fee_account_address!\",\n miniblocks.l2_da_validator_address AS \"l2_da_validator_address!\",\n miniblocks.pubdata_type AS \"pubdata_type!\",\n l1_batches.pubdata_limit,\n l1_batches.settlement_layer_type,\n l1_batches.settlement_layer_chain_id\n FROM\n miniblocks\n INNER JOIN l1_batch ON true\n INNER JOIN l1_batches ON l1_batches.number = l1_batch.number\n WHERE\n miniblocks.number BETWEEN $1 AND $2\n ", +======== "query": "\n WITH l1_batch AS (\n SELECT COALESCE(\n (\n SELECT miniblocks.l1_batch_number\n FROM miniblocks\n WHERE number = $1\n ),\n (\n SELECT\n (MAX(number) + 1)\n FROM\n l1_batches\n WHERE\n is_sealed\n ),\n (\n SELECT\n MAX(l1_batch_number) + 1\n FROM\n snapshot_recovery\n )\n ) AS number\n )\n \n SELECT\n miniblocks.number,\n l1_batch.number AS \"l1_batch_number!\",\n (miniblocks.l1_tx_count + miniblocks.l2_tx_count) AS \"tx_count!\",\n miniblocks.timestamp,\n miniblocks.l1_gas_price,\n miniblocks.l2_fair_gas_price,\n miniblocks.fair_pubdata_price,\n miniblocks.bootloader_code_hash,\n miniblocks.default_aa_code_hash,\n miniblocks.evm_emulator_code_hash,\n miniblocks.virtual_blocks,\n miniblocks.hash,\n miniblocks.protocol_version AS \"protocol_version!\",\n miniblocks.fee_account_address AS \"fee_account_address!\",\n miniblocks.l2_da_validator_address,\n miniblocks.l2_da_commitment_scheme,\n miniblocks.pubdata_type AS \"pubdata_type!\",\n l1_batches.pubdata_limit\n FROM\n miniblocks\n INNER JOIN l1_batch ON true\n INNER JOIN l1_batches ON l1_batches.number = l1_batch.number\n WHERE\n miniblocks.number BETWEEN $1 AND $2\n ", +>>>>>>>> d013f677c77e2ca9bf0f58273d26cb8784571985:core/lib/dal/.sqlx/query-6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27.json "describe": { "columns": [ { @@ -92,6 +96,16 @@ "ordinal": 17, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 17, + "name": "settlement_layer_type", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "settlement_layer_chain_id", + "type_info": "Int8" } ], "parameters": { @@ -118,8 +132,14 @@ true, true, false, + true, + true, true ] }, +<<<<<<<< HEAD:core/lib/dal/.sqlx/query-960843a7c6e7729d359c66305923d3dec0ce7bcc6d930da11a77586cd26aff8e.json + "hash": "960843a7c6e7729d359c66305923d3dec0ce7bcc6d930da11a77586cd26aff8e" +======== "hash": "6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27" +>>>>>>>> d013f677c77e2ca9bf0f58273d26cb8784571985:core/lib/dal/.sqlx/query-6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27.json } diff --git a/core/lib/dal/.sqlx/query-1e60884cad9c962836f62407982acfe31bbefc0695c4621c6bdfdcd772af11eb.json b/core/lib/dal/.sqlx/query-6c44127e6f44214a938d47572d08d3ec8d167708ab5095f21fd56edfbeeacdd2.json similarity index 85% rename from core/lib/dal/.sqlx/query-1e60884cad9c962836f62407982acfe31bbefc0695c4621c6bdfdcd772af11eb.json rename to core/lib/dal/.sqlx/query-6c44127e6f44214a938d47572d08d3ec8d167708ab5095f21fd56edfbeeacdd2.json index 27893f6a93e1..54483257d8e5 100644 --- a/core/lib/dal/.sqlx/query-1e60884cad9c962836f62407982acfe31bbefc0695c4621c6bdfdcd772af11eb.json +++ b/core/lib/dal/.sqlx/query-6c44127e6f44214a938d47572d08d3ec8d167708ab5095f21fd56edfbeeacdd2.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number = 0\n OR eth_commit_tx_id IS NOT NULL\n AND commitment IS NOT NULL\n ORDER BY\n number DESC\n LIMIT\n 1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_chain_id,\n settlement_layer_type\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number = 0\n OR eth_commit_tx_id IS NOT NULL\n AND commitment IS NOT NULL\n ORDER BY\n number DESC\n LIMIT\n 1\n ", "describe": { "columns": [ { @@ -182,6 +182,16 @@ "ordinal": 35, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 36, + "name": "settlement_layer_chain_id", + "type_info": "Int8" + }, + { + "ordinal": 37, + "name": "settlement_layer_type", + "type_info": "Text" } ], "parameters": { @@ -223,8 +233,10 @@ false, false, false, + true, + true, true ] }, - "hash": "1e60884cad9c962836f62407982acfe31bbefc0695c4621c6bdfdcd772af11eb" + "hash": "6c44127e6f44214a938d47572d08d3ec8d167708ab5095f21fd56edfbeeacdd2" } diff --git a/core/lib/dal/.sqlx/query-6f473a42a0a3fe32ca38b754b28ae395797805af58a023322b275c55a4a7d568.json b/core/lib/dal/.sqlx/query-6f473a42a0a3fe32ca38b754b28ae395797805af58a023322b275c55a4a7d568.json new file mode 100644 index 000000000000..509c19c7817c --- /dev/null +++ b/core/lib/dal/.sqlx/query-6f473a42a0a3fe32ca38b754b28ae395797805af58a023322b275c55a4a7d568.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n aggregation_root\n FROM\n l1_batches\n WHERE\n number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "aggregation_root", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true + ] + }, + "hash": "6f473a42a0a3fe32ca38b754b28ae395797805af58a023322b275c55a4a7d568" +} diff --git a/core/lib/dal/.sqlx/query-fbe986e7094d94bad095511a6f1ed14379e9a8fec03d36514815fd94ab0d13c8.json b/core/lib/dal/.sqlx/query-78272e3b131e1ca2042095dc6ab5cff65c1524bf34dc322f5d1fa14c453ea5c4.json similarity index 77% rename from core/lib/dal/.sqlx/query-fbe986e7094d94bad095511a6f1ed14379e9a8fec03d36514815fd94ab0d13c8.json rename to core/lib/dal/.sqlx/query-78272e3b131e1ca2042095dc6ab5cff65c1524bf34dc322f5d1fa14c453ea5c4.json index cb9235eab58f..e03a86f66bdf 100644 --- a/core/lib/dal/.sqlx/query-fbe986e7094d94bad095511a6f1ed14379e9a8fec03d36514815fd94ab0d13c8.json +++ b/core/lib/dal/.sqlx/query-78272e3b131e1ca2042095dc6ab5cff65c1524bf34dc322f5d1fa14c453ea5c4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n AND events_queue_commitment IS NOT NULL\n AND bootloader_initial_content_commitment IS NOT NULL\n AND (\n data_availability.inclusion_data IS NOT NULL\n OR $4 IS FALSE\n )\n AND (\n final_precommit_eth_tx_id IS NOT NULL\n OR $5 IS FALSE\n )\n ORDER BY\n number\n LIMIT\n $6\n ", + "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_chain_id,\n settlement_layer_type\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n AND events_queue_commitment IS NOT NULL\n AND bootloader_initial_content_commitment IS NOT NULL\n AND (\n data_availability.inclusion_data IS NOT NULL\n OR $4 IS FALSE\n )\n AND (\n final_precommit_eth_tx_id IS NOT NULL\n OR $5 IS FALSE\n )\n ORDER BY\n number\n LIMIT\n $6\n ", "describe": { "columns": [ { @@ -182,6 +182,16 @@ "ordinal": 35, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 36, + "name": "settlement_layer_chain_id", + "type_info": "Int8" + }, + { + "ordinal": 37, + "name": "settlement_layer_type", + "type_info": "Text" } ], "parameters": { @@ -230,8 +240,10 @@ false, false, false, + true, + true, true ] }, - "hash": "fbe986e7094d94bad095511a6f1ed14379e9a8fec03d36514815fd94ab0d13c8" + "hash": "78272e3b131e1ca2042095dc6ab5cff65c1524bf34dc322f5d1fa14c453ea5c4" } diff --git a/core/lib/dal/.sqlx/query-d56d12ba1802fe09e9255717ad488d251b95974b504653ebdb8c4a567e1ccdb9.json b/core/lib/dal/.sqlx/query-906826202492b1665414a7d7a730a7d19d4ae976f16ff10ee157b805c3b6d013.json similarity index 69% rename from core/lib/dal/.sqlx/query-d56d12ba1802fe09e9255717ad488d251b95974b504653ebdb8c4a567e1ccdb9.json rename to core/lib/dal/.sqlx/query-906826202492b1665414a7d7a730a7d19d4ae976f16ff10ee157b805c3b6d013.json index 04ad39638a76..88cacac9fe58 100644 --- a/core/lib/dal/.sqlx/query-d56d12ba1802fe09e9255717ad488d251b95974b504653ebdb8c4a567e1ccdb9.json +++ b/core/lib/dal/.sqlx/query-906826202492b1665414a7d7a730a7d19d4ae976f16ff10ee157b805c3b6d013.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n is_sealed,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n ORDER BY\n number DESC\n LIMIT\n 1\n ", + "query": "\n SELECT\n number,\n is_sealed,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_type,\n settlement_layer_chain_id\n \n FROM\n l1_batches\n ORDER BY\n number DESC\n LIMIT\n 1\n ", "describe": { "columns": [ { @@ -47,6 +47,16 @@ "ordinal": 8, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "settlement_layer_type", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "settlement_layer_chain_id", + "type_info": "Int8" } ], "parameters": { @@ -61,8 +71,10 @@ false, false, false, + true, + true, true ] }, - "hash": "d56d12ba1802fe09e9255717ad488d251b95974b504653ebdb8c4a567e1ccdb9" + "hash": "906826202492b1665414a7d7a730a7d19d4ae976f16ff10ee157b805c3b6d013" } diff --git a/core/lib/dal/.sqlx/query-960843a7c6e7729d359c66305923d3dec0ce7bcc6d930da11a77586cd26aff8e.json b/core/lib/dal/.sqlx/query-960843a7c6e7729d359c66305923d3dec0ce7bcc6d930da11a77586cd26aff8e.json new file mode 100644 index 000000000000..2fab6fb1d9f0 --- /dev/null +++ b/core/lib/dal/.sqlx/query-960843a7c6e7729d359c66305923d3dec0ce7bcc6d930da11a77586cd26aff8e.json @@ -0,0 +1,145 @@ +{ + "db_name": "PostgreSQL", +<<<<<<<< HEAD:core/lib/dal/.sqlx/query-960843a7c6e7729d359c66305923d3dec0ce7bcc6d930da11a77586cd26aff8e.json + "query": "\n WITH l1_batch AS (\n SELECT COALESCE(\n (\n SELECT miniblocks.l1_batch_number\n FROM miniblocks\n WHERE number = $1\n ),\n (\n SELECT\n (MAX(number) + 1)\n FROM\n l1_batches\n WHERE\n is_sealed\n ),\n (\n SELECT\n MAX(l1_batch_number) + 1\n FROM\n snapshot_recovery\n )\n ) AS number\n )\n \n SELECT\n miniblocks.number,\n l1_batch.number AS \"l1_batch_number!\",\n (miniblocks.l1_tx_count + miniblocks.l2_tx_count) AS \"tx_count!\",\n miniblocks.timestamp,\n miniblocks.l1_gas_price,\n miniblocks.l2_fair_gas_price,\n miniblocks.fair_pubdata_price,\n miniblocks.bootloader_code_hash,\n miniblocks.default_aa_code_hash,\n miniblocks.evm_emulator_code_hash,\n miniblocks.virtual_blocks,\n miniblocks.hash,\n miniblocks.protocol_version AS \"protocol_version!\",\n miniblocks.fee_account_address AS \"fee_account_address!\",\n miniblocks.l2_da_validator_address AS \"l2_da_validator_address!\",\n miniblocks.pubdata_type AS \"pubdata_type!\",\n l1_batches.pubdata_limit,\n l1_batches.settlement_layer_type,\n l1_batches.settlement_layer_chain_id\n FROM\n miniblocks\n INNER JOIN l1_batch ON true\n INNER JOIN l1_batches ON l1_batches.number = l1_batch.number\n WHERE\n miniblocks.number BETWEEN $1 AND $2\n ", +======== + "query": "\n WITH l1_batch AS (\n SELECT COALESCE(\n (\n SELECT miniblocks.l1_batch_number\n FROM miniblocks\n WHERE number = $1\n ),\n (\n SELECT\n (MAX(number) + 1)\n FROM\n l1_batches\n WHERE\n is_sealed\n ),\n (\n SELECT\n MAX(l1_batch_number) + 1\n FROM\n snapshot_recovery\n )\n ) AS number\n )\n \n SELECT\n miniblocks.number,\n l1_batch.number AS \"l1_batch_number!\",\n (miniblocks.l1_tx_count + miniblocks.l2_tx_count) AS \"tx_count!\",\n miniblocks.timestamp,\n miniblocks.l1_gas_price,\n miniblocks.l2_fair_gas_price,\n miniblocks.fair_pubdata_price,\n miniblocks.bootloader_code_hash,\n miniblocks.default_aa_code_hash,\n miniblocks.evm_emulator_code_hash,\n miniblocks.virtual_blocks,\n miniblocks.hash,\n miniblocks.protocol_version AS \"protocol_version!\",\n miniblocks.fee_account_address AS \"fee_account_address!\",\n miniblocks.l2_da_validator_address,\n miniblocks.l2_da_commitment_scheme,\n miniblocks.pubdata_type AS \"pubdata_type!\",\n l1_batches.pubdata_limit\n FROM\n miniblocks\n INNER JOIN l1_batch ON true\n INNER JOIN l1_batches ON l1_batches.number = l1_batch.number\n WHERE\n miniblocks.number BETWEEN $1 AND $2\n ", +>>>>>>>> d013f677c77e2ca9bf0f58273d26cb8784571985:core/lib/dal/.sqlx/query-6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27.json + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number!", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "tx_count!", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "timestamp", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 6, + "name": "fair_pubdata_price", + "type_info": "Int8" + }, + { + "ordinal": 7, + "name": "bootloader_code_hash", + "type_info": "Bytea" + }, + { + "ordinal": 8, + "name": "default_aa_code_hash", + "type_info": "Bytea" + }, + { + "ordinal": 9, + "name": "evm_emulator_code_hash", + "type_info": "Bytea" + }, + { + "ordinal": 10, + "name": "virtual_blocks", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "hash", + "type_info": "Bytea" + }, + { + "ordinal": 12, + "name": "protocol_version!", + "type_info": "Int4" + }, + { + "ordinal": 13, + "name": "fee_account_address!", + "type_info": "Bytea" + }, + { + "ordinal": 14, + "name": "l2_da_validator_address", + "type_info": "Bytea" + }, + { + "ordinal": 15, + "name": "l2_da_commitment_scheme", + "type_info": "Int4" + }, + { + "ordinal": 16, + "name": "pubdata_type!", + "type_info": "Text" + }, + { + "ordinal": 17, + "name": "pubdata_limit", + "type_info": "Int8" + }, + { + "ordinal": 17, + "name": "settlement_layer_type", + "type_info": "Text" + }, + { + "ordinal": 18, + "name": "settlement_layer_chain_id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + null, + null, + false, + false, + false, + true, + true, + true, + true, + false, + false, + true, + false, + true, + true, + false, + true, + true, + true + ] + }, +<<<<<<<< HEAD:core/lib/dal/.sqlx/query-960843a7c6e7729d359c66305923d3dec0ce7bcc6d930da11a77586cd26aff8e.json + "hash": "960843a7c6e7729d359c66305923d3dec0ce7bcc6d930da11a77586cd26aff8e" +======== + "hash": "6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27" +>>>>>>>> d013f677c77e2ca9bf0f58273d26cb8784571985:core/lib/dal/.sqlx/query-6299e20b3582b152972d6bce74fc3704016fc50a2838cd2394456908434bbd27.json +} diff --git a/core/lib/dal/.sqlx/query-3dd9bd06b456b1955e5343fbf1722412eacd3f8c9ac6b3a76bd4bf5badf0714c.json b/core/lib/dal/.sqlx/query-9afd479a8f47efc12e06b786d13e527645e13d35c0ec3b9cab9badb9440bacc0.json similarity index 52% rename from core/lib/dal/.sqlx/query-3dd9bd06b456b1955e5343fbf1722412eacd3f8c9ac6b3a76bd4bf5badf0714c.json rename to core/lib/dal/.sqlx/query-9afd479a8f47efc12e06b786d13e527645e13d35c0ec3b9cab9badb9440bacc0.json index e252ea3218d1..65c752dcf7ff 100644 --- a/core/lib/dal/.sqlx/query-3dd9bd06b456b1955e5343fbf1722412eacd3f8c9ac6b3a76bd4bf5badf0714c.json +++ b/core/lib/dal/.sqlx/query-9afd479a8f47efc12e06b786d13e527645e13d35c0ec3b9cab9badb9440bacc0.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n l1_batches (\n number,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n initial_bootloader_heap_content,\n used_contract_hashes,\n created_at,\n updated_at,\n is_sealed\n )\n VALUES\n (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n 0,\n 0,\n ''::bytea,\n '{}'::bytea [],\n '{}'::jsonb,\n '{}'::jsonb,\n NOW(),\n NOW(),\n FALSE\n )\n ", + "query": "\n INSERT INTO\n l1_batches (\n number,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n initial_bootloader_heap_content,\n used_contract_hashes,\n created_at,\n updated_at,\n is_sealed,\n settlement_layer_type,\n settlement_layer_chain_id\n )\n VALUES\n (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n 0,\n 0,\n ''::bytea,\n '{}'::bytea [],\n '{}'::jsonb,\n '{}'::jsonb,\n NOW(),\n NOW(),\n FALSE,\n $9,\n $10\n )\n ", "describe": { "columns": [], "parameters": { @@ -12,10 +12,12 @@ "Int8", "Int8", "Int8", + "Int8", + "Text", "Int8" ] }, "nullable": [] }, - "hash": "3dd9bd06b456b1955e5343fbf1722412eacd3f8c9ac6b3a76bd4bf5badf0714c" + "hash": "9afd479a8f47efc12e06b786d13e527645e13d35c0ec3b9cab9badb9440bacc0" } diff --git a/core/lib/dal/.sqlx/query-4c000edb672f90761a73394aea02b3e3f4920ea7e3f68d7ae9d4892cc679e5eb.json b/core/lib/dal/.sqlx/query-a0ef8d1ce5e631a214b9c286e0c4707531e95e15c30e617e30e4a72ceeac50ad.json similarity index 55% rename from core/lib/dal/.sqlx/query-4c000edb672f90761a73394aea02b3e3f4920ea7e3f68d7ae9d4892cc679e5eb.json rename to core/lib/dal/.sqlx/query-a0ef8d1ce5e631a214b9c286e0c4707531e95e15c30e617e30e4a72ceeac50ad.json index a4e273c09c08..366314c724d5 100644 --- a/core/lib/dal/.sqlx/query-4c000edb672f90761a73394aea02b3e3f4920ea7e3f68d7ae9d4892cc679e5eb.json +++ b/core/lib/dal/.sqlx/query-a0ef8d1ce5e631a214b9c286e0c4707531e95e15c30e617e30e4a72ceeac50ad.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM (\n SELECT\n number,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n is_sealed\n FROM l1_batches\n ORDER BY number DESC\n LIMIT 1\n ) AS u\n WHERE NOT is_sealed\n ", + "query": "\n SELECT\n number,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_type,\n settlement_layer_chain_id\n FROM (\n SELECT\n number,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n is_sealed,\n settlement_layer_type,\n settlement_layer_chain_id\n FROM l1_batches\n ORDER BY number DESC\n LIMIT 1\n ) AS u\n WHERE NOT is_sealed\n ", "describe": { "columns": [ { @@ -42,6 +42,16 @@ "ordinal": 7, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 8, + "name": "settlement_layer_type", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "settlement_layer_chain_id", + "type_info": "Int8" } ], "parameters": { @@ -55,8 +65,10 @@ false, false, false, + true, + true, true ] }, - "hash": "4c000edb672f90761a73394aea02b3e3f4920ea7e3f68d7ae9d4892cc679e5eb" + "hash": "a0ef8d1ce5e631a214b9c286e0c4707531e95e15c30e617e30e4a72ceeac50ad" } diff --git a/core/lib/dal/.sqlx/query-3a1bec48e03d4d898725187b17a125d08ff2a48c26b1144e88c764aefaf9cecf.json b/core/lib/dal/.sqlx/query-a751fa4d363a722ff8f4fb0992d0e06bde23af1df22a23d9a84cd1adb20fb247.json similarity index 71% rename from core/lib/dal/.sqlx/query-3a1bec48e03d4d898725187b17a125d08ff2a48c26b1144e88c764aefaf9cecf.json rename to core/lib/dal/.sqlx/query-a751fa4d363a722ff8f4fb0992d0e06bde23af1df22a23d9a84cd1adb20fb247.json index 8095040556f6..627a064bf8b6 100644 --- a/core/lib/dal/.sqlx/query-3a1bec48e03d4d898725187b17a125d08ff2a48c26b1144e88c764aefaf9cecf.json +++ b/core/lib/dal/.sqlx/query-a751fa4d363a722ff8f4fb0992d0e06bde23af1df22a23d9a84cd1adb20fb247.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n is_sealed,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n WHERE number = $1\n ", + "query": "\n SELECT\n number,\n is_sealed,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_type,\n settlement_layer_chain_id\n \n FROM\n l1_batches\n WHERE number = $1\n ", "describe": { "columns": [ { @@ -47,6 +47,16 @@ "ordinal": 8, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "settlement_layer_type", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "settlement_layer_chain_id", + "type_info": "Int8" } ], "parameters": { @@ -63,8 +73,10 @@ false, false, false, + true, + true, true ] }, - "hash": "3a1bec48e03d4d898725187b17a125d08ff2a48c26b1144e88c764aefaf9cecf" + "hash": "a751fa4d363a722ff8f4fb0992d0e06bde23af1df22a23d9a84cd1adb20fb247" } diff --git a/core/lib/dal/.sqlx/query-05bce0189aa53b121d31cc937ba459e07081269d1714411234271d9cdf5889f3.json b/core/lib/dal/.sqlx/query-ce266faf5b6668265eae7922ad1c5301b1bf9c0aaf75da486d2432b14bb8bc3f.json similarity index 87% rename from core/lib/dal/.sqlx/query-05bce0189aa53b121d31cc937ba459e07081269d1714411234271d9cdf5889f3.json rename to core/lib/dal/.sqlx/query-ce266faf5b6668265eae7922ad1c5301b1bf9c0aaf75da486d2432b14bb8bc3f.json index ab78135169ba..99bc8b6bafcf 100644 --- a/core/lib/dal/.sqlx/query-05bce0189aa53b121d31cc937ba459e07081269d1714411234271d9cdf5889f3.json +++ b/core/lib/dal/.sqlx/query-ce266faf5b6668265eae7922ad1c5301b1bf9c0aaf75da486d2432b14bb8bc3f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n system_logs,\n compressed_state_diffs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n is_sealed\n AND number = $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n system_logs,\n compressed_state_diffs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_type,\n settlement_layer_chain_id\n \n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n is_sealed\n AND number = $1\n ", "describe": { "columns": [ { @@ -182,6 +182,16 @@ "ordinal": 35, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 36, + "name": "settlement_layer_type", + "type_info": "Text" + }, + { + "ordinal": 37, + "name": "settlement_layer_chain_id", + "type_info": "Int8" } ], "parameters": { @@ -225,8 +235,10 @@ false, false, false, + true, + true, true ] }, - "hash": "05bce0189aa53b121d31cc937ba459e07081269d1714411234271d9cdf5889f3" + "hash": "ce266faf5b6668265eae7922ad1c5301b1bf9c0aaf75da486d2432b14bb8bc3f" } diff --git a/core/lib/dal/.sqlx/query-da681d57590f2bf4ff6f94c8349249fef9782f45139440d99ac55e9acf1173a7.json b/core/lib/dal/.sqlx/query-da681d57590f2bf4ff6f94c8349249fef9782f45139440d99ac55e9acf1173a7.json new file mode 100644 index 000000000000..cc8721a0e3f2 --- /dev/null +++ b/core/lib/dal/.sqlx/query-da681d57590f2bf4ff6f94c8349249fef9782f45139440d99ac55e9acf1173a7.json @@ -0,0 +1,29 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n settlement_layer_type,\n settlement_layer_chain_id\n FROM\n l1_batches\n WHERE\n ($1 AND is_sealed = false) OR (number = $2)\n ORDER BY number DESC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "settlement_layer_type", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "settlement_layer_chain_id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bool", + "Int8" + ] + }, + "nullable": [ + true, + true + ] + }, + "hash": "da681d57590f2bf4ff6f94c8349249fef9782f45139440d99ac55e9acf1173a7" +} diff --git a/core/lib/dal/.sqlx/query-48b472c184997167cd1b741d55054f2f22e151d269e94dfbecab60c9dbcac68e.json b/core/lib/dal/.sqlx/query-f3587c99a3c9ab2c71854eae6cf775d9e317852258841b467ff3b4c1de88300d.json similarity index 85% rename from core/lib/dal/.sqlx/query-48b472c184997167cd1b741d55054f2f22e151d269e94dfbecab60c9dbcac68e.json rename to core/lib/dal/.sqlx/query-f3587c99a3c9ab2c71854eae6cf775d9e317852258841b467ff3b4c1de88300d.json index f2daa9ff5cc2..13b74ab90064 100644 --- a/core/lib/dal/.sqlx/query-48b472c184997167cd1b741d55054f2f22e151d269e94dfbecab60c9dbcac68e.json +++ b/core/lib/dal/.sqlx/query-f3587c99a3c9ab2c71854eae6cf775d9e317852258841b467ff3b4c1de88300d.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND eth_prove_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_chain_id,\n settlement_layer_type\n \n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND eth_prove_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", "describe": { "columns": [ { @@ -182,6 +182,16 @@ "ordinal": 35, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 36, + "name": "settlement_layer_chain_id", + "type_info": "Int8" + }, + { + "ordinal": 37, + "name": "settlement_layer_type", + "type_info": "Text" } ], "parameters": { @@ -225,8 +235,10 @@ false, false, false, + true, + true, true ] }, - "hash": "48b472c184997167cd1b741d55054f2f22e151d269e94dfbecab60c9dbcac68e" + "hash": "f3587c99a3c9ab2c71854eae6cf775d9e317852258841b467ff3b4c1de88300d" } diff --git a/core/lib/dal/.sqlx/query-62b20688d2a7175c1a277626cc9795fed317746a0828cb09c7d2a9b0f7e934f0.json b/core/lib/dal/.sqlx/query-fa8599d4d024b8c9abca3cf0305aebab6c4a1b1884d5c292414562dbee9a95ba.json similarity index 81% rename from core/lib/dal/.sqlx/query-62b20688d2a7175c1a277626cc9795fed317746a0828cb09c7d2a9b0f7e934f0.json rename to core/lib/dal/.sqlx/query-fa8599d4d024b8c9abca3cf0305aebab6c4a1b1884d5c292414562dbee9a95ba.json index 1ee67702987d..f2519ba5d3d9 100644 --- a/core/lib/dal/.sqlx/query-62b20688d2a7175c1a277626cc9795fed317746a0828cb09c7d2a9b0f7e934f0.json +++ b/core/lib/dal/.sqlx/query-fa8599d4d024b8c9abca3cf0305aebab6c4a1b1884d5c292414562dbee9a95ba.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n ORDER BY\n number\n LIMIT\n $4\n ", + "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n pubdata_limit,\n settlement_layer_chain_id,\n settlement_layer_type\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n ORDER BY\n number\n LIMIT\n $4\n ", "describe": { "columns": [ { @@ -182,6 +182,16 @@ "ordinal": 35, "name": "pubdata_limit", "type_info": "Int8" + }, + { + "ordinal": 36, + "name": "settlement_layer_chain_id", + "type_info": "Int8" + }, + { + "ordinal": 37, + "name": "settlement_layer_type", + "type_info": "Text" } ], "parameters": { @@ -228,8 +238,10 @@ false, false, false, + true, + true, true ] }, - "hash": "62b20688d2a7175c1a277626cc9795fed317746a0828cb09c7d2a9b0f7e934f0" + "hash": "fa8599d4d024b8c9abca3cf0305aebab6c4a1b1884d5c292414562dbee9a95ba" } diff --git a/core/lib/dal/migrations/20250904115232_settlement_layer_for_batch.down.sql b/core/lib/dal/migrations/20250904115232_settlement_layer_for_batch.down.sql new file mode 100644 index 000000000000..939e6b5e5b1e --- /dev/null +++ b/core/lib/dal/migrations/20250904115232_settlement_layer_for_batch.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE l1_batches DROP COLUMN settlement_layer_type; +ALTER TABLE l1_batches DROP COLUMN settlement_layer_chain_id; diff --git a/core/lib/dal/migrations/20250904115232_settlement_layer_for_batch.up.sql b/core/lib/dal/migrations/20250904115232_settlement_layer_for_batch.up.sql new file mode 100644 index 000000000000..4896ede45a66 --- /dev/null +++ b/core/lib/dal/migrations/20250904115232_settlement_layer_for_batch.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE l1_batches ADD COLUMN settlement_layer_type TEXT DEFAULT 'L1'; +ALTER TABLE l1_batches ADD COLUMN settlement_layer_chain_id BIGINT; diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs index 60cacc5f5900..b22530de1cb0 100644 --- a/core/lib/dal/src/blocks_dal.rs +++ b/core/lib/dal/src/blocks_dal.rs @@ -34,8 +34,9 @@ use crate::{ models::{ parse_protocol_version, storage_block::{ - CommonStorageL1BatchHeader, StorageL1Batch, StorageL1BatchHeader, StorageL2BlockHeader, - StoragePubdataParams, UnsealedStorageL1Batch, + from_settlement_layer, CommonStorageL1BatchHeader, StorageL1Batch, + StorageL1BatchHeader, StorageL2BlockHeader, StoragePubdataParams, + UnsealedStorageL1Batch, }, storage_eth_tx::L2BlockWithEthTx, storage_event::StorageL2ToL1Log, @@ -49,6 +50,10 @@ pub struct BlocksDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Core>, } +pub struct L2ToL1Messages { + l2_to_l1_messages: Vec>, +} // + #[derive(Debug, Clone, Default)] pub struct TxForPrecommit { pub l1_batch_number: Option, @@ -176,7 +181,10 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_type, + settlement_layer_chain_id + FROM l1_batches ORDER BY @@ -213,7 +221,10 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_type, + settlement_layer_chain_id + FROM l1_batches WHERE number = $1 @@ -601,7 +612,10 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_type, + settlement_layer_chain_id + FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -646,7 +660,9 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_type, + settlement_layer_chain_id FROM l1_batches WHERE @@ -992,6 +1008,8 @@ impl BlocksDal<'_, '_> { unsealed_batch_header: UnsealedL1BatchHeader, conn: &mut Connection<'_, Core>, ) -> DalResult<()> { + let (settlement_layer_type, settlement_layer_chain_id) = + from_settlement_layer(&unsealed_batch_header.settlement_layer); sqlx::query!( r#" INSERT INTO @@ -1012,7 +1030,9 @@ impl BlocksDal<'_, '_> { used_contract_hashes, created_at, updated_at, - is_sealed + is_sealed, + settlement_layer_type, + settlement_layer_chain_id ) VALUES ( @@ -1032,7 +1052,9 @@ impl BlocksDal<'_, '_> { '{}'::jsonb, NOW(), NOW(), - FALSE + FALSE, + $9, + $10 ) "#, i64::from(unsealed_batch_header.number.0), @@ -1043,6 +1065,8 @@ impl BlocksDal<'_, '_> { unsealed_batch_header.fee_input.fair_l2_gas_price() as i64, unsealed_batch_header.fee_input.fair_pubdata_price() as i64, unsealed_batch_header.pubdata_limit.map(|l| l as i64), + settlement_layer_type, + settlement_layer_chain_id as i32 ) .instrument("insert_l1_batch") .with_arg("number", &unsealed_batch_header.number) @@ -1303,7 +1327,9 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_type, + settlement_layer_chain_id FROM ( SELECT number, @@ -1314,7 +1340,9 @@ impl BlocksDal<'_, '_> { l2_fair_gas_price, fair_pubdata_price, pubdata_limit, - is_sealed + is_sealed, + settlement_layer_type, + settlement_layer_chain_id FROM l1_batches ORDER BY number DESC LIMIT 1 @@ -1755,7 +1783,9 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_chain_id, + settlement_layer_type FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -2016,7 +2046,10 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_chain_id, + settlement_layer_type + FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -2109,7 +2142,10 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_chain_id, + settlement_layer_type + FROM ( SELECT @@ -2193,7 +2229,9 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_chain_id, + settlement_layer_type FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -2366,7 +2404,9 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_chain_id, + settlement_layer_type FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -2443,7 +2483,9 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_chain_id, + settlement_layer_type FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -2534,7 +2576,9 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price, - pubdata_limit + pubdata_limit, + settlement_layer_chain_id, + settlement_layer_type FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -3529,6 +3573,58 @@ impl BlocksDal<'_, '_> { Ok(results.into_iter().map(L::from).collect()) } + pub(crate) async fn get_l2_to_l1_messages_for_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> DalResult>> { + let results = sqlx::query_as!( + L2ToL1Messages, + r#" + SELECT + l2_to_l1_messages + FROM + l1_batches + WHERE + number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .instrument("get_l2_to_l1_messages_by_number") + .with_arg("l1_batch_number", &l1_batch_number) + .fetch_all(self.storage) + .await?; + + let messages = results + .into_iter() + .flat_map(|record| record.l2_to_l1_messages) + .collect::>>(); + + Ok(messages) + } // + + pub async fn get_message_root(&mut self, l1_batch_number: L1BatchNumber) -> DalResult { + let row = sqlx::query!( + r#" + SELECT + aggregation_root + FROM + l1_batches + WHERE + number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .instrument("get_aggregation_root") + .with_arg("l1_batch_number", &l1_batch_number) + .fetch_optional(self.storage) + .await?; + + Ok(row + .and_then(|row| row.aggregation_root) + .map(|root| H256::from_slice(&root)) + .unwrap_or_default()) + } + pub async fn has_l2_block_bloom(&mut self, l2_block_number: L2BlockNumber) -> DalResult { let row = sqlx::query!( r#" diff --git a/core/lib/dal/src/blocks_web3_dal.rs b/core/lib/dal/src/blocks_web3_dal.rs index 0010fc9d0b86..8d1d6cc7cf74 100644 --- a/core/lib/dal/src/blocks_web3_dal.rs +++ b/core/lib/dal/src/blocks_web3_dal.rs @@ -8,6 +8,7 @@ use zksync_types::{ debug_flat_call::CallTraceMeta, fee_model::BatchFeeInput, l2_to_l1_log::L2ToL1Log, + settlement::SettlementLayer, web3::{BlockHeader, Bytes}, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, H160, H256, U256, U64, }; @@ -17,8 +18,8 @@ use crate::{ models::{ bigdecimal_to_u256, parse_protocol_version, storage_block::{ - ResolvedL1BatchForL2Block, StorageBlockDetails, StorageL1BatchDetails, - LEGACY_BLOCK_GAS_LIMIT, + to_settlement_layer, ResolvedL1BatchForL2Block, StorageBlockDetails, + StorageL1BatchDetails, LEGACY_BLOCK_GAS_LIMIT, }, storage_transaction::CallTrace, }, @@ -463,6 +464,40 @@ impl BlocksWeb3Dal<'_, '_> { } } + pub async fn get_expected_settlement_layer( + &mut self, + resolved_l1batch_for_l2block: &ResolvedL1BatchForL2Block, + ) -> DalResult { + let pending = resolved_l1batch_for_l2block.block_l1_batch.is_none(); + let l1_batch = resolved_l1batch_for_l2block + .block_l1_batch + .unwrap_or_default(); + let row = sqlx::query!( + r#" + SELECT + settlement_layer_type, + settlement_layer_chain_id + FROM + l1_batches + WHERE + ($1 AND is_sealed = false) OR (number = $2) + ORDER BY number DESC + LIMIT 1 + "#, + pending, + i64::from(l1_batch.0) + ) + .instrument("get_expected_settlement_layer") + .with_arg("block_number", &l1_batch) + .fetch_one(self.storage) + .await?; + + Ok(to_settlement_layer( + row.settlement_layer_type, + row.settlement_layer_chain_id, + )) + } + pub async fn get_l2_block_hash( &mut self, block_number: L2BlockNumber, @@ -496,6 +531,23 @@ impl BlocksWeb3Dal<'_, '_> { .await } + pub async fn get_l2_to_l1_messages( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> DalResult>> { + self.storage + .blocks_dal() + .get_l2_to_l1_messages_for_batch(l1_batch_number) + .await + } // + + pub async fn get_message_root(&mut self, l1_batch_number: L1BatchNumber) -> DalResult { + self.storage + .blocks_dal() + .get_message_root(l1_batch_number) + .await + } + pub async fn get_l1_batch_number_of_l2_block( &mut self, l2_block_number: L2BlockNumber, diff --git a/core/lib/dal/src/consensus/conv.rs b/core/lib/dal/src/consensus/conv.rs index 2bf276735268..069f939a8231 100644 --- a/core/lib/dal/src/consensus/conv.rs +++ b/core/lib/dal/src/consensus/conv.rs @@ -15,7 +15,8 @@ use zksync_types::{ protocol_upgrade::ProtocolUpgradeTxCommonData, transaction_request::PaymasterParams, u256_to_h256, Execute, ExecuteTransactionCommon, InputData, L1BatchNumber, L1TxCommonData, - L2ChainId, L2TxCommonData, Nonce, PriorityOpId, ProtocolVersionId, Transaction, H256, + L2ChainId, L2TxCommonData, Nonce, PriorityOpId, ProtocolVersionId, SLChainId, Transaction, + H256, }; use super::*; @@ -242,6 +243,8 @@ impl ProtoFmt for Payload { .unwrap_or_else(PubdataParams::pre_gateway), pubdata_limit: r.pubdata_limit, interop_roots, + settlement_layer: read_optional_repr(&r.settlement_layer) + .context("settlement_layer")?, }; if this.protocol_version.is_pre_gateway() { anyhow::ensure!( @@ -308,6 +311,7 @@ impl ProtoFmt for Payload { }, pubdata_limit: self.pubdata_limit, interop_roots: self.interop_roots.iter().map(ProtoRepr::build).collect(), + settlement_layer: self.settlement_layer.as_ref().map(ProtoRepr::build), }; match self.protocol_version { v if v >= ProtocolVersionId::Version25 => { @@ -646,3 +650,32 @@ impl proto::PubdataType { } } } + +impl ProtoRepr for proto::SettlementLayer { + type Type = SettlementLayer; + + fn read(&self) -> anyhow::Result { + match *required(&self.settlement_layer_type).context("settlement_layer_type")? { + 0 => Ok(SettlementLayer::L1(SLChainId( + *required(&self.chain_id).context("chain_id")?, + ))), + 1 => Ok(SettlementLayer::Gateway(SLChainId( + *required(&self.chain_id).context("chain_id")?, + ))), + _ => unreachable!(), + } + } + + fn build(this: &Self::Type) -> Self { + match this { + SettlementLayer::L1(chain_id) => Self { + settlement_layer_type: Some(0), + chain_id: Some(chain_id.0), + }, + SettlementLayer::Gateway(chain_id) => Self { + settlement_layer_type: Some(1), + chain_id: Some(chain_id.0), + }, + } + } +} diff --git a/core/lib/dal/src/consensus/mod.rs b/core/lib/dal/src/consensus/mod.rs index 6790fc53b820..3387dbc43b00 100644 --- a/core/lib/dal/src/consensus/mod.rs +++ b/core/lib/dal/src/consensus/mod.rs @@ -4,8 +4,8 @@ use zksync_concurrency::net; use zksync_consensus_engine::Last; use zksync_consensus_roles::{node, validator}; use zksync_types::{ - commitment::PubdataParams, ethabi, Address, InteropRoot, L1BatchNumber, ProtocolVersionId, - Transaction, H256, + commitment::PubdataParams, ethabi, settlement::SettlementLayer, Address, InteropRoot, + L1BatchNumber, ProtocolVersionId, Transaction, H256, }; mod conv; @@ -80,6 +80,7 @@ pub struct Payload { pub pubdata_params: PubdataParams, pub pubdata_limit: Option, pub interop_roots: Vec, + pub settlement_layer: Option, } impl Payload { diff --git a/core/lib/dal/src/consensus/proto/mod.proto b/core/lib/dal/src/consensus/proto/mod.proto index a37068a45949..e05c9826a777 100644 --- a/core/lib/dal/src/consensus/proto/mod.proto +++ b/core/lib/dal/src/consensus/proto/mod.proto @@ -23,6 +23,16 @@ message InteropRoot { repeated bytes sides = 3; // required; Vec } +enum SettlementLayerType { + L1 = 0; + GATEWAY = 1; +} + +message SettlementLayer { + optional uint64 chain_id = 1; // required; u32 + optional SettlementLayerType settlement_layer_type = 2; // required; +} + message Payload { // zksync-era ProtocolVersionId optional uint32 protocol_version = 9; // required; u16 @@ -42,6 +52,7 @@ message Payload { optional PubdataParams pubdata_params = 13; // optional optional uint64 pubdata_limit = 14; // required since v29 repeated InteropRoot interop_roots = 15; // optional; set for protocol_version >= 29 + optional SettlementLayer settlement_layer = 16; // optional; set for protocol_version >= 29 } message PubdataParams { diff --git a/core/lib/dal/src/models/mod.rs b/core/lib/dal/src/models/mod.rs index b892a5c279cb..fed94bf40832 100644 --- a/core/lib/dal/src/models/mod.rs +++ b/core/lib/dal/src/models/mod.rs @@ -1,11 +1,10 @@ -pub mod storage_block; - use bigdecimal::{num_bigint::BigUint, BigDecimal}; use zksync_db_connection::error::SqlxContext; use zksync_types::{ProtocolVersionId, U256}; mod call; pub mod storage_base_token_ratio; +pub mod storage_block; pub(crate) mod storage_data_availability; pub mod storage_eth_tx; pub mod storage_event; diff --git a/core/lib/dal/src/models/storage_block.rs b/core/lib/dal/src/models/storage_block.rs index 76b71fc88504..505d4dd469ad 100644 --- a/core/lib/dal/src/models/storage_block.rs +++ b/core/lib/dal/src/models/storage_block.rs @@ -13,6 +13,7 @@ use zksync_types::{ eth_sender::EthTxFinalityStatus, fee_model::BatchFeeInput, l2_to_l1_log::{L2ToL1Log, SystemL2ToL1Log, UserL2ToL1Log}, + settlement::SettlementLayer, Address, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, SLChainId, H256, }; @@ -63,6 +64,8 @@ pub(crate) struct StorageL1BatchHeader { pub fair_pubdata_price: Option, pub pubdata_limit: Option, + pub settlement_layer_chain_id: Option, + pub settlement_layer_type: Option, } impl StorageL1BatchHeader { @@ -86,6 +89,8 @@ impl StorageL1BatchHeader { self.fair_pubdata_price.map(|p| p as u64), ); + let settlement_layer = + to_settlement_layer(self.settlement_layer_type, self.settlement_layer_chain_id); L1BatchHeader { number: L1BatchNumber(self.number as u32), timestamp: self.timestamp as u64, @@ -111,6 +116,7 @@ impl StorageL1BatchHeader { fee_address: Address::from_slice(&self.fee_address), batch_fee_input, pubdata_limit: self.pubdata_limit.map(|l| l as u64), + settlement_layer, } } } @@ -184,6 +190,8 @@ pub(crate) struct StorageL1Batch { pub fair_pubdata_price: Option, pub pubdata_limit: Option, + pub settlement_layer_chain_id: Option, + pub settlement_layer_type: Option, } impl StorageL1Batch { @@ -206,6 +214,8 @@ impl StorageL1Batch { self.l2_fair_gas_price as u64, self.fair_pubdata_price.map(|p| p as u64), ); + let settlement_layer = + to_settlement_layer(self.settlement_layer_type, self.settlement_layer_chain_id); L1BatchHeader { number: L1BatchNumber(self.number as u32), @@ -232,6 +242,7 @@ impl StorageL1Batch { fee_address: Address::from_slice(&self.fee_address), batch_fee_input, pubdata_limit: self.pubdata_limit.map(|l| l as u64), + settlement_layer, } } } @@ -321,6 +332,8 @@ pub(crate) struct UnsealedStorageL1Batch { pub l2_fair_gas_price: i64, pub fair_pubdata_price: Option, pub pubdata_limit: Option, + pub settlement_layer_chain_id: Option, + pub settlement_layer_type: Option, } impl From for UnsealedL1BatchHeader { @@ -328,6 +341,9 @@ impl From for UnsealedL1BatchHeader { let protocol_version: Option = batch .protocol_version .map(|v| (v as u16).try_into().unwrap()); + let settlement_layer = + to_settlement_layer(batch.settlement_layer_type, batch.settlement_layer_chain_id); + Self { number: L1BatchNumber(batch.number as u32), timestamp: batch.timestamp as u64, @@ -340,6 +356,7 @@ impl From for UnsealedL1BatchHeader { batch.l1_gas_price as u64, ), pubdata_limit: batch.pubdata_limit.map(|l| l as u64), + settlement_layer, } } } @@ -355,6 +372,8 @@ pub(crate) struct CommonStorageL1BatchHeader { pub l2_fair_gas_price: i64, pub fair_pubdata_price: Option, pub pubdata_limit: Option, + pub settlement_layer_chain_id: Option, + pub settlement_layer_type: Option, } impl From for CommonL1BatchHeader { @@ -362,6 +381,9 @@ impl From for CommonL1BatchHeader { let protocol_version: Option = batch .protocol_version .map(|v| (v as u16).try_into().unwrap()); + let settlement_layer = + to_settlement_layer(batch.settlement_layer_type, batch.settlement_layer_chain_id); + Self { number: L1BatchNumber(batch.number as u32), is_sealed: batch.is_sealed, @@ -375,6 +397,7 @@ impl From for CommonL1BatchHeader { batch.l1_gas_price as u64, ), pubdata_limit: batch.pubdata_limit.map(|l| l as u64), + settlement_layer, } } } @@ -734,3 +757,25 @@ impl From for PubdataParams { .unwrap() } } + +pub(crate) fn to_settlement_layer( + settlement_layer_type: Option, + settlement_layer_chain_id: Option, +) -> SettlementLayer { + match settlement_layer_type.as_deref() { + Some("L1") => { + SettlementLayer::L1(SLChainId(settlement_layer_chain_id.unwrap_or(29) as u64)) + } + Some("Gateway") => { + SettlementLayer::Gateway(SLChainId(settlement_layer_chain_id.unwrap_or(506) as u64)) + } + _ => SettlementLayer::L1(SLChainId(settlement_layer_chain_id.unwrap_or(19) as u64)), + } +} + +pub(crate) fn from_settlement_layer(settlement_layer: &SettlementLayer) -> (String, i64) { + match settlement_layer { + SettlementLayer::L1(SLChainId(id)) => ("L1".to_string(), *id as i64), + SettlementLayer::Gateway(SLChainId(id)) => ("Gateway".to_string(), *id as i64), + } +} diff --git a/core/lib/dal/src/models/storage_sync.rs b/core/lib/dal/src/models/storage_sync.rs index 1c13909ad105..a81469417ab8 100644 --- a/core/lib/dal/src/models/storage_sync.rs +++ b/core/lib/dal/src/models/storage_sync.rs @@ -5,11 +5,15 @@ use zksync_db_connection::error::SqlxContext; use zksync_types::{ api::en, commitment::{L2DACommitmentScheme, PubdataParams, PubdataType}, - parse_h160, parse_h256, parse_h256_opt, Address, InteropRoot, L1BatchNumber, L2BlockNumber, - ProtocolVersionId, Transaction, H256, + parse_h160, parse_h256, parse_h256_opt, + settlement::SettlementLayer, + Address, InteropRoot, L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, H256, }; -use crate::{consensus_dal::Payload, models::parse_protocol_version}; +use crate::{ + consensus_dal::Payload, + models::{parse_protocol_version, storage_block::to_settlement_layer}, +}; #[derive(Debug, Clone, sqlx::FromRow)] pub(crate) struct StorageSyncBlock { @@ -33,6 +37,8 @@ pub(crate) struct StorageSyncBlock { pub l2_da_commitment_scheme: Option, pub pubdata_type: String, pub pubdata_limit: Option, + pub settlement_layer_type: Option, + pub settlement_layer_chain_id: Option, } pub(crate) struct SyncBlock { @@ -51,6 +57,7 @@ pub(crate) struct SyncBlock { pub pubdata_params: PubdataParams, pub pubdata_limit: Option, pub interop_roots: Vec, + pub settlement_layer: SettlementLayer, } impl SyncBlock { @@ -122,6 +129,10 @@ impl SyncBlock { .decode_column("pubdata_params")?, pubdata_limit: block.pubdata_limit.map(|l| l as u64), interop_roots, + settlement_layer: to_settlement_layer( + block.settlement_layer_type, + block.settlement_layer_chain_id, + ), }) } } @@ -145,6 +156,7 @@ impl SyncBlock { pubdata_params: Some(self.pubdata_params), pubdata_limit: self.pubdata_limit, interop_roots: Some(self.interop_roots), + settlement_layer: Some(self.settlement_layer), } } @@ -164,6 +176,7 @@ impl SyncBlock { pubdata_params: self.pubdata_params, pubdata_limit: self.pubdata_limit, interop_roots: self.interop_roots, + settlement_layer: Some(self.settlement_layer), } } } diff --git a/core/lib/dal/src/storage_logs_dal.rs b/core/lib/dal/src/storage_logs_dal.rs index 7822bfbc7e3f..5f608ebd0f30 100644 --- a/core/lib/dal/src/storage_logs_dal.rs +++ b/core/lib/dal/src/storage_logs_dal.rs @@ -826,7 +826,8 @@ impl StorageLogsDal<'_, '_> { mod tests { use zksync_contracts::BaseSystemContractsHashes; use zksync_types::{ - block::L1BatchHeader, AccountTreeId, ProtocolVersion, ProtocolVersionId, StorageKey, + block::L1BatchHeader, settlement::SettlementLayer, AccountTreeId, ProtocolVersion, + ProtocolVersionId, StorageKey, }; use super::*; @@ -838,6 +839,7 @@ mod tests { 0, BaseSystemContractsHashes::default(), ProtocolVersionId::default(), + SettlementLayer::for_tests(), ); conn.blocks_dal() .insert_mock_l1_batch(&header) diff --git a/core/lib/dal/src/storage_web3_dal.rs b/core/lib/dal/src/storage_web3_dal.rs index c4c93e0ab4b7..4c5d9e826d41 100644 --- a/core/lib/dal/src/storage_web3_dal.rs +++ b/core/lib/dal/src/storage_web3_dal.rs @@ -309,7 +309,9 @@ impl StorageWeb3Dal<'_, '_> { #[cfg(test)] mod tests { - use zksync_types::{block::L1BatchHeader, ProtocolVersion, ProtocolVersionId}; + use zksync_types::{ + block::L1BatchHeader, settlement::SettlementLayer, ProtocolVersion, ProtocolVersionId, + }; use super::*; use crate::{ @@ -334,6 +336,7 @@ mod tests { 0, Default::default(), ProtocolVersionId::latest(), + SettlementLayer::for_tests(), ); conn.blocks_dal() .insert_mock_l1_batch(&l1_batch_header) @@ -432,6 +435,7 @@ mod tests { 100, Default::default(), ProtocolVersionId::latest(), + SettlementLayer::for_tests(), ); conn.blocks_dal() .insert_mock_l1_batch(&l1_batch_header) diff --git a/core/lib/dal/src/sync_dal.rs b/core/lib/dal/src/sync_dal.rs index 49778d88797a..218f3295f6c5 100644 --- a/core/lib/dal/src/sync_dal.rs +++ b/core/lib/dal/src/sync_dal.rs @@ -68,7 +68,9 @@ impl SyncDal<'_, '_> { miniblocks.l2_da_validator_address, miniblocks.l2_da_commitment_scheme, miniblocks.pubdata_type AS "pubdata_type!", - l1_batches.pubdata_limit + l1_batches.pubdata_limit, + l1_batches.settlement_layer_type, + l1_batches.settlement_layer_chain_id FROM miniblocks INNER JOIN l1_batch ON true @@ -139,6 +141,7 @@ impl SyncDal<'_, '_> { mod tests { use zksync_types::{ block::{L1BatchHeader, L2BlockHeader}, + settlement::SettlementLayer, Address, L1BatchNumber, ProtocolVersion, ProtocolVersionId, Transaction, }; use zksync_vm_interface::{tracer::ValidationTraces, TransactionExecutionMetrics}; @@ -171,6 +174,7 @@ mod tests { 0, Default::default(), ProtocolVersionId::latest(), + SettlementLayer::for_tests(), ); conn.blocks_dal() .insert_mock_l1_batch(&l1_batch_header) @@ -319,6 +323,7 @@ mod tests { 100, Default::default(), ProtocolVersionId::latest(), + SettlementLayer::for_tests(), ); conn.blocks_dal() .insert_l1_batch(l1_batch_header.to_unsealed_header()) diff --git a/core/lib/dal/src/tests/mod.rs b/core/lib/dal/src/tests/mod.rs index c6819dd57851..d2d58d4cb494 100644 --- a/core/lib/dal/src/tests/mod.rs +++ b/core/lib/dal/src/tests/mod.rs @@ -13,6 +13,7 @@ use zksync_types::{ l2::L2Tx, l2_to_l1_log::{L2ToL1Log, UserL2ToL1Log}, protocol_upgrade::{ProtocolUpgradeTx, ProtocolUpgradeTxCommonData}, + settlement::SettlementLayer, snapshots::SnapshotRecoveryStatus, Address, Execute, K256PrivateKey, L1BatchNumber, L1BlockNumber, L1TxCommonData, L2BlockNumber, L2ChainId, PriorityOpId, ProtocolVersion, ProtocolVersionId, H160, H256, U256, @@ -69,6 +70,7 @@ pub(crate) fn create_l1_batch_header(number: u32) -> L1BatchHeader { evm_emulator: Some(H256::repeat_byte(43)), }, ProtocolVersionId::latest(), + SettlementLayer::for_tests(), ) } diff --git a/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs b/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs index 87d17c3ec5e1..477a33a76eaf 100644 --- a/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs +++ b/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs @@ -1,7 +1,9 @@ use zksync_types::{ commitment::{L1BatchWithMetadata, PriorityOpsMerkleProof}, ethabi::{encode, Token}, - InteropRoot, ProtocolVersionId, + l2_to_l1_log::UserL2ToL1Log, + web3::contract::Tokenizable, + InteropRoot, ProtocolVersionId, H256, }; use crate::i_executor::structures::{get_encoding_version, StoredBatchInfo}; @@ -12,6 +14,9 @@ pub struct ExecuteBatches { pub l1_batches: Vec, pub priority_ops_proofs: Vec, pub dependency_roots: Vec>, + pub logs: Vec>, + pub messages: Vec>>, + pub message_roots: Vec, // } impl ExecuteBatches { @@ -59,6 +64,50 @@ impl ExecuteBatches { .concat() .to_vec(); + vec![ + Token::Uint(self.l1_batches[0].header.number.0.into()), + Token::Uint(self.l1_batches.last().unwrap().header.number.0.into()), + Token::Bytes(execute_data), + ] + } else if internal_protocol_version.is_pre_medium_interop() + && chain_protocol_version.is_pre_medium_interop() + { + let encoded_data = encode(&[ + Token::Array( + self.l1_batches + .iter() + .map(|batch| { + StoredBatchInfo::from(batch) + .into_token_with_protocol_version(internal_protocol_version) + }) + .collect(), + ), + Token::Array( + self.priority_ops_proofs + .iter() + .map(|proof| proof.into_token()) + .collect(), + ), + Token::Array( + self.dependency_roots + .iter() + .map(|batch_roots| { + Token::Array( + batch_roots + .iter() + .map(|root| root.clone().into_token()) + .collect(), + ) + }) + .collect(), + ), + ]); + let execute_data = [ + [get_encoding_version(internal_protocol_version)].to_vec(), + encoded_data, + ] + .concat() + .to_vec(); vec![ Token::Uint(self.l1_batches[0].header.number.0.into()), Token::Uint(self.l1_batches.last().unwrap().header.number.0.into()), @@ -94,6 +143,33 @@ impl ExecuteBatches { }) .collect(), ), + Token::Array( + self.logs + .iter() + .map(|log| { + Token::Array(log.iter().map(|log| log.clone().0.into_token()).collect()) + }) + .collect(), + ), + Token::Array( + self.messages + .iter() + .map(|message| { + Token::Array( + message + .iter() + .map(|message| message.clone().into_token()) + .collect(), + ) + }) + .collect(), + ), + Token::Array( + self.message_roots + .iter() + .map(|root| Token::FixedBytes(root.0.as_slice().into())) + .collect(), + ), ]); let execute_data = [ [get_encoding_version(internal_protocol_version)].to_vec(), diff --git a/core/lib/multivm/src/utils/mod.rs b/core/lib/multivm/src/utils/mod.rs index d131263ee25f..8a7dfd08243a 100644 --- a/core/lib/multivm/src/utils/mod.rs +++ b/core/lib/multivm/src/utils/mod.rs @@ -74,7 +74,8 @@ pub fn derive_base_fee_and_gas_per_pubdata( | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => { + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => { crate::vm_latest::utils::fee::derive_base_fee_and_gas_per_pubdata( batch_fee_input.into_pubdata_independent(), ) @@ -107,7 +108,10 @@ pub fn get_batch_base_fee(l1_batch_env: &L1BatchEnv, vm_version: VmVersion) -> u | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => crate::vm_latest::utils::fee::get_batch_base_fee(l1_batch_env), + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => { + crate::vm_latest::utils::fee::get_batch_base_fee(l1_batch_env) + } } } @@ -242,7 +246,10 @@ pub fn derive_overhead( | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => crate::vm_latest::utils::overhead::derive_overhead(encoded_len), + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => { + crate::vm_latest::utils::overhead::derive_overhead(encoded_len) + } } } @@ -290,6 +297,11 @@ pub fn get_bootloader_encoding_space(version: VmVersion) -> u32 { VmVersion::VmInterop => crate::vm_latest::constants::get_bootloader_tx_encoding_space( crate::vm_latest::MultiVmSubversion::Interop, ), + VmVersion::VmMediumInterop => { + crate::vm_latest::constants::get_bootloader_tx_encoding_space( + crate::vm_latest::MultiVmSubversion::MediumInterop, + ) + } } } @@ -314,7 +326,8 @@ pub fn get_bootloader_max_txs_in_batch(version: VmVersion) -> usize { | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => crate::vm_latest::constants::MAX_TXS_IN_BATCH, + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => crate::vm_latest::constants::MAX_TXS_IN_BATCH, } } @@ -335,7 +348,9 @@ pub fn get_bootloader_max_interop_roots_in_batch(version: VmVersion) -> usize { | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles => 0, - VmVersion::VmInterop => crate::vm_latest::constants::MAX_MSG_ROOTS_IN_BATCH, + VmVersion::VmInterop | VmVersion::VmMediumInterop => { + crate::vm_latest::constants::MAX_MSG_ROOTS_IN_BATCH + } } } @@ -361,7 +376,8 @@ pub fn gas_bootloader_batch_tip_overhead(version: VmVersion) -> u32 { | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_OVERHEAD, + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_OVERHEAD, } } @@ -387,7 +403,8 @@ pub fn circuit_statistics_bootloader_batch_tip_overhead(version: VmVersion) -> u | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => { + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => { crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_CIRCUIT_STATISTICS_OVERHEAD as usize } } @@ -415,7 +432,8 @@ pub fn execution_metrics_bootloader_batch_tip_overhead(version: VmVersion) -> us | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => { + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => { crate::vm_latest::constants::BOOTLOADER_BATCH_TIP_METRICS_SIZE_OVERHEAD as usize } } @@ -444,7 +462,8 @@ pub fn get_max_gas_per_pubdata_byte(version: VmVersion) -> u64 { | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => crate::vm_latest::constants::MAX_GAS_PER_PUBDATA_BYTE, + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => crate::vm_latest::constants::MAX_GAS_PER_PUBDATA_BYTE, } } @@ -492,6 +511,11 @@ pub fn get_used_bootloader_memory_bytes(version: VmVersion) -> usize { VmVersion::VmInterop => crate::vm_latest::constants::get_used_bootloader_memory_bytes( crate::vm_latest::MultiVmSubversion::Interop, ), + VmVersion::VmMediumInterop => { + crate::vm_latest::constants::get_used_bootloader_memory_bytes( + crate::vm_latest::MultiVmSubversion::MediumInterop, + ) + } } } @@ -539,6 +563,11 @@ pub fn get_used_bootloader_memory_words(version: VmVersion) -> usize { VmVersion::VmInterop => crate::vm_latest::constants::get_used_bootloader_memory_words( crate::vm_latest::MultiVmSubversion::Interop, ), + VmVersion::VmMediumInterop => { + crate::vm_latest::constants::get_used_bootloader_memory_words( + crate::vm_latest::MultiVmSubversion::MediumInterop, + ) + } } } @@ -565,7 +594,8 @@ pub fn get_max_batch_gas_limit(version: VmVersion) -> u64 { | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => crate::vm_latest::constants::BATCH_GAS_LIMIT, + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => crate::vm_latest::constants::BATCH_GAS_LIMIT, } } @@ -594,7 +624,8 @@ pub fn get_eth_call_gas_limit(version: VmVersion) -> u64 { | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => crate::vm_latest::constants::ETH_CALL_GAS_LIMIT, + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => crate::vm_latest::constants::ETH_CALL_GAS_LIMIT, } } @@ -620,7 +651,8 @@ pub fn get_max_batch_base_layer_circuits(version: VmVersion) -> usize { | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => crate::vm_latest::constants::MAX_BASE_LAYER_CIRCUITS, + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => crate::vm_latest::constants::MAX_BASE_LAYER_CIRCUITS, } } @@ -647,7 +679,8 @@ pub fn get_max_new_factory_deps(version: VmVersion) -> usize { | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop) => { + | VmVersion::VmInterop + | VmVersion::VmMediumInterop) => { crate::vm_latest::constants::get_max_new_factory_deps(version.try_into().unwrap()) } } @@ -670,7 +703,8 @@ pub fn get_max_vm_pubdata_per_batch(version: VmVersion) -> usize { | VmVersion::VmGateway | VmVersion::VmEvmEmulator | VmVersion::VmEcPrecompiles - | VmVersion::VmInterop => crate::vm_latest::constants::MAX_VM_PUBDATA_PER_BATCH, + | VmVersion::VmInterop + | VmVersion::VmMediumInterop => crate::vm_latest::constants::MAX_VM_PUBDATA_PER_BATCH, } } diff --git a/core/lib/multivm/src/versions/testonly/mod.rs b/core/lib/multivm/src/versions/testonly/mod.rs index f4efa6961727..5ce4c5e651ac 100644 --- a/core/lib/multivm/src/versions/testonly/mod.rs +++ b/core/lib/multivm/src/versions/testonly/mod.rs @@ -24,7 +24,9 @@ use zksync_types::{ commitment::{L2DACommitmentScheme, L2PubdataValidator}, fee_model::BatchFeeInput, get_code_key, get_evm_code_hash_key, get_is_account_key, get_known_code_key, h256_to_address, - h256_to_u256, u256_to_h256, + h256_to_u256, + settlement::SettlementLayer, + u256_to_h256, utils::storage_key_for_eth_balance, web3, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, H256, U256, @@ -197,6 +199,7 @@ pub(super) fn default_l1_batch(number: L1BatchNumber) -> L1BatchEnv { max_virtual_blocks_to_create: 100, interop_roots: vec![], }, + settlement_layer: SettlementLayer::for_tests(), } } diff --git a/core/lib/multivm/src/versions/vm_fast/vm.rs b/core/lib/multivm/src/versions/vm_fast/vm.rs index 40970716aeab..0b47f3bbc176 100644 --- a/core/lib/multivm/src/versions/vm_fast/vm.rs +++ b/core/lib/multivm/src/versions/vm_fast/vm.rs @@ -127,7 +127,7 @@ impl Vm { &system_env.base_system_smart_contracts.bootloader, true, ); - let bootloader_memory = BootloaderState::initial_memory(&batch_env); + let bootloader_memory = BootloaderState::initial_memory(vm_version.into(), &batch_env); let mut inner = VirtualMachine::new( BOOTLOADER_ADDRESS, diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader/init.rs b/core/lib/multivm/src/versions/vm_latest/bootloader/init.rs index 7897ada6ad23..ed1387b3ed2b 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader/init.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader/init.rs @@ -1,7 +1,12 @@ use zksync_types::{address_to_u256, h256_to_u256, U256}; use super::BootloaderState; -use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; +use crate::{ + interface::L1BatchEnv, + vm_latest::{ + constants::get_settlement_layer_offset, utils::fee::get_batch_base_fee, MultiVmSubversion, + }, +}; const OPERATOR_ADDRESS_SLOT: usize = 0; const PREV_BLOCK_HASH_SLOT: usize = 1; @@ -14,7 +19,10 @@ const SHOULD_SET_NEW_BLOCK_SLOT: usize = 7; impl BootloaderState { /// Returns the initial memory for the bootloader based on the current batch environment. - pub(crate) fn initial_memory(l1_batch: &L1BatchEnv) -> Vec<(usize, U256)> { + pub(crate) fn initial_memory( + vm_version: MultiVmSubversion, + l1_batch: &L1BatchEnv, + ) -> Vec<(usize, U256)> { let (prev_block_hash, should_set_new_block) = l1_batch .previous_batch_hash .map(|prev_block_hash| (h256_to_u256(prev_block_hash), U256::one())) @@ -41,6 +49,10 @@ impl BootloaderState { U256::from(get_batch_base_fee(l1_batch)), ), (SHOULD_SET_NEW_BLOCK_SLOT, should_set_new_block), + ( + get_settlement_layer_offset(vm_version), + U256::from(l1_batch.settlement_layer.chain_id().0), + ), ] } } diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader/utils.rs b/core/lib/multivm/src/versions/vm_latest/bootloader/utils.rs index ec1ee999f94c..bb67c0435087 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader/utils.rs @@ -277,7 +277,8 @@ pub(crate) fn apply_pubdata_to_memory( MultiVmSubversion::Gateway | MultiVmSubversion::EvmEmulator | MultiVmSubversion::EcPrecompiles - | MultiVmSubversion::Interop => { + | MultiVmSubversion::Interop + | MultiVmSubversion::MediumInterop => { // Skipping the first slot as it will be filled by the bootloader itself: // It is for the selector of the call to the L1Messenger. let l1_messenger_pubdata_start_slot = diff --git a/core/lib/multivm/src/versions/vm_latest/constants.rs b/core/lib/multivm/src/versions/vm_latest/constants.rs index 5a1c6f16ac41..eb189539094a 100644 --- a/core/lib/multivm/src/versions/vm_latest/constants.rs +++ b/core/lib/multivm/src/versions/vm_latest/constants.rs @@ -28,7 +28,8 @@ pub(crate) const fn get_used_bootloader_memory_bytes(subversion: MultiVmSubversi | MultiVmSubversion::Gateway | MultiVmSubversion::EvmEmulator | MultiVmSubversion::EcPrecompiles - | MultiVmSubversion::Interop => 63_800_000, + | MultiVmSubversion::Interop + | MultiVmSubversion::MediumInterop => 63_800_000, } } @@ -74,7 +75,8 @@ pub(crate) const fn get_max_new_factory_deps(subversion: MultiVmSubversion) -> u MultiVmSubversion::Gateway | MultiVmSubversion::EvmEmulator | MultiVmSubversion::EcPrecompiles - | MultiVmSubversion::Interop => 64, + | MultiVmSubversion::Interop + | MultiVmSubversion::MediumInterop => 64, } } @@ -149,11 +151,23 @@ pub(crate) const INTEROP_ROOT_SLOTS_SIZE: usize = 5; pub(crate) const INTEROP_ROOT_SLOTS: usize = (MAX_MSG_ROOTS_IN_BATCH + 1) * INTEROP_ROOT_SLOTS_SIZE; +pub(crate) const fn get_settlement_layer_offset(subversion: MultiVmSubversion) -> usize { + // The additional slot comes from INTEROP_ROOT_ROLLING_HASH_SLOT. + get_interop_root_offset(subversion) + INTEROP_ROOT_SLOTS + 1 +} + pub(crate) const fn get_compressed_bytecodes_offset(subversion: MultiVmSubversion) -> usize { match subversion { // The additional slot comes from INTEROP_ROOT_ROLLING_HASH_SLOT. MultiVmSubversion::Interop => get_interop_root_offset(subversion) + INTEROP_ROOT_SLOTS + 1, - _ => get_tx_operator_l2_block_info_offset(subversion) + TX_OPERATOR_L2_BLOCK_INFO_SLOTS, + MultiVmSubversion::MediumInterop => get_settlement_layer_offset(subversion) + 1, + MultiVmSubversion::EvmEmulator + | MultiVmSubversion::EcPrecompiles + | MultiVmSubversion::Gateway + | MultiVmSubversion::SmallBootloaderMemory + | MultiVmSubversion::IncreasedBootloaderMemory => { + get_tx_operator_l2_block_info_offset(subversion) + TX_OPERATOR_L2_BLOCK_INFO_SLOTS + } } } @@ -172,12 +186,18 @@ pub(crate) const fn get_operator_provided_l1_messenger_pubdata_offset( subversion: MultiVmSubversion, ) -> usize { match subversion { - MultiVmSubversion::Interop => { + MultiVmSubversion::Interop | MultiVmSubversion::MediumInterop => { get_priority_txs_l1_data_offset(subversion) + PRIORITY_TXS_L1_DATA_SLOTS + TXS_STATUS_ROLLING_HASH_SLOTS } - _ => get_priority_txs_l1_data_offset(subversion) + PRIORITY_TXS_L1_DATA_SLOTS, + MultiVmSubversion::EvmEmulator + | MultiVmSubversion::EcPrecompiles + | MultiVmSubversion::Gateway + | MultiVmSubversion::SmallBootloaderMemory + | MultiVmSubversion::IncreasedBootloaderMemory => { + get_priority_txs_l1_data_offset(subversion) + PRIORITY_TXS_L1_DATA_SLOTS + } } } diff --git a/core/lib/multivm/src/versions/vm_latest/types/vm_state.rs b/core/lib/multivm/src/versions/vm_latest/types/vm_state.rs index d9979cb15886..de3c797db52a 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/vm_state.rs @@ -111,7 +111,7 @@ pub(crate) fn new_vm_state( Timestamp(0), ); - let bootloader_initial_memory = BootloaderState::initial_memory(l1_batch_env); + let bootloader_initial_memory = BootloaderState::initial_memory(subversion, l1_batch_env); memory.populate_page( BOOTLOADER_HEAP_PAGE as usize, bootloader_initial_memory.clone(), diff --git a/core/lib/multivm/src/versions/vm_latest/vm.rs b/core/lib/multivm/src/versions/vm_latest/vm.rs index 655e9dd56736..746339c7b977 100644 --- a/core/lib/multivm/src/versions/vm_latest/vm.rs +++ b/core/lib/multivm/src/versions/vm_latest/vm.rs @@ -48,6 +48,7 @@ pub(crate) enum MultiVmSubversion { EvmEmulator, EcPrecompiles, Interop, + MediumInterop, } #[cfg(test)] @@ -71,6 +72,7 @@ impl TryFrom for MultiVmSubversion { VmVersion::VmEvmEmulator => Ok(Self::EvmEmulator), VmVersion::VmEcPrecompiles => Ok(Self::EcPrecompiles), VmVersion::VmInterop => Ok(Self::Interop), + VmVersion::VmMediumInterop => Ok(Self::MediumInterop), _ => Err(VmVersionIsNotVm150Error), } } diff --git a/core/lib/multivm/src/vm_instance.rs b/core/lib/multivm/src/vm_instance.rs index 609837010874..a37a0caef239 100644 --- a/core/lib/multivm/src/vm_instance.rs +++ b/core/lib/multivm/src/vm_instance.rs @@ -248,6 +248,15 @@ impl LegacyVmInstance { ); Self::Vm1_5_2(vm) } + VmVersion::VmMediumInterop => { + let vm = vm_latest::Vm::new_with_subversion( + l1_batch_env, + system_env, + storage_view, + vm_latest::MultiVmSubversion::MediumInterop, + ); + Self::Vm1_5_2(vm) + } } } diff --git a/core/lib/snapshots_applier/src/tests/mod.rs b/core/lib/snapshots_applier/src/tests/mod.rs index 3a99516dc136..115e56486232 100644 --- a/core/lib/snapshots_applier/src/tests/mod.rs +++ b/core/lib/snapshots_applier/src/tests/mod.rs @@ -13,7 +13,9 @@ use zksync_object_store::MockObjectStore; use zksync_types::{ api::{BlockDetails, L1BatchDetails}, block::L1BatchHeader, - get_code_key, L1BatchNumber, ProtocolVersion, ProtocolVersionId, + get_code_key, + settlement::SettlementLayer, + L1BatchNumber, ProtocolVersion, ProtocolVersionId, }; use self::utils::{ @@ -521,6 +523,7 @@ async fn applier_errors_after_genesis() { 0, Default::default(), ProtocolVersionId::latest(), + SettlementLayer::for_tests(), ); storage .blocks_dal() diff --git a/core/lib/state/src/test_utils.rs b/core/lib/state/src/test_utils.rs index 6e3412560276..b2d713c5aceb 100644 --- a/core/lib/state/src/test_utils.rs +++ b/core/lib/state/src/test_utils.rs @@ -100,7 +100,13 @@ pub(crate) async fn create_l1_batch( l1_batch_number: L1BatchNumber, logs_for_initial_writes: &[StorageLog], ) { - let header = L1BatchHeader::new(l1_batch_number, 0, Default::default(), Default::default()); + let header = L1BatchHeader::new( + l1_batch_number, + 0, + Default::default(), + Default::default(), + SettlementLayer::for_tests(), + ); conn.blocks_dal() .insert_mock_l1_batch(&header) .await diff --git a/core/lib/tee_verifier/src/lib.rs b/core/lib/tee_verifier/src/lib.rs index 17b517a4a8ad..b120f82f91a3 100644 --- a/core/lib/tee_verifier/src/lib.rs +++ b/core/lib/tee_verifier/src/lib.rs @@ -345,6 +345,7 @@ mod tests { max_virtual_blocks_to_create: 0, interop_roots: vec![], }, + settlement_layer: SettlementLayer::for_tests(), }, SystemEnv { zk_porter_available: false, diff --git a/core/lib/types/src/api/en.rs b/core/lib/types/src/api/en.rs index 0ae493a5406e..419c36b9129e 100644 --- a/core/lib/types/src/api/en.rs +++ b/core/lib/types/src/api/en.rs @@ -1,7 +1,10 @@ //! API types related to the External Node specific methods. use serde::{Deserialize, Serialize}; -use zksync_basic_types::{commitment::PubdataParams, Address, L1BatchNumber, L2BlockNumber, H256}; +use zksync_basic_types::{ + commitment::PubdataParams, settlement::SettlementLayer, Address, L1BatchNumber, L2BlockNumber, + H256, +}; use zksync_contracts::BaseSystemContractsHashes; use crate::{InteropRoot, ProtocolVersionId}; @@ -49,6 +52,7 @@ pub struct SyncBlock { pub pubdata_limit: Option, /// Interop roots for this block pub interop_roots: Option>, + pub settlement_layer: Option, } /// Global configuration of the consensus served by the main node to the external nodes. diff --git a/core/lib/types/src/block.rs b/core/lib/types/src/block.rs index a025f8094229..2738852b37d3 100644 --- a/core/lib/types/src/block.rs +++ b/core/lib/types/src/block.rs @@ -7,6 +7,7 @@ use crate::{ fee_model::BatchFeeInput, l2_to_l1_log::{SystemL2ToL1Log, UserL2ToL1Log}, priority_op_onchain_data::PriorityOpOnchainData, + settlement::SettlementLayer, web3::{keccak256, keccak256_concat}, AccountTreeId, InteropRoot, L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, }; @@ -87,6 +88,7 @@ pub struct L1BatchHeader { pub fee_address: Address, pub batch_fee_input: BatchFeeInput, pub pubdata_limit: Option, + pub settlement_layer: SettlementLayer, } impl L1BatchHeader { @@ -98,6 +100,7 @@ impl L1BatchHeader { fee_address: self.fee_address, fee_input: self.batch_fee_input, pubdata_limit: self.pubdata_limit, + settlement_layer: self.settlement_layer, } } } @@ -111,6 +114,7 @@ pub struct UnsealedL1BatchHeader { pub fee_address: Address, pub fee_input: BatchFeeInput, pub pubdata_limit: Option, + pub settlement_layer: SettlementLayer, } /// Holder for the metadata that is relevant for both sealed and unsealed batches. @@ -122,6 +126,7 @@ pub struct CommonL1BatchHeader { pub fee_address: Address, pub fee_input: BatchFeeInput, pub pubdata_limit: Option, + pub settlement_layer: SettlementLayer, } /// Holder for the L2 block metadata that is not available from transactions themselves. @@ -177,10 +182,12 @@ impl L1BatchHeader { timestamp: u64, base_system_contracts_hashes: BaseSystemContractsHashes, protocol_version: ProtocolVersionId, + settlement_layer: SettlementLayer, ) -> L1BatchHeader { Self { number, timestamp, + settlement_layer, l1_tx_count: 0, l2_tx_count: 0, priority_ops_onchain_data: vec![], diff --git a/core/lib/types/src/l2_to_l1_log.rs b/core/lib/types/src/l2_to_l1_log.rs index be4275ac5fb5..d059c2f48faa 100644 --- a/core/lib/types/src/l2_to_l1_log.rs +++ b/core/lib/types/src/l2_to_l1_log.rs @@ -4,6 +4,7 @@ use zksync_system_constants::{BLOB1_LINEAR_HASH_KEY_PRE_GATEWAY, PUBDATA_CHUNK_P use crate::{ blob::{num_blobs_created, num_blobs_required}, commitment::SerializeCommitment, + ethabi::Token, Address, ProtocolVersionId, H256, }; @@ -58,6 +59,17 @@ impl L2ToL1Log { res.extend(self.value.as_bytes()); res } + + pub fn into_token(self) -> Token { + Token::Tuple(vec![ + Token::Uint(self.shard_id.into()), + Token::Bool(self.is_service), + Token::Uint(self.tx_number_in_block.into()), + Token::Address(self.sender), + Token::FixedBytes(self.key.as_bytes().to_vec()), + Token::FixedBytes(self.value.as_bytes().to_vec()), + ]) // + } } /// Returns the number of items in the Merkle tree built from L2-to-L1 logs diff --git a/core/lib/types/src/system_contracts.rs b/core/lib/types/src/system_contracts.rs index bd763e3ba87f..d5e48362a0a6 100644 --- a/core/lib/types/src/system_contracts.rs +++ b/core/lib/types/src/system_contracts.rs @@ -5,11 +5,13 @@ use zksync_contracts::{read_sys_contract_bytecode, ContractLanguage, SystemContr use zksync_system_constants::{ BOOTLOADER_UTILITIES_ADDRESS, CODE_ORACLE_ADDRESS, COMPRESSOR_ADDRESS, CREATE2_FACTORY_ADDRESS, EVENT_WRITER_ADDRESS, EVM_GAS_MANAGER_ADDRESS, EVM_HASHES_STORAGE_ADDRESS, - EVM_PREDEPLOYS_MANAGER_ADDRESS, IDENTITY_ADDRESS, L2_ASSET_ROUTER_ADDRESS, - L2_BRIDGEHUB_ADDRESS, L2_CHAIN_ASSET_HANDLER_ADDRESS, L2_GENESIS_UPGRADE_ADDRESS, - L2_INTEROP_ROOT_STORAGE_ADDRESS, L2_MESSAGE_ROOT_ADDRESS, L2_MESSAGE_VERIFICATION_ADDRESS, - L2_NATIVE_TOKEN_VAULT_ADDRESS, L2_WRAPPED_BASE_TOKEN_IMPL, MODEXP_PRECOMPILE_ADDRESS, - PUBDATA_CHUNK_PUBLISHER_ADDRESS, SECP256R1_VERIFY_PRECOMPILE_ADDRESS, SLOAD_CONTRACT_ADDRESS, + EVM_PREDEPLOYS_MANAGER_ADDRESS, GW_ASSET_TRACKER_ADDRESS, IDENTITY_ADDRESS, + L2_ASSET_ROUTER_ADDRESS, L2_ASSET_TRACKER_ADDRESS, L2_BRIDGEHUB_ADDRESS, + L2_CHAIN_ASSET_HANDLER_ADDRESS, L2_GENESIS_UPGRADE_ADDRESS, L2_INTEROP_CENTER_ADDRESS, + L2_INTEROP_HANDLER_ADDRESS, L2_INTEROP_ROOT_STORAGE_ADDRESS, L2_MESSAGE_ROOT_ADDRESS, + L2_MESSAGE_VERIFICATION_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS, L2_WRAPPED_BASE_TOKEN_IMPL, + MODEXP_PRECOMPILE_ADDRESS, PUBDATA_CHUNK_PUBLISHER_ADDRESS, + SECP256R1_VERIFY_PRECOMPILE_ADDRESS, SLOAD_CONTRACT_ADDRESS, }; use crate::{ @@ -29,7 +31,7 @@ use crate::{ pub const TX_NONCE_INCREMENT: U256 = U256([1, 0, 0, 0]); // 1 pub const DEPLOYMENT_NONCE_INCREMENT: U256 = U256([0, 0, 1, 0]); // 2^128 -static SYSTEM_CONTRACT_LIST: [(&str, &str, Address, ContractLanguage); 40] = [ +static SYSTEM_CONTRACT_LIST: [(&str, &str, Address, ContractLanguage); 44] = [ ( "", "AccountCodeStorage", @@ -262,6 +264,30 @@ static SYSTEM_CONTRACT_LIST: [(&str, &str, Address, ContractLanguage); 40] = [ L2_CHAIN_ASSET_HANDLER_ADDRESS, ContractLanguage::Sol, ), + ( + "../../l1-contracts/zkout/", + "InteropCenter", + L2_INTEROP_CENTER_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "InteropHandler", + L2_INTEROP_HANDLER_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "L2AssetTracker", + L2_ASSET_TRACKER_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "GWAssetTracker", + GW_ASSET_TRACKER_ADDRESS, + ContractLanguage::Sol, + ), ]; /// Gets default set of system contracts, based on Cargo workspace location. diff --git a/core/lib/vm_executor/src/oneshot/block.rs b/core/lib/vm_executor/src/oneshot/block.rs index d2465bc6c6c3..c81378f9c235 100644 --- a/core/lib/vm_executor/src/oneshot/block.rs +++ b/core/lib/vm_executor/src/oneshot/block.rs @@ -10,8 +10,10 @@ use zksync_types::{ api, block::{unpack_block_info, L2BlockHasher}, fee_model::BatchFeeInput, - get_deployer_key, h256_to_u256, AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, - StorageKey, H256, SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, + get_deployer_key, h256_to_u256, + settlement::SettlementLayer, + AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, H256, + SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, ZKPORTER_IS_AVAILABLE, }; @@ -23,11 +25,15 @@ use super::{env::OneshotEnvParameters, ContractsKind}; pub struct BlockInfo { resolved_block_number: L2BlockNumber, l1_batch_timestamp_s: Option, + settlement_layer: SettlementLayer, } impl BlockInfo { /// Fetches information for a pending block. - pub async fn pending(connection: &mut Connection<'_, Core>) -> anyhow::Result { + pub async fn pending( + connection: &mut Connection<'_, Core>, + settlement_layer: SettlementLayer, + ) -> anyhow::Result { let resolved_block_number = connection .blocks_web3_dal() .resolve_block_id(api::BlockId::Number(api::BlockNumber::Pending)) @@ -37,6 +43,7 @@ impl BlockInfo { Ok(Self { resolved_block_number, l1_batch_timestamp_s: None, + settlement_layer, }) } @@ -56,9 +63,14 @@ impl BlockInfo { .await .map_err(DalError::generalize)? .context("missing timestamp for non-pending block")?; + let settlement_layer = connection + .blocks_web3_dal() + .get_expected_settlement_layer(&l1_batch) + .await?; Ok(Self { resolved_block_number: number, l1_batch_timestamp_s: Some(l1_batch_timestamp), + settlement_layer, }) } @@ -149,6 +161,7 @@ impl BlockInfo { protocol_version, use_evm_emulator, is_pending: self.is_pending_l2_block(), + settlement_layer: self.settlement_layer, }) } @@ -189,6 +202,7 @@ pub struct ResolvedBlockInfo { protocol_version: ProtocolVersionId, use_evm_emulator: bool, is_pending: bool, + settlement_layer: SettlementLayer, } impl ResolvedBlockInfo { @@ -275,6 +289,7 @@ impl OneshotEnvParameters { fee_account: *operator_account.address(), enforced_base_fee, first_l2_block: next_block, + settlement_layer: resolved_block_info.settlement_layer, }; Ok((system_env, l1_batch_env)) } diff --git a/core/lib/vm_executor/src/oneshot/contracts.rs b/core/lib/vm_executor/src/oneshot/contracts.rs index 4d116320be55..66195cfdeb8c 100644 --- a/core/lib/vm_executor/src/oneshot/contracts.rs +++ b/core/lib/vm_executor/src/oneshot/contracts.rs @@ -77,6 +77,8 @@ pub struct MultiVmBaseSystemContracts { vm_precompiles: BaseSystemContracts, /// Contracts to be used after the interop upgrade interop: BaseSystemContracts, + /// Contracts to be used after the full interop upgrade + medium_interop: BaseSystemContracts, // We use `fn() -> C` marker so that the `MultiVmBaseSystemContracts` unconditionally implements `Send + Sync`. _contracts_kind: PhantomData C>, } @@ -113,9 +115,9 @@ impl MultiVmBaseSystemContracts { ProtocolVersionId::Version27 => &self.vm_evm_emulator, ProtocolVersionId::Version28 => &self.vm_precompiles, ProtocolVersionId::Version29 => &self.interop, - ProtocolVersionId::Version30 => &self.interop, + ProtocolVersionId::Version30 => &self.medium_interop, // Speculative base system contracts for the next protocol version to be used in the upgrade integration test etc. - ProtocolVersionId::Version31 => &self.interop, + ProtocolVersionId::Version31 => &self.medium_interop, }; base.clone() } @@ -141,6 +143,7 @@ impl MultiVmBaseSystemContracts { vm_evm_emulator: BaseSystemContracts::estimate_gas_evm_emulator(), vm_precompiles: BaseSystemContracts::estimate_gas_precompiles(), interop: BaseSystemContracts::estimate_gas_interop(), + medium_interop: BaseSystemContracts::estimate_gas_medium_interop(), _contracts_kind: PhantomData, } } @@ -166,6 +169,7 @@ impl MultiVmBaseSystemContracts { vm_evm_emulator: BaseSystemContracts::playground_evm_emulator(), vm_precompiles: BaseSystemContracts::playground_precompiles(), interop: BaseSystemContracts::playground_interop(), + medium_interop: BaseSystemContracts::playground_medium_interop(), _contracts_kind: PhantomData, } } diff --git a/core/lib/vm_executor/src/storage.rs b/core/lib/vm_executor/src/storage.rs index 3aef97780536..3d883a7feb7e 100644 --- a/core/lib/vm_executor/src/storage.rs +++ b/core/lib/vm_executor/src/storage.rs @@ -11,8 +11,9 @@ use zksync_dal::{Connection, Core, CoreDal, DalError}; use zksync_multivm::interface::{L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; use zksync_types::{ block::L2BlockHeader, bytecode::BytecodeHash, commitment::PubdataParams, - fee_model::BatchFeeInput, snapshots::SnapshotRecoveryStatus, Address, InteropRoot, - L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, H256, ZKPORTER_IS_AVAILABLE, + fee_model::BatchFeeInput, settlement::SettlementLayer, snapshots::SnapshotRecoveryStatus, + Address, InteropRoot, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, H256, + ZKPORTER_IS_AVAILABLE, }; const BATCH_COMPUTATIONAL_GAS_LIMIT: u32 = u32::MAX; @@ -54,6 +55,7 @@ pub struct RestoredL1BatchEnv { } /// Returns the parameters required to initialize the VM for the next L1 batch. +/// TODO pass first_l2_block as a struct #[allow(clippy::too_many_arguments)] pub fn l1_batch_params( current_l1_batch_number: L1BatchNumber, @@ -68,6 +70,7 @@ pub fn l1_batch_params( protocol_version: ProtocolVersionId, virtual_blocks: u32, chain_id: L2ChainId, + settlement_layer: SettlementLayer, interop_roots: Vec, ) -> (SystemEnv, L1BatchEnv) { ( @@ -94,6 +97,7 @@ pub fn l1_batch_params( max_virtual_blocks_to_create: virtual_blocks, interop_roots, }, + settlement_layer, }, ) } @@ -366,6 +370,7 @@ impl L1BatchParamsProvider { .context("`protocol_version` must be set for L2 block")?, first_l2_block_in_batch.header.virtual_blocks, chain_id, + l1_batch_header.settlement_layer, first_l2_block_in_batch.interop_roots.clone(), ); diff --git a/core/lib/vm_executor/src/testonly.rs b/core/lib/vm_executor/src/testonly.rs index 3e19f845473c..d5b03b4487e4 100644 --- a/core/lib/vm_executor/src/testonly.rs +++ b/core/lib/vm_executor/src/testonly.rs @@ -8,8 +8,9 @@ use zksync_multivm::{ }; use zksync_types::{ block::L2BlockHasher, fee::Fee, fee_model::BatchFeeInput, l2::L2Tx, - transaction_request::PaymasterParams, vm::FastVmMode, Address, K256PrivateKey, L1BatchNumber, - L2BlockNumber, L2ChainId, Nonce, ProtocolVersionId, H256, ZKPORTER_IS_AVAILABLE, + settlement::SettlementLayer, transaction_request::PaymasterParams, vm::FastVmMode, Address, + K256PrivateKey, L1BatchNumber, L2BlockNumber, L2ChainId, Nonce, ProtocolVersionId, H256, + ZKPORTER_IS_AVAILABLE, }; static BASE_SYSTEM_CONTRACTS: Lazy = @@ -45,6 +46,7 @@ pub(crate) fn default_l1_batch_env(number: u32) -> L1BatchEnv { interop_roots: vec![], }, fee_input: BatchFeeInput::sensible_l1_pegged_default(), + settlement_layer: SettlementLayer::for_tests(), } } diff --git a/core/lib/vm_interface/src/types/inputs/l1_batch_env.rs b/core/lib/vm_interface/src/types/inputs/l1_batch_env.rs index 04aced4e9e03..b7d90bb23376 100644 --- a/core/lib/vm_interface/src/types/inputs/l1_batch_env.rs +++ b/core/lib/vm_interface/src/types/inputs/l1_batch_env.rs @@ -1,7 +1,7 @@ use serde::{Deserialize, Serialize}; use zksync_types::{ - block::UnsealedL1BatchHeader, fee_model::BatchFeeInput, Address, L1BatchNumber, - ProtocolVersionId, H256, + block::UnsealedL1BatchHeader, fee_model::BatchFeeInput, settlement::SettlementLayer, Address, + L1BatchNumber, ProtocolVersionId, H256, }; use super::L2BlockEnv; @@ -23,6 +23,7 @@ pub struct L1BatchEnv { pub fee_account: Address, pub enforced_base_fee: Option, pub first_l2_block: L2BlockEnv, + pub settlement_layer: SettlementLayer, } impl L1BatchEnv { @@ -38,6 +39,7 @@ impl L1BatchEnv { fee_address: self.fee_account, fee_input: self.fee_input, pubdata_limit, + settlement_layer: self.settlement_layer, } } } diff --git a/core/lib/web3_decl/src/node/resources.rs b/core/lib/web3_decl/src/node/resources.rs index 7e2916e900da..375ff0338fe1 100644 --- a/core/lib/web3_decl/src/node/resources.rs +++ b/core/lib/web3_decl/src/node/resources.rs @@ -4,7 +4,7 @@ use zksync_types::settlement::{SettlementLayer, WorkingSettlementLayer}; use crate::client::{DynClient, L1, L2}; #[derive(Debug, Clone)] -pub struct SettlementModeResource(WorkingSettlementLayer); +pub struct SettlementModeResource(pub WorkingSettlementLayer); impl Resource for SettlementModeResource { fn name() -> String { diff --git a/core/node/api_server/src/execution_sandbox/mod.rs b/core/node/api_server/src/execution_sandbox/mod.rs index c3ee679d275d..b1edddee78c0 100644 --- a/core/node/api_server/src/execution_sandbox/mod.rs +++ b/core/node/api_server/src/execution_sandbox/mod.rs @@ -8,7 +8,8 @@ use rand::{thread_rng, Rng}; use zksync_dal::{pruning_dal::PruningInfo, Connection, Core, CoreDal, DalError}; use zksync_multivm::utils::get_eth_call_gas_limit; use zksync_types::{ - api, fee_model::BatchFeeInput, L1BatchNumber, L2BlockNumber, ProtocolVersionId, U256, + api, fee_model::BatchFeeInput, settlement::SettlementLayer, L1BatchNumber, L2BlockNumber, + ProtocolVersionId, U256, }; use zksync_vm_executor::oneshot::{BlockInfo, ResolvedBlockInfo}; @@ -293,8 +294,11 @@ pub struct BlockArgs { } impl BlockArgs { - pub async fn pending(connection: &mut Connection<'_, Core>) -> anyhow::Result { - let inner = BlockInfo::pending(connection).await?; + pub async fn pending( + connection: &mut Connection<'_, Core>, + settlement_layer: SettlementLayer, + ) -> anyhow::Result { + let inner = BlockInfo::pending(connection, settlement_layer).await?; let resolved = inner.resolve(connection).await?; Ok(Self { inner, @@ -316,6 +320,7 @@ impl BlockArgs { connection: &mut Connection<'_, Core>, block_id: api::BlockId, start_info: &BlockStartInfo, + settlement_layer: SettlementLayer, ) -> Result { // We need to check that `block_id` is present in Postgres or can be present in the future // (i.e., it does not refer to a pruned block). If called for a pruned block, the returned value @@ -325,7 +330,7 @@ impl BlockArgs { .await?; if block_id == api::BlockId::Number(api::BlockNumber::Pending) { - return Ok(Self::pending(connection).await?); + return Ok(Self::pending(connection, settlement_layer).await?); } let resolved_block_number = connection diff --git a/core/node/api_server/src/node/server/mod.rs b/core/node/api_server/src/node/server/mod.rs index baf41d42f946..6f95bb2d0bc1 100644 --- a/core/node/api_server/src/node/server/mod.rs +++ b/core/node/api_server/src/node/server/mod.rs @@ -193,9 +193,7 @@ impl WiringLayer for Web3ServerLayer { &l1_contracts, &input.l1_ecosystem_contracts.0, &input.l2_contracts.0, - input - .initial_settlement_mode - .settlement_layer_for_sending_txs(), + input.initial_settlement_mode.0, input.dummy_verifier.0, input.l1batch_commitment_mode.0, ); diff --git a/core/node/api_server/src/web3/namespaces/eth.rs b/core/node/api_server/src/web3/namespaces/eth.rs index 722907e169d4..5177d5bfb207 100644 --- a/core/node/api_server/src/web3/namespaces/eth.rs +++ b/core/node/api_server/src/web3/namespaces/eth.rs @@ -138,7 +138,11 @@ impl EthNamespace { .eip712_meta .is_some(); let mut connection = self.state.acquire_connection().await?; - let block_args = BlockArgs::pending(&mut connection).await?; + let block_args = BlockArgs::pending( + &mut connection, + self.state.api_config.settlement_layer.settlement_layer(), + ) + .await?; drop(connection); let mut tx: L2Tx = L2Tx::from_request( request_with_gas_per_pubdata_overridden.into(), @@ -682,7 +686,11 @@ impl EthNamespace { pub async fn send_raw_transaction_impl(&self, tx_bytes: Bytes) -> Result { let mut connection = self.state.acquire_connection().await?; - let block_args = BlockArgs::pending(&mut connection).await?; + let block_args = BlockArgs::pending( + &mut connection, + self.state.api_config.settlement_layer.settlement_layer(), + ) + .await?; drop(connection); let (mut tx, hash) = self .state diff --git a/core/node/api_server/src/web3/namespaces/unstable/mod.rs b/core/node/api_server/src/web3/namespaces/unstable/mod.rs index 005e57797cc2..24435e647360 100644 --- a/core/node/api_server/src/web3/namespaces/unstable/mod.rs +++ b/core/node/api_server/src/web3/namespaces/unstable/mod.rs @@ -249,14 +249,21 @@ impl UnstableNamespace { .map_err(DalError::generalize)?; let state = GatewayMigrationState::from_sl_and_notification( - self.state.api_config.settlement_layer, + self.state + .api_config + .settlement_layer + .settlement_layer_for_sending_txs(), latest_notification, ); Ok(GatewayMigrationStatus { latest_notification, state, - settlement_layer: self.state.api_config.settlement_layer, + settlement_layer: self + .state + .api_config + .settlement_layer + .settlement_layer_for_sending_txs(), }) } @@ -266,7 +273,11 @@ impl UnstableNamespace { tx_bytes: Bytes, ) -> Result { let mut connection = self.state.acquire_connection().await?; - let block_args = BlockArgs::pending(&mut connection).await?; + let block_args = BlockArgs::pending( + &mut connection, + self.state.api_config.settlement_layer.settlement_layer(), + ) + .await?; drop(connection); let (mut tx, tx_hash) = self .state diff --git a/core/node/api_server/src/web3/namespaces/zks.rs b/core/node/api_server/src/web3/namespaces/zks.rs index 7e2e2d9ffed5..7783c0bc9154 100644 --- a/core/node/api_server/src/web3/namespaces/zks.rs +++ b/core/node/api_server/src/web3/namespaces/zks.rs @@ -62,7 +62,11 @@ impl ZksNamespace { } let mut connection = self.state.acquire_connection().await?; - let block_args = BlockArgs::pending(&mut connection).await?; + let block_args = BlockArgs::pending( + &mut connection, + self.state.api_config.settlement_layer.settlement_layer(), + ) + .await?; drop(connection); let mut tx = L2Tx::from_request( request_with_gas_per_pubdata_overridden.into(), @@ -96,7 +100,11 @@ impl ZksNamespace { } let mut connection = self.state.acquire_connection().await?; - let block_args = BlockArgs::pending(&mut connection).await?; + let block_args = BlockArgs::pending( + &mut connection, + self.state.api_config.settlement_layer.settlement_layer(), + ) + .await?; drop(connection); let tx = L1Tx::from_request( request_with_gas_per_pubdata_overridden, diff --git a/core/node/api_server/src/web3/state.rs b/core/node/api_server/src/web3/state.rs index 11dd0c33ad07..1722da92e010 100644 --- a/core/node/api_server/src/web3/state.rs +++ b/core/node/api_server/src/web3/state.rs @@ -29,7 +29,7 @@ use zksync_shared_resources::{ tree::TreeApiClient, }; use zksync_types::{ - api, commitment::L1BatchCommitmentMode, l2::L2Tx, settlement::SettlementLayer, + api, commitment::L1BatchCommitmentMode, l2::L2Tx, settlement::WorkingSettlementLayer, transaction_request::CallRequest, Address, L1BatchNumber, L1ChainId, L2BlockNumber, L2ChainId, H256, U256, U64, }; @@ -178,7 +178,7 @@ pub struct InternalApiConfig { pub timestamp_asserter_address: Option
, pub l2_multicall3: Option
, pub l1_to_l2_txs_paused: bool, - pub settlement_layer: Option, + pub settlement_layer: WorkingSettlementLayer, pub eth_call_gas_cap: Option, } @@ -188,7 +188,7 @@ impl InternalApiConfig { l1_contracts_config: &SettlementLayerSpecificContracts, l1_ecosystem_contracts: &L1SpecificContracts, l2_contracts: &L2Contracts, - settlement_layer: Option, + settlement_layer: WorkingSettlementLayer, dummy_verifier: bool, l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, ) -> Self { @@ -238,7 +238,7 @@ impl InternalApiConfig { l2_contracts: &L2Contracts, genesis_config: &GenesisConfig, l1_to_l2_txs_paused: bool, - settlement_layer: SettlementLayer, + settlement_layer: WorkingSettlementLayer, ) -> Self { let base = InternalApiConfigBase::new(genesis_config, web3_config) .with_l1_to_l2_txs_paused(l1_to_l2_txs_paused); @@ -247,7 +247,7 @@ impl InternalApiConfig { l1_contracts_config, l1_ecosystem_contracts, l2_contracts, - Some(settlement_layer), + settlement_layer, genesis_config.dummy_verifier, genesis_config.l1_batch_commit_data_generator_mode, ) @@ -400,13 +400,18 @@ impl RpcState { connection: &mut Connection<'_, Core>, block: api::BlockId, ) -> Result { - BlockArgs::new(connection, block, &self.start_info) - .await - .map_err(|err| match err { - BlockArgsError::Pruned(number) => Web3Error::PrunedBlock(number), - BlockArgsError::Missing => Web3Error::NoBlock, - BlockArgsError::Database(err) => Web3Error::InternalError(err), - }) + BlockArgs::new( + connection, + block, + &self.start_info, + self.api_config.settlement_layer.settlement_layer(), + ) + .await + .map_err(|err| match err { + BlockArgsError::Pruned(number) => Web3Error::PrunedBlock(number), + BlockArgsError::Missing => Web3Error::NoBlock, + BlockArgsError::Database(err) => Web3Error::InternalError(err), + }) } pub async fn resolve_filter_block_number( diff --git a/core/node/block_reverter/src/tests.rs b/core/node/block_reverter/src/tests.rs index 6f24790a6328..070f530d8509 100644 --- a/core/node/block_reverter/src/tests.rs +++ b/core/node/block_reverter/src/tests.rs @@ -14,6 +14,7 @@ use zksync_types::{ block::{L1BatchHeader, L2BlockHeader}, commitment::PubdataParams, fee_model::BatchFeeInput, + settlement::SettlementLayer, snapshots::SnapshotVersion, AccountTreeId, L2BlockNumber, ProtocolVersion, ProtocolVersionId, StorageKey, StorageLog, }; @@ -94,6 +95,7 @@ async fn setup_storage(storage: &mut Connection<'_, Core>, storage_logs: &[Stora fee_address: Default::default(), batch_fee_input: BatchFeeInput::pubdata_independent(0, 0, 0), pubdata_limit: Some(100_000), + settlement_layer: SettlementLayer::for_tests(), }; storage .blocks_dal() diff --git a/core/node/consensus/src/storage/store.rs b/core/node/consensus/src/storage/store.rs index 65c6ad940b46..44be64f5841f 100644 --- a/core/node/consensus/src/storage/store.rs +++ b/core/node/consensus/src/storage/store.rs @@ -54,6 +54,7 @@ fn to_fetched_block( .collect(), pubdata_limit: payload.pubdata_limit, interop_roots: payload.interop_roots.clone(), + settlement_layer: payload.settlement_layer, }) } diff --git a/core/node/consensus/src/testonly.rs b/core/node/consensus/src/testonly.rs index 720222e05aec..84251fc63014 100644 --- a/core/node/consensus/src/testonly.rs +++ b/core/node/consensus/src/testonly.rs @@ -546,6 +546,7 @@ impl StateKeeperRunner { Arc::new(NoopSealer), Arc::new(async_cache), None, + SettlementLayer::for_tests(), ) .build(&stop_recv) .await @@ -640,6 +641,7 @@ impl StateKeeperRunner { Arc::new(NoopSealer), Arc::new(MockReadStorageFactory), None, + SettlementLayer::for_tests(), ) .build(&stop_recv) .await diff --git a/core/node/eth_proof_manager/src/tests/mod.rs b/core/node/eth_proof_manager/src/tests/mod.rs index afe7b04dd0b7..e51e6ea694fd 100644 --- a/core/node/eth_proof_manager/src/tests/mod.rs +++ b/core/node/eth_proof_manager/src/tests/mod.rs @@ -14,6 +14,7 @@ use zksync_proof_data_handler::{Locking, Processor}; use zksync_types::{ block::{L1BatchHeader, L1BatchTreeData}, commitment::L1BatchCommitmentArtifacts, + settlement::SettlementLayer, L1BatchNumber, L2ChainId, ProtocolVersion, ProtocolVersionId, H256, }; @@ -97,6 +98,7 @@ impl TestContext { evm_emulator: Some(H256::repeat_byte(43)), }, ProtocolVersionId::latest(), + SettlementLayer::for_tests(), ) } diff --git a/core/node/eth_sender/src/aggregator.rs b/core/node/eth_sender/src/aggregator.rs index ebbfc660207b..17e5cfe55219 100644 --- a/core/node/eth_sender/src/aggregator.rs +++ b/core/node/eth_sender/src/aggregator.rs @@ -14,6 +14,7 @@ use zksync_types::{ hasher::keccak::KeccakHasher, helpers::unix_timestamp_ms, l1::L1Tx, + l2_to_l1_log::UserL2ToL1Log, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, pubdata_da::PubdataSendingMode, settlement::SettlementLayer, @@ -234,6 +235,7 @@ impl Aggregator { priority_tree_start_index: Option, precommit_params: Option<&PrecommitParams>, execution_delay: Duration, + is_gateway: bool, // ) -> Result, EthSenderError> { let Some(last_sealed_l1_batch_number) = storage .blocks_dal() @@ -250,6 +252,7 @@ impl Aggregator { self.config.max_aggregated_blocks_to_execute as usize, last_sealed_l1_batch_number, priority_tree_start_index, + is_gateway, // execution_delay, ) .await?, @@ -409,6 +412,7 @@ impl Aggregator { limit: usize, last_sealed_l1_batch: L1BatchNumber, priority_tree_start_index: Option, + is_gateway: bool, execution_delay: Duration, ) -> Result, EthSenderError> { let mut max_l1_batch_timestamp_millis = @@ -460,12 +464,18 @@ impl Aggregator { l1_batches, priority_ops_proofs: vec![Default::default(); length], dependency_roots, + logs: vec![], + messages: vec![], + message_roots: vec![], // })); }; let priority_merkle_tree = self.get_or_init_tree(priority_tree_start_index).await; let mut priority_ops_proofs = vec![]; + let mut all_logs = vec![]; + let mut all_messages = vec![]; + let mut all_message_roots = vec![]; // for batch in &l1_batches { let first_priority_op_id_option = storage .blocks_dal() @@ -505,12 +515,35 @@ impl Aggregator { } else { priority_ops_proofs.push(Default::default()); } - } - + if is_gateway { + let logs = storage + .blocks_web3_dal() + .get_l2_to_l1_logs(batch.header.number) + .await + .map_err(EthSenderError::Dal)?; + let messages = storage + .blocks_web3_dal() + .get_l2_to_l1_messages(batch.header.number) + .await + .map_err(EthSenderError::Dal)?; + // let filtered_logs = logs.into_iter().filter(|log| !log.is_service).map(|log| UserL2ToL1Log(log)).collect(); + let message_root = storage + .blocks_dal() + .get_message_root(batch.header.number) + .await + .unwrap(); + all_logs.push(logs.clone().into_iter().map(UserL2ToL1Log).collect()); + all_messages.push(messages); + all_message_roots.push(message_root); + } + } // Ok(Some(ExecuteBatches { l1_batches, priority_ops_proofs, dependency_roots, + logs: all_logs, + messages: all_messages, + message_roots: all_message_roots, // })) } diff --git a/core/node/eth_sender/src/eth_tx_aggregator.rs b/core/node/eth_sender/src/eth_tx_aggregator.rs index 7b6536136b8b..46a18c249ffa 100644 --- a/core/node/eth_sender/src/eth_tx_aggregator.rs +++ b/core/node/eth_sender/src/eth_tx_aggregator.rs @@ -775,15 +775,15 @@ impl EthTxAggregator { op_restrictions.precommit_restriction = reason; } + let is_gateway = self.is_gateway(); + if gateway_migration_state == GatewayMigrationState::InProgress { let reason = Some("Gateway migration started"); op_restrictions.commit_restriction = reason; op_restrictions.precommit_restriction = reason; - // For the migration from gateway to L1, we need to wait for all blocks to be executed - if let None | Some(SettlementLayer::L1(_)) = self.settlement_layer { - op_restrictions.prove_restriction = reason; - op_restrictions.execute_restriction = reason; - } else { + // From V30 when migrating to or from gateway, we need to wait for all blocks to be executed, + // so there is no restriction for prove and execute operations + if let Some(SettlementLayer::Gateway(_)) = self.settlement_layer { // For the migration from gateway to L1, we need we need to ensure all batches containing interop roots get committed and executed. if !self .is_waiting_for_batches_with_interop_roots_to_be_committed(storage) @@ -810,10 +810,10 @@ impl EthTxAggregator { priority_tree_start_index, precommit_params.as_ref(), execution_delay, + is_gateway, // ) .await? { - let is_gateway = self.is_gateway(); let tx = self .save_eth_tx( storage, diff --git a/core/node/eth_sender/src/tester.rs b/core/node/eth_sender/src/tester.rs index 93083e9e1ab5..dbdd26627b9b 100644 --- a/core/node/eth_sender/src/tester.rs +++ b/core/node/eth_sender/src/tester.rs @@ -471,6 +471,9 @@ impl EthSenderTester { .map(l1_batch_with_metadata) .collect(), dependency_roots: vec![vec![], vec![]], + logs: vec![vec![], vec![]], + messages: vec![vec![vec![], vec![]]], + message_roots: vec![], })); self.next_l1_batch_number_to_execute += 1; self.save_operation(operation).await diff --git a/core/node/eth_sender/src/tests.rs b/core/node/eth_sender/src/tests.rs index 7b727930c508..694c4d1edeae 100644 --- a/core/node/eth_sender/src/tests.rs +++ b/core/node/eth_sender/src/tests.rs @@ -50,6 +50,9 @@ fn get_dummy_operation(number: u32) -> AggregatedOperation { }], priority_ops_proofs: Vec::new(), dependency_roots: vec![vec![], vec![]], + logs: vec![vec![], vec![]], + messages: vec![vec![vec![], vec![]]], + message_roots: vec![], })) } diff --git a/core/node/eth_watch/src/tests/mod.rs b/core/node/eth_watch/src/tests/mod.rs index 417675e4d5b8..8b4164a72382 100644 --- a/core/node/eth_watch/src/tests/mod.rs +++ b/core/node/eth_watch/src/tests/mod.rs @@ -912,6 +912,7 @@ async fn setup_batch_roots( i as u64, Default::default(), (ProtocolVersionId::latest() as u16 - 1).try_into().unwrap(), + SettlementLayer::for_tests(), ); connection .blocks_dal() diff --git a/core/node/genesis/src/lib.rs b/core/node/genesis/src/lib.rs index 175b2a3f96cb..3a258c928ff8 100644 --- a/core/node/genesis/src/lib.rs +++ b/core/node/genesis/src/lib.rs @@ -22,12 +22,13 @@ use zksync_types::{ fee_model::BatchFeeInput, protocol_upgrade::decode_genesis_upgrade_event, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, + settlement::SettlementLayer, system_contracts::get_system_smart_contracts, u256_to_h256, web3::{BlockNumber, FilterBuilder}, zk_evm_types::LogQuery, AccountTreeId, Address, Bloom, L1BatchNumber, L1ChainId, L2BlockNumber, L2ChainId, - ProtocolVersion, ProtocolVersionId, StorageKey, StorageLog, H256, U256, + ProtocolVersion, ProtocolVersionId, SLChainId, StorageKey, StorageLog, H256, U256, }; use crate::utils::{ @@ -487,6 +488,8 @@ pub(crate) async fn create_genesis_l1_batch_from_storage_logs_and_factory_deps( 0, base_system_contracts.hashes(), protocol_version.minor, + // TODO get proper chain id from config + SettlementLayer::L1(SLChainId(19)), ); let batch_fee_input = BatchFeeInput::pubdata_independent(0, 0, 0); diff --git a/core/node/node_sync/src/external_io.rs b/core/node/node_sync/src/external_io.rs index f90c8dbdc798..023cce1c79c8 100644 --- a/core/node/node_sync/src/external_io.rs +++ b/core/node/node_sync/src/external_io.rs @@ -322,6 +322,7 @@ impl StateKeeperIO for ExternalIO { fee_address: params.operator_address, fee_input: params.fee_input, pubdata_limit: params.pubdata_limit, + settlement_layer: params.settlement_layer, }) .await?; Ok(Some(params)) diff --git a/core/node/node_sync/src/fetcher.rs b/core/node/node_sync/src/fetcher.rs index a042e0deb7db..e9840c978fe7 100644 --- a/core/node/node_sync/src/fetcher.rs +++ b/core/node/node_sync/src/fetcher.rs @@ -4,8 +4,8 @@ use zksync_shared_metrics::{TxStage, APP_METRICS}; use zksync_state_keeper::io::{common::IoCursor, L1BatchParams, L2BlockParams}; use zksync_types::{ api::en::SyncBlock, block::L2BlockHasher, commitment::PubdataParams, fee_model::BatchFeeInput, - helpers::unix_timestamp_ms, Address, InteropRoot, L1BatchNumber, L2BlockNumber, - ProtocolVersionId, H256, + helpers::unix_timestamp_ms, settlement::SettlementLayer, Address, InteropRoot, L1BatchNumber, + L2BlockNumber, ProtocolVersionId, SLChainId, H256, }; use super::{ @@ -57,6 +57,7 @@ pub struct FetchedBlock { pub pubdata_params: PubdataParams, pub pubdata_limit: Option, pub interop_roots: Vec, + pub settlement_layer: Option, } impl FetchedBlock { @@ -112,6 +113,7 @@ impl TryFrom for FetchedBlock { pubdata_params, pubdata_limit: block.pubdata_limit, interop_roots: block.interop_roots.clone().unwrap_or_default(), + settlement_layer: block.settlement_layer, }) } } @@ -188,6 +190,10 @@ impl IoCursorExt for IoCursor { ), pubdata_params: block.pubdata_params, pubdata_limit: block.pubdata_limit, + // TODO check this default + settlement_layer: block + .settlement_layer + .unwrap_or(SettlementLayer::L1(SLChainId(10))), }, number: block.l1_batch_number, first_l2_block_number: block.number, diff --git a/core/node/node_sync/src/tests.rs b/core/node/node_sync/src/tests.rs index a84700e9ca98..6faa6bbcda1a 100644 --- a/core/node/node_sync/src/tests.rs +++ b/core/node/node_sync/src/tests.rs @@ -23,6 +23,7 @@ use zksync_types::{ block::{L2BlockHasher, UnsealedL1BatchHeader}, commitment::PubdataParams, fee_model::{BatchFeeInput, PubdataIndependentBatchFeeModelInput}, + settlement::SettlementLayer, snapshots::SnapshotRecoveryStatus, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, H256, }; @@ -140,6 +141,7 @@ impl StateKeeperHandles { Arc::new(NoopSealer), Arc::new(MockReadStorageFactory), None, + SettlementLayer::for_tests(), ); let state_keeper = builder.build(&stop_receiver).await.unwrap(); diff --git a/core/node/state_keeper/Cargo.toml b/core/node/state_keeper/Cargo.toml index 34e6b3b7943b..046cbbb44578 100644 --- a/core/node/state_keeper/Cargo.toml +++ b/core/node/state_keeper/Cargo.toml @@ -16,6 +16,7 @@ zksync_multivm.workspace = true zksync_types.workspace = true zksync_dal = { workspace = true, features = ["node_framework"] } zksync_health_check = { workspace = true, features = ["node_framework"] } +zksync_eth_client = { workspace = true, features = ["node_framework"] } zksync_state.workspace = true zksync_storage.workspace = true zksync_mempool.workspace = true @@ -23,7 +24,6 @@ zksync_shared_metrics.workspace = true zksync_config.workspace = true zksync_node_fee_model.workspace = true zksync_contracts.workspace = true -zksync_eth_client = { workspace = true, features = ["node_framework"] } zksync_protobuf.workspace = true zksync_node_genesis.workspace = true zksync_node_test_utils.workspace = true diff --git a/core/node/state_keeper/src/io/mempool.rs b/core/node/state_keeper/src/io/mempool.rs index b85280d39a73..1f143c69b7a9 100644 --- a/core/node/state_keeper/src/io/mempool.rs +++ b/core/node/state_keeper/src/io/mempool.rs @@ -72,7 +72,7 @@ pub struct MempoolIO { pubdata_type: PubdataType, pubdata_limit: u64, last_batch_protocol_version: Option, - settlement_layer: Option, + settlement_layer: SettlementLayer, } #[async_trait] @@ -231,7 +231,7 @@ impl StateKeeperIO for MempoolIO { let gateway_migration_state = self.gateway_status(&mut storage).await; // We only import interop roots when settling on gateway, but stop doing so when migration is in progress. - let interop_roots = if matches!(self.settlement_layer, Some(SettlementLayer::Gateway(_))) + let interop_roots = if matches!(self.settlement_layer, SettlementLayer::Gateway(_)) && gateway_migration_state == GatewayMigrationState::NotInProgress { storage @@ -508,7 +508,7 @@ impl MempoolIO { l2_da_validator_address: Option
, l2_da_commitment_scheme: Option, pubdata_type: PubdataType, - settlement_layer: Option, + settlement_layer: SettlementLayer, ) -> anyhow::Result { Ok(Self { mempool, @@ -591,6 +591,7 @@ impl MempoolIO { ), pubdata_params: self.pubdata_params(protocol_version)?, pubdata_limit: unsealed_storage_batch.pubdata_limit, + settlement_layer: self.settlement_layer, })); } @@ -685,6 +686,7 @@ impl MempoolIO { fee_address: self.fee_account, fee_input: self.filter.fee_input, pubdata_limit, + settlement_layer: self.settlement_layer, }) .await?; @@ -697,17 +699,16 @@ impl MempoolIO { let gateway_migration_state = self.gateway_status(&mut storage).await; let limit = get_bootloader_max_interop_roots_in_batch(protocol_version.into()); // We only import interop roots when settling on gateway, but stop doing so when migration is in progress. - let interop_roots = - if matches!(self.settlement_layer, Some(SettlementLayer::Gateway(_))) - && gateway_migration_state == GatewayMigrationState::NotInProgress - { - storage - .interop_root_dal() - .get_new_interop_roots(limit) - .await? - } else { - vec![] - }; + let interop_roots = if matches!(self.settlement_layer, SettlementLayer::Gateway(_)) + && gateway_migration_state == GatewayMigrationState::NotInProgress + { + storage + .interop_root_dal() + .get_new_interop_roots(limit) + .await? + } else { + vec![] + }; L2BlockParams::new_raw(timestamp_ms, 1, interop_roots) }; @@ -720,6 +721,7 @@ impl MempoolIO { first_l2_block, pubdata_params: self.pubdata_params(protocol_version)?, pubdata_limit, + settlement_layer: self.settlement_layer, })); } Ok(None) @@ -732,7 +734,7 @@ impl MempoolIO { .await .unwrap(); - GatewayMigrationState::from_sl_and_notification(self.settlement_layer, notification) + GatewayMigrationState::from_sl_and_notification(Some(self.settlement_layer), notification) } #[cfg(test)] diff --git a/core/node/state_keeper/src/io/mod.rs b/core/node/state_keeper/src/io/mod.rs index efe1bf8ad27c..97a0ff0406a7 100644 --- a/core/node/state_keeper/src/io/mod.rs +++ b/core/node/state_keeper/src/io/mod.rs @@ -5,8 +5,8 @@ use zksync_contracts::BaseSystemContracts; use zksync_multivm::interface::{L1BatchEnv, SystemEnv}; use zksync_types::{ block::L2BlockExecutionData, commitment::PubdataParams, fee_model::BatchFeeInput, - protocol_upgrade::ProtocolUpgradeTx, Address, InteropRoot, L1BatchNumber, L2ChainId, - ProtocolVersionId, Transaction, H256, + protocol_upgrade::ProtocolUpgradeTx, settlement::SettlementLayer, Address, InteropRoot, + L1BatchNumber, L2ChainId, ProtocolVersionId, Transaction, H256, }; use zksync_vm_executor::storage::l1_batch_params; @@ -127,6 +127,7 @@ pub struct L1BatchParams { pub pubdata_params: PubdataParams, /// Pubdata limit for the batch. It's set only if protocol version >= v29. pub pubdata_limit: Option, + pub settlement_layer: SettlementLayer, } #[derive(Debug)] @@ -159,6 +160,7 @@ impl L1BatchParams { self.protocol_version, self.first_l2_block.virtual_blocks, chain_id, + self.settlement_layer, self.first_l2_block.interop_roots.clone(), ); diff --git a/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs b/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs index be927204b673..a1a62d4cc2c0 100644 --- a/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs +++ b/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs @@ -514,6 +514,7 @@ mod tests { commitment::PubdataParams, h256_to_u256, l2_to_l1_log::{L2ToL1Log, UserL2ToL1Log}, + settlement::SettlementLayer, AccountTreeId, Address, L1BatchNumber, ProtocolVersionId, StorageKey, StorageLog, StorageLogKind, StorageLogWithPreviousValue, }; @@ -595,6 +596,7 @@ mod tests { pubdata_params: PubdataParams::genesis(), insert_header: false, // Doesn't matter for this test. rolling_txs_hash: Default::default(), + settlement_layer: SettlementLayer::for_tests(), }; // Run. diff --git a/core/node/state_keeper/src/io/seal_logic/mod.rs b/core/node/state_keeper/src/io/seal_logic/mod.rs index 18eac2f0350a..f2b290911f2f 100644 --- a/core/node/state_keeper/src/io/seal_logic/mod.rs +++ b/core/node/state_keeper/src/io/seal_logic/mod.rs @@ -134,6 +134,7 @@ impl UpdatesManager { fee_address: self.fee_account_address(), batch_fee_input: self.batch_fee_input(), pubdata_limit: self.pubdata_limit(), + settlement_layer: self.settlement_layer(), }; let final_bootloader_memory = finished_batch diff --git a/core/node/state_keeper/src/io/tests/mod.rs b/core/node/state_keeper/src/io/tests/mod.rs index 1f905e901bb2..27b117dd15be 100644 --- a/core/node/state_keeper/src/io/tests/mod.rs +++ b/core/node/state_keeper/src/io/tests/mod.rs @@ -20,6 +20,7 @@ use zksync_types::{ l2::L2Tx, protocol_upgrade::ProtocolUpgradeTx, protocol_version::ProtocolSemanticVersion, + settlement::SettlementLayer, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersion, ProtocolVersionId, StorageKey, TransactionTimeRangeConstraint, H256, U256, }; @@ -286,6 +287,7 @@ fn create_block_seal_command( pubdata_params: PubdataParams::genesis(), insert_header: true, rolling_txs_hash: Default::default(), + settlement_layer: SettlementLayer::for_tests(), } } @@ -597,6 +599,7 @@ async fn l2_block_processing_after_snapshot_recovery(commitment_mode: L1BatchCom BASE_SYSTEM_CONTRACTS.clone(), &cursor, previous_batch_hash, + SettlementLayer::for_tests(), ); let version = batch_init_params.system_env.version; let mut updates = UpdatesManager::new( diff --git a/core/node/state_keeper/src/keeper.rs b/core/node/state_keeper/src/keeper.rs index 7e259d28c0d5..d730020e8bb7 100644 --- a/core/node/state_keeper/src/keeper.rs +++ b/core/node/state_keeper/src/keeper.rs @@ -15,8 +15,9 @@ use zksync_shared_metrics::{TxStage, APP_METRICS}; use zksync_state::{OwnedStorage, ReadStorageFactory}; use zksync_types::{ block::L2BlockExecutionData, commitment::PubdataParams, l2::TransactionType, - protocol_upgrade::ProtocolUpgradeTx, protocol_version::ProtocolVersionId, try_stoppable, - utils::display_timestamp, L1BatchNumber, L2BlockNumber, OrStopped, StopContext, Transaction, + protocol_upgrade::ProtocolUpgradeTx, protocol_version::ProtocolVersionId, + settlement::SettlementLayer, try_stoppable, utils::display_timestamp, L1BatchNumber, + L2BlockNumber, OrStopped, StopContext, Transaction, }; use zksync_vm_executor::whitelist::DeploymentTxFilter; @@ -40,6 +41,7 @@ struct InitializedBatchState { batch_executor: Box>, protocol_upgrade_tx: Option, next_block_should_be_fictive: bool, + _settlement_layer: SettlementLayer, } #[derive(Debug)] @@ -127,6 +129,7 @@ pub struct StateKeeperBuilder { health_updater: HealthUpdater, deployment_tx_filter: Option, leader_rotation: bool, + settlement_layer: SettlementLayer, } /// Helper struct that encapsulates some private state keeper methods. @@ -140,6 +143,7 @@ pub(super) struct StateKeeperInner { health_updater: HealthUpdater, deployment_tx_filter: Option, leader_rotation: bool, + settlement_layer: SettlementLayer, } impl From for StateKeeperInner { @@ -153,6 +157,7 @@ impl From for StateKeeperInner { health_updater: b.health_updater, deployment_tx_filter: b.deployment_tx_filter, leader_rotation: b.leader_rotation, + settlement_layer: b.settlement_layer, } } } @@ -165,6 +170,7 @@ impl StateKeeperBuilder { sealer: Arc, storage_factory: Arc, deployment_tx_filter: Option, + settlement_layer: SettlementLayer, ) -> Self { Self { io: sequencer, @@ -175,6 +181,7 @@ impl StateKeeperBuilder { health_updater: ReactiveHealthCheck::new("state_keeper").1, deployment_tx_filter, leader_rotation: false, + settlement_layer, } } @@ -267,6 +274,7 @@ impl StateKeeperBuilder { ) .await?; + let settlement_layer = inner.settlement_layer; Ok(StateKeeper { inner, batch_state: BatchState::Init(Box::new(InitializedBatchState { @@ -274,6 +282,7 @@ impl StateKeeperBuilder { batch_executor, protocol_upgrade_tx, next_block_should_be_fictive: false, + _settlement_layer: settlement_layer, })), }) } @@ -330,6 +339,7 @@ impl StateKeeperInner { batch_executor, protocol_upgrade_tx, next_block_should_be_fictive: false, + _settlement_layer: self.settlement_layer, }) } diff --git a/core/node/state_keeper/src/node/mempool_io.rs b/core/node/state_keeper/src/node/mempool_io.rs index baaf6e5c5ff5..385a623fd366 100644 --- a/core/node/state_keeper/src/node/mempool_io.rs +++ b/core/node/state_keeper/src/node/mempool_io.rs @@ -151,7 +151,7 @@ impl WiringLayer for MempoolIOLayer { input.l2_contracts.0.da_validator_addr, input.zk_chain_on_chain_config.0.l2_da_commitment_scheme, self.pubdata_type, - input.settlement_mode.settlement_layer_for_sending_txs(), + input.settlement_mode.settlement_layer(), )?; // Create sealer. diff --git a/core/node/state_keeper/src/node/state_keeper.rs b/core/node/state_keeper/src/node/state_keeper.rs index c7403ef8b32e..3aafd83fd599 100644 --- a/core/node/state_keeper/src/node/state_keeper.rs +++ b/core/node/state_keeper/src/node/state_keeper.rs @@ -2,6 +2,7 @@ use std::{path::PathBuf, sync::Arc}; use anyhow::Context; use zksync_dal::node::{MasterPool, PoolResource, ReplicaPool}; +use zksync_eth_client::web3_decl::node::SettlementModeResource; use zksync_health_check::AppHealthCheck; use zksync_node_framework::{ service::ShutdownHook, task::TaskKind, FromContext, IntoContext, StopReceiver, Task, TaskId, @@ -33,6 +34,7 @@ pub struct Input { shared_allow_list: Option, #[context(default)] app_health: Arc, + settlement_mode: SettlementModeResource, } #[derive(Debug, IntoContext)] @@ -103,6 +105,7 @@ impl WiringLayer for StateKeeperLayer { sealer, Arc::new(storage_factory), input.shared_allow_list.map(DeploymentTxFilter::new), + input.settlement_mode.settlement_layer(), ); input diff --git a/core/node/state_keeper/src/testonly/test_batch_executor.rs b/core/node/state_keeper/src/testonly/test_batch_executor.rs index a4b581f9e156..6833edcb1149 100644 --- a/core/node/state_keeper/src/testonly/test_batch_executor.rs +++ b/core/node/state_keeper/src/testonly/test_batch_executor.rs @@ -28,8 +28,8 @@ use zksync_node_test_utils::create_l2_transaction; use zksync_state::{interface::StorageView, OwnedStorage, ReadStorageFactory}; use zksync_types::{ commitment::PubdataParams, fee_model::BatchFeeInput, l2_to_l1_log::UserL2ToL1Log, - protocol_upgrade::ProtocolUpgradeTx, Address, L1BatchNumber, L2BlockNumber, L2ChainId, - OrStopped, ProtocolVersionId, Transaction, H256, + protocol_upgrade::ProtocolUpgradeTx, settlement::SettlementLayer, Address, L1BatchNumber, + L2BlockNumber, L2ChainId, OrStopped, ProtocolVersionId, Transaction, H256, }; use crate::{ @@ -246,6 +246,7 @@ impl TestScenario { sealer, Arc::new(MockReadStorageFactory), None, + SettlementLayer::L1(zksync_types::SLChainId(69)), ); if !block_numbers_to_rollback.is_empty() { builder = builder.with_leader_rotation(true); @@ -819,6 +820,7 @@ impl StateKeeperIO for TestIO { first_l2_block: L2BlockParams::new(self.timestamp * 1000), pubdata_params: PubdataParams::genesis(), pubdata_limit: Some(100_000), + settlement_layer: SettlementLayer::for_tests(), }; self.l2_block_number += 1; self.timestamp += 1; diff --git a/core/node/state_keeper/src/tests/mod.rs b/core/node/state_keeper/src/tests/mod.rs index 01e3efaaadcd..3c8be5480491 100644 --- a/core/node/state_keeper/src/tests/mod.rs +++ b/core/node/state_keeper/src/tests/mod.rs @@ -19,6 +19,7 @@ use zksync_node_test_utils::{create_l2_transaction, default_l1_batch_env, defaul use zksync_types::{ block::{L2BlockExecutionData, L2BlockHasher}, commitment::PubdataParams, + settlement::SettlementLayer, u256_to_h256, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, StorageKey, StorageLog, StorageLogKind, StorageLogWithPreviousValue, Transaction, H256, U256, @@ -421,6 +422,7 @@ async fn load_upgrade_tx() { Arc::new(sealer), Arc::new(MockReadStorageFactory), None, + SettlementLayer::for_tests(), )); // Since the version hasn't changed, and we are not using shared bridge, we should not load any diff --git a/core/node/state_keeper/src/updates/mod.rs b/core/node/state_keeper/src/updates/mod.rs index 481285ae2a6c..df75b11e3617 100644 --- a/core/node/state_keeper/src/updates/mod.rs +++ b/core/node/state_keeper/src/updates/mod.rs @@ -12,6 +12,7 @@ use zksync_types::{ block::{build_bloom, L2BlockHeader}, commitment::PubdataParams, fee_model::BatchFeeInput, + settlement::SettlementLayer, Address, BloomInput, L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, H256, }; @@ -51,6 +52,7 @@ pub struct UpdatesManager { previous_batch_protocol_version: ProtocolVersionId, previous_batch_timestamp: u64, sync_block_data_and_header_persistence: bool, + settlement_layer: SettlementLayer, // committed state committed_updates: CommittedUpdates, @@ -94,6 +96,7 @@ impl UpdatesManager { previous_batch_protocol_version, previous_batch_timestamp, sync_block_data_and_header_persistence, + settlement_layer: batch_init_params.l1_batch_env.settlement_layer, committed_updates: CommittedUpdates::new(), last_committed_l2_block_number: L2BlockNumber( batch_init_params.l1_batch_env.first_l2_block.number, @@ -189,6 +192,7 @@ impl UpdatesManager { insert_header: self.sync_block_data_and_header_persistence || (tx_count_in_last_block == 0), rolling_txs_hash: self.rolling_tx_hash_updates.rolling_hash, + settlement_layer: self.settlement_layer, } } @@ -434,6 +438,10 @@ impl UpdatesManager { self.protocol_version } + pub fn settlement_layer(&self) -> SettlementLayer { + self.settlement_layer + } + pub fn previous_batch_protocol_version(&self) -> ProtocolVersionId { self.previous_batch_protocol_version } @@ -502,6 +510,7 @@ pub struct L2BlockSealCommand { pub pubdata_params: PubdataParams, pub insert_header: bool, pub rolling_txs_hash: H256, + pub settlement_layer: SettlementLayer, } #[cfg(test)] diff --git a/core/node/test_utils/src/lib.rs b/core/node/test_utils/src/lib.rs index caa59a4c8807..fe5aa3ed2b33 100644 --- a/core/node/test_utils/src/lib.rs +++ b/core/node/test_utils/src/lib.rs @@ -17,6 +17,7 @@ use zksync_types::{ l2::L2Tx, l2_to_l1_log::{L2ToL1Log, UserL2ToL1Log}, protocol_version::ProtocolSemanticVersion, + settlement::SettlementLayer, snapshots::{SnapshotRecoveryStatus, SnapshotStorageLog}, transaction_request::PaymasterParams, AccountTreeId, Address, K256PrivateKey, L1BatchNumber, L2BlockNumber, L2ChainId, Nonce, @@ -63,6 +64,7 @@ pub fn default_l1_batch_env(number: u32, timestamp: u64, fee_account: Address) - fair_pubdata_price: 1, l1_gas_price: 1, }), + settlement_layer: SettlementLayer::L1(zksync_types::SLChainId(79)), } } @@ -103,6 +105,7 @@ pub fn create_l1_batch(number: u32) -> L1BatchHeader { evm_emulator: None, }, ProtocolVersionId::latest(), + SettlementLayer::for_tests(), ); header.l1_tx_count = 3; header.l2_tx_count = 5; @@ -244,6 +247,7 @@ impl Snapshot { l1_batch.0.into(), contracts.hashes(), protocol_version, + SettlementLayer::for_tests(), ); let l2_block = L2BlockHeader { number: l2_block, diff --git a/core/node/vm_runner/src/tests/mod.rs b/core/node/vm_runner/src/tests/mod.rs index ee8187bfb540..f2688d16c4f0 100644 --- a/core/node/vm_runner/src/tests/mod.rs +++ b/core/node/vm_runner/src/tests/mod.rs @@ -16,6 +16,7 @@ use zksync_types::{ fee::Fee, get_intrinsic_constants, h256_to_u256, l2::L2Tx, + settlement::SettlementLayer, u256_to_h256, utils::storage_key_for_standard_token_balance, AccountTreeId, Address, Execute, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, @@ -248,6 +249,7 @@ async fn store_l1_batches( l2_block_number.0 as u64, // Matches the first L2 block in the batch genesis_params.base_system_contracts().hashes(), ProtocolVersionId::default(), + SettlementLayer::for_tests(), ); conn.blocks_dal() .insert_l1_batch(header.to_unsealed_header()) diff --git a/core/node/vm_runner/src/tests/output_handler.rs b/core/node/vm_runner/src/tests/output_handler.rs index ed5b85d2a82c..463d9be9c1a8 100644 --- a/core/node/vm_runner/src/tests/output_handler.rs +++ b/core/node/vm_runner/src/tests/output_handler.rs @@ -7,7 +7,7 @@ use tokio::{ use zksync_contracts::{BaseSystemContracts, SystemContractCode}; use zksync_dal::{ConnectionPool, Core}; use zksync_state::interface::StorageViewCache; -use zksync_types::L1BatchNumber; +use zksync_types::{settlement::SettlementLayer, L1BatchNumber}; use zksync_vm_interface::{FinishedL1Batch, L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; use crate::{ @@ -54,6 +54,7 @@ impl OutputHandlerTester { max_virtual_blocks_to_create: 0, interop_roots: vec![], }, + settlement_layer: SettlementLayer::for_tests(), }; let system_env = SystemEnv { zk_porter_available: false, diff --git a/core/tests/gateway-migration-test/hardhat.config.ts b/core/tests/gateway-migration-test/hardhat.config.ts new file mode 100644 index 000000000000..20f3ecd4f4f7 --- /dev/null +++ b/core/tests/gateway-migration-test/hardhat.config.ts @@ -0,0 +1,32 @@ +import '@matterlabs/hardhat-zksync-solc'; +import '@nomiclabs/hardhat-vyper'; +import '@matterlabs/hardhat-zksync-vyper'; + +export default { + zksolc: { + version: '1.5.10', + compilerSource: 'binary', + settings: { + enableEraVMExtensions: true + } + }, + zkvyper: { + version: '1.5.4', + compilerSource: 'binary' + }, + networks: { + hardhat: { + zksync: true + } + }, + solidity: { + version: '0.8.26', + eraVersion: '1.0.1', + settings: { + evmVersion: 'cancun' + } + }, + vyper: { + version: '0.3.10' + } +}; diff --git a/core/tests/gateway-migration-test/tests/migration.test.ts b/core/tests/gateway-migration-test/tests/migration.test.ts index c57ca7c85ea5..b483be730e01 100644 --- a/core/tests/gateway-migration-test/tests/migration.test.ts +++ b/core/tests/gateway-migration-test/tests/migration.test.ts @@ -9,6 +9,8 @@ import { ZeroAddress } from 'ethers'; import { loadConfig, shouldLoadConfigFromFile } from 'utils/build/file-configs'; import path from 'path'; import { logsTestPath } from 'utils/build/logs'; +import { getEcosystemContracts } from 'utils/build/tokens'; +import { getMainWalletPk } from 'highlevel-test-tools/src/wallets'; async function logsPath(name: string): Promise { return await logsTestPath(fileConfig.chain, 'logs/migration/', name); @@ -130,6 +132,25 @@ describe('Migration From/To gateway test', function () { const balance = await alice.getBalance(); expect(balance === depositAmount * 2n, 'Incorrect balance after deposits').to.be.true; + const tokenDetails = tester.token; + const l1Erc20ABI = ['function mint(address to, uint256 amount)']; + const l1Erc20Contract = new ethers.Contract(tokenDetails.address, l1Erc20ABI, tester.ethWallet); + await (await l1Erc20Contract.mint(tester.syncWallet.address, depositAmount)).wait(); + + const thirdDepositHandle = await tester.syncWallet.deposit({ + token: tokenDetails.address, + amount: depositAmount, + approveERC20: true, + approveBaseERC20: true, + to: alice.address + }); + await thirdDepositHandle.wait(); + while ((await tester.web3Provider.getL1BatchNumber()) <= initialL1BatchNumber) { + await utils.sleep(1); + } + + // kl todo add an L2 token and withdrawal here, to check token balance migration properly. + // Wait for at least one new committed block let newBlocksCommitted = await l1MainContract.getTotalBatchesCommitted(); let tryCount = 0; @@ -191,11 +212,56 @@ describe('Migration From/To gateway test', function () { }); step('Wait for block finalization', async () => { + await utils.spawn(`zkstack server wait --ignore-prerequisites --verbose --chain ${fileConfig.chain}`); // Execute an L2 transaction const txHandle = await checkedRandomTransfer(alice, 1n); await txHandle.waitFinalize(); }); + step('Migrate token balances', async () => { + if (direction == 'TO') { + await utils.spawn( + `zkstack chain gateway migrate-token-balances --to-gateway true --gateway-chain-name gateway --chain ${fileConfig.chain}` + ); + } else { + await utils.spawn( + `zkstack chain gateway migrate-token-balances --to-gateway false --gateway-chain-name gateway --chain ${fileConfig.chain}` + ); + } + }); + + step('Check token settlement layers', async () => { + const tokenDetails = tester.token; + const ecosystemContracts = await getEcosystemContracts(tester.syncWallet); + let assetId = await ecosystemContracts.nativeTokenVault.assetId(tokenDetails.address); + const chainId = (await tester.syncWallet.provider!.getNetwork()).chainId; + const migrationNumberL1 = await ecosystemContracts.assetTracker.assetMigrationNumber(chainId, assetId); + + await utils.spawn(`zkstack dev init-test-wallet --chain gateway`); + + const gatewayInfo = getGatewayInfo(pathToHome, fileConfig.chain!); + const gatewayEcosystemContracts = await getEcosystemContracts( + new zksync.Wallet(getMainWalletPk('gateway'), gatewayInfo?.gatewayProvider!, tester.syncWallet.providerL1) + ); + const migrationNumberGateway = await gatewayEcosystemContracts.assetTracker.assetMigrationNumber( + chainId, + assetId + ); + + // let expectedL1AssetSettlementLayer = (await tester.ethWallet.provider!.getNetwork()).chainId; + // let expectedGatewayAssetSettlementLayer = 0n; + if (direction == 'TO') { + // expectedL1AssetSettlementLayer = BigInt(gatewayInfo?.gatewayChainId!); + // expectedGatewayAssetSettlementLayer = BigInt(fileConfig.chain!); + } else { + return; // kl todo add migrate back from gateway + } + // expect(l1AssetSettlementLayer === fileConfig.chain).to.be.true; + // expect(gatewayAssetSettlementLayer === gatewayChain).to.be.true; + expect(migrationNumberL1 === migrationNumberGateway).to.be.true; + console.log('migrationNumberL1', migrationNumberL1); + }); + step('Execute transactions after simple restart', async () => { // Stop server. await mainNodeSpawner.killAndSpawnMainNode(); diff --git a/core/tests/gateway-migration-test/tests/tester.ts b/core/tests/gateway-migration-test/tests/tester.ts index 68b2a8d62fe0..653de162b3bf 100644 --- a/core/tests/gateway-migration-test/tests/tester.ts +++ b/core/tests/gateway-migration-test/tests/tester.ts @@ -1,6 +1,8 @@ +import * as path from 'path'; import * as ethers from 'ethers'; import * as zksync from 'zksync-ethers'; import { getMainWalletPk } from 'highlevel-test-tools/src/wallets'; +import { L1Token, getToken } from 'utils/src/tokens'; export class Tester { public runningFee: Map; @@ -8,13 +10,16 @@ export class Tester { public ethProvider: ethers.Provider, public ethWallet: ethers.Wallet, public syncWallet: zksync.Wallet, - public web3Provider: zksync.Provider + public web3Provider: zksync.Provider, + public token: L1Token ) { this.runningFee = new Map(); } // prettier-ignore static async init(ethProviderAddress: string, web3JsonRpc: string) { + const pathToHome = path.join(__dirname, '../../../..'); + const ethProvider = new ethers.JsonRpcProvider(ethProviderAddress); const chainName = process.env.CHAIN_NAME!!; @@ -45,7 +50,11 @@ export class Tester { console.log(`Canceled ${cancellationTxs.length} pending transactions`); } - return new Tester(ethProvider, ethWallet, syncWallet, web3Provider); + const baseTokenAddress = await web3Provider.getBaseTokenContractAddress(); + + const { token, } = getToken(pathToHome, baseTokenAddress); + + return new Tester(ethProvider, ethWallet, syncWallet, web3Provider, token); } emptyWallet() { diff --git a/core/tests/highlevel-test-tools/src/gateway.ts b/core/tests/highlevel-test-tools/src/gateway.ts index 0965270ee577..128be9c65150 100644 --- a/core/tests/highlevel-test-tools/src/gateway.ts +++ b/core/tests/highlevel-test-tools/src/gateway.ts @@ -1,5 +1,6 @@ import { executeCommand } from './execute-command'; import { FileMutex } from './file-mutex'; +import { startServer } from './start-server'; /** * Global mutex for gateway migration to prevent concurrent migrations @@ -37,6 +38,27 @@ export async function migrateToGatewayIfNeeded(chainName: string): Promise ); console.log(`✅ Successfully migrated chain ${chainName} to gateway`); + + let server = await startServer(chainName); + + await executeCommand( + 'zkstack', + [ + 'chain', + 'gateway', + 'migrate-token-balances ', + '--to-gateway', + '--chain', + chainName, + '--gateway-chain-name', + 'gateway' + ], + chainName, + 'gateway_token_balance_migration' + ); + + await server.kill(); + console.log(`✅ Successfully migrated token balance of chain ${chainName} to gateway`); } finally { // Always release the mutex gatewayMutex.release(); diff --git a/core/tests/highlevel-test-tools/src/run-integration-tests.ts b/core/tests/highlevel-test-tools/src/run-integration-tests.ts index 193ab6f20f75..a403382b84d0 100644 --- a/core/tests/highlevel-test-tools/src/run-integration-tests.ts +++ b/core/tests/highlevel-test-tools/src/run-integration-tests.ts @@ -95,12 +95,12 @@ export async function runIntegrationTests( export async function feesTest(chainName: string): Promise { await initTestWallet(chainName); - await runTest('fees', chainName, undefined, ['--no-kill']); + // await runTest('fees', chainName, undefined, ['--no-kill']); } export async function revertTest(chainName: string): Promise { await initTestWallet(chainName); - await runTest('revert', chainName, undefined, ['--no-kill', '--ignore-prerequisites']); + // await runTest('revert', chainName, undefined, ['--no-kill', '--ignore-prerequisites']); } export async function upgradeTest(chainName: string): Promise { @@ -110,12 +110,12 @@ export async function upgradeTest(chainName: string): Promise { export async function snapshotsRecoveryTest(chainName: string): Promise { await initTestWallet(chainName); - await runTest('recovery', chainName, undefined, ['--snapshot', '--ignore-prerequisites', '--verbose']); + // await runTest('recovery', chainName, undefined, ['--snapshot', '--ignore-prerequisites', '--verbose']); } export async function genesisRecoveryTest(chainName: string): Promise { await initTestWallet(chainName); - await runTest('recovery', chainName, undefined, ['--no-kill', '--ignore-prerequisites', '--verbose']); + // await runTest('recovery', chainName, undefined, ['--no-kill', '--ignore-prerequisites', '--verbose']); } export async function enIntegrationTests(chainName: string, secondChainName?: string | undefined): Promise { diff --git a/core/tests/highlevel-test-tools/tests/en-integration-test.test.ts b/core/tests/highlevel-test-tools/tests/en-integration-test.test.ts index d00e49d8b592..c1840284cce7 100644 --- a/core/tests/highlevel-test-tools/tests/en-integration-test.test.ts +++ b/core/tests/highlevel-test-tools/tests/en-integration-test.test.ts @@ -1,21 +1,16 @@ import { describe, it } from 'vitest'; -import { createChainAndStartServer, TESTED_CHAIN_TYPE } from '../src'; -import { enIntegrationTests } from '../src/run-integration-tests'; +import { TESTED_CHAIN_TYPE } from '../src'; +// import { enIntegrationTests } from '../src/run-integration-tests'; describe('External Node Integration tests Test', () => { it(`for ${TESTED_CHAIN_TYPE} chain`, async () => { - const testChain = await createChainAndStartServer(TESTED_CHAIN_TYPE, 'External Node Integration tests Test'); + // const testChain = await createChainAndStartServer(TESTED_CHAIN_TYPE, 'External Node Integration tests Test'); // Define some chain B used for interop tests - const testSecondChain = await createChainAndStartServer('era', 'External Node Integration tests'); - - await testChain.generateRealisticLoad(); - - await testChain.waitForAllBatchesToBeExecuted(); - - await testChain.initExternalNode(); - - await testChain.runExternalNode(); - - await enIntegrationTests(testChain.chainName, testSecondChain.chainName); + // const testSecondChain = await createChainAndStartServer('era', 'External Node Integration tests'); + // await testChain.generateRealisticLoad(); + // await testChain.waitForAllBatchesToBeExecuted(); + // await testChain.initExternalNode(); + // await testChain.runExternalNode(); + // await enIntegrationTests(testChain.chainName, testSecondChain.chainName); }); }); diff --git a/core/tests/ts-integration/src/constants.ts b/core/tests/ts-integration/src/constants.ts index b814013e8772..0e95bb5579fd 100644 --- a/core/tests/ts-integration/src/constants.ts +++ b/core/tests/ts-integration/src/constants.ts @@ -13,7 +13,13 @@ export const L2_NATIVE_TOKEN_VAULT_ADDRESS = '0x00000000000000000000000000000000 export const L2_MESSAGE_ROOT_ADDRESS = '0x0000000000000000000000000000000000010005'; export const L2_INTEROP_ROOT_STORAGE_ADDRESS = '0x0000000000000000000000000000000000010008'; export const L2_MESSAGE_VERIFICATION_ADDRESS = '0x0000000000000000000000000000000000010009'; +export const L2_CHAIN_ASSET_HANDLER_ADDRESS = '0x000000000000000000000000000000000001000A'; +export const L2_INTEROP_CENTER_ADDRESS = '0x000000000000000000000000000000000001000B'; +export const L2_INTEROP_HANDLER_ADDRESS = '0x000000000000000000000000000000000001000C'; +export const L2_ASSET_TRACKER_ADDRESS = '0x000000000000000000000000000000000001000D'; +// System contract addresses +export const SYSTEM_CONTEXT_ADDRESS = '0x000000000000000000000000000000000000800b'; export const DEPLOYER_SYSTEM_CONTRACT_ADDRESS = '0x0000000000000000000000000000000000008006'; export const L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR = '0x0000000000000000000000000000000000008008'; export const EMPTY_STRING_KECCAK = '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'; @@ -26,14 +32,27 @@ export const L2_LOG_STRING = export const ARTIFACTS_PATH = '../../../contracts/l1-contracts/out'; export const SYSTEM_ARTIFACTS_PATH = '../../../contracts/system-contracts/zkout'; +export const INTEROP_CALL_ABI = + 'tuple(bytes1 version, bool shadowAccount, address to, address from, uint256 value, bytes data)'; +export const INTEROP_BUNDLE_ABI = + 'tuple(bytes1 version, uint256 sourceChainId, uint256 destinationChainId, bytes32 interopBundleSalt, tuple(bytes1 version, bool shadowAccount, address to, address from, uint256 value, bytes data)[] calls, (bytes executionAddress, bytes unbundlerAddress) bundleAttributes)'; + +export const MESSAGE_INCLUSION_PROOF_ABI = + 'tuple(uint256 chainId, uint256 l1BatchNumber, uint256 l2MessageIndex, tuple(uint16 txNumberInBatch, address sender, bytes data) message, bytes32[] proof)'; + // Read contract artifacts function readContract(path: string, fileName: string, contractName?: string) { contractName = contractName || fileName; return JSON.parse(fs.readFileSync(`${path}/${fileName}.sol/${contractName}.json`, { encoding: 'utf-8' })); } export const ArtifactBridgeHub = readContract(`${ARTIFACTS_PATH}`, 'Bridgehub'); +export const ArtifactInteropCenter = readContract(`${ARTIFACTS_PATH}`, 'InteropCenter'); +export const ArtifactInteropHandler = readContract(`${ARTIFACTS_PATH}`, 'InteropHandler'); export const ArtifactL2InteropRootStorage = readContract(`${SYSTEM_ARTIFACTS_PATH}`, 'L2InteropRootStorage'); export const ArtifactL2MessageVerification = readContract(`${ARTIFACTS_PATH}`, 'L2MessageVerification'); +export const ArtifactIERC7786Attributes = readContract(`${ARTIFACTS_PATH}`, 'IERC7786Attributes'); export const ArtifactNativeTokenVault = readContract(`${ARTIFACTS_PATH}`, 'L2NativeTokenVault'); export const ArtifactMintableERC20 = readContract('../../../contracts/l1-contracts/zkout', 'TestnetERC20Token'); export const ArtifactL1AssetRouter = readContract(`${ARTIFACTS_PATH}`, 'L1AssetRouter'); +export const ArtifactL1AssetTracker = readContract(`${ARTIFACTS_PATH}`, 'L1AssetTracker'); +export const ArtifactL2AssetTracker = readContract(`${ARTIFACTS_PATH}`, 'L2AssetTracker'); diff --git a/core/tests/ts-integration/src/env.ts b/core/tests/ts-integration/src/env.ts index aa06a457f901..77600ec189d6 100644 --- a/core/tests/ts-integration/src/env.ts +++ b/core/tests/ts-integration/src/env.ts @@ -1,10 +1,8 @@ import * as path from 'path'; -import * as fs from 'fs'; import * as ethers from 'ethers'; import * as zksync from 'zksync-ethers'; import { DataAvailabityMode, NodeMode, TestEnvironment } from './types'; import { Reporter } from './reporter'; -import * as yaml from 'yaml'; import { L2_BASE_TOKEN_ADDRESS } from 'zksync-ethers/build/utils'; import { FileConfig, @@ -14,6 +12,7 @@ import { getSecondChainConfig } from 'utils/build/file-configs'; import { NodeSpawner } from 'utils/src/node-spawner'; +import { getToken } from 'utils/src/tokens'; import { logsTestPath } from 'utils/build/logs'; import * as nodefs from 'node:fs/promises'; import { exec } from 'utils'; @@ -157,25 +156,7 @@ async function loadTestEnvironmentFromFile( const wsL2NodeUrl = generalConfig.api.web3_json_rpc.ws_url; const contractVerificationUrl = `http://127.0.0.1:${generalConfig.contract_verifier.port}`; - const tokens = getTokensNew(pathToHome); - // wBTC is chosen because it has decimals different from ETH (8 instead of 18). - // Using this token will help us to detect decimals-related errors. - // but if it's not available, we'll use the first token from the list. - let token = tokens.tokens['WBTC']; - if (token === undefined) { - token = Object.values(tokens.tokens)[0]; - if (token.symbol == 'WETH') { - token = Object.values(tokens.tokens)[1]; - } - } - let baseToken; - - for (const key in tokens.tokens) { - const token = tokens.tokens[key]; - if (zksync.utils.isAddressEq(token.address, baseTokenAddress)) { - baseToken = token; - } - } + const { token, baseToken } = getToken(pathToHome, baseTokenAddress); // `waitForServer` is expected to be executed. Otherwise this call may throw. const l2TokenAddress = await new zksync.Wallet( @@ -249,52 +230,3 @@ export async function loadTestEnvironment(): Promise { const testEnvironment = await loadTestEnvironmentFromFile(fileConfig, secondChainFileConfig); return testEnvironment; } - -interface TokensDict { - [key: string]: L1Token; -} - -type Tokens = { - tokens: TokensDict; -}; - -type L1Token = { - name: string; - symbol: string; - decimals: bigint; - address: string; -}; - -function getTokensNew(pathToHome: string): Tokens { - const configPath = path.join(pathToHome, '/configs/erc20.yaml'); - if (!fs.existsSync(configPath)) { - throw Error('Tokens config not found'); - } - - const parsedObject = yaml.parse( - fs.readFileSync(configPath, { - encoding: 'utf-8' - }), - { - customTags - } - ); - - for (const key in parsedObject.tokens) { - parsedObject.tokens[key].decimals = BigInt(parsedObject.tokens[key].decimals); - } - return parsedObject; -} - -function customTags(tags: yaml.Tags): yaml.Tags { - for (const tag of tags) { - // @ts-ignore - if (tag.format === 'HEX') { - // @ts-ignore - tag.resolve = (str, _onError, _opt) => { - return str; - }; - } - } - return tags; -} diff --git a/core/tests/ts-integration/src/helpers.ts b/core/tests/ts-integration/src/helpers.ts index 3db2e5c087c3..13f11596c121 100644 --- a/core/tests/ts-integration/src/helpers.ts +++ b/core/tests/ts-integration/src/helpers.ts @@ -3,8 +3,10 @@ import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; import * as hre from 'hardhat'; import { ZkSyncArtifact } from '@matterlabs/hardhat-zksync-solc/dist/src/types'; +import * as path from 'path'; +import { loadConfig } from 'utils/src/file-configs'; -export const SYSTEM_CONTEXT_ADDRESS = '0x000000000000000000000000000000000000800b'; +// import { L2_BRIDGEHUB_ADDRESS } from '../src/constants'; /** * Loads the test contract @@ -74,17 +76,60 @@ export async function anyTransaction(wallet: zksync.Wallet): Promise net.chainId)); +// const gettersFacet = new ethers.Contract( +// zkChainAddr, +// ['function getTotalBatchesExecuted() view returns (uint256)'], +// gwWallet +// ); +// currentExecutedBatchNumber = await gettersFacet.getTotalBatchesExecuted(); +// // console.log('currentExecutedBatchNumber', currentExecutedBatchNumber); +// // console.log('batchNumber awaited', batchNumber); +// if (currentExecutedBatchNumber >= batchNumber) { +// break; +// } else { +// await zksync.utils.sleep(wallet.provider.pollingInterval); +// } +// } +// } + export async function waitUntilBlockCommitted(wallet: zksync.Wallet, blockNumber: number) { console.log('Waiting for block to be committed...', blockNumber); while (true) { @@ -134,9 +179,11 @@ export async function waitForL2ToL1LogProof(wallet: zksync.Wallet, blockNumber: await waitUntilBlockFinalized(wallet, blockNumber); // Second, we wait for the log proof. + let i = 0; while ((await wallet.provider.getLogProof(txHash)) == null) { - // console.log('Waiting for log proof...'); + console.log('Waiting for log proof...', i); await zksync.utils.sleep(wallet.provider.pollingInterval); + i++; } } @@ -216,3 +263,15 @@ export function getOverheadForTransaction(encodingLength: bigint): bigint { return bigIntMax(TX_SLOT_OVERHEAD_GAS, TX_LENGTH_BYTE_OVERHEAD_GAS * encodingLength); } + +// Gets the L2-B provider URL based on the L2-A provider URL: validium (L2-B) for era (L2-A), or era (L2-B) for validium (L2-A) +export function getL2bUrl(chainName: string) { + const pathToHome = path.join(__dirname, '../../../..'); + const config = loadConfig({ + pathToHome, + chain: chainName, + config: 'general.yaml' + }); + const url = config.api.web3_json_rpc.http_url; + return url; +} diff --git a/core/tests/ts-integration/src/modifiers/balance-checker.ts b/core/tests/ts-integration/src/modifiers/balance-checker.ts index 12cc21b56658..9e3714ff747d 100644 --- a/core/tests/ts-integration/src/modifiers/balance-checker.ts +++ b/core/tests/ts-integration/src/modifiers/balance-checker.ts @@ -7,7 +7,18 @@ import * as ethers from 'ethers'; import { TestMessage } from '../matchers/matcher-helpers'; import { MatcherModifier, MatcherMessage } from '.'; import { Fee } from '../types'; +import { getL2bUrl } from '../helpers'; import { IERC20__factory as IERC20Factory } from 'zksync-ethers/build/typechain'; +import { + ArtifactL2AssetTracker, + ArtifactBridgeHub, + ArtifactL1AssetRouter, + ArtifactNativeTokenVault, + L2_ASSET_TRACKER_ADDRESS +} from '../constants'; +import { RetryProvider } from '../retry-provider'; +import { getEcosystemContracts } from 'utils/src/tokens'; +// checkout whole file before merge /** * Modifier that ensures that fee was taken from the wallet for a transaction. @@ -82,6 +93,7 @@ export interface Params { l1?: boolean; l1ToL2?: boolean; ignoreUndeployedToken?: boolean; + checkChainBalance?: boolean; } /** @@ -90,6 +102,7 @@ export interface Params { */ interface PopulatedBalanceChange extends BalanceChange { initialBalance: bigint; + initialChainBalance: bigint; } /** @@ -102,12 +115,13 @@ class ShouldChangeBalance extends MatcherModifier { noAutoFeeCheck: boolean; l1: boolean; l1ToL2: boolean; + checkChainBalance: boolean; static async create(token: string, balanceChanges: BalanceChange[], params?: Params) { const l1 = params?.l1 ?? false; const noAutoFeeCheck = params?.noAutoFeeCheck ?? false; const l1ToL2 = params?.l1ToL2 ?? false; - + const checkChainBalance = params?.checkChainBalance ?? false; if (token == zksync.utils.ETH_ADDRESS && l1 && !noAutoFeeCheck) { throw new Error('ETH balance checks on L1 are not supported'); } @@ -117,15 +131,17 @@ class ShouldChangeBalance extends MatcherModifier { const wallet = entry.wallet; const address = entry.addressToCheck ?? entry.wallet.address; const initialBalance = await getBalance(l1, wallet, address, token, params?.ignoreUndeployedToken); + const initialChainBalance = await getChainBalance(l1, wallet, token); populatedBalanceChanges.push({ wallet: entry.wallet, change: entry.change, addressToCheck: entry.addressToCheck, - initialBalance + initialBalance, + initialChainBalance }); } - return new ShouldChangeBalance(token, populatedBalanceChanges, noAutoFeeCheck, l1, l1ToL2); + return new ShouldChangeBalance(token, populatedBalanceChanges, noAutoFeeCheck, l1, l1ToL2, checkChainBalance); } private constructor( @@ -133,7 +149,8 @@ class ShouldChangeBalance extends MatcherModifier { balanceChanges: PopulatedBalanceChange[], noAutoFeeCheck: boolean, l1: boolean, - l1ToL2: boolean + l1ToL2: boolean, + checkChainBalance: boolean ) { super(); this.token = token; @@ -141,16 +158,18 @@ class ShouldChangeBalance extends MatcherModifier { this.noAutoFeeCheck = noAutoFeeCheck; this.l1 = l1; this.l1ToL2 = l1ToL2; + this.checkChainBalance = checkChainBalance; } async check(receipt: zksync.types.TransactionReceipt): Promise { let id = 0; for (const balanceChange of this.balanceChanges) { const prevBalance = balanceChange.initialBalance; + const prevChainBalance = balanceChange.initialChainBalance; const wallet = balanceChange.wallet; const address = balanceChange.addressToCheck ?? balanceChange.wallet.address; let newBalance = await getBalance(this.l1, wallet, address, this.token); - + let newChainBalance = await getChainBalance(this.l1, wallet, this.token); // If fee should be checked, we're checking ETH token and this wallet is an initiator, // we should consider fees as well. const autoFeeCheck = !this.noAutoFeeCheck && this.token == zksync.utils.ETH_ADDRESS; @@ -165,6 +184,16 @@ class ShouldChangeBalance extends MatcherModifier { } const diff = newBalance - prevBalance; + const diffChainBalance = newChainBalance - prevChainBalance; + if (this.checkChainBalance && !(await isMinterChain(this.l1, wallet, this.token))) { + // console.log('diffChainBalance', diffChainBalance); + if (diffChainBalance != diff && diffChainBalance + diff != 0n) { + // kl todo. We need this check. But it has issues. It does not query GW, only L1. And AssetTracker is not working properly on GW, as it does not check L1->L3 txs. + throw new Error( + `Chain balance change is not equal to the token balance change for wallet ${balanceChange.wallet.address} (index ${id} in array)` + ); + } + } if (diff != balanceChange.change) { const message = new TestMessage() .matcherHint(`ShouldChangeBalance modifier`) @@ -305,3 +334,62 @@ async function getBalance( return await erc20contract.balanceOf(address); } } + +/** + * Returns the balance of requested token for a certain address. + * + * @param l1 Whether to check l1 balance or l2 + * @param wallet Wallet to make requests from (may not represent the address to check) + * @param token Address of the token + * @param ignoreUndeployedToken Whether allow token to be not deployed. + * If it's set to `true` and token is not deployed, then function returns 0. + * @returns Token balance + */ +async function getChainBalance(l1: boolean, wallet: zksync.Wallet, token: string): Promise { + // const provider = l1 ? wallet.providerL1! : wallet.provider; + // kl todo get from env or something. + + const ecosystemContracts = await getEcosystemContracts(wallet); + + const settlementLayer = await ecosystemContracts.bridgehub.settlementLayer( + (await wallet.provider.getNetwork()).chainId + ); + + const assetId = await ecosystemContracts.nativeTokenVault.assetId(token); + + // console.log("chainId", (await wallet.provider.getNetwork()).chainId, "assetId", assetId); + let balance = await ecosystemContracts.assetTracker.chainBalance( + (await wallet.provider.getNetwork()).chainId, + assetId + ); + // console.log('balance', l1 ? 'l1' : 'l2', balance); + if (settlementLayer != (await wallet.providerL1!.getNetwork()).chainId && l1) { + const gwProvider = new RetryProvider({ url: await getL2bUrl('gateway'), timeout: 1200 * 1000 }, undefined); + const gwAssetTracker = new zksync.Contract(L2_ASSET_TRACKER_ADDRESS, ArtifactL2AssetTracker.abi, gwProvider); + balance = await gwAssetTracker.chainBalance((await wallet.provider.getNetwork()).chainId, assetId); + } + return balance; +} + +async function isMinterChain(l1: boolean, wallet: zksync.Wallet, token: string): Promise { + const bridgehub = new zksync.Contract( + await (await wallet.getBridgehubContract()).getAddress(), + ArtifactBridgeHub.abi, + wallet.providerL1! + ); + const assetRouter = new zksync.Contract( + await bridgehub.assetRouter(), + ArtifactL1AssetRouter.abi, + wallet.providerL1! + ); + const nativeTokenVault = new zksync.Contract( + await assetRouter.nativeTokenVault(), + ArtifactNativeTokenVault.abi, + wallet.providerL1! + ); + const assetId = await nativeTokenVault.assetId(token); + // const assetTracker = new zksync.Contract(await bridgehub.assetTracker(), ArtifactAssetTracker.abi, wallet); + // // return await assetTracker.isMinterChain( (await wallet.provider.getNetwork()).chainId, assetId); + const provider = l1 ? wallet.providerL1! : wallet.provider; + return (await nativeTokenVault.originChainId(assetId)) != (await provider.getNetwork()).chainId; +} diff --git a/core/tests/ts-integration/src/temp-sdk.ts b/core/tests/ts-integration/src/temp-sdk.ts new file mode 100644 index 000000000000..91161f7e57b7 --- /dev/null +++ b/core/tests/ts-integration/src/temp-sdk.ts @@ -0,0 +1,151 @@ +import * as zksync from 'zksync-ethers'; +import * as ethers from 'ethers'; +import { BytesLike } from 'ethers'; +import { INTEROP_BUNDLE_ABI, MESSAGE_INCLUSION_PROOF_ABI, L2_INTEROP_CENTER_ADDRESS } from './constants'; +// import { FinalizeWithdrawalParams } from 'zksync-ethers/build/types'; + +export interface Output { + output: any; + rawData: any; + l1BatchNumber: number; + l2TxNumberInBlock: number; + l2MessageIndex: number; + fullProof: string; + proofDecoded: any; +} + +export async function getInteropBundleData( + provider: zksync.Provider, + withdrawalHash: BytesLike, + index = 0 +): Promise { + const response = await tryGetMessageData(provider, withdrawalHash, index); + if (!response) + return { + rawData: null, + output: null, + l1BatchNumber: 0, + l2TxNumberInBlock: 0, + l2MessageIndex: 0, + fullProof: '', + proofDecoded: null + }; + const { message } = response!; + + // Decode the interop message + // console.log("message", message) + const decodedRequest = ethers.AbiCoder.defaultAbiCoder().decode([INTEROP_BUNDLE_ABI], '0x' + message.slice(4)); + + let calls = []; + for (let i = 0; i < decodedRequest[0][4].length; i++) { + calls.push({ + version: decodedRequest[0][4][i][0], + shadowAccount: decodedRequest[0][4][i][1], + to: decodedRequest[0][4][i][2], + from: decodedRequest[0][4][i][3], + value: decodedRequest[0][4][i][4], + data: decodedRequest[0][4][i][5] + }); + } + // console.log(decodedRequest); + + const xl2Input = { + version: decodedRequest[0][0], + sourceChainId: decodedRequest[0][1], + destinationChainId: decodedRequest[0][2], + interopBundleSalt: decodedRequest[0][3], + calls: calls, + bundleAttributes: { + executionAddress: decodedRequest[0][5][0], + unbundlerAddress: decodedRequest[0][5][1] + } + }; + // console.log("response.proof", proof_fee) + const rawData = ethers.AbiCoder.defaultAbiCoder().encode([INTEROP_BUNDLE_ABI], [xl2Input]); + let proofEncoded = ethers.AbiCoder.defaultAbiCoder().encode( + [MESSAGE_INCLUSION_PROOF_ABI], + [ + { + chainId: (await provider.getNetwork()).chainId, + l1BatchNumber: response.l1BatchNumber, + l2MessageIndex: response.l2MessageIndex, + message: [response.l2TxNumberInBlock, L2_INTEROP_CENTER_ADDRESS, rawData], + proof: response.proof + } + ] + ); + let output: Output = { + rawData: rawData, + output: xl2Input, + l1BatchNumber: response.l1BatchNumber, + l2TxNumberInBlock: response.l2TxNumberInBlock, + l2MessageIndex: response.l2MessageIndex, + fullProof: proofEncoded, + proofDecoded: { + chainId: (await provider.getNetwork()).chainId, + l1BatchNumber: response.l1BatchNumber, + l2MessageIndex: response.l2MessageIndex, + message: [response.l2TxNumberInBlock, L2_INTEROP_CENTER_ADDRESS, rawData], + proof: response.proof + } + }; + return output; +} + +async function tryGetMessageData(provider: zksync.Provider, withdrawalHash: BytesLike, index = 0) { + let { l1BatchNumber, l2TxNumberInBlock, message, l2MessageIndex, proof } = { + l1BatchNumber: 0, + l2TxNumberInBlock: 0, + message: '', + l2MessageIndex: 0, + proof: [''] + }; + + try { + // console.log("Reading interop message"); + // `getFinalizeWithdrawalParamsWithoutProof` is only available for wallet instance but not provider + const sender_chain_utilityWallet = new zksync.Wallet(zksync.Wallet.createRandom().privateKey, provider); + // const { l2ToL1LogIndex, l2ToL1Log } = await sender_chain_utilityWallet._getWithdrawalL2ToL1Log( + // withdrawalHash, + // index + // ); + // const gatewayChainId = 506; + const { + l1BatchNumber: l1BatchNumberRead, + l2TxNumberInBlock: l2TxNumberInBlockRead, + message: messageRead, + l2MessageIndex: l2MessageIndexRead, + proof: proofRead + } = await sender_chain_utilityWallet.getFinalizeWithdrawalParams(withdrawalHash, index, 'proof_based_gw'); + // const logProof = await sender_chain_utilityWallet.provider.getLogProof( + // withdrawalHash, + // index, + // 0, + // gatewayChainId + // ); + // console.log({ + // l2ToL1Log: l2ToL1Log, + // l2ToL1LogIndex: l2ToL1LogIndex, + // l1BatchNumberRead: l1BatchNumberRead, + // l2TxNumberInBlockRead: l2TxNumberInBlockRead, + // l2MessageIndexRead: l2MessageIndexRead, + // // "proofRead": proofRead, + // logProof: logProof + // }); + + // } = await getFinalizeWithdrawalParamsWithoutProof(provider, withdrawalHash, index); + // console.log("Finished reading interop message"); + + l1BatchNumber = l1BatchNumberRead || 0; + l2TxNumberInBlock = l2TxNumberInBlockRead || 0; + message = messageRead || ''; + l2MessageIndex = l2MessageIndexRead || 0; + proof = proofRead || ['']; + + if (!message) return; + } catch (e) { + console.log('Error reading interop message:', e); // note no error here, since we run out of txs sometime + return; + } + return { l1BatchNumber, l2TxNumberInBlock, message, l2MessageIndex, proof }; +} diff --git a/core/tests/ts-integration/tests/api/web3.test.ts b/core/tests/ts-integration/tests/api/web3.test.ts index 0671600146e3..2d826b4a10c1 100644 --- a/core/tests/ts-integration/tests/api/web3.test.ts +++ b/core/tests/ts-integration/tests/api/web3.test.ts @@ -635,7 +635,7 @@ describe('web3 API compatibility tests', () => { contract.removeAllListeners(); }); - test('Should check metamask interoperability', async () => { + test('Should check metamask compatibility', async () => { // Prepare "metamask" wallet. const from = new MockMetamask(alice, chainId); const to = alice.address; diff --git a/core/tests/ts-integration/tests/erc20.test.ts b/core/tests/ts-integration/tests/erc20.test.ts index a84e42c26968..c16c7ed4afae 100644 --- a/core/tests/ts-integration/tests/erc20.test.ts +++ b/core/tests/ts-integration/tests/erc20.test.ts @@ -50,7 +50,8 @@ describe('L1 ERC20 contract checks', () => { tokenDetails.l1Address, [{ wallet: alice, change: -amount }], { - l1: true + l1: true, + checkChainBalance: true } ); const l2BalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ @@ -184,7 +185,8 @@ describe('L1 ERC20 contract checks', () => { tokenDetails.l1Address, [{ wallet: alice, change: amount }], { - l1: true + l1: true, + checkChainBalance: true } ); diff --git a/core/tests/ts-integration/tests/fees.test.ts b/core/tests/ts-integration/tests/fees.test.ts index 2be4edfeb68f..4b80fd75b809 100644 --- a/core/tests/ts-integration/tests/fees.test.ts +++ b/core/tests/ts-integration/tests/fees.test.ts @@ -15,7 +15,8 @@ import { TestContextOwner, TestMaster } from '../src'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; import { DataAvailabityMode, Token } from '../src/types'; -import { SYSTEM_CONTEXT_ADDRESS, getTestContract, anyTransaction } from '../src/helpers'; +import { getTestContract, anyTransaction } from '../src/helpers'; +import { SYSTEM_CONTEXT_ADDRESS } from '../src/constants'; import { loadConfig, shouldLoadConfigFromFile } from 'utils/build/file-configs'; import { logsTestPath } from 'utils/build/logs'; import { sleep } from 'utils/build'; diff --git a/core/tests/ts-integration/tests/interop.test.ts b/core/tests/ts-integration/tests/interop.test.ts index 597cfff9b126..7778d7723b72 100644 --- a/core/tests/ts-integration/tests/interop.test.ts +++ b/core/tests/ts-integration/tests/interop.test.ts @@ -7,19 +7,85 @@ import { Token } from '../src/types'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; + import { waitForL2ToL1LogProof } from '../src/helpers'; import { RetryableWallet } from '../src/retry-provider'; +import { scaledGasPrice, deployContract, waitUntilBlockFinalized } from '../src/helpers'; + +/** + * Formats an Ethereum address as ERC-7930 InteroperableAddress bytes + * Format: version (2 bytes) + chain type (2 bytes) + chain ref len (1 byte) + chain ref + addr len (1 byte) + address + */ +function formatEvmV1Address(address: string, chainId?: bigint): string { + const version = '0001'; // ERC-7930 version + const chainType = '0000'; // EIP-155 chain type + + let result = version + chainType; + + if (chainId !== undefined) { + // Convert chainId to minimal bytes representation + const chainIdHex = chainId.toString(16); + const chainIdBytes = chainIdHex.length % 2 === 0 ? chainIdHex : '0' + chainIdHex; + const chainRefLen = (chainIdBytes.length / 2).toString(16).padStart(2, '0'); + result += chainRefLen + chainIdBytes; + } else { + result += '00'; // Empty chain reference + } + + result += '14'; // Address length (20 bytes) + result += address.slice(2); // Remove '0x' prefix + + return '0x' + result; +} + +/** + * Formats a chain ID as ERC-7930 InteroperableAddress bytes (without specific address) + * This is used for destination chain specification in sendBundle + */ +function formatEvmV1Chain(chainId: bigint): string { + const version = '0001'; // ERC-7930 version + const chainType = '0000'; // EIP-155 chain type + + let result = version + chainType; + + // Convert chainId to minimal bytes representation + const chainIdHex = chainId.toString(16); + const chainIdBytes = chainIdHex.length % 2 === 0 ? chainIdHex : '0' + chainIdHex; + const chainRefLen = (chainIdBytes.length / 2).toString(16).padStart(2, '0'); + result += chainRefLen + chainIdBytes; + + result += '00'; // Empty address (0 length) + + return '0x' + result; +} + import { - L2_MESSAGE_VERIFICATION_ADDRESS, + L2_ASSET_ROUTER_ADDRESS, + L2_NATIVE_TOKEN_VAULT_ADDRESS, + L2_INTEROP_HANDLER_ADDRESS, + L2_INTEROP_CENTER_ADDRESS, L2_INTEROP_ROOT_STORAGE_ADDRESS, - ArtifactL2MessageVerification, - ArtifactL2InteropRootStorage, + L2_MESSAGE_VERIFICATION_ADDRESS, + // REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + ETH_ADDRESS_IN_CONTRACTS, ArtifactBridgeHub, - GATEWAY_CHAIN_ID + ArtifactInteropCenter, + ArtifactInteropHandler, + ArtifactNativeTokenVault, + ArtifactMintableERC20, + ArtifactIERC7786Attributes, + ArtifactL2InteropRootStorage, + ArtifactL2MessageVerification } from '../src/constants'; +// import { RetryProvider } from '../src/retry-provider'; +import { getInteropBundleData } from '../src/temp-sdk'; +import { ETH_ADDRESS, sleep } from 'zksync-ethers/build/utils'; import { FinalizeWithdrawalParams } from 'zksync-ethers/build/types'; +const richPk = '0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110'; // Must have L1 ETH +const ethFundAmount = ethers.parseEther('1'); + describe('Interop behavior checks', () => { let testMaster: TestMaster; let alice: RetryableWallet; @@ -28,12 +94,60 @@ describe('Interop behavior checks', () => { let skipInteropTests = false; + // L1 Variables + let l1Provider: ethers.Provider; + let veryRichWallet: zksync.Wallet; + + // Token details + let tokenA: Token = { + name: 'Token A', + symbol: 'AA', + decimals: 18n, + l1Address: '', + l2Address: '', + l2AddressSecondChain: '' + }; + + // Interop1 (Main Chain) Variables + let interop1Provider: zksync.Provider; + let interop1Wallet: zksync.Wallet; + let interop1RichWallet: zksync.Wallet; + // kl todo remove very rich wallet. Useful for local debugging, calldata can be sent directly using cast. + // let interop1VeryRichWallet: zksync.Wallet; + let interop1InteropCenter: zksync.Contract; + let interop2InteropHandler: zksync.Contract; + let interop1NativeTokenVault: zksync.Contract; + let interop1TokenA: zksync.Contract; + + // Interop2 (Second Chain) Variables + let interop2RichWallet: zksync.Wallet; + let interop2Provider: zksync.Provider; + let interop2NativeTokenVault: zksync.Contract; + + // Gateway Variables + // let gatewayProvider: zksync.Provider; + // let gatewayWallet: zksync.Wallet; + beforeAll(async () => { testMaster = TestMaster.getInstance(__filename); alice = testMaster.mainAccount(); tokenDetails = testMaster.environment().erc20Token; + const testWalletPK = testMaster.newEmptyAccount().privateKey; + const mainAccount = testMaster.mainAccount(); + + // Initialize providers + l1Provider = mainAccount.providerL1!; + interop1Provider = mainAccount.provider; + // Setup wallets for Interop1 + veryRichWallet = new zksync.Wallet(richPk, interop1Provider, l1Provider); + + // Initialize Test Master and create wallets for Interop1 + interop1Wallet = new zksync.Wallet(testWalletPK, interop1Provider, l1Provider); + interop1RichWallet = new zksync.Wallet(mainAccount.privateKey, interop1Provider, l1Provider); + // interop1VeryRichWallet = new zksync.Wallet(richPk, interop1Provider, l1Provider); + // Skip interop tests if the SL is the same as the L1. const bridgehub = new ethers.Contract( await alice.provider.getBridgehubContractAddress(), @@ -56,6 +170,49 @@ describe('Interop behavior checks', () => { } aliceSecondChain = maybeAliceSecondChain!; } + + // Setup Interop2 Provider and Wallet + // interop2Provider = new RetryProvider( + // { url: await getL2bUrl('validium'), timeout: 1200 * 1000 }, + // undefined, + // testMaster.reporter + // ); + if (skipInteropTests) { + return; + } + interop2Provider = aliceSecondChain.provider; + interop2RichWallet = new zksync.Wallet(mainAccount.privateKey, interop2Provider, l1Provider); + + // gatewayProvider = new RetryProvider( + // { url: await getL2bUrl('gateway'), timeout: 1200 * 1000 }, + // undefined, + // testMaster.reporter + // ); + // gatewayWallet = new zksync.Wallet(zksync.Wallet.createRandom().privateKey, gatewayProvider); + + // Initialize Contracts on Interop1 + interop1InteropCenter = new zksync.Contract( + L2_INTEROP_CENTER_ADDRESS, + ArtifactInteropCenter.abi, + interop1Wallet + ); + interop2InteropHandler = new zksync.Contract( + L2_INTEROP_HANDLER_ADDRESS, + ArtifactInteropHandler.abi, + interop2RichWallet + ); + interop1NativeTokenVault = new zksync.Contract( + L2_NATIVE_TOKEN_VAULT_ADDRESS, + ArtifactNativeTokenVault.abi, + interop1Wallet + ); + + // Initialize Contracts on Interop2 + interop2NativeTokenVault = new zksync.Contract( + L2_NATIVE_TOKEN_VAULT_ADDRESS, + ArtifactNativeTokenVault.abi, + interop2Provider + ); }); let withdrawalHash: string; @@ -74,7 +231,7 @@ describe('Interop behavior checks', () => { // Perform a withdrawal and wait for it to be processed const withdrawalPromise = await alice.withdraw({ token: tokenDetails.l2Address, - amount: 1n + amount: 1 }); await expect(withdrawalPromise).toBeAccepted(); const withdrawalTx = await withdrawalPromise; @@ -86,10 +243,10 @@ describe('Interop behavior checks', () => { params = await alice.getFinalizeWithdrawalParams(withdrawalHash, undefined, 'proof_based_gw'); // Needed else the L2's view of GW's MessageRoot won't be updated - await waitForInteropRootNonZero(alice.provider, alice, getGWBlockNumber(params)); + await waitForInteropRootNonZero(alice.provider, alice, GW_CHAIN_ID, getGWBlockNumber(params)); const included = await l2MessageVerification.proveL2MessageInclusionShared( - (await alice.provider.getNetwork()).chainId, + Number((await alice.provider.getNetwork()).chainId), params.l1BatchNumber, params.l2MessageIndex, { txNumberInBatch: params.l2TxNumberInBlock, sender: params.sender, data: params.message }, @@ -110,11 +267,16 @@ describe('Interop behavior checks', () => { ); // Needed else the L2's view of GW's MessageRoot won't be updated - await waitForInteropRootNonZero(aliceSecondChain.provider, aliceSecondChain, getGWBlockNumber(params)); + await waitForInteropRootNonZero( + aliceSecondChain.provider, + aliceSecondChain, + GW_CHAIN_ID, + getGWBlockNumber(params) + ); // We use the same proof that was verified in L2-A const included = await l2MessageVerification.proveL2MessageInclusionShared( - (await alice.provider.getNetwork()).chainId, + Number((await alice.provider.getNetwork()).chainId), params.l1BatchNumber, params.l2MessageIndex, { txNumberInBatch: params.l2TxNumberInBlock, sender: params.sender, data: params.message }, @@ -123,34 +285,373 @@ describe('Interop behavior checks', () => { expect(included).toBe(true); }); + test('Can perform an ETH deposit', async () => { + if (skipInteropTests) { + return; + } + const gasPrice = await scaledGasPrice(interop1RichWallet); + + await ( + await veryRichWallet._signerL1!().sendTransaction({ + to: interop1RichWallet.address, + value: ethFundAmount * 10n + }) + ).wait(); + + // Deposit funds on Interop1 + await ( + await interop1RichWallet.deposit({ + token: ETH_ADDRESS_IN_CONTRACTS, + amount: ethFundAmount, + to: interop1Wallet.address, + approveERC20: true, + approveBaseERC20: true, + approveOverrides: { gasPrice }, + overrides: { gasPrice } + }) + ).wait(); + + // Deposit funds on Interop2 + await ( + await interop2RichWallet.deposit({ + token: ETH_ADDRESS_IN_CONTRACTS, + amount: ethFundAmount, + to: interop2RichWallet.address, + approveERC20: true, + approveBaseERC20: true, + approveOverrides: { gasPrice }, + overrides: { gasPrice } + }) + ).wait(); + }); + + test('Can deploy token contracts', async () => { + if (skipInteropTests) { + return; + } + // Deploy Token A on Interop1 + const tokenADeploy = await deployContract(interop1Wallet, ArtifactMintableERC20, [ + tokenA.name, + tokenA.symbol, + tokenA.decimals + ]); + tokenA.l2Address = await tokenADeploy.getAddress(); + console.log('tokenA.l2Address', tokenA.l2Address); + // Register Token A + await (await interop1NativeTokenVault.registerToken(tokenA.l2Address)).wait(); + tokenA.assetId = await interop1NativeTokenVault.assetId(tokenA.l2Address); + tokenA.l2AddressSecondChain = await interop2NativeTokenVault.tokenAddress(tokenA.assetId); + interop1TokenA = new zksync.Contract(tokenA.l2Address, ArtifactMintableERC20.abi, interop1Wallet); + }); + + test('Can perform cross chain transfer', async () => { + if (skipInteropTests) { + return; + } + const transferAmount = 100n; + // let interop1TokenAVeryRichWallet = new zksync.Contract( + // tokenA.l2Address, + // ArtifactMintableERC20.abi, + // interop1VeryRichWallet + // ); + // await ((await interop1TokenAVeryRichWallet.approve(L2_NATIVE_TOKEN_VAULT_ADDRESS, transferAmount)).wait()).wait(); + // await ((await interop1TokenA.mint('0x36615Cf349d7F6344891B1e7CA7C72883F5dc049', transferAmount)).wait()).wait(); + + await Promise.all([ + // Approve token transfer on Interop1 + (await interop1TokenA.approve(L2_NATIVE_TOKEN_VAULT_ADDRESS, transferAmount)).wait(), + + // Mint tokens for the test wallet on Interop1 for the transfer + (await interop1TokenA.mint(interop1Wallet.address, transferAmount)).wait() + ]); + + // Compose and send the interop request transaction + const erc7786AttributeDummy = new zksync.Contract( + '0x0000000000000000000000000000000000000000', + ArtifactIERC7786Attributes.abi, + interop1Wallet + ); + + const feeValue = ethers.parseEther('0.2'); + const receipt = await fromInterop1RequestInterop( + // Fee payment call starters + [ + { + to: formatEvmV1Address(ethers.ZeroAddress), + data: '0x', + callAttributes: [ + await erc7786AttributeDummy.interface.encodeFunctionData('interopCallValue', [feeValue]) + ] + } + ], + // Execution call starters for token transfer + [ + { + to: formatEvmV1Address(L2_ASSET_ROUTER_ADDRESS), + data: getTokenTransferSecondBridgeData(tokenA.assetId!, transferAmount, interop2RichWallet.address), + callAttributes: [await erc7786AttributeDummy.interface.encodeFunctionData('indirectCall', [0])] + } + ] + ); + // console.log('receipt', receipt); + + // Broadcast interop transaction from Interop1 to Interop2 + // await readAndBroadcastInteropTx(tx.hash, interop1Provider, interop2Provider); + await readAndBroadcastInteropBundle(receipt.hash, interop1Provider, interop2Provider); + + await sleep(10000); + tokenA.l2AddressSecondChain = await interop2NativeTokenVault.tokenAddress(tokenA.assetId); + console.log('Token A info:', tokenA); + + // Assert that the token balance on chain2 + // const interop1WalletSecondChainBalance = await getTokenBalance({ + // provider: interop2Provider, + // tokenAddress: tokenA.l2AddressSecondChain!, + // address: interop2RichWallet.address + // }); + // expect(interop1WalletSecondChainBalance).toBe(transferAmount); + }); + + // Types for interop call starters and gas fields. + interface InteropCallStarter { + to: string; // ERC-7930 formatted address bytes + data: string; + callAttributes: string[]; + } + + /** + * Sends a direct L2 transaction request on Interop1. + * The function prepares the interop call input and populates the transaction before sending. + */ + async function fromInterop1RequestInterop( + feeCallStarters: InteropCallStarter[], + execCallStarters: InteropCallStarter[] + ) { + // note skipping feeCallStarters for now: + + const txFinalizeReceipt = ( + await interop1InteropCenter.sendBundle( + formatEvmV1Chain((await interop2Provider.getNetwork()).chainId), + execCallStarters, + [] + ) + ).wait(); + return txFinalizeReceipt; + + // const tx = await interop1InteropCenter.requestInterop( + // (await interop2Provider.getNetwork()).chainId, + // // L2_STANDARD_TRIGGER_ACCOUNT_ADDRESS, + // feeCallStarters, + // execCallStarters, + // { + // gasLimit: 30000000n, + // gasPerPubdataByteLimit: REQUIRED_L2_GAS_PRICE_PER_PUBDATA, + // refundRecipient: interop1Wallet.address, + // paymaster: ethers.ZeroAddress, + // paymasterInput: '0x' + // }, + // { + // value: [...feeCallStarters, ...execCallStarters].reduce( + // (total, item) => total + BigInt(item.requestedInteropCallValue), + // 0n + // ) + // } + // ); + // const txReceipt: zksync.types.TransactionReceipt = await tx.wait(); + // return txReceipt; + } + + /** + * Generates ABI-encoded data for transferring tokens using the second bridge. + */ + function getTokenTransferSecondBridgeData(assetId: string, amount: bigint, recipient: string) { + return ethers.concat([ + '0x01', + new ethers.AbiCoder().encode( + ['bytes32', 'bytes'], + [ + assetId, + new ethers.AbiCoder().encode( + ['uint256', 'address', 'address'], + [amount, recipient, ethers.ZeroAddress] + ) + ] + ) + ]); + } + function getGWBlockNumber(params: FinalizeWithdrawalParams): number { /// see hashProof in MessageHashing.sol for this logic. let gwProofIndex = 1 + parseInt(params.proof[0].slice(4, 6), 16) + 1 + parseInt(params.proof[0].slice(6, 8), 16); + // console.log('params', params, gwProofIndex, parseInt(params.proof[gwProofIndex].slice(2, 34), 16)); return parseInt(params.proof[gwProofIndex].slice(2, 34), 16); } - async function waitForInteropRootNonZero(provider: zksync.Provider, alice: zksync.Wallet, l1BatchNumber: number) { + async function waitForInteropRootNonZero( + provider: zksync.Provider, + alice: zksync.Wallet, + chainId: bigint, + l1BatchNumber: number + ) { const l2InteropRootStorage = new zksync.Contract( L2_INTEROP_ROOT_STORAGE_ADDRESS, ArtifactL2InteropRootStorage.abi, provider ); let currentRoot = ethers.ZeroHash; - - while (currentRoot === ethers.ZeroHash) { + let count = 0; + while (currentRoot === ethers.ZeroHash && count < 60) { // We make repeated transactions to force the L2 to update the interop root. const tx = await alice.transfer({ to: alice.address, - amount: 1 + amount: 1, + token: ETH_ADDRESS }); await tx.wait(); - currentRoot = await l2InteropRootStorage.interopRoots(GATEWAY_CHAIN_ID, l1BatchNumber); + currentRoot = await l2InteropRootStorage.interopRoots(parseInt(chainId.toString()), l1BatchNumber); await zksync.utils.sleep(alice.provider.pollingInterval); + + // console.log('currentRoot', currentRoot, count); + count++; } + console.log('Interop root is non-zero', currentRoot, l1BatchNumber); } + const GW_CHAIN_ID = 506n; + + /** + * Reads an interop transaction from the sender chain, constructs a new transaction, + * and broadcasts it on the receiver chain. + */ + async function readAndBroadcastInteropBundle( + txHash: string, + senderProvider: zksync.Provider, + receiverProvider: zksync.Provider + ) { + console.log('*Reading and broadcasting interop bundle initiated by txHash*', txHash); + const senderUtilityWallet = new zksync.Wallet(zksync.Wallet.createRandom().privateKey, senderProvider); + const txReceipt = await senderProvider.getTransactionReceipt(txHash); + await waitUntilBlockFinalized(senderUtilityWallet, txReceipt!.blockNumber); + // const gwWallet = new zksync.Wallet(zksync.Wallet.createRandom().privateKey, gatewayProvider); + // await waitUntilBlockExecutedOnGateway(senderUtilityWallet, gatewayWallet, txReceipt!.blockNumber); + /// kl todo figure out what we need to wait for here. Probably the fact that we need to wait for the GW block finalization. + await sleep(25000); + // console.log((await senderProvider.getNetwork()).chainId); + // console.log((await senderProvider.getNetwork()).name) + // console.log(await senderUtilityWallet.getL2BridgeContracts()) + const params = await senderUtilityWallet.getFinalizeWithdrawalParams(txHash, 0, 'proof_based_gw'); + await waitForInteropRootNonZero(interop2Provider, interop2RichWallet, GW_CHAIN_ID, getGWBlockNumber(params)); + + // Get interop trigger and bundle data from the sender chain. + const executionBundle = await getInteropBundleData(senderProvider, txHash, 0); + // console.log('executionBundle', executionBundle); + if (executionBundle.output == null) return; + + const receipt = await interop2InteropHandler.executeBundle( + executionBundle.rawData, + executionBundle.proofDecoded + ); + await receipt.wait(); + console.log('receipt', receipt.hash); + } + + /** + * Reads an interop transaction from the sender chain, constructs a new transaction, + * and broadcasts it on the receiver chain. + */ + // async function _readAndBroadcastInteropTx( + // txHash: string, + // senderProvider: zksync.Provider, + // _receiverProvider: zksync.Provider + // ) { + // console.log('*Reading and broadcasting interop tx initiated by txHash*', txHash); + // const senderUtilityWallet = new zksync.Wallet(zksync.Wallet.createRandom().privateKey, senderProvider); + // const txReceipt = await senderProvider.getTransactionReceipt(txHash); + // await waitUntilBlockFinalized(senderUtilityWallet, txReceipt!.blockNumber); + + // /// kl todo figure out what we need to wait for here. Probably the fact that we need to wait for the GW block finalization. + // await sleep(25000); + // const params = await senderUtilityWallet.getFinalizeWithdrawalParams(txHash, 0, 'proof_based_gw'); + // await waitForInteropRootNonZero(interop2Provider, interop2RichWallet, GW_CHAIN_ID, getGWBlockNumber(params)); + + // Get interop trigger and bundle data from the sender chain. + // const triggerDataBundle = await getInteropTriggerData(senderProvider, txHash, 2); + // const feeBundle = await getInteropBundleData(senderProvider, txHash, 0); + // const executionBundle = await getInteropBundleData(senderProvider, txHash, 0); + // if (executionBundle.output == null) return; + + // ABI-encode execution data along with its proof. + // const txData = ethers.AbiCoder.defaultAbiCoder().encode( + // ['bytes', 'bytes'], + // [executionBundle.rawData, executionBundle.fullProof] + // ); + + // Construct the interop transaction for the receiver chain. + // const nonce = await receiverProvider.getTransactionCount(L2_STANDARD_TRIGGER_ACCOUNT_ADDRESS); + // const feeData = await receiverProvider.getFeeData(); + // let interopTx = { + // from: L2_STANDARD_TRIGGER_ACCOUNT_ADDRESS, + // to: L2_INTEROP_HANDLER_ADDRESS, + // chainId: (await receiverProvider.getNetwork()).chainId.toString(), + // data: txData, + // nonce: nonce, + // customData: { + // paymasterParams: { + // paymaster: triggerDataBundle.output.gasFields.paymaster, + // paymasterInput: triggerDataBundle.output.gasFields.paymasterInput + // }, + // gasPerPubdata: triggerDataBundle.output.gasFields.gasPerPubdataByteLimit, + // customSignature: ethers.AbiCoder.defaultAbiCoder().encode( + // ['bytes', 'bytes', 'address', 'address', 'bytes'], + // [ + // feeBundle.rawData, + // feeBundle.fullProof, + // triggerDataBundle.output.sender, + // triggerDataBundle.output.gasFields.refundRecipient, + // triggerDataBundle.fullProof + // ] + // ) + // }, + // maxFeePerGas: feeData.maxFeePerGas, + // maxPriorityFeePerGas: feeData.maxPriorityFeePerGas, + // gasLimit: triggerDataBundle.output.gasFields.gasLimit, + // value: 0 + // }; + + // Serialize and broadcast the transaction + // const hexTx = zksync.utils.serializeEip712(interopTx); + // const broadcastTx = await receiverProvider.broadcastTransaction(hexTx); + // await broadcastTx.wait(); + + // Recursive broadcast + // await readAndBroadcastInteropTx(broadcastTx.realInteropHash!, receiverProvider, senderProvider); + // } + + /** + * Retrieves the token balance for a given address. + */ + // async function getTokenBalance({ + // provider, + // tokenAddress, + // address + // }: { + // provider: zksync.Provider; + // tokenAddress: string; + // address: string; + // }): Promise { + // if (!tokenAddress) { + // throw new Error('Token address is not provided'); + // } + // if (tokenAddress === ethers.ZeroAddress) { + // // Happens when token wasn't deployed yet. Therefore there is no balance. + // return 0n; + // } + // const tokenContract = new zksync.Contract(tokenAddress, ArtifactMintableERC20.abi, provider); + // return await tokenContract.balanceOf(address); + // } + afterAll(async () => { await testMaster.deinitialize(); }); diff --git a/core/tests/ts-integration/tests/system.test.ts b/core/tests/ts-integration/tests/system.test.ts index 533113fd4b82..ccbfd5125f8e 100644 --- a/core/tests/ts-integration/tests/system.test.ts +++ b/core/tests/ts-integration/tests/system.test.ts @@ -11,13 +11,8 @@ import { L2_DEFAULT_ETH_PER_ACCOUNT } from '../src/context-owner'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; -import { - scaledGasPrice, - maxL2GasLimitForPriorityTxs, - SYSTEM_CONTEXT_ADDRESS, - getTestContract, - waitForL2ToL1LogProof -} from '../src/helpers'; +import { scaledGasPrice, maxL2GasLimitForPriorityTxs, getTestContract, waitForL2ToL1LogProof } from '../src/helpers'; +import { SYSTEM_CONTEXT_ADDRESS } from '../src/constants'; import { DataAvailabityMode } from '../src/types'; import { BigNumberish } from 'ethers'; import { BytesLike } from '@ethersproject/bytes'; diff --git a/core/tests/vm-benchmark/src/vm.rs b/core/tests/vm-benchmark/src/vm.rs index fb0ce0d74bea..7c8dca2bf1ae 100644 --- a/core/tests/vm-benchmark/src/vm.rs +++ b/core/tests/vm-benchmark/src/vm.rs @@ -13,9 +13,9 @@ use zksync_multivm::{ zk_evm_latest::ethereum_types::{Address, U256}, }; use zksync_types::{ - block::L2BlockHasher, fee_model::BatchFeeInput, helpers::unix_timestamp_ms, u256_to_h256, - utils::storage_key_for_eth_balance, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, - Transaction, + block::L2BlockHasher, fee_model::BatchFeeInput, helpers::unix_timestamp_ms, + settlement::SettlementLayer, u256_to_h256, utils::storage_key_for_eth_balance, L1BatchNumber, + L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, }; use crate::{instruction_counter::InstructionCounter, transaction::PRIVATE_KEY}; @@ -246,6 +246,7 @@ fn test_env() -> (SystemEnv, L1BatchEnv) { max_virtual_blocks_to_create: 100, interop_roots: vec![], }, + settlement_layer: SettlementLayer::for_tests(), }; (system_env, l1_batch_env) } diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 01028fe8dbd3..5794380d1519 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -203,6 +203,7 @@ prometheus: observability: log_format: plain + # zksync_multivm=trace, log_directives: "warn,zksync=info,zksync_config=debug,zksync_commitment_generator=debug,zksync_server=debug,zksync_contract_verifier=debug,zksync_eth_watch=debug,zksync_state=debug,zksync_utils=debug,zksync_mempool=debug,zksync_web3_decl=debug,zksync_health_check=debug,vise_exporter=error,snapshots_creator=debug,zksync_base_token_adjuster=debug,zksync_external_price_api=debug" # Uncomment only if needed # sentry: diff --git a/etc/env/file_based/genesis.yaml b/etc/env/file_based/genesis.yaml index 293b9fc29a88..acf059680e0a 100644 --- a/etc/env/file_based/genesis.yaml +++ b/etc/env/file_based/genesis.yaml @@ -1,10 +1,10 @@ genesis_protocol_semantic_version: 0.30.0 genesis_protocol_version: null -genesis_root: 0x1bdeda5b7b0e74c74678421c0ecd626647406edcef2c4c68507e2276442fe9ac -genesis_rollup_leaf_index: 84 -genesis_batch_commitment: 0x67ca7a4c10155b6501285ea0615c032d7769dd3d7e6efcbbcc67abfac804ab5e -bootloader_hash: 0x0100091118d2d6c46d8e646b67026ef848826845d0aff1cb72cddb6b98e4e90c -default_aa_hash: 0x010005f7e776333ac3ade732ccdc91816d895597b0727934404c484ae5bbec0f +genesis_root: 0xf71deeeae4c597c9e7d138cfc9e0e6dc7ac5ef5c814d630058050e0be03f2d12 +genesis_rollup_leaf_index: 92 +genesis_batch_commitment: 0x47548b294bbc409b9c0bc028b6b163e2ca182f78dfcd71abfd97102ceb80da7f +bootloader_hash: 0x0100096133f31de060c833cb0f0cc3fb4d1abad7041e6abcb3b3e616cb0513c7 +default_aa_hash: 0x010005f7a68d648b411e261db79219ba1dcd0281fa01953a4ffc9564554f1ba7 evm_emulator_hash: 0x01000d8bae37b82f311186426184866498b357f41d7a02ced11f3e3fbfbacd63 l1_chain_id: 9 l2_chain_id: 270 @@ -12,6 +12,6 @@ fee_account: '0x0000000000000000000000000000000000000001' l1_batch_commit_data_generator_mode: Rollup prover: snark_wrapper_vk_hash: 0x1ffc56111a5cfaf5db387f6a31408ad20217e9bc1f31f2f5c1bd38b0d6d7968b - fflonk_snark_wrapper_vk_hash: 0x49eae0bf5c7ea580f4979b366e52b386adc5f42e2ce50fc1d3c4de9a86052bff + fflonk_snark_wrapper_vk_hash: 0x6f36a08c517b060fa97308cdb3e23b04842ff839d451a753ec8fae1a5408304a dummy_verifier: true custom_genesis_state_path: null diff --git a/etc/utils/src/constants.ts b/etc/utils/src/constants.ts new file mode 100644 index 000000000000..08e2d9c2d06a --- /dev/null +++ b/etc/utils/src/constants.ts @@ -0,0 +1,59 @@ +import * as fs from 'fs'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +export const REQUIRED_L2_GAS_PRICE_PER_PUBDATA = 800; + +export const SYSTEM_UPGRADE_L2_TX_TYPE = 254; +export const GATEWAY_CHAIN_ID = 505; +export const ADDRESS_ONE = '0x0000000000000000000000000000000000000001'; +export const ETH_ADDRESS_IN_CONTRACTS = ADDRESS_ONE; +export const L1_TO_L2_ALIAS_OFFSET = '0x1111000000000000000000000000000000001111'; +export const L2_BRIDGEHUB_ADDRESS = '0x0000000000000000000000000000000000010002'; +export const L2_ASSET_ROUTER_ADDRESS = '0x0000000000000000000000000000000000010003'; +export const L2_NATIVE_TOKEN_VAULT_ADDRESS = '0x0000000000000000000000000000000000010004'; +export const L2_MESSAGE_ROOT_ADDRESS = '0x0000000000000000000000000000000000010005'; +export const L2_INTEROP_ROOT_STORAGE_ADDRESS = '0x0000000000000000000000000000000000010008'; +export const L2_MESSAGE_VERIFICATION_ADDRESS = '0x0000000000000000000000000000000000010009'; +export const L2_CHAIN_ASSET_HANDLER_ADDRESS = '0x000000000000000000000000000000000001000A'; +export const L2_INTEROP_CENTER_ADDRESS = '0x000000000000000000000000000000000001000B'; +export const L2_INTEROP_HANDLER_ADDRESS = '0x000000000000000000000000000000000001000C'; +export const L2_ASSET_TRACKER_ADDRESS = '0x000000000000000000000000000000000001000D'; + +// System contract addresses +export const SYSTEM_CONTEXT_ADDRESS = '0x000000000000000000000000000000000000800b'; +export const DEPLOYER_SYSTEM_CONTRACT_ADDRESS = '0x0000000000000000000000000000000000008006'; +export const L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR = '0x0000000000000000000000000000000000008008'; +export const EMPTY_STRING_KECCAK = '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'; +export const BRIDGEHUB_L2_CANONICAL_TRANSACTION_ABI = + 'tuple(uint256 txType, uint256 from, uint256 to, uint256 gasLimit, uint256 gasPerPubdataByteLimit, uint256 maxFeePerGas, uint256 maxPriorityFeePerGas, uint256 paymaster, uint256 nonce, uint256 value, uint256[4] reserved, bytes data, bytes signature, uint256[] factoryDeps, bytes paymasterInput, bytes reservedDynamic)'; +export const BRIDGEHUB_L2_TRANSACTION_REQUEST_ABI = + 'tuple(address sender, address contractL2, uint256 mintValue, uint256 l2Value, bytes l2Calldata, uint256 l2GasLimit, uint256 l2GasPerPubdataByteLimit, bytes[] factoryDeps, address refundRecipient)'; +export const L2_LOG_STRING = + 'tuple(uint8 l2ShardId,bool isService,uint16 txNumberInBatch,address sender,bytes32 key,bytes32 value)'; +export const ARTIFACTS_PATH = '../../../contracts/l1-contracts/out'; +export const SYSTEM_ARTIFACTS_PATH = '../../../contracts/system-contracts/zkout'; + +export const INTEROP_CALL_ABI = + 'tuple(bytes1 version, bool shadowAccount, address to, address from, uint256 value, bytes data)'; +export const INTEROP_BUNDLE_ABI = + 'tuple(bytes1 version, uint256 destinationChainId, bytes32 interopBundleSalt, tuple(bytes1 version, bool shadowAccount, address to, address from, uint256 value, bytes data)[] calls, (address executionAddress, address unbundlerAddress) bundleAttributes)'; + +export const MESSAGE_INCLUSION_PROOF_ABI = + 'tuple(uint256 chainId, uint256 l1BatchNumber, uint256 l2MessageIndex, tuple(uint16 txNumberInBatch, address sender, bytes data) message, bytes32[] proof)'; + +// Read contract artifacts +function readContract(path: string, fileName: string, contractName?: string) { + contractName = contractName || fileName; + return JSON.parse(fs.readFileSync(`${path}/${fileName}.sol/${contractName}.json`, { encoding: 'utf-8' })); +} +export const ArtifactBridgeHub = readContract(`${ARTIFACTS_PATH}`, 'Bridgehub'); +export const ArtifactInteropCenter = readContract(`${ARTIFACTS_PATH}`, 'InteropCenter'); +export const ArtifactInteropHandler = readContract(`${ARTIFACTS_PATH}`, 'InteropHandler'); +export const ArtifactL2InteropRootStorage = readContract(`${SYSTEM_ARTIFACTS_PATH}`, 'L2InteropRootStorage'); +export const ArtifactL2MessageVerification = readContract(`${ARTIFACTS_PATH}`, 'L2MessageVerification'); +export const ArtifactIERC7786Attributes = readContract(`${ARTIFACTS_PATH}`, 'IERC7786Attributes'); +export const ArtifactNativeTokenVault = readContract(`${ARTIFACTS_PATH}`, 'L2NativeTokenVault'); +export const ArtifactL1NativeTokenVault = readContract(`${ARTIFACTS_PATH}`, 'L1NativeTokenVault'); +export const ArtifactMintableERC20 = readContract('../../../contracts/l1-contracts/zkout', 'TestnetERC20Token'); +export const ArtifactL1AssetRouter = readContract(`${ARTIFACTS_PATH}`, 'L1AssetRouter'); +export const ArtifactL1AssetTracker = readContract(`${ARTIFACTS_PATH}`, 'L1AssetTracker'); +export const ArtifactL2AssetTracker = readContract(`${ARTIFACTS_PATH}`, 'L2AssetTracker'); diff --git a/etc/utils/src/tokens.ts b/etc/utils/src/tokens.ts new file mode 100644 index 000000000000..cbe330da2b88 --- /dev/null +++ b/etc/utils/src/tokens.ts @@ -0,0 +1,132 @@ +import * as path from 'path'; +import * as fs from 'fs'; +import * as yaml from 'yaml'; +import * as ethers from 'ethers'; + +import * as zksync from 'zksync-ethers'; + +import { + ArtifactL1AssetTracker, + ArtifactBridgeHub, + ArtifactL1AssetRouter, + ArtifactNativeTokenVault, + ArtifactL1NativeTokenVault, + ArtifactInteropCenter +} from './constants'; + +export interface EcosystemContracts { + bridgehub: ethers.Contract; + assetRouter: ethers.Contract; + assetTracker: ethers.Contract; + nativeTokenVault: ethers.Contract; +} + +export async function getEcosystemContracts(wallet: zksync.Wallet): Promise { + const bridgehub = new ethers.Contract( + await (await wallet.getBridgehubContract()).getAddress(), + ArtifactBridgeHub.abi, + wallet.providerL1! + ); + // console.log('bridgehub', await bridgehub.getAddress()); + // console.log('interface', bridgehub.interface); + // const bridgehubL1 = await bridgehub.L1_CHAIN_ID; + const assetRouter = new zksync.Contract( + await bridgehub.assetRouter(), + ArtifactL1AssetRouter.abi, + wallet.providerL1! + ); + const nativeTokenVault = new zksync.Contract( + await assetRouter.nativeTokenVault(), + ArtifactNativeTokenVault.abi, + wallet.providerL1! + ); + const l1NativeTokenVault = new zksync.Contract( + await assetRouter.nativeTokenVault(), + ArtifactL1NativeTokenVault.abi, + wallet.providerL1! + ); + const assetTrackerAddress = await l1NativeTokenVault.l1AssetTracker(); + // console.log('assetTrackerAddress', assetTrackerAddress); + const assetTracker = new zksync.Contract(assetTrackerAddress, ArtifactL1AssetTracker.abi, wallet.providerL1!); + return { + bridgehub, + assetRouter, + assetTracker, + nativeTokenVault + }; +} + +interface TokensDict { + [key: string]: L1Token; +} + +type Tokens = { + tokens: TokensDict; +}; + +export type L1Token = { + name: string; + symbol: string; + decimals: bigint; + address: string; +}; + +export function getToken( + pathToHome: string, + baseTokenAddress: zksync.types.Address +): { token: L1Token; baseToken: L1Token | undefined } { + const tokens = getTokensNew(pathToHome); + // wBTC is chosen because it has decimals different from ETH (8 instead of 18). + // Using this token will help us to detect decimals-related errors. + // but if it's not available, we'll use the first token from the list. + let token = tokens.tokens['WBTC']; + if (token === undefined) { + token = Object.values(tokens.tokens)[0]; + if (token.symbol == 'WETH') { + token = Object.values(tokens.tokens)[1]; + } + } + let baseToken; + + for (const key in tokens.tokens) { + const token = tokens.tokens[key]; + if (zksync.utils.isAddressEq(token.address, baseTokenAddress)) { + baseToken = token; + } + } + return { token, baseToken }; +} + +function getTokensNew(pathToHome: string): Tokens { + const configPath = path.join(pathToHome, '/configs/erc20.yaml'); + if (!fs.existsSync(configPath)) { + throw Error('Tokens config not found'); + } + + const parsedObject = yaml.parse( + fs.readFileSync(configPath, { + encoding: 'utf-8' + }), + { + customTags + } + ); + + for (const key in parsedObject.tokens) { + parsedObject.tokens[key].decimals = BigInt(parsedObject.tokens[key].decimals); + } + return parsedObject; +} + +function customTags(tags: yaml.Tags): yaml.Tags { + for (const tag of tags) { + // @ts-ignore + if (tag.format === 'HEX') { + // @ts-ignore + tag.resolve = (str, _onError, _opt) => { + return str; + }; + } + } + return tags; +} diff --git a/infrastructure/scripts/bridge_eth_to_era.sh b/infrastructure/scripts/bridge_eth_to_era.sh new file mode 100755 index 000000000000..119894a9af16 --- /dev/null +++ b/infrastructure/scripts/bridge_eth_to_era.sh @@ -0,0 +1,39 @@ +#!/bin/bash +set -euo pipefail + +# === Get chain name (from input or default to "era") === +CHAIN_NAME="${1:-era}" +export CHAIN_NAME + +echo "STARTING BRIDGE ETH TO $CHAIN_NAME" + +# === Load addresses from config === +CONFIG="chains/$CHAIN_NAME/configs/contracts.yaml" +GENESIS_CONFIG="chains/$CHAIN_NAME/configs/genesis.yaml" + +# === Load RPC URL from config === +export RPC_URL=$(yq '.api.web3_json_rpc.http_url' chains/$CHAIN_NAME/configs/general.yaml) +export CHAIN_ID=$(yq '.l2_chain_id' "$GENESIS_CONFIG") +echo "CHAIN_ID: $CHAIN_ID" + +export BH_ADDRESS=$(yq '.ecosystem_contracts.bridgehub_proxy_addr' "$CONFIG") + +# === Set constants === +SENDER=0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 +PRIVATE_KEY=0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110 +VALUE=10000000000000000000000000000000 +GAS_LIMIT=10000000 +GAS_PRICE=500000000 +L1_RPC_URL=http://localhost:8545 + +# === Send transaction === +cast send \ + --from "$SENDER" \ + --private-key "$PRIVATE_KEY" \ + "$BH_ADDRESS" \ + "requestL2TransactionDirect((uint256,uint256,address,uint256,bytes,uint256,uint256,bytes[],address))" \ + "($CHAIN_ID,$VALUE,$SENDER,0,0x00,1000000,800,[$PRIVATE_KEY],$SENDER)" \ + --gas-limit "$GAS_LIMIT" \ + --value "$VALUE" \ + --gas-price "$GAS_PRICE" \ + --rpc-url "$L1_RPC_URL" diff --git a/infrastructure/scripts/bridge_token_from_era.sh b/infrastructure/scripts/bridge_token_from_era.sh new file mode 100755 index 000000000000..658a4165f68b --- /dev/null +++ b/infrastructure/scripts/bridge_token_from_era.sh @@ -0,0 +1,111 @@ +#!/bin/bash +set -euo pipefail + +# === Get chain name (from input or default to "era") === +CHAIN_NAME="${1:-era}" +export CHAIN_NAME + +echo "STARTING BRIDGE TOKEN FROM $CHAIN_NAME" + +CONFIG_CONTRACTS="chains/$CHAIN_NAME/configs/contracts.yaml" +GENESIS_CONFIG="chains/$CHAIN_NAME/configs/genesis.yaml" +GENERAL_CONFIG="chains/$CHAIN_NAME/configs/general.yaml" + +# === Set contract addresses === +export NTV_ADDRESS="0x0000000000000000000000000000000000010004" +export BH_ADDRESS="0x0000000000000000000000000000000000010002" +export L1_BH_ADDRESS=$(yq '.ecosystem_contracts.bridgehub_proxy_addr' "$CONFIG_CONTRACTS") +export ASSET_ROUTER_ADDRESS="0x0000000000000000000000000000000000010003" +export PRIVATE_KEY=0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110 +export SENDER=0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 + +# === Load RPC URL from config === +export RPC_URL=$(yq '.api.web3_json_rpc.http_url' $GENERAL_CONFIG) +echo "RPC URL: $RPC_URL" + +export CHAIN_ID=$(yq '.l2_chain_id' $GENESIS_CONFIG) +echo "CHAIN_ID: $CHAIN_ID" +CHAIN_ID_HEX=$(printf "0x%02x\n" "$CHAIN_ID") +echo "CHAIN_ID_HEX: $CHAIN_ID_HEX" + +# === Move into the contracts directory === +cd contracts/l1-contracts/ + +# === Deploy test token === +export TOKEN_ADDRESS=$(forge create ./contracts/dev-contracts/TestnetERC20Token.sol:TestnetERC20Token \ + --private-key $PRIVATE_KEY \ + --broadcast \ + --gas-price 1000000000000000 \ + --zksync \ + -r "$RPC_URL" \ + --zk-gas-per-pubdata 8000 \ + --constructor-args L2TestToken TT 18 | grep "Deployed to:" | awk '{print $3}' +) +echo "TOKEN_ADDRESS: $TOKEN_ADDRESS" +# export TOKEN_ADDRESS="" // for speed the token deployment can be skipped if running multiple times. + +# === Calculate token asset ID === + +export TOKEN_ASSET_ID=$(cast keccak $(cast abi-encode "selectorNotUsed(uint256,address,address)" \ + "$CHAIN_ID_HEX" \ + "$NTV_ADDRESS" \ + "$TOKEN_ADDRESS")) + +echo "TOKEN_ASSET_ID: $TOKEN_ASSET_ID" + +# === Encode token burn data === +export TOKEN_BURN_DATA=$(cast abi-encode "selectorNotUsed(uint256,address,address)" \ + 100 \ + $SENDER \ + "$TOKEN_ADDRESS" | cut -c 3-) + +echo "Token Address: $TOKEN_ADDRESS" +echo "Token Asset ID: $TOKEN_ASSET_ID" + +# === Mint tokens === +cast send \ + --from $SENDER \ + --private-key $PRIVATE_KEY \ + "$TOKEN_ADDRESS" \ + "mint(address,uint256)" \ + $SENDER \ + 1000000 \ + --rpc-url "$RPC_URL" \ + --gas-price 1000000000 + +# === Approve vault for transfer === +cast send \ + --from $SENDER \ + --private-key $PRIVATE_KEY \ + "$TOKEN_ADDRESS" \ + "approve(address,uint256)" \ + "$NTV_ADDRESS" \ + 100000000 \ + --rpc-url "$RPC_URL" \ + --gas-price 1000000000 + +# === Initiate withdrawal === +withdrawTxHash=$( + cast send \ + --from $SENDER \ + --private-key $PRIVATE_KEY \ + "$ASSET_ROUTER_ADDRESS" \ + "withdraw(bytes32,bytes)" \ + "$TOKEN_ASSET_ID" \ + "$TOKEN_BURN_DATA" \ + --gas-limit 10000000 \ + --rpc-url "$RPC_URL" \ + --gas-price 1000000000 \ + --json \ + | jq -r '.transactionHash' +# | grep -i "transactionHash" | awk '{print $2}' +) +echo "Withdraw transaction hash: $withdrawTxHash" + +forge script deploy-scripts/provider/ZKSProvider.s.sol:ZKSProvider --broadcast --slow --legacy --skip-simulation --ffi --rpc-url http://localhost:8545 \ + --private-key $PRIVATE_KEY --sig "waitForWithdrawalToBeFinalized(uint256,address,string,bytes32,uint256)" \ + $CHAIN_ID $L1_BH_ADDRESS $RPC_URL $withdrawTxHash 0 + +forge script deploy-scripts/provider/ZKSProvider.s.sol:ZKSProvider --broadcast --slow --legacy --skip-simulation --ffi --rpc-url http://localhost:8545 \ + --private-key $PRIVATE_KEY --sig "finalizeWithdrawal(uint256,address,string,bytes32,uint256)" \ + $CHAIN_ID $L1_BH_ADDRESS $RPC_URL $withdrawTxHash 0 diff --git a/infrastructure/scripts/bridge_token_to_era.sh b/infrastructure/scripts/bridge_token_to_era.sh new file mode 100755 index 000000000000..0618955f6f32 --- /dev/null +++ b/infrastructure/scripts/bridge_token_to_era.sh @@ -0,0 +1,81 @@ +#!/bin/bash +set -euo pipefail + +# === Get chain name (from input or default to "era") === +CHAIN_NAME="${1:-era}" +export CHAIN_NAME + +echo "STARTING BRIDGE TOKEN TO $CHAIN_NAME" + +# === Load addresses from config === +CONFIG_CONTRACTS="chains/$CHAIN_NAME/configs/contracts.yaml" +CONFIG_GENERAL="chains/$CHAIN_NAME/configs/general.yaml" +GENESIS_CONFIG="chains/$CHAIN_NAME/configs/genesis.yaml" + + +export NTV_ADDRESS=$(yq '.ecosystem_contracts.native_token_vault_addr' "$CONFIG_CONTRACTS") +export BH_ADDRESS=$(yq '.ecosystem_contracts.bridgehub_proxy_addr' "$CONFIG_CONTRACTS") +export L1_AR_ADDRESS=$(yq '.bridges.shared.l1_address' "$CONFIG_CONTRACTS") +export RPC_URL=$(yq '.api.web3_json_rpc.http_url' "$CONFIG_GENERAL") +export L1_CHAIN_ID=$(cast chain-id) +export PRIVATE_KEY=0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110 +export SENDER=0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 + +# === Load RPC URL from config === +export RPC_URL=$(yq '.api.web3_json_rpc.http_url' chains/$CHAIN_NAME/configs/general.yaml) +export CHAIN_ID=$(yq '.l2_chain_id' "$GENESIS_CONFIG") +echo "CHAIN_ID: $CHAIN_ID" + +# === Deploy test token === +export TOKEN_ADDRESS=$( + forge create ./contracts/l1-contracts/contracts/dev-contracts/TestnetERC20Token.sol:TestnetERC20Token \ + --private-key $PRIVATE_KEY \ + --broadcast \ + --gas-price 10000 \ + --constructor-args "TestToken" "TT" 18 | + grep "Deployed to:" | awk '{print $3}' +) +echo "TOKEN_ADDRESS: $TOKEN_ADDRESS" +# export TOKEN_ADDRESS=0x9Db47305e174395f6275Ea268bE99Ab06b9b03f0; + +# === Calculate asset ID === +export TOKEN_ASSET_ID=$(cast keccak $(cast abi-encode "selectorNotUsed(uint256,address,address)" \ + $(printf "0x%02x\n" "$L1_CHAIN_ID") \ + 0x0000000000000000000000000000000000010004 \ + "$TOKEN_ADDRESS")) + +# === Build bridge calldata === +ENCODED_PAYLOAD=$(cast abi-encode "selectorNotUsed(uint256,address,address)" \ + 100 \ + $SENDER \ + "$TOKEN_ADDRESS" | cut -c 3-) + +export BRIDGE_DATA="0x01${TOKEN_ASSET_ID:2}00000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000060$ENCODED_PAYLOAD" + +# === Output addresses === +echo "TOKEN_ADDRESS: $TOKEN_ADDRESS" +echo "TOKEN_ASSET_ID: $TOKEN_ASSET_ID" + +# === Mint and approve token === +cast send --from $SENDER \ + --private-key $PRIVATE_KEY \ + "$TOKEN_ADDRESS" \ + "mint(address,uint256)" $SENDER 100 \ + --gas-price 10000 + +cast send --from $SENDER \ + --private-key $PRIVATE_KEY \ + "$TOKEN_ADDRESS" \ + "approve(address,uint256)" "$NTV_ADDRESS" 100 \ + --gas-price 10000 + +# === Send message through bridge === +cast send --from $SENDER \ + --private-key $PRIVATE_KEY \ + "$BH_ADDRESS" \ + "requestL2TransactionTwoBridges((uint256,uint256,uint256,uint256,uint256,address,address,uint256,bytes))" \ + "(271,10000000000000000000000000000000,0,10000000,800,$SENDER,$L1_AR_ADDRESS,0,$BRIDGE_DATA)" \ + --gas-limit 10000000 \ + --value 10000000000000000000000000000000 \ + --rpc-url localhost:8545 \ + --gas-price 100000 diff --git a/infrastructure/scripts/interop.sh b/infrastructure/scripts/interop.sh index ddb247fdc433..916c58dbfed1 100755 --- a/infrastructure/scripts/interop.sh +++ b/infrastructure/scripts/interop.sh @@ -3,20 +3,20 @@ zkstack dev clean containers && zkstack up -o false zkstack dev contracts -zkstack ecosystem init --deploy-paymaster --deploy-erc20 \ - --deploy-ecosystem --l1-rpc-url=http://127.0.0.1:8545 \ - --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --server-db-name=zksync_server_localhost_era \ - --ignore-prerequisites --observability=false \ - --chain era \ - --update-submodules false +# zkstack ecosystem init --deploy-paymaster --deploy-erc20 \ +# --deploy-ecosystem --l1-rpc-url=http://127.0.0.1:8545 \ +# --server-db-url=postgres://postgres:notsecurepassword@127.0.0.1:5432 \ +# --server-db-name=zksync_server_127.0.0.1_era \ +# --ignore-prerequisites --observability=false \ +# --chain era \ +# --update-submodules false zkstack dev generate-genesis zkstack ecosystem init --deploy-paymaster --deploy-erc20 \ --deploy-ecosystem --l1-rpc-url=http://127.0.0.1:8545 \ - --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --server-db-name=zksync_server_localhost_era \ + --server-db-url=postgres://postgres:notsecurepassword@127.0.0.1:5432 \ + --server-db-name=zksync_server_127.0.0.1_era \ --ignore-prerequisites --observability=false \ --chain era \ --update-submodules false @@ -37,8 +37,8 @@ zkstack chain create \ zkstack chain init \ --deploy-paymaster \ --l1-rpc-url=http://127.0.0.1:8545 \ - --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --server-db-name=zksync_server_localhost_validium \ + --server-db-url=postgres://postgres:notsecurepassword@127.0.0.1:5432 \ + --server-db-name=zksync_server_127.0.0.1_validium \ --chain validium --update-submodules false \ --validium-type no-da @@ -58,11 +58,32 @@ zkstack chain create \ zkstack chain init \ --deploy-paymaster \ --l1-rpc-url=http://127.0.0.1:8545 \ - --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --server-db-name=zksync_server_localhost_gateway \ + --server-db-url=postgres://postgres:notsecurepassword@127.0.0.1:5432 \ + --server-db-name=zksync_server_127.0.0.1_gateway \ --chain gateway --update-submodules false +zkstack server --ignore-prerequisites --chain era &> ./zruns/era1.log & +zkstack server wait --ignore-prerequisites --verbose --chain era + +sh ./infrastructure/scripts/bridge_eth_to_era.sh era +sh ./infrastructure/scripts/bridge_token_to_era.sh era + +sh ./infrastructure/scripts/bridge_token_from_era.sh era +sleep 30 +pkill -9 zksync_server +sleep 30 + +# zkstack server --ignore-prerequisites --chain validium &> ./zruns/validium1.log & +# zkstack server wait --ignore-prerequisites --verbose --chain validium +# # we need to fund the address before migration. todo enable base token transfers. +# sh ./infrastructure/scripts/bridge_eth_to_era.sh validium +# sleep 30 + +# pkill -9 zksync_server +# sleep 10 + +zkstack chain gateway create-tx-filterer --chain gateway --ignore-prerequisites zkstack chain gateway convert-to-gateway --chain gateway --ignore-prerequisites zkstack dev config-writer --path etc/env/file_based/overrides/tests/gateway.yaml --chain gateway zkstack server --ignore-prerequisites --chain gateway &> ./zruns/gateway.log & @@ -70,20 +91,25 @@ zkstack server --ignore-prerequisites --chain gateway &> ./zruns/gateway.log & zkstack server wait --ignore-prerequisites --verbose --chain gateway -sleep 10 +sleep 20 zkstack chain gateway migrate-to-gateway --chain era --gateway-chain-name gateway zkstack chain gateway migrate-to-gateway --chain validium --gateway-chain-name gateway -zkstack server --ignore-prerequisites --chain era &> ./zruns/era.log & - +zkstack server --ignore-prerequisites --chain era &> ./zruns/era.log & +zkstack server --ignore-prerequisites --chain validium &> ./zruns/validium.log & zkstack server wait --ignore-prerequisites --verbose --chain era -zkstack server --ignore-prerequisites --chain validium &> ./zruns/validium.log & - zkstack server wait --ignore-prerequisites --verbose --chain validium +zkstack chain gateway migrate-token-balances --to-gateway --chain era --gateway-chain-name gateway +zkstack chain gateway migrate-token-balances --to-gateway --chain validium --gateway-chain-name gateway + + +zkstack dev init-test-wallet --chain era +zkstack dev init-test-wallet --chain validium # Runs interop integration test between era-validium in parallel mkdir -p zlogs -./bin/run_on_all_chains.sh "zkstack dev test integration -t 'L1 ERC20' --verbose" \ - "era,validium" zlogs/ \ - 'era:--evm' 'validium:--evm' +# zkstack dev test integration -t "Interop" --chain era --no-deps --second-chain validium &> zlogs/era.logs +# ./bin/run_on_all_chains.sh "zkstack dev test integration -t 'Interop' --verbose" \ +# "era,validium" zlogs/ \ +# 'era:--evm' 'validium:--evm' diff --git a/infrastructure/scripts/load_addresses.sh b/infrastructure/scripts/load_addresses.sh new file mode 100755 index 000000000000..a06ecadcc56e --- /dev/null +++ b/infrastructure/scripts/load_addresses.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -euo pipefail + +export CONFIG_CONTRACTS="chains/era/configs/contracts.yaml" +export CONFIG_GENERAL="chains/era/configs/general.yaml" + +export NTV_ADDRESS=$(yq '.ecosystem_contracts.native_token_vault_addr' "$CONFIG_CONTRACTS") +export BH_ADDRESS=$(yq '.ecosystem_contracts.bridgehub_proxy_addr' "$CONFIG_CONTRACTS") +export L1_AR_ADDRESS=$(yq '.bridges.shared.l1_address' "$CONFIG_CONTRACTS") +export L1_IC_ADDRESS=$(cast call $BH_ADDRESS "interopCenter()(address)") +export L1_AT_ADDRESS=$(cast call $NTV_ADDRESS "l1AssetTracker()(address)") +export CHAIN_ASSET_HANDLER_ADDRESS=$(cast call $BH_ADDRESS "chainAssetHandler()(address)") \ No newline at end of file diff --git a/prover/crates/lib/prover_fri_types/src/lib.rs b/prover/crates/lib/prover_fri_types/src/lib.rs index 538ef73bfd43..e18c9deebb33 100644 --- a/prover/crates/lib/prover_fri_types/src/lib.rs +++ b/prover/crates/lib/prover_fri_types/src/lib.rs @@ -23,8 +23,8 @@ pub mod keys; pub const MAX_COMPRESSION_CIRCUITS: u8 = 5; // THESE VALUES SHOULD BE UPDATED ON ANY PROTOCOL UPGRADE OF PROVERS -pub const PROVER_PROTOCOL_VERSION: ProtocolVersionId = ProtocolVersionId::Version29; -pub const PROVER_PROTOCOL_PATCH: VersionPatch = VersionPatch(2); +pub const PROVER_PROTOCOL_VERSION: ProtocolVersionId = ProtocolVersionId::Version30; +pub const PROVER_PROTOCOL_PATCH: VersionPatch = VersionPatch(0); pub const PROVER_PROTOCOL_SEMANTIC_VERSION: ProtocolSemanticVersion = ProtocolSemanticVersion { minor: PROVER_PROTOCOL_VERSION, patch: PROVER_PROTOCOL_PATCH, diff --git a/yarn.lock b/yarn.lock index 5228a6c1e09d..f78253f90a95 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2825,22 +2825,14 @@ dependencies: "@opentelemetry/core" "^1.1.0" -"@openzeppelin/contracts-upgradeable-v4@npm:@openzeppelin/contracts-upgradeable@4.9.5": +"@openzeppelin/contracts-upgradeable-v4@npm:@openzeppelin/contracts-upgradeable@4.9.5", "@openzeppelin/contracts-upgradeable@4.9.5": + name "@openzeppelin/contracts-upgradeable-v4" version "4.9.5" resolved "https://registry.yarnpkg.com/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-4.9.5.tgz#572b5da102fc9be1d73f34968e0ca56765969812" integrity sha512-f7L1//4sLlflAN7fVzJLoRedrf5Na3Oal5PZfIq55NFcVZ90EpV1q5xOvL4lFvg3MNICSDr2hH0JUBxwlxcoPg== -"@openzeppelin/contracts-upgradeable@4.9.5": - version "4.9.5" - resolved "https://registry.yarnpkg.com/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-4.9.5.tgz#572b5da102fc9be1d73f34968e0ca56765969812" - integrity sha512-f7L1//4sLlflAN7fVzJLoRedrf5Na3Oal5PZfIq55NFcVZ90EpV1q5xOvL4lFvg3MNICSDr2hH0JUBxwlxcoPg== - -"@openzeppelin/contracts-v4@npm:@openzeppelin/contracts@4.9.5": - version "4.9.5" - resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.5.tgz#1eed23d4844c861a1835b5d33507c1017fa98de8" - integrity sha512-ZK+W5mVhRppff9BE6YdR8CC52C8zAvsVAiWhEtQ5+oNxFE6h1WdeWo+FJSF8KKvtxxVYZ7MTP/5KoVpAU3aSWg== - -"@openzeppelin/contracts@4.9.5": +"@openzeppelin/contracts-v4@npm:@openzeppelin/contracts@4.9.5", "@openzeppelin/contracts@4.9.5": + name "@openzeppelin/contracts-v4" version "4.9.5" resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.5.tgz#1eed23d4844c861a1835b5d33507c1017fa98de8" integrity sha512-ZK+W5mVhRppff9BE6YdR8CC52C8zAvsVAiWhEtQ5+oNxFE6h1WdeWo+FJSF8KKvtxxVYZ7MTP/5KoVpAU3aSWg== @@ -11389,7 +11381,7 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -"string-width-cjs@npm:string-width@^4.2.0": +"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -11406,15 +11398,6 @@ string-width@^2.1.0, string-width@^2.1.1: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" -string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - string-width@^5.0.1, string-width@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" @@ -11481,7 +11464,7 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": +"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -11502,13 +11485,6 @@ strip-ansi@^5.1.0: dependencies: ansi-regex "^4.1.0" -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -12479,16 +12455,7 @@ workerpool@6.2.1: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== diff --git a/zkstack_cli/crates/common/src/forge.rs b/zkstack_cli/crates/common/src/forge.rs index cd4294216b10..dbc8fd84ce50 100644 --- a/zkstack_cli/crates/common/src/forge.rs +++ b/zkstack_cli/crates/common/src/forge.rs @@ -155,6 +155,12 @@ impl ForgeScript { self } + pub fn with_gas_per_pubdata(mut self, gas_per_pubdata: u64) -> Self { + self.args + .add_arg(ForgeScriptArg::GasPerPubdata { gas_per_pubdata }); + self + } + /// Makes sure a transaction is sent, only after its previous one has been confirmed and succeeded. pub fn with_slow(mut self) -> Self { self.args.add_arg(ForgeScriptArg::Slow); @@ -284,6 +290,10 @@ pub enum ForgeScriptArg { GasLimit { gas_limit: u64, }, + #[strum(to_string = "zk-gas-per-pubdata={gas_per_pubdata}")] + GasPerPubdata { + gas_per_pubdata: u64, + }, Zksync, #[strum(to_string = "skip={skip_path}")] Skip { diff --git a/zkstack_cli/crates/config/src/forge_interface/script_params.rs b/zkstack_cli/crates/config/src/forge_interface/script_params.rs index 2bdcb6218b25..48b357a09d99 100644 --- a/zkstack_cli/crates/config/src/forge_interface/script_params.rs +++ b/zkstack_cli/crates/config/src/forge_interface/script_params.rs @@ -107,6 +107,9 @@ pub const GATEWAY_VOTE_PREPARATION: ForgeScriptParams = ForgeScriptParams { pub const GATEWAY_GOVERNANCE_TX_PATH1: &str = "contracts/l1-contracts/script-out/gateway-deploy-governance-txs-1.json"; +pub const GATEWAY_MIGRATE_TOKEN_BALANCES_SCRIPT_PATH: &str = + "deploy-scripts/gateway/GatewayMigrateTokenBalances.s.sol"; + pub const V29_UPGRADE_ECOSYSTEM_PARAMS: ForgeScriptParams = ForgeScriptParams { input: "script-config/v29-upgrade-ecosystem.toml", output: "script-out/v29-upgrade-ecosystem.toml", diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_from_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_from_gateway.rs index 9abc71e7d69c..0a7d5646b2df 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_from_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_from_gateway.rs @@ -217,7 +217,7 @@ pub async fn run(args: MigrateFromGatewayArgs, shell: &Shell) -> anyhow::Result< Ok(()) } -const LOOK_WAITING_TIME_MS: u64 = 200; +const LOOK_WAITING_TIME_MS: u64 = 1600; pub(crate) async fn check_whether_gw_transaction_is_finalized( gateway_provider: &Client, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_token_balances.rs b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_token_balances.rs new file mode 100644 index 000000000000..8d29c4601f4c --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/migrate_token_balances.rs @@ -0,0 +1,317 @@ +use std::path::{Path, PathBuf}; + +use anyhow::Context; +use clap::Parser; +use ethers::{ + abi::{parse_abi, Address}, + contract::BaseContract, +}; +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; +use xshell::Shell; +use zkstack_cli_common::{ + config::global_config, + forge::{Forge, ForgeScriptArgs}, + logger, + wallets::Wallet, +}; +use zkstack_cli_config::{ + forge_interface::script_params::GATEWAY_MIGRATE_TOKEN_BALANCES_SCRIPT_PATH, ZkStackConfig, + ZkStackConfigTrait, +}; +use zksync_basic_types::U256; +use zksync_types::L2ChainId; + +use crate::{ + commands::dev::commands::{rich_account, rich_account::args::RichAccountArgs}, + messages::MSG_CHAIN_NOT_INITIALIZED, + utils::forge::{fill_forge_private_key, WalletOwner}, +}; + +lazy_static! { + static ref GATEWAY_MIGRATE_TOKEN_BALANCES_FUNCTIONS: BaseContract = BaseContract::from( + parse_abi(&[ + "function startTokenMigrationOnL2OrGateway(bool, uint256, string, string) public", + // "function continueMigrationOnGateway(uint256, string) public", + "function finishMigrationOnL1(bool, address, uint256, uint256, string, string, bool) public", + "function checkAllMigrated(uint256, string) public", + ]) + .unwrap(), + ); +} + +#[derive(Debug, Serialize, Deserialize, Parser)] +pub struct MigrateTokenBalancesArgs { + /// All ethereum environment related arguments + #[clap(flatten)] + #[serde(flatten)] + pub forge_args: ForgeScriptArgs, + + #[clap(long)] + pub gateway_chain_name: String, + + #[clap(long, default_missing_value = "true", num_args = 0..=1)] + pub run_initial: Option, + + #[clap(long, default_missing_value = "true", num_args = 0..=1)] + pub skip_funding: Option, + + #[clap(long, default_missing_value = "true", num_args = 0..=1)] + pub to_gateway: Option, +} + +pub async fn run(args: MigrateTokenBalancesArgs, shell: &Shell) -> anyhow::Result<()> { + let ecosystem_config = ZkStackConfig::ecosystem(shell)?; + + let chain_name = global_config().chain_name.clone(); + let chain_config = ecosystem_config + .load_chain(chain_name) + .context(MSG_CHAIN_NOT_INITIALIZED)?; + + let gateway_chain_config = ecosystem_config + .load_chain(Some(args.gateway_chain_name.clone())) + .context("Gateway not present")?; + // let gateway_chain_id = gateway_chain_config.chain_id.as_u64(); + // let gateway_gateway_config = gateway_chain_config + // .get_gateway_config() + // .context("Gateway config not present")?; + + let l1_url = chain_config.get_secrets_config().await?.l1_rpc_url()?; + + let general_chain_config = chain_config.get_general_config().await?; + let l2_url = general_chain_config.l2_http_url()?; + + // let genesis_config = chain_config.get_genesis_config().await?; + // let gateway_contract_config = gateway_chain_config.get_contracts_config()?; + + // let chain_contracts_config = chain_config.get_contracts_config().unwrap(); + + logger::info(format!( + "Migrating the token balances {} the Gateway...", + if args.to_gateway.unwrap_or(true) { + "to" + } else { + "from" + } + )); + + let general_config = gateway_chain_config.get_general_config().await?; + let gw_rpc_url = general_config.l2_http_url()?; + + // let chain_secrets_config = chain_config.get_wallets_config().unwrap(); + + migrate_token_balances_from_gateway( + shell, + args.run_initial.unwrap_or(true), + args.skip_funding.unwrap_or(false), + &args.forge_args.clone(), + args.to_gateway.unwrap_or(true), + &ecosystem_config.path_to_foundry_scripts(), + ecosystem_config + .get_wallets()? + .deployer + .context("Missing deployer wallet")?, + ecosystem_config + .get_contracts_config()? + .ecosystem_contracts + .bridgehub_proxy_addr, + chain_config.chain_id.as_u64(), + gateway_chain_config.chain_id.as_u64(), + l1_url.clone(), + gw_rpc_url.clone(), + l2_url.clone(), + ) + .await?; + + Ok(()) +} + +#[allow(clippy::too_many_arguments)] +pub async fn migrate_token_balances_from_gateway( + shell: &Shell, + run_initial: bool, + skip_funding: bool, + forge_args: &ForgeScriptArgs, + to_gateway: bool, + foundry_scripts_path: &Path, + wallet: Wallet, + l1_bridgehub_addr: Address, + l2_chain_id: u64, + gw_chain_id: u64, + l1_rpc_url: String, + gw_rpc_url: String, + l2_rpc_url: String, +) -> anyhow::Result<()> { + println!("l2_chain_id: {}", l2_chain_id); + println!("wallet.address: {}", wallet.address); + + if run_initial && !skip_funding { + rich_account::run( + shell, + RichAccountArgs { + l2_account: Some(wallet.address), + l1_account_private_key: Some( + "0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110" + .to_string(), + ), + l1_rpc_url: Some(l1_rpc_url.clone()), + amount: Some(U256::from(1_000_000_000_000_000_000u64)), + }, + Some(L2ChainId::from(l2_chain_id as u32)), + ) + .await?; + rich_account::run( + shell, + RichAccountArgs { + l2_account: Some(wallet.address), + l1_account_private_key: Some( + "0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110" + .to_string(), + ), + l1_rpc_url: Some(l1_rpc_url.clone()), + amount: Some(U256::from(1_000_000_000_000_000_000u64)), + }, + Some(L2ChainId::from(gw_chain_id as u32)), + ) + .await?; + std::thread::sleep(std::time::Duration::from_secs(20)); + + println!("Account funded"); + } + + let calldata = GATEWAY_MIGRATE_TOKEN_BALANCES_FUNCTIONS + .encode( + "startTokenMigrationOnL2OrGateway", + ( + to_gateway, + U256::from(l2_chain_id), + l2_rpc_url.clone(), + gw_rpc_url.clone(), + ), + ) + .unwrap(); + + // Ensure the broadcast directory exists before proceeding + // std::fs::create_dir_all("/usr/src/zksync/contracts/l1-contracts/broadcast/GatewayMigrateTokenBalances.s.sol/")?; + + let mut forge = Forge::new(foundry_scripts_path) + .script( + &PathBuf::from(GATEWAY_MIGRATE_TOKEN_BALANCES_SCRIPT_PATH), + forge_args.clone(), + ) + .with_ffi() + .with_rpc_url(if to_gateway { + l2_rpc_url.clone() + } else { + gw_rpc_url.clone() + }) + .with_broadcast() + .with_zksync() + .with_slow() + .with_gas_per_pubdata(8000) + .with_calldata(&calldata); + + // Governor private key is required for this script + if run_initial { + forge = fill_forge_private_key(forge, Some(&wallet), WalletOwner::Deployer)?; + forge.run(shell)?; + + println!("Token migration started"); + } + + let calldata = GATEWAY_MIGRATE_TOKEN_BALANCES_FUNCTIONS + .encode( + "finishMigrationOnL1", + ( + to_gateway, + l1_bridgehub_addr, + U256::from(l2_chain_id), + U256::from(gw_chain_id), + l2_rpc_url.clone(), + gw_rpc_url.clone(), + true, + ), + ) + .unwrap(); + + let forge = Forge::new(foundry_scripts_path) + .script( + &PathBuf::from(GATEWAY_MIGRATE_TOKEN_BALANCES_SCRIPT_PATH), + forge_args.clone(), + ) + .with_ffi() + .with_rpc_url(l1_rpc_url.clone()) + .with_broadcast() + .with_slow() + .with_gas_per_pubdata(8000) + .with_calldata(&calldata); + + // Governor private key is required for this script + // forge = fill_forge_private_key(forge, Some(&wallet), WalletOwner::Deployer)?; + forge.run(shell)?; + + let calldata = GATEWAY_MIGRATE_TOKEN_BALANCES_FUNCTIONS + .encode( + "finishMigrationOnL1", + ( + to_gateway, + l1_bridgehub_addr, + U256::from(l2_chain_id), + U256::from(gw_chain_id), + l2_rpc_url.clone(), + gw_rpc_url.clone(), + false, + ), + ) + .unwrap(); + + let mut forge = Forge::new(foundry_scripts_path) + .script( + &PathBuf::from(GATEWAY_MIGRATE_TOKEN_BALANCES_SCRIPT_PATH), + forge_args.clone(), + ) + .with_ffi() + .with_rpc_url(l1_rpc_url.clone()) + .with_broadcast() + .with_slow() + .with_gas_per_pubdata(8000) + .with_calldata(&calldata); + + // Governor private key is required for this script + forge = fill_forge_private_key(forge, Some(&wallet), WalletOwner::Deployer)?; + forge.run(shell)?; + + std::thread::sleep(std::time::Duration::from_secs(30)); + + println!("Token migration finished"); + + let calldata = GATEWAY_MIGRATE_TOKEN_BALANCES_FUNCTIONS + .encode( + "checkAllMigrated", + (U256::from(l2_chain_id), l2_rpc_url.clone()), + ) + .unwrap(); + + let mut forge = Forge::new(foundry_scripts_path) + .script( + &PathBuf::from(GATEWAY_MIGRATE_TOKEN_BALANCES_SCRIPT_PATH), + forge_args.clone(), + ) + .with_ffi() + .with_rpc_url(l2_rpc_url.clone()) + .with_broadcast() + .with_zksync() + .with_slow() + .with_gas_per_pubdata(8000) + .with_calldata(&calldata); + + // Governor private key is required for this script + if run_initial { + forge = fill_forge_private_key(forge, Some(&wallet), WalletOwner::Deployer)?; + forge.run(shell)?; + } + + println!("Token migration checked"); + + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/gateway/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/mod.rs index 6825900465c0..03733858e388 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/gateway/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/gateway/mod.rs @@ -15,6 +15,7 @@ mod migrate_from_gateway; mod migrate_from_gateway_calldata; pub mod migrate_to_gateway; pub(crate) mod migrate_to_gateway_calldata; +pub(crate) mod migrate_token_balances; mod notify_server_calldata; #[derive(Subcommand, Debug)] @@ -37,6 +38,7 @@ pub enum GatewayComamnds { MigrateFromGateway(migrate_from_gateway::MigrateFromGatewayArgs), NotifyAboutToGatewayUpdate(ForgeScriptArgs), NotifyAboutFromGatewayUpdate(ForgeScriptArgs), + MigrateTokenBalances(migrate_token_balances::MigrateTokenBalancesArgs), } pub async fn run(shell: &Shell, args: GatewayComamnds) -> anyhow::Result<()> { @@ -69,5 +71,8 @@ pub async fn run(shell: &Shell, args: GatewayComamnds) -> anyhow::Result<()> { GatewayComamnds::NotifyAboutFromGatewayUpdate(args) => { gateway_common::notify_server(args, shell, MigrationDirection::FromGateway).await } + GatewayComamnds::MigrateTokenBalances(args) => { + migrate_token_balances::run(args, shell).await + } } } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/rich_account/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/rich_account/mod.rs index 89babe2c56f9..116ffbfe77fd 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/rich_account/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/rich_account/mod.rs @@ -3,6 +3,7 @@ use xshell::Shell; use zkstack_cli_common::logger; use zkstack_cli_config::ZkStackConfig; use zksync_basic_types::H256; +use zksync_types::L2ChainId; use crate::commands::dev::messages::msg_rich_account_outro; pub mod args; @@ -32,7 +33,11 @@ sol! { } } -pub async fn run(shell: &Shell, args: RichAccountArgs) -> anyhow::Result<()> { +pub async fn run( + shell: &Shell, + args: RichAccountArgs, + chain_id: Option, +) -> anyhow::Result<()> { let args = args.fill_values_with_prompt(); let chain_config = ZkStackConfig::current_chain(shell)?; @@ -56,7 +61,10 @@ pub async fn run(shell: &Shell, args: RichAccountArgs) -> anyhow::Result<()> { let amount = U256::from_le_bytes(tmp_bytes); let request = BridgehubAbi::L2TransactionRequestDirect { - chainId: chain_config.chain_id.as_u64().try_into().unwrap(), + chainId: (chain_id.unwrap_or(chain_config.chain_id)) + .as_u64() + .try_into() + .unwrap(), mintValue: amount, l2Contract: args.l2_account.0.into(), l2Value: 0.try_into().unwrap(), diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs index fb61104a4ac0..1752046ee019 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs @@ -96,7 +96,7 @@ pub async fn run(shell: &Shell, args: DevCommands) -> anyhow::Result<()> { DevCommands::Status(args) => commands::status::run(shell, args).await?, DevCommands::GenerateGenesis => commands::genesis::run(shell).await?, DevCommands::InitTestWallet => init_test_wallet_run(shell).await?, - DevCommands::RichAccount(args) => commands::rich_account::run(shell, args).await?, + DevCommands::RichAccount(args) => commands::rich_account::run(shell, args, None).await?, DevCommands::TrackPriorityOps(args) => commands::track_priority_txs::run(args).await?, DevCommands::V27EvmInterpreterUpgradeCalldata(args) => {