diff --git a/go.work.sum b/go.work.sum index 1173bc3c6a..6e264b0df7 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1360,8 +1360,7 @@ github.com/scroll-tech/da-codec v0.1.1-0.20241014152913-2703f226fb0b h1:5H6V6yba github.com/scroll-tech/da-codec v0.1.1-0.20241014152913-2703f226fb0b/go.mod h1:48uxaqVgpD8ulH8p+nrBtfeLHZ9tX82bVVdPNkW3rPE= github.com/scroll-tech/da-codec v0.1.3-0.20250227072756-a1482833595f h1:YYbhuUwjowqI4oyXtECRofck7Fyj18e1tcRjuQlZpJE= github.com/scroll-tech/da-codec v0.1.3-0.20250227072756-a1482833595f/go.mod h1:xECEHZLVzbdUn+tNbRJhRIjLGTOTmnFQuTgUTeVLX58= -github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493 h1:Ioc01J0WEMxuwFvEPGJeBKXdf2KY4Yc3XbFky/IxLlI= -github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY= +github.com/scroll-tech/da-codec v0.1.3-0.20250313120912-344f2d5e33e1/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY= github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ= github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e/go.mod h1:swB5NSp8pKNDuYsTxfR08bHS6L56i119PBx8fxvV8Cs= github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22/go.mod h1:r9FwtxCtybMkTbWYCyBuevT9TW3zHmOTHqD082Uh+Oo= diff --git a/permissionless-batches/.gitignore b/permissionless-batches/.gitignore index ac3d45c877..e69de29bb2 100644 --- a/permissionless-batches/.gitignore +++ b/permissionless-batches/.gitignore @@ -1 +0,0 @@ -conf/ \ No newline at end of file diff --git a/permissionless-batches/Makefile b/permissionless-batches/Makefile new file mode 100644 index 0000000000..bb0e344903 --- /dev/null +++ b/permissionless-batches/Makefile @@ -0,0 +1,25 @@ +.PHONY: batch-production-submission launch_local_prover launch_cloud_prover psql check_proving_status + +export SCROLL_ZKVM_VERSION=0.3.0 +PG_URL=postgres://postgres@localhost:5432/scroll + +batch_production_submission: + docker compose --profile batch-production-submission up + +local_prover: + docker compose --profile local-prover up -d + +cloud_prover: + docker compose --profile cloud-prover up + +psql: + psql 'postgres://postgres@localhost:5432/scroll' + +check_proving_status: + @echo "Checking proving status..." + @result=$$(psql "${PG_URL}" -t -c "SELECT proving_status = 4 AS is_status_success FROM batch ORDER BY index LIMIT 1;" | tr -d '[:space:]'); \ + if [ "$$result" = "t" ]; then \ + echo "✅ Prove succeeded! You're ready to submit permissionless batch and proof!"; \ + else \ + echo "Proof is not ready..."; \ + fi \ No newline at end of file diff --git a/permissionless-batches/README.md b/permissionless-batches/README.md index 42481fd031..a7ebf3f3cf 100644 --- a/permissionless-batches/README.md +++ b/permissionless-batches/README.md @@ -90,19 +90,33 @@ To produce a batch you need to run the `batch-production-submission` profile in 3. Fill in required fields in `conf/relayer/config.json` -Run with `docker compose --profile batch-production-submission up`. +Run with `make batch_production_submission`. This will produce chunks, a batch and bundle which will be proven in the next step. `Success! You're ready to generate proofs!` indicates that everything is working correctly and the batch is ready to be proven. #### Proving a batch -To prove the chunk, batch and bundle you just generated you need to run the `proving` profile in `docker-compose.yml`. +To prove the chunk, batch and bundle you just generated you need to run the `local-prover` or `cloud-prover` profile in `docker-compose.yml`. + +Local Proving: + +1. Hardware spec for local prover: CPU: 36+ core, 128G memory GPU: 24G memory (eg. Rtx 3090/3090Ti/4090/A10/L4) +2. Make sure `verifier` `low_version_circuit` and `high_version_circuit` in `conf/coordinator/config.json` are correct for the latest fork: [TODO link list with versions](#batch-production-toolkit) +2. Set the `SCROLL_ZKVM_VERSION` environment variable on `Makefile` to the correct version. [TODO link list with versions](#batch-production-toolkit) +4. Fill in the required fields in `conf/proving-service/local-prover/config.json` + +Run with `make local_prover`. + +Cloud Proving(not supported yet): 1. Make sure `verifier` `low_version_circuit` and `high_version_circuit` in `conf/coordinator/config.json` are correct for the latest fork: [TODO link list with versions](#batch-production-toolkit) -2. Download the latest `assets` and `params` for the circuit from [TODO link list with versions](#batch-production-toolkit) into `conf/coordinator/assets` and `conf/coordinator/params` respectively. -3. Fill in the required fields in `conf/proving-service/config.json`. It is recommended to use Sindri. You'll need to obtain credits and an API key from their [website](https://sindri.app/). -4. Alternatively, you can run your own prover: https://github.com/scroll-tech/scroll-prover. However, this requires more configuration. +2. Set the `SCROLL_ZKVM_VERSION` environment variable on `Makefile` to the correct version. [TODO link list with versions](#batch-production-toolkit) +3. Fill in the required fields in `conf/proving-service/cloud-prover/config.json`. It is recommended to use Sindri. You'll need to obtain credits and an API key from their [website](https://sindri.app/). + +Run with `make cloud_prover`. -Run with `docker compose --profile proving up`. +This will prove chunks, the batch and bundle. +Run `make check_proving_status` +`Success! You're ready to submit permissionless batch and proof!` indicates that everything is working correctly and the batch is ready to be submit. #### Batch submission @@ -110,7 +124,7 @@ To submit the batch you need to run the `batch-production-submission` profile in 1. Fill in required fields in `conf/relayer/config.json` for the sender config. -Run with `docker compose --profile batch-production-submission up`. +Run with `make batch_production_submission`. This will submit the batch to L1 and finalize it. The transaction will be retried in case of failure. **Troubleshooting** diff --git a/permissionless-batches/conf/coordinator/config.json b/permissionless-batches/conf/coordinator/config.json index fd38cc25f4..93bd2631a8 100644 --- a/permissionless-batches/conf/coordinator/config.json +++ b/permissionless-batches/conf/coordinator/config.json @@ -1,23 +1,22 @@ { "prover_manager": { "provers_per_session": 1, - "session_attempts": 5, - "bundle_collection_time_sec": 3600, - "batch_collection_time_sec": 3600, - "chunk_collection_time_sec": 3600, + "session_attempts": 100, + "chunk_collection_time_sec": 36000, + "batch_collection_time_sec": 2700, + "bundle_collection_time_sec": 2700, "verifier": { "mock_mode": false, - "low_version_circuit": { - "params_path": "./conf/params", - "assets_path": "./conf/assets", - "fork_name": "darwinV2", - "min_prover_version": "v4.4.55" - }, - "high_version_circuit": { - "params_path": "./conf/params", - "assets_path": "./conf/assets", + "low_version_circuit" : { "fork_name": "darwinV2", + "params_path": "/verifier/params", + "assets_path": "/verifier/assets", "min_prover_version": "v4.4.56" + }, + "high_version_circuit" : { + "fork_name": "euclid", + "assets_path": "/verifier/openvm/verifier", + "min_prover_version": "v4.5.7" } } }, @@ -28,11 +27,11 @@ "maxIdleNum": 20 }, "l2": { - "chain_id": 111 + "chain_id": 333333 }, "auth": { - "secret": "prover secret key", - "challenge_expire_duration_sec": 3600, + "secret": "e788b62d39254928a821ac1c76b274a8c835aa1e20ecfb6f50eb10e87847de44", + "challenge_expire_duration_sec": 10, "login_expire_duration_sec": 3600 } } diff --git a/permissionless-batches/conf/coordinator/coordinator_run.sh b/permissionless-batches/conf/coordinator/coordinator_run.sh new file mode 100755 index 0000000000..dfdf941728 --- /dev/null +++ b/permissionless-batches/conf/coordinator/coordinator_run.sh @@ -0,0 +1,127 @@ +#!/usr/bin/bash + +apt update +apt install -y wget libdigest-sha-perl + +# release version +if [ -z "${SCROLL_ZKVM_VERSION}" ]; then + echo "SCROLL_ZKVM_VERSION not set" + exit 1 +fi + +if [ -z "${HTTP_PORT}" ]; then + echo "HTTP_PORT not set" + exit 1 +fi + +if [ -z "${METRICS_PORT}" ]; then + echo "METRICS_PORT not set" + exit 1 +fi + +case $CHAIN_ID in +"5343532222") # staging network + echo "staging network not supported" + exit 1 + ;; +"534353") # alpha network + echo "alpha network not supported" + exit 1 + ;; +esac + +BASE_DOWNLOAD_DIR="/verifier" +# Ensure the base directory exists +mkdir -p "$BASE_DOWNLOAD_DIR" + +# Set subdirectories +ASSETS_DIR="$BASE_DOWNLOAD_DIR/assets" +OPENVM_DIR="$BASE_DOWNLOAD_DIR/openvm" + +# Create necessary directories +mkdir -p "$ASSETS_DIR" +mkdir -p "$OPENVM_DIR/verifier" + +# Define URLs for asset files +ASSETS_CHECKSUM_URL="https://circuit-release.s3.us-west-2.amazonaws.com/release-v0.13.1/sha256sum" +ASSETS_CHECKSUM_FILE="$ASSETS_DIR/sha256sum" + +ASSETS_URLS=( + "https://circuit-release.s3.us-west-2.amazonaws.com/release-v0.13.1/vk_batch.vkey" + "https://circuit-release.s3.us-west-2.amazonaws.com/release-v0.13.1/vk_bundle.vkey" + "https://circuit-release.s3.us-west-2.amazonaws.com/release-v0.13.1/vk_chunk.vkey" +) + +# Define URLs for OpenVM files (No checksum verification) +OPENVM_URLS=( + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/verifier.bin" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/root-verifier-vm-config" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/root-verifier-committed-exe" +) + +# Function to download and verify files (skips existing valid files) +download_and_verify() { + local url="$1" + local dest_dir="$2" + local checksum_file="$3" + + local filename=$(basename "$url") + local filepath="$dest_dir/$filename" + + if [[ -f "$filepath" ]]; then + echo "Checking existing file: $filename..." + if grep "$filename" "$checksum_file" | sed "s|$filename|$filepath|" | sha256sum --check --status; then + echo "File is already present and valid ✅ - Skipping download." + return + else + echo "File exists but checksum mismatch ❌ - Re-downloading." + rm -f "$filepath" + fi + fi + + echo "Downloading $filename..." + curl -o "$filepath" -L "$url" + + if [[ ! -f "$filepath" ]]; then + echo "Download failed for $filename ❌" + exit 1 + fi + + echo "Verifying checksum for $filename..." + grep "$filename" "$checksum_file" | sed "s|$filename|$filepath|" | sha256sum --check --status || exit 1 + echo "Checksum verification passed for $filename ✅" +} + +# Download and verify asset files +curl -o "$ASSETS_CHECKSUM_FILE" -L "$ASSETS_CHECKSUM_URL" +for url in "${ASSETS_URLS[@]}"; do + download_and_verify "$url" "$ASSETS_DIR" "$ASSETS_CHECKSUM_FILE" +done + +# Download OpenVM files (No checksum verification, but skips if file exists) +for url in "${OPENVM_URLS[@]}"; do + dest_subdir="$OPENVM_DIR/$(basename $(dirname "$url"))" + mkdir -p "$dest_subdir" + + filepath="$dest_subdir/$(basename "$url")" + echo "Downloading $filepath..." + curl -o "$filepath" -L "$url" +done + +mkdir -p "$HOME/.openvm" +ln -s "$OPENVM_DIR/params" "$HOME/.openvm/params" + +echo "All files downloaded successfully! 🎉" + +mkdir -p /usr/local/bin +wget https://github.com/ethereum/solidity/releases/download/v0.8.19/solc-static-linux -O /usr/local/bin/solc +chmod +x /usr/local/bin/solc + +# Start coordinator +echo "Starting coordinator api" + +RUST_BACKTRACE=1 exec coordinator_api --config /coordinator/config.json \ + --genesis /coordinator/genesis.json \ + --http --http.addr "0.0.0.0" --http.port ${HTTP_PORT} \ + --metrics --metrics.addr "0.0.0.0" --metrics.port ${METRICS_PORT} \ + --log.debug diff --git a/permissionless-batches/conf/coordinator/params/.gitkeep b/permissionless-batches/conf/coordinator/params/.gitkeep deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/permissionless-batches/conf/proving-service/batch/.gitkeep b/permissionless-batches/conf/proving-service/batch/.gitkeep deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/permissionless-batches/conf/proving-service/bundle/.gitkeep b/permissionless-batches/conf/proving-service/bundle/.gitkeep deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/permissionless-batches/conf/proving-service/chunk/.gitkeep b/permissionless-batches/conf/proving-service/chunk/.gitkeep deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/permissionless-batches/conf/coordinator/assets/.gitkeep b/permissionless-batches/conf/proving-service/cloud-prover/.gitkeep similarity index 100% rename from permissionless-batches/conf/coordinator/assets/.gitkeep rename to permissionless-batches/conf/proving-service/cloud-prover/.gitkeep diff --git a/permissionless-batches/conf/proving-service/cloud-prover/config.json b/permissionless-batches/conf/proving-service/cloud-prover/config.json new file mode 100644 index 0000000000..a43a428913 --- /dev/null +++ b/permissionless-batches/conf/proving-service/cloud-prover/config.json @@ -0,0 +1,23 @@ +{ + "prover_name_prefix": "prover", + "keys_dir": "keys", + "coordinator": { + "base_url": "http://coordinator:8390", + "retry_count": 10, + "retry_wait_time_sec": 10, + "connection_timeout_sec": 30 + }, + "l2geth": { + "endpoint": "" + }, + "prover": { + "circuit_type": 2, + "supported_proof_types": [ + 1, + 2, + 3 + ], + "circuit_version": "v0.13.1" + }, + "db_path": "db" +} diff --git a/permissionless-batches/conf/proving-service/config.json b/permissionless-batches/conf/proving-service/config.json deleted file mode 100644 index de7fc9334c..0000000000 --- a/permissionless-batches/conf/proving-service/config.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "prover_name_prefix": "prover_", - "keys_dir": "/app/", - "db_path": "/app/", - "coordinator": { - "base_url": "http://coordinator:8390", - "retry_count": 3, - "retry_wait_time_sec": 5, - "connection_timeout_sec": 60 - }, - "l2geth": { - "endpoint": "" - }, - "prover": { - "circuit_type": 2, - "circuit_version": "v0.13.1", - "n_workers": 1, - "cloud": { - "base_url": "https://sindri.app/api/v1/", - "api_key": "", - "retry_count": 3, - "retry_wait_time_sec": 5, - "connection_timeout_sec": 60 - } - } -} \ No newline at end of file diff --git a/permissionless-batches/conf/proving-service/local-prover/config.json b/permissionless-batches/conf/proving-service/local-prover/config.json new file mode 100644 index 0000000000..719872cef9 --- /dev/null +++ b/permissionless-batches/conf/proving-service/local-prover/config.json @@ -0,0 +1,28 @@ +{ + "sdk_config": { + "prover_name_prefix": "local_prover", + "keys_dir": "/keys", + "db_path": "/db", + "coordinator": { + "base_url": "http://172.17.0.1:8556", + "retry_count": 10, + "retry_wait_time_sec": 10, + "connection_timeout_sec": 30 + }, + "l2geth": { + "endpoint": "" + }, + "prover": { + "circuit_type": 2, + "supported_proof_types": [1,2,3], + "circuit_version": "v0.13.1" + }, + "health_listener_addr": "0.0.0.0:89" + }, + "circuits": { + "euclidV2": { + "hard_fork_name": "euclidV2", + "workspace_path": "/openvm" + } + } +} \ No newline at end of file diff --git a/permissionless-batches/conf/proving-service/local-prover/prover_run.sh b/permissionless-batches/conf/proving-service/local-prover/prover_run.sh new file mode 100755 index 0000000000..0b40a785e6 --- /dev/null +++ b/permissionless-batches/conf/proving-service/local-prover/prover_run.sh @@ -0,0 +1,55 @@ +#!/usr/bin/bash + +apt update +apt install -y wget curl + +# release version +if [ -z "${SCROLL_ZKVM_VERSION}" ]; then + echo "SCROLL_ZKVM_VERSION not set" + exit 1 +fi + +BASE_DOWNLOAD_DIR="/openvm" +# Ensure the base directory exists +mkdir -p "$BASE_DOWNLOAD_DIR" + +# Define URLs for OpenVM files (No checksum verification) +OPENVM_URLS=( + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/chunk/app.vmexe" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/chunk/openvm.toml" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/batch/app.vmexe" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/batch/openvm.toml" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/app.vmexe" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/app_euclidv1.vmexe" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/openvm.toml" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/verifier.bin" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/verifier.sol" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/digest_1.hex" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/digest_2.hex" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/digest_1_euclidv1.hex" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/digest_2_euclidv1.hex" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/params/kzg_bn254_22.srs" + "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/params/kzg_bn254_24.srs" +) + +# Download OpenVM files (No checksum verification, but skips if file exists) +for url in "${OPENVM_URLS[@]}"; do + dest_subdir="$BASE_DOWNLOAD_DIR/$(basename $(dirname "$url"))" + mkdir -p "$dest_subdir" + + filepath="$dest_subdir/$(basename "$url")" + echo "Downloading $filepath..." + curl -o "$filepath" -L "$url" +done + +mkdir -p "$HOME/.openvm" +ln -s "/openvm/params" "$HOME/.openvm/params" + +mkdir -p /usr/local/bin +wget https://github.com/ethereum/solidity/releases/download/v0.8.19/solc-static-linux -O /usr/local/bin/solc +chmod +x /usr/local/bin/solc + +mkdir -p /openvm/cache + +RUST_MIN_STACK=16777216 RUST_BACKTRACE=1 exec /prover/prover --config /prover/conf/config.json + diff --git a/permissionless-batches/conf/relayer/config.json b/permissionless-batches/conf/relayer/config.json index 3b06b6dcc3..4ed6dc56ec 100644 --- a/permissionless-batches/conf/relayer/config.json +++ b/permissionless-batches/conf/relayer/config.json @@ -9,7 +9,7 @@ "commit_sender_signer_config": { "signer_type": "PrivateKey", "private_key_signer_config": { - "private_key": "1414141414141414141414141414141414141414141414141414141414141414" + "private_key": "" } }, "l1_commit_gas_limit_multiplier": 1.2 @@ -40,16 +40,17 @@ }, "db_config": { "driver_name": "postgres", - "dsn": "postgres://db/scroll?sslmode=disable&user=postgres", + "dsn": "postgres://172.17.0.1:5432/scroll?sslmode=disable&user=postgres", "maxOpenNum": 200, "maxIdleNum": 20 }, "recovery_config": { "enable": true, - "l1_block_height": , - "latest_finalized_batch": , - "l2_block_height_limit": , + "l1_block_height": "", + "latest_finalized_batch": "", + "l2_block_height_limit": "", "force_latest_finalized_batch": false, - "force_l1_message_count": 0 + "force_l1_message_count": 0, + "submit_without_proof": false } } diff --git a/permissionless-batches/docker-compose.yml b/permissionless-batches/docker-compose.yml index dca785c1fd..d982739ee0 100644 --- a/permissionless-batches/docker-compose.yml +++ b/permissionless-batches/docker-compose.yml @@ -5,11 +5,12 @@ services: build: context: ../ dockerfile: build/dockerfiles/recovery_permissionless_batches.Dockerfile + network_mode: host container_name: permissionless-batches-relayer volumes: - ./conf/relayer/config.json:/app/conf/config.json - ./conf/genesis.json:/app/conf/genesis.json - command: "--config /app/conf/config.json" + command: "--config /app/conf/config.json --min-codec-version 0" profiles: - batch-production-submission depends_on: @@ -32,21 +33,27 @@ services: ports: - "5432:5432" - coordinator: - build: - context: ../ - dockerfile: build/dockerfiles/coordinator-api.Dockerfile + coordinator-api: + image: scrolltech/coordinator-api:v4.5.7 volumes: - - ./conf/coordinator/config.json:/app/conf/config.json - - ./conf/genesis.json:/app/conf/genesis.json - command: "--config /app/conf/config.json --http.port 8390 --verbosity 5" + - ./conf/coordinator/config.json:/coordinator/config.json:ro + - ./conf/genesis.json:/coordinator/genesis.json:ro + - ./conf/coordinator/coordinator_run.sh:/bin/coordinator_run.sh + entrypoint: /bin/coordinator_run.sh profiles: - - proving + - local-prover + - cloud-prover + ports: [8556:8555] + environment: + - SCROLL_ZKVM_VERSION=${SCROLL_ZKVM_VERSION} + - SCROLL_PROVER_ASSETS_DIR=/verifier/assets/ + - HTTP_PORT=8555 + - METRICS_PORT=8390 depends_on: db: condition: service_healthy healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8390/coordinator/v1/challenge"] + test: ["CMD", "curl", "-f", "http://localhost:8555/coordinator/v1/challenge"] interval: 1s timeout: 1s retries: 10 @@ -57,64 +64,51 @@ services: context: ../ dockerfile: build/dockerfiles/coordinator-cron.Dockerfile volumes: - - ./conf/coordinator/:/app/conf + - ./conf/coordinator/config.json:/app/conf/config.json command: "--config /app/conf/config.json --verbosity 3" profiles: - - proving + - local-prover + - cloud-prover depends_on: db: condition: service_healthy - - proving-service-chunk: - image: scrolltech/sdk-cloud-prover:sindri-v0.0.5 - platform: linux/amd64 - command: "--config /app/config.json" - profiles: - - proving - environment: - PROVER_NAME_PREFIX: "sindri_chunk" - CIRCUIT_TYPE: 1 # 1 for chunk proving - N_WORKERS: 1 - volumes: - - ./conf/proving-service/chunk/:/app/ - - ./conf/proving-service/config.json:/app/config.json - depends_on: - coordinator: - condition: service_healthy - - proving-service-batch: - image: scrolltech/sdk-cloud-prover:sindri-v0.0.5 + local-prover: + image: scrolltech/cuda-prover:v4.5.7-a18fe064-fcc09d1-9174aff + network_mode: host platform: linux/amd64 - command: "--config /app/config.json" + runtime: nvidia + entrypoint: /bin/prover_run.sh profiles: - - proving + - local-prover environment: - PROVER_NAME_PREFIX: "sindri_batch" - CIRCUIT_TYPE: 2 # 2 for batch proving - N_WORKERS: 1 + - SCROLL_ZKVM_VERSION=${SCROLL_ZKVM_VERSION} + - LD_LIBRARY_PATH=/prover:/usr/local/cuda/lib64 + - RUST_MIN_STACK=16777216 + - RUST_BACKTRACE=1 + - RUST_LOG=info volumes: - - ./conf/proving-service/batch/:/app - - ./conf/proving-service/config.json:/app/config.json + - ./conf/proving-service/local-prover/config.json:/prover/conf/config.json:ro + - ./conf/proving-service/local-prover/prover_run.sh:/bin/prover_run.sh + - ./conf/proving-service/local-prover/db:/db + - ./conf/proving-service/local-prover/keys:/keys depends_on: - coordinator: + coordinator-api: condition: service_healthy - proving-service-bundle: + cloud-prover: image: scrolltech/sdk-cloud-prover:sindri-v0.0.5 platform: linux/amd64 command: "--config /app/config.json" profiles: - - proving + - cloud-prover environment: - PROVER_NAME_PREFIX: "sindri_bundle" - CIRCUIT_TYPE: 3 # 3 for bundle proving - N_WORKERS: 1 + - N_WORKERS=1 volumes: - - ./conf/proving-service/bundle/:/app + - ./conf/proving-service/chunk/:/app/ - ./conf/proving-service/config.json:/app/config.json depends_on: - coordinator: + coordinator-api: condition: service_healthy volumes: diff --git a/rollup/cmd/permissionless_batches/app/app.go b/rollup/cmd/permissionless_batches/app/app.go index e0188303fa..7437bd57f0 100644 --- a/rollup/cmd/permissionless_batches/app/app.go +++ b/rollup/cmd/permissionless_batches/app/app.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "os" + "os/signal" "github.com/prometheus/client_golang/prometheus" "github.com/scroll-tech/da-codec/encoding" @@ -16,6 +17,7 @@ import ( "scroll-tech/common/observability" "scroll-tech/common/utils" "scroll-tech/common/version" + "scroll-tech/rollup/internal/config" "scroll-tech/rollup/internal/controller/permissionless_batches" "scroll-tech/rollup/internal/controller/watcher" @@ -109,7 +111,7 @@ func action(ctx *cli.Context) error { if err != nil { return fmt.Errorf("failed to create submitter: %w", err) } - if err = submitter.Submit(false); err != nil { + if err = submitter.Submit(!cfg.RecoveryConfig.SubmitWithoutProof); err != nil { return fmt.Errorf("failed to submit batch: %w", err) } diff --git a/rollup/cmd/rollup_relayer/app/app.go b/rollup/cmd/rollup_relayer/app/app.go index 9c23a6e69c..043f04769f 100644 --- a/rollup/cmd/rollup_relayer/app/app.go +++ b/rollup/cmd/rollup_relayer/app/app.go @@ -18,6 +18,7 @@ import ( "scroll-tech/common/observability" "scroll-tech/common/utils" "scroll-tech/common/version" + "scroll-tech/rollup/internal/config" "scroll-tech/rollup/internal/controller/relayer" "scroll-tech/rollup/internal/controller/watcher" @@ -108,7 +109,7 @@ func action(ctx *cli.Context) error { l2watcher := watcher.NewL2WatcherClient(subCtx, l2client, cfg.L2Config.Confirmations, cfg.L2Config.L2MessageQueueAddress, cfg.L2Config.WithdrawTrieRootSlot, genesis.Config, db, registry) - if cfg.RecoveryConfig.Enable { + if cfg.RecoveryConfig != nil && cfg.RecoveryConfig.Enable { log.Info("Starting rollup-relayer in recovery mode", "version", version.Version) l1Client, err := ethclient.Dial(cfg.L1Config.Endpoint) diff --git a/rollup/go.mod b/rollup/go.mod index d06be48597..e8b327cb39 100644 --- a/rollup/go.mod +++ b/rollup/go.mod @@ -12,7 +12,7 @@ require ( github.com/mitchellh/mapstructure v1.5.0 github.com/prometheus/client_golang v1.16.0 github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493 - github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601 + github.com/scroll-tech/go-ethereum v1.10.14-0.20250428071010-eaf06e30a037 github.com/smartystreets/goconvey v1.8.0 github.com/spf13/viper v1.19.0 github.com/stretchr/testify v1.10.0 diff --git a/rollup/go.sum b/rollup/go.sum index e87d0e6980..1e836cf7b9 100644 --- a/rollup/go.sum +++ b/rollup/go.sum @@ -251,8 +251,8 @@ github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6g github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493 h1:Ioc01J0WEMxuwFvEPGJeBKXdf2KY4Yc3XbFky/IxLlI= github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601 h1:NEsjCG6uSvLRBlsP3+x6PL1kM+Ojs3g8UGotIPgJSz8= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601/go.mod h1:OblWe1+QrZwdpwO0j/LY3BSGuKT3YPUFBDQQgvvfStQ= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250428071010-eaf06e30a037 h1:AAnzT1IvUe2QdPjqBMpSrl0frfqaR9kLw/rBqLn/Bj0= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250428071010-eaf06e30a037/go.mod h1:756YMENiSfx/5pCwKq3+uSTWjXuHTbiCB+TirJjsQT8= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= diff --git a/rollup/internal/config/recovery.go b/rollup/internal/config/recovery.go index 34b1f82062..3cb5783148 100644 --- a/rollup/internal/config/recovery.go +++ b/rollup/internal/config/recovery.go @@ -10,4 +10,5 @@ type RecoveryConfig struct { L2BlockHeightLimit uint64 `json:"l2_block_height_limit"` ForceL1MessageCount uint64 `json:"force_l1_message_count"` + SubmitWithoutProof bool `json:"submit_without_proof"` } diff --git a/rollup/internal/controller/permissionless_batches/minimal_recovery.go b/rollup/internal/controller/permissionless_batches/minimal_recovery.go index 0e323ed238..150d145dff 100644 --- a/rollup/internal/controller/permissionless_batches/minimal_recovery.go +++ b/rollup/internal/controller/permissionless_batches/minimal_recovery.go @@ -17,6 +17,7 @@ import ( "scroll-tech/common/types" "scroll-tech/database/migrate" + "scroll-tech/rollup/internal/config" "scroll-tech/rollup/internal/controller/watcher" "scroll-tech/rollup/internal/orm" @@ -265,7 +266,7 @@ func (r *MinimalRecovery) restoreMinimalPreviousState() (*orm.Chunk, *orm.Batch, log.Info("Last L2 block in batch", "batch", batchCommitEvent.BatchIndex(), "L2 block", lastBlockInBatch, "PostL1MessageQueueHash", daBlobPayload.PostL1MessageQueueHash()) - // 4. Get the L1 messages count after the latest finalized batch. + // 4. Get the L1 messages count and state root after the latest finalized batch. var l1MessagesCount uint64 if r.cfg.RecoveryConfig.ForceL1MessageCount == 0 { l1MessagesCount, err = reader.NextUnfinalizedL1MessageQueueIndex(latestFinalizedL1Block) @@ -278,8 +279,15 @@ func (r *MinimalRecovery) restoreMinimalPreviousState() (*orm.Chunk, *orm.Batch, log.Info("L1 messages count after latest finalized batch", "batch", batchCommitEvent.BatchIndex(), "count", l1MessagesCount) + stateRoot, err := reader.GetFinalizedStateRootByBatchIndex(latestFinalizedL1Block, latestFinalizedBatchIndex) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to get state root: %w", err) + } + + log.Info("State root after latest finalized batch", "batch", batchCommitEvent.BatchIndex(), "stateRoot", stateRoot.Hex()) + // 5. Insert minimal state to DB. - chunk, err := r.chunkORM.InsertPermissionlessChunk(r.ctx, defaultFakeRestoredChunkIndex, daBatch.Version(), daBlobPayload, l1MessagesCount) + chunk, err := r.chunkORM.InsertPermissionlessChunk(r.ctx, defaultFakeRestoredChunkIndex, daBatch.Version(), daBlobPayload, l1MessagesCount, stateRoot) if err != nil { return nil, nil, nil, fmt.Errorf("failed to insert chunk raw: %w", err) } @@ -388,6 +396,7 @@ func (r *MinimalRecovery) decodeLatestFinalizedBatch(reader *l1.Reader, event *l blobClient.AddBlobClient(client) } + log.Info("Fetching blob by versioned hash and block time", "TargetBlobVersionedHash", targetBlobVersionedHash, "BlockTime", blockHeader.Time, "BlockNumber", blockHeader.Number) blob, err := blobClient.GetBlobByVersionedHashAndBlockTime(r.ctx, targetBlobVersionedHash, blockHeader.Time) if err != nil { return nil, nil, fmt.Errorf("failed to get blob by versioned hash and block time for batch %d: %w", event.BatchIndex(), err) diff --git a/rollup/internal/controller/permissionless_batches/submitter.go b/rollup/internal/controller/permissionless_batches/submitter.go index 9e4d201edb..12f6602f89 100644 --- a/rollup/internal/controller/permissionless_batches/submitter.go +++ b/rollup/internal/controller/permissionless_batches/submitter.go @@ -19,6 +19,7 @@ import ( "scroll-tech/common/types" "scroll-tech/common/types/message" + bridgeAbi "scroll-tech/rollup/abi" "scroll-tech/rollup/internal/config" "scroll-tech/rollup/internal/controller/sender" diff --git a/rollup/internal/controller/relayer/full_recovery.go b/rollup/internal/controller/relayer/full_recovery.go index acffc772b4..3444f951a4 100644 --- a/rollup/internal/controller/relayer/full_recovery.go +++ b/rollup/internal/controller/relayer/full_recovery.go @@ -13,6 +13,7 @@ import ( "gorm.io/gorm" "scroll-tech/common/types" + "scroll-tech/rollup/internal/config" "scroll-tech/rollup/internal/controller/watcher" "scroll-tech/rollup/internal/orm" @@ -118,7 +119,7 @@ func (f *FullRecovery) RestoreFullPreviousState() error { var bundle []*batchEvents - // with bundles all commited batches until this finalized batch are finalized in the same bundle + // with bundles all committed batches until this finalized batch are finalized in the same bundle for commitsHeapMap.Len() > 0 { commitEvent := commitsHeapMap.Peek() if commitEvent.BatchIndex().Uint64() > finalizeEvent.BatchIndex().Uint64() { @@ -138,7 +139,7 @@ func (f *FullRecovery) RestoreFullPreviousState() error { return false } - case l1.RevertEventType: + case l1.RevertEventV7Type: // We ignore reverted batches. commitsHeapMap.RemoveByKey(event.BatchIndex().Uint64()) } @@ -252,9 +253,7 @@ func (f *FullRecovery) processFinalizedBatch(nextBatch *batchEvents) error { log.Info("Reproducing chunk", "start block", start, "end block", end) var chunk encoding.Chunk - for _, block := range blocks { - chunk.Blocks = append(chunk.Blocks, block) - } + chunk.Blocks = append(chunk.Blocks, blocks...) metrics, err := butils.CalculateChunkMetrics(&chunk, codec.Version()) if err != nil { @@ -297,9 +296,7 @@ func (f *FullRecovery) processFinalizedBatch(nextBatch *batchEvents) error { batch.ParentBatchHash = common.HexToHash(dbParentBatch.Hash) batch.TotalL1MessagePoppedBefore = dbChunks[0].TotalL1MessagesPoppedBefore - for _, chunk := range daChunks { - batch.Chunks = append(batch.Chunks, chunk) - } + batch.Chunks = append(batch.Chunks, daChunks...) metrics, err := butils.CalculateBatchMetrics(&batch, codec.Version()) if err != nil { diff --git a/rollup/internal/orm/batch.go b/rollup/internal/orm/batch.go index a79429f0e4..080e31a15e 100644 --- a/rollup/internal/orm/batch.go +++ b/rollup/internal/orm/batch.go @@ -340,6 +340,7 @@ func (o *Batch) InsertPermissionlessBatch(ctx context.Context, batchIndex *big.I StartChunkHash: chunk.Hash, EndChunkIndex: chunk.Index, EndChunkHash: chunk.Hash, + StateRoot: chunk.StateRoot, PrevL1MessageQueueHash: chunk.PrevL1MessageQueueHash, PostL1MessageQueueHash: chunk.PostL1MessageQueueHash, BatchHeader: []byte{1, 2, 3}, diff --git a/rollup/internal/orm/chunk.go b/rollup/internal/orm/chunk.go index fda5269409..541cf3e9a9 100644 --- a/rollup/internal/orm/chunk.go +++ b/rollup/internal/orm/chunk.go @@ -281,7 +281,7 @@ func (o *Chunk) InsertChunk(ctx context.Context, chunk *encoding.Chunk, codecVer return &newChunk, nil } -func (o *Chunk) InsertPermissionlessChunk(ctx context.Context, index uint64, codecVersion encoding.CodecVersion, daBlobPayload encoding.DABlobPayload, totalL1MessagePoppedBefore uint64) (*Chunk, error) { +func (o *Chunk) InsertPermissionlessChunk(ctx context.Context, index uint64, codecVersion encoding.CodecVersion, daBlobPayload encoding.DABlobPayload, totalL1MessagePoppedBefore uint64, stateRoot common.Hash) (*Chunk, error) { // Create some unique identifier. It is not really used for anything except in DB. var chunkBytes []byte for _, block := range daBlobPayload.Blocks() { @@ -305,7 +305,7 @@ func (o *Chunk) InsertPermissionlessChunk(ctx context.Context, index uint64, cod PrevL1MessageQueueHash: daBlobPayload.PrevL1MessageQueueHash().Hex(), PostL1MessageQueueHash: daBlobPayload.PostL1MessageQueueHash().Hex(), ParentChunkHash: emptyHash, - StateRoot: emptyHash, + StateRoot: stateRoot.Hex(), ParentChunkStateRoot: emptyHash, WithdrawRoot: emptyHash, CodecVersion: int16(codecVersion),