diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..190885b0c2 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,12 @@ +# Exclude all Rust build output directories. +# This prevents Docker from sending gigabytes of compiled artifacts +# as part of the build context when building Dockerfile.local. +**/target/ + +# Not needed to build sc-meta from source. +.github/ +.vscode/ +tools/ +contracts/ +template-test/ +install-debugger-test/ diff --git a/Cargo.lock b/Cargo.lock index 5a2a941703..fbc802975e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3269,6 +3269,7 @@ name = "multiversx-sc-meta" version = "0.65.1" dependencies = [ "anyhow", + "base64", "bip39", "clap", "colored 3.1.1", @@ -3276,6 +3277,7 @@ dependencies = [ "convert_case 0.11.0", "copy_dir", "hex", + "indexmap", "multiversx-sc", "multiversx-sc-meta-lib", "multiversx-sc-scenario", @@ -3325,7 +3327,6 @@ dependencies = [ name = "multiversx-sc-scenario" version = "0.65.1" dependencies = [ - "base64", "colored 3.1.1", "hex", "hex-literal", @@ -3352,7 +3353,6 @@ name = "multiversx-sc-snippets" version = "0.65.1" dependencies = [ "anyhow", - "base64", "colored 3.1.1", "env_logger", "futures", diff --git a/framework/meta-lib/src/cargo_toml/cargo_toml_contents.rs b/framework/meta-lib/src/cargo_toml/cargo_toml_contents.rs index 616f98dcbc..bfdd2dcd40 100644 --- a/framework/meta-lib/src/cargo_toml/cargo_toml_contents.rs +++ b/framework/meta-lib/src/cargo_toml/cargo_toml_contents.rs @@ -72,22 +72,78 @@ impl CargoTomlContents { pub fn package_name(&self) -> String { self.toml_value .get(PACKAGE) - .expect("missing package in Cargo.toml") + .unwrap_or_else(|| { + panic!( + "missing [package] section in Cargo.toml: {}", + self.path.display() + ) + }) .get("name") - .expect("missing package name in Cargo.toml") + .unwrap_or_else(|| { + panic!( + "missing package name in Cargo.toml: {}", + self.path.display() + ) + }) .as_str() - .expect("package name not a string value") + .unwrap_or_else(|| { + panic!( + "package name is not a string in Cargo.toml: {}", + self.path.display() + ) + }) + .to_string() + } + + pub fn package_version(&self) -> String { + self.toml_value + .get(PACKAGE) + .unwrap_or_else(|| { + panic!( + "missing [package] section in Cargo.toml: {}", + self.path.display() + ) + }) + .get("version") + .unwrap_or_else(|| { + panic!( + "missing package version in Cargo.toml: {}", + self.path.display() + ) + }) + .as_str() + .unwrap_or_else(|| { + panic!( + "package version is not a string in Cargo.toml: {}", + self.path.display() + ) + }) .to_string() } pub fn package_edition(&self) -> String { self.toml_value .get(PACKAGE) - .expect("missing package in Cargo.toml") + .unwrap_or_else(|| { + panic!( + "missing [package] section in Cargo.toml: {}", + self.path.display() + ) + }) .get("edition") - .expect("missing package name in Cargo.toml") + .unwrap_or_else(|| { + panic!( + "missing package edition in Cargo.toml: {}", + self.path.display() + ) + }) .as_str() - .expect("package name not a string value") + .unwrap_or_else(|| { + panic!( + "package edition is not a string in Cargo.toml: {}", + self.path.display() + ) + }) .to_string() } diff --git a/framework/meta-lib/src/contract/sc_config/wasm_build.rs b/framework/meta-lib/src/contract/sc_config/wasm_build.rs index 10c9afc902..a8a535327f 100644 --- a/framework/meta-lib/src/contract/sc_config/wasm_build.rs +++ b/framework/meta-lib/src/contract/sc_config/wasm_build.rs @@ -345,10 +345,7 @@ impl ContractVariant { print_generate_codehash(&output_codehash_path.to_string_lossy()); - let wasm_bytes = fs::read(&output_wasm_path).expect("failed to read compiled contract"); - let hash = multiversx_sc::chain_core::std::code_hash(&wasm_bytes); - let hex_hash = hex::encode(hash); - fs::write(&output_codehash_path, hex_hash).expect("failed to write codehash file"); + tools::generate_codehash(&output_wasm_path, &output_codehash_path); } } diff --git a/framework/meta-lib/src/tools.rs b/framework/meta-lib/src/tools.rs index de13131d71..ed356cbb78 100644 --- a/framework/meta-lib/src/tools.rs +++ b/framework/meta-lib/src/tools.rs @@ -5,6 +5,7 @@ pub(crate) mod panic_report; mod rustc_version; mod rustc_version_warning; pub mod twiggy; +mod wasm_codehash; mod wasm_extractor; pub mod wasm_opt; mod wasm_to_wat; @@ -13,6 +14,7 @@ pub use find_workspace::{find_current_workspace, find_workspace}; pub use git_describe::git_describe; pub use rustc_version::RustcVersion; pub use rustc_version_warning::rustc_version_warning; +pub use wasm_codehash::{CODEHASH_FILE_SUFFIX, generate_codehash, generate_codehashes_in_output}; pub use wasm_extractor::OpcodeVersion; pub use wasm_extractor::code_report::CodeReport; pub use wasm_extractor::extractor::WasmInfo; diff --git a/framework/meta-lib/src/tools/wasm_codehash.rs b/framework/meta-lib/src/tools/wasm_codehash.rs new file mode 100644 index 0000000000..b4ae1c3091 --- /dev/null +++ b/framework/meta-lib/src/tools/wasm_codehash.rs @@ -0,0 +1,39 @@ +use std::{fs, path::Path}; + +pub const CODEHASH_FILE_SUFFIX: &str = ".codehash.txt"; + +/// Computes the Blake2b-256 code hash of the `.wasm` file at `wasm_path`, +/// and writes the lowercase hex string to `codehash_path`. +pub fn generate_codehash(wasm_path: &Path, codehash_path: &Path) { + let wasm_bytes = fs::read(wasm_path) + .unwrap_or_else(|err| panic!("failed to read wasm file {}: {err}", wasm_path.display())); + let hash = multiversx_sc::chain_core::std::code_hash(&wasm_bytes); + let hex_hash = hex::encode(hash); + fs::write(codehash_path, hex_hash).unwrap_or_else(|err| { + panic!( + "failed to write codehash file {}: {err}", + codehash_path.display() + ) + }); +} + +/// Scans `output_dir` for `.wasm` files and generates a codehash file for each one. +/// +/// For every `.wasm` found, writes the Blake2b-256 hash (lowercase hex) to +/// `.codehash.txt` in the same directory. +pub fn generate_codehashes_in_output(output_dir: &Path) { + let Ok(read_dir) = fs::read_dir(output_dir) else { + return; + }; + for entry in read_dir.flatten() { + let path = entry.path(); + if path.extension().map(|e| e == "wasm").unwrap_or(false) { + let codehash_path = path.with_file_name(format!( + "{}{}", + path.file_stem().unwrap().to_string_lossy(), + CODEHASH_FILE_SUFFIX + )); + generate_codehash(&path, &codehash_path); + } + } +} diff --git a/framework/meta/Cargo.toml b/framework/meta/Cargo.toml index e6b6e114d1..3636127070 100644 --- a/framework/meta/Cargo.toml +++ b/framework/meta/Cargo.toml @@ -42,6 +42,8 @@ common-path = "1.0.0" bip39 = { version = "2.0", features = ["rand"] } anyhow = "1.0" hex = "0.4.3" +base64 = "0.22" +indexmap = { version = "2", features = ["serde"] } # warning: newer versions require Rust 1.88, hence we are not upgrading yet zip = { version = "7.2", features = ["deflate"], default-features = false } diff --git a/framework/meta/Dockerfile b/framework/meta/Dockerfile new file mode 100644 index 0000000000..ac9e785cbf --- /dev/null +++ b/framework/meta/Dockerfile @@ -0,0 +1,69 @@ +FROM ubuntu:22.04 + +# Toolchain versions — bump these together when upgrading. +ARG VERSION_RUST="1.93.0" +ARG VERSION_SC_META="0.65.1" +ARG VERSION_WASM_OPT="0.116.1" +ARG TARGETPLATFORM + +# Install system dependencies +RUN apt-get update --fix-missing && apt-get install -y \ + wget \ + build-essential \ + git \ + pkg-config \ + libssl-dev + +# Install Rust +RUN wget -O rustup.sh https://sh.rustup.rs && \ + chmod +x rustup.sh && \ + CARGO_HOME=/rust RUSTUP_HOME=/rust ./rustup.sh \ + --verbose \ + --default-toolchain ${VERSION_RUST} \ + --profile minimal \ + -y && \ + rm rustup.sh && \ + chmod -R 777 /rust && \ + rm -rf /rust/registry + +# Install sc-meta from crates.io +RUN PATH="/rust/bin:${PATH}" CARGO_HOME=/rust RUSTUP_HOME=/rust \ + cargo install multiversx-sc-meta --version ${VERSION_SC_META} --locked && \ + rm -rf /rust/registry + +# Install wasm32 target +RUN PATH="/rust/bin:${PATH}" CARGO_HOME=/rust RUSTUP_HOME=/rust \ + sc-meta install wasm32 + +# Install wasm-opt +RUN PATH="/rust/bin:${PATH}" CARGO_HOME=/rust RUSTUP_HOME=/rust \ + cargo install wasm-opt --version ${VERSION_WASM_OPT} --locked && \ + rm -rf /rust/registry + +ENV PATH="/rust/bin:${PATH}" +ENV CARGO_HOME="/rust" +ENV RUSTUP_HOME="/rust" + +# Exposed to sc-meta at runtime for embedding into .source.json build metadata. +ENV BUILD_METADATA_VERSION_RUST=${VERSION_RUST} +ENV BUILD_METADATA_VERSION_SC_META=${VERSION_SC_META} +ENV BUILD_METADATA_VERSION_WASM_OPT=${VERSION_WASM_OPT} +ENV BUILD_METADATA_TARGETPLATFORM=${TARGETPLATFORM} + +# /project — mounted read-only by the caller (the contract source tree) +# /output — mounted read-write by the caller (build artifacts land here) +# /rust/cargo-target-dir — optionally mounted for caching between runs + +# Additional arguments forwarded at "docker run" time: +# --project /project (required; set automatically by docker-build) +# --contract (optional) +# --no-wasm-opt (optional) +# --build-root (optional; defaults to /tmp/sc-build inside the container) +ENTRYPOINT ["sc-meta", "reproducible-build", "local-build", \ + "--output", "/output", \ + "--target-dir", "/rust/cargo-target-dir"] + +LABEL frozen="yes" +LABEL rust=${VERSION_RUST} +LABEL sc_meta=${VERSION_SC_META} +LABEL wasm_opt=${VERSION_WASM_OPT} diff --git a/framework/meta/Dockerfile.local b/framework/meta/Dockerfile.local new file mode 100644 index 0000000000..8a7116cb34 --- /dev/null +++ b/framework/meta/Dockerfile.local @@ -0,0 +1,73 @@ +FROM ubuntu:22.04 + +# Toolchain versions — bump these together when upgrading. +ARG VERSION_RUST="1.93.0" +ARG VERSION_SC_META="local" +ARG VERSION_WASM_OPT="0.116.1" +ARG TARGETPLATFORM + +# Install system dependencies +RUN apt-get update --fix-missing && apt-get install -y \ + wget \ + build-essential \ + git \ + pkg-config \ + libssl-dev + +# Install Rust +RUN wget -O rustup.sh https://sh.rustup.rs && \ + chmod +x rustup.sh && \ + CARGO_HOME=/rust RUSTUP_HOME=/rust ./rustup.sh \ + --verbose \ + --default-toolchain ${VERSION_RUST} \ + --profile minimal \ + -y && \ + rm rustup.sh && \ + chmod -R 777 /rust && \ + rm -rf /rust/registry + +# Install sc-meta from the local source tree. +# Build context must be the workspace root: +# docker build -f framework/meta/Dockerfile.local +COPY . /mx-sdk-rs +RUN PATH="/rust/bin:${PATH}" CARGO_HOME=/rust RUSTUP_HOME=/rust \ + cargo install --path /mx-sdk-rs/framework/meta --locked && \ + rm -rf /rust/registry && \ + rm -rf /mx-sdk-rs + +# Install wasm32 target +RUN PATH="/rust/bin:${PATH}" CARGO_HOME=/rust RUSTUP_HOME=/rust \ + sc-meta install wasm32 + +# Install wasm-opt +RUN PATH="/rust/bin:${PATH}" CARGO_HOME=/rust RUSTUP_HOME=/rust \ + cargo install wasm-opt --version ${VERSION_WASM_OPT} --locked && \ + rm -rf /rust/registry + +ENV PATH="/rust/bin:${PATH}" +ENV CARGO_HOME="/rust" +ENV RUSTUP_HOME="/rust" + +# Exposed to sc-meta at runtime for embedding into .source.json build metadata. +ENV BUILD_METADATA_VERSION_RUST=${VERSION_RUST} +ENV BUILD_METADATA_VERSION_SC_META=${VERSION_SC_META} +ENV BUILD_METADATA_VERSION_WASM_OPT=${VERSION_WASM_OPT} +ENV BUILD_METADATA_TARGETPLATFORM=${TARGETPLATFORM} + +# /project — mounted read-only by the caller (the contract source tree) +# /output — mounted read-write by the caller (build artifacts land here) +# /rust/cargo-target-dir — optionally mounted for caching between runs + +# Additional arguments forwarded at "docker run" time: +# --project /project (required; set automatically by docker-build) +# --contract (optional) +# --no-wasm-opt (optional) +# --build-root (optional; defaults to /tmp/sc-build inside the container) +ENTRYPOINT ["sc-meta", "reproducible-build", "local-build", \ + "--output", "/output", \ + "--target-dir", "/rust/cargo-target-dir"] + +LABEL frozen="no" +LABEL rust=${VERSION_RUST} +LABEL sc_meta=${VERSION_SC_META} +LABEL wasm_opt=${VERSION_WASM_OPT} diff --git a/framework/meta/docker-build-released.sh b/framework/meta/docker-build-released.sh new file mode 100755 index 0000000000..e450cb6ceb --- /dev/null +++ b/framework/meta/docker-build-released.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# Builds the Docker image using the published sc-meta from crates.io. +# For a local source build, use docker-build.sh. + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +docker build \ + --platform linux/amd64 \ + -f "$SCRIPT_DIR/Dockerfile" \ + -t multiversx/sc-meta-reproducible-build:0.65.1 \ + "$SCRIPT_DIR" diff --git a/framework/meta/docker-build.sh b/framework/meta/docker-build.sh new file mode 100755 index 0000000000..7960b174a2 --- /dev/null +++ b/framework/meta/docker-build.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Builds the Docker image using the local workspace source tree. +# For the released (crates.io) image, use docker-build-released.sh. + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +echo "$SCRIPT_DIR" + +WORKSPACE_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +docker build \ + --platform linux/amd64 \ + -f "$SCRIPT_DIR/Dockerfile.local" \ + -t multiversx/sc-meta-reproducible-build:local \ + "$WORKSPACE_ROOT" \ No newline at end of file diff --git a/framework/meta/src/cli/cli_args_standalone.rs b/framework/meta/src/cli/cli_args_standalone.rs index 4ec3cf75e7..80425a34e8 100644 --- a/framework/meta/src/cli/cli_args_standalone.rs +++ b/framework/meta/src/cli/cli_args_standalone.rs @@ -85,6 +85,13 @@ pub enum StandaloneCliAction { )] LocalDeps(LocalDepsArgs), + #[command( + name = "reproducible-build", + alias = "rb", + about = "Reproducible build operations." + )] + ReproducibleBuild(ReproducibleBuildArgs), + #[command( name = "wallet", about = "Generates a new wallet or performs actions on an existing wallet." @@ -250,21 +257,7 @@ pub struct ConvertArgs { } #[derive(Default, Clone, PartialEq, Eq, Debug, Args)] -pub struct AllArgs { - #[command(subcommand)] - pub command: ContractCliAction, - - /// Target directory where to call all contract meta crates. - /// Will be current directory if not specified. - #[arg(long, verbatim_doc_comment)] - #[clap(global = true)] - pub path: Option, - - /// Ignore all directories with these names. - #[arg(long, verbatim_doc_comment)] - #[clap(global = true, default_value = "target")] - pub ignore: Vec, - +pub struct MetaLibArgs { #[arg( long = "no-abi-git-version", help = "Skips loading the Git version into the ABI", @@ -285,11 +278,31 @@ pub struct AllArgs { pub target_dir_all: Option, } +#[derive(Default, Clone, PartialEq, Eq, Debug, Args)] +pub struct AllArgs { + #[command(subcommand)] + pub command: ContractCliAction, + + /// Target directory where to call all contract meta crates. + /// Will be current directory if not specified. + #[arg(long, verbatim_doc_comment)] + #[clap(global = true)] + pub path: Option, + + /// Ignore all directories with these names. + #[arg(long, verbatim_doc_comment)] + #[clap(global = true, default_value = "target")] + pub ignore: Vec, + + #[command(flatten)] + pub meta_lib_args: MetaLibArgs, +} + impl AllArgs { pub fn target_dir_all_override(&self) -> Self { let mut result = self.clone(); - if let Some(target_dir_all) = &self.target_dir_all { - result.target_dir_meta = Some(target_dir_all.clone()); + if let Some(target_dir_all) = &self.meta_lib_args.target_dir_all { + result.meta_lib_args.target_dir_meta = Some(target_dir_all.clone()); match &mut result.command { ContractCliAction::Build(build_args) => { build_args.target_dir_wasm = Some(target_dir_all.clone()); @@ -305,37 +318,6 @@ impl AllArgs { } result } - - pub fn to_cargo_run_args(&self) -> Vec { - let processed = self.target_dir_all_override(); - let mut raw = vec!["run".to_string()]; - if let Some(target_dir_meta) = &processed.target_dir_meta { - raw.push("--target-dir".to_string()); - raw.push(target_dir_meta.clone()); - } - raw.append(&mut processed.command.to_raw()); - if !processed.load_abi_git_version { - raw.push("--no-abi-git-version".to_string()); - } - raw - } - - /// Produces the arguments for an abi call corresponding to a build. - /// - /// Used to get the rustc and framework versions configured for a build. - pub fn to_cargo_abi_for_build(&self) -> Vec { - let processed = self.target_dir_all_override(); - let mut raw = vec!["run".to_string()]; - if let Some(target_dir_meta) = &processed.target_dir_meta { - raw.push("--target-dir".to_string()); - raw.push(target_dir_meta.clone()); - } - raw.push("abi".to_string()); - if !processed.load_abi_git_version { - raw.push("--no-abi-git-version".to_string()); - } - raw - } } #[derive(Default, Clone, PartialEq, Eq, Debug, Args)] @@ -360,6 +342,171 @@ pub struct UpgradeArgs { pub no_check: bool, } +#[derive(Default, Clone, PartialEq, Eq, Debug, Args)] +pub struct PackArgs { + /// Project folder (workspace root or single contract folder). + /// Will be current directory if not specified. + #[arg(long, verbatim_doc_comment)] + pub path: Option, + + /// Only pack the contract with this name (as found in Cargo.toml). + /// If not specified, all contracts under the project folder are packed. + #[arg(long, verbatim_doc_comment)] + pub contract: Option, +} + +#[derive(Clone, PartialEq, Eq, Debug, Args)] +pub struct ReproducibleBuildArgs { + #[command(subcommand)] + pub command: ReproducibleBuildCliAction, +} + +#[derive(Clone, PartialEq, Eq, Debug, Subcommand)] +pub enum ReproducibleBuildCliAction { + #[command( + name = "source-pack", + about = "Packages the contract source code into a self-contained JSON file, suitable for reproducible builds." + )] + SourcePack(PackArgs), + + #[command( + name = "local-build", + about = "Builds all contracts locally, mirroring the Docker reproducible build pipeline." + )] + LocalBuild(LocalBuildArgs), + + #[command( + name = "docker-build", + about = "Runs the reproducible build inside a pinned Docker container." + )] + DockerBuild(DockerBuildArgs), + + #[command( + name = "local-deps", + about = "Generates a report on the local dependencies of the contract." + )] + LocalDeps(LocalDepsArgs), + + #[command( + name = "source-unpack", + about = "Unpacks a .source.json file produced by a previous build back to the filesystem." + )] + SourceUnpack(SourceUnpackArgs), +} + +#[derive(Clone, PartialEq, Eq, Debug, Args)] +pub struct LocalBuildArgs { + /// Project folder (workspace root or single contract folder). + /// Will be current directory if not specified. + #[arg(long, verbatim_doc_comment)] + pub path: Option, + + /// Output folder where build artifacts and source JSON files will be placed. + /// A subfolder per contract name will be created inside it. + #[arg(long, verbatim_doc_comment)] + pub output: String, + + /// Cargo target directory for compilation. + /// Defaults to /tmp/sc-target if not specified. + #[arg(long = "target-dir", verbatim_doc_comment)] + pub target_dir: Option, + + /// Folder where the project will be copied before building. + /// Defaults to /tmp/sc-build if not specified. + #[arg(long = "build-root", verbatim_doc_comment)] + pub build_root: Option, + + /// Only build the contract with this name (as found in Cargo.toml). + /// If not specified, all contracts under the project folder are built. + #[arg(long, verbatim_doc_comment)] + pub contract: Option, + + /// Do not optimize wasm files after the build. + #[arg(long = "no-wasm-opt", default_value = "false", verbatim_doc_comment)] + pub no_wasm_opt: bool, + + /// If the output folder is not empty, wipe it before building instead of aborting. + #[arg(long, default_value = "false", verbatim_doc_comment)] + pub force: bool, + + /// Path to a `.source.json` file produced by a previous build. + /// When set, the source is unpacked to /tmp/unwrapped/ and the build + /// proceeds from there, reproducing the original layout exactly. + /// Mutually exclusive with --path. + #[arg(long = "packaged-src", verbatim_doc_comment)] + pub packaged_src: Option, +} + +#[derive(Clone, PartialEq, Eq, Debug, Args)] +pub struct SourceUnpackArgs { + /// Path to the `.source.json` file to unpack. + #[arg(long = "packaged-src", verbatim_doc_comment)] + pub packaged_src: String, + + /// Folder where the source files will be extracted. + /// Defaults to /tmp/unwrapped if not specified. + #[arg(long, verbatim_doc_comment)] + pub output: Option, +} + +#[derive(Clone, PartialEq, Eq, Debug, Args)] +pub struct DockerBuildArgs { + /// Pinned Docker image tag to run the build in. + /// e.g. `multiversx/sc-meta-reproducible-build:0.65.1` + #[arg(long = "docker-image", verbatim_doc_comment)] + pub docker_image: String, + + /// Project folder (workspace root or single contract folder). + /// Will be current directory if not specified. + #[arg(long, verbatim_doc_comment)] + pub project: Option, + + /// Output folder where build artifacts will be placed. + /// Defaults to `/output-docker/`. + #[arg(long, verbatim_doc_comment)] + pub output: Option, + + /// Only build the contract with this name (as found in Cargo.toml). + /// If not specified, all contracts under the project folder are built. + #[arg(long, verbatim_doc_comment)] + pub contract: Option, + + /// Do not optimize wasm files after the build. + #[arg(long = "no-wasm-opt", default_value = "false", verbatim_doc_comment)] + pub no_wasm_opt: bool, + + /// Override the build root path inside the container. + /// Defaults to the container's built-in default (/tmp/sc-build). + #[arg(long = "build-root", verbatim_doc_comment)] + pub build_root: Option, + + /// Do not pass `--interactive` to `docker run`. + /// Required in non-interactive environments such as CI. + #[arg( + long = "no-docker-interactive", + default_value = "false", + verbatim_doc_comment + )] + pub no_docker_interactive: bool, + + /// Do not pass `--tty` to `docker run`. + /// Required in non-interactive environments such as CI. + #[arg(long = "no-docker-tty", default_value = "false", verbatim_doc_comment)] + pub no_docker_tty: bool, + + /// Skip forcing `--platform linux/amd64` on the Docker run. + #[arg( + long = "no-default-platform", + default_value = "false", + verbatim_doc_comment + )] + pub no_default_platform: bool, + + /// Set CARGO_TERM_VERBOSE=true inside the container. + #[arg(long = "cargo-verbose", default_value = "false", verbatim_doc_comment)] + pub cargo_verbose: bool, +} + #[derive(Default, Clone, PartialEq, Eq, Debug, Args)] pub struct LocalDepsArgs { /// Target directory where to generate local deps reports. diff --git a/framework/meta/src/cli/cli_standalone_main.rs b/framework/meta/src/cli/cli_standalone_main.rs index 6a1557deaf..2b75965778 100644 --- a/framework/meta/src/cli/cli_standalone_main.rs +++ b/framework/meta/src/cli/cli_standalone_main.rs @@ -1,4 +1,4 @@ -use crate::cli::{StandaloneCliAction, StandaloneCliArgs}; +use crate::cli::{ReproducibleBuildCliAction, StandaloneCliAction, StandaloneCliArgs}; use crate::cmd::chain_simulator::chain_simulator; use crate::cmd::retrieve_address::retrieve_address; use crate::cmd::scen_blackbox::scen_blackbox_tool; @@ -9,7 +9,9 @@ use crate::cmd::all::call_all_meta; use crate::cmd::code_report::report; use crate::cmd::info::call_info; use crate::cmd::install::install; -use crate::cmd::local_deps::local_deps; +use crate::cmd::reproducible_builds::{ + docker_build, local_build, local_deps, source_pack, source_unpack, +}; use crate::cmd::scen_test_gen::test_gen_tool; use crate::cmd::template::{create_contract, print_template_names}; use crate::cmd::test::test; @@ -60,6 +62,13 @@ pub async fn cli_main_standalone() { Some(StandaloneCliAction::LocalDeps(args)) => { local_deps(args); } + Some(StandaloneCliAction::ReproducibleBuild(rb_args)) => match &rb_args.command { + ReproducibleBuildCliAction::SourcePack(args) => source_pack(args), + ReproducibleBuildCliAction::LocalBuild(args) => local_build(args), + ReproducibleBuildCliAction::DockerBuild(args) => docker_build(args), + ReproducibleBuildCliAction::LocalDeps(args) => local_deps(args), + ReproducibleBuildCliAction::SourceUnpack(args) => source_unpack(args), + }, Some(StandaloneCliAction::Wallet(args)) => { wallet(args); } diff --git a/framework/meta/src/cmd.rs b/framework/meta/src/cmd.rs index d32a8d52be..7879a0bb71 100644 --- a/framework/meta/src/cmd.rs +++ b/framework/meta/src/cmd.rs @@ -4,8 +4,8 @@ mod check_wasmer_dependencies; pub mod code_report; pub mod info; pub mod install; -pub mod local_deps; pub mod print_util; +pub mod reproducible_builds; pub mod retrieve_address; pub mod scen_blackbox; pub mod scen_test_gen; diff --git a/framework/meta/src/cmd/all.rs b/framework/meta/src/cmd/all.rs index 20af36d7d3..a04f90b6cf 100644 --- a/framework/meta/src/cmd/all.rs +++ b/framework/meta/src/cmd/all.rs @@ -1,14 +1,17 @@ mod all_rustc_check; +mod contract_meta_call; + +pub use contract_meta_call::ContractMetaCall; use super::{ check_wasmer_dependencies::check_wasmer_dependencies, - print_util::{print_all_command, print_all_count, print_all_index}, + print_util::{print_all_count, print_all_index}, }; use crate::{ cli::AllArgs, - folder_structure::{RelevantDirectories, dir_pretty_print}, + folder_structure::{RelevantDirectories, RelevantDirectory, dir_pretty_print}, }; -use std::{path::Path, process::Command}; +use std::path::Path; pub fn call_all_meta(args: &AllArgs) { let path = if let Some(some_path) = &args.path { @@ -34,27 +37,17 @@ fn perform_call_all_meta(path: &Path, args: &AllArgs) { return; } + dirs.warn_duplicate_contract_names(); + for (i, contract_crate) in dirs.iter_contract_crates().enumerate() { print_all_index(i + 1, num_contract_crates); - - contract_crate.assert_meta_path_exists(); - let meta_path = contract_crate.meta_path(); - - all_rustc_check::verify_rustc_version(contract_crate, args); - call_contract_meta(&meta_path, &args.to_cargo_run_args()); + call_contract_meta(contract_crate, args); } } -pub fn call_contract_meta(meta_path: &Path, cargo_run_args: &[String]) { - print_all_command(meta_path, cargo_run_args); - - let exit_status = Command::new("cargo") - .current_dir(meta_path) - .args(cargo_run_args) - .spawn() - .expect("failed to spawn cargo run process in meta crate") - .wait() - .expect("cargo run process in meta crate was not running"); - - assert!(exit_status.success(), "contract meta process failed"); +pub fn call_contract_meta(contract_crate: &RelevantDirectory, args: &AllArgs) { + contract_crate.assert_meta_path_exists(); + all_rustc_check::verify_rustc_version(contract_crate, args); + ContractMetaCall::new(args.command.clone(), &args.meta_lib_args) + .call_for_contract(contract_crate); } diff --git a/framework/meta/src/cmd/all/all_rustc_check.rs b/framework/meta/src/cmd/all/all_rustc_check.rs index 4ec9cb0608..754e8759fd 100644 --- a/framework/meta/src/cmd/all/all_rustc_check.rs +++ b/framework/meta/src/cmd/all/all_rustc_check.rs @@ -7,8 +7,8 @@ use multiversx_sc_meta_lib::{ }; use semver::Version; -use crate::{cli::AllArgs, folder_structure::RelevantDirectory}; -use std::{path::Path, process::Command}; +use crate::{cli::AllArgs, cmd::all::ContractMetaCall, folder_structure::RelevantDirectory}; +use std::path::Path; fn should_perform_rustc_version_check(args: &AllArgs) -> bool { matches!(args.command, ContractCliAction::Build(_)) @@ -19,18 +19,8 @@ pub fn verify_rustc_version(contract_crate: &RelevantDirectory, args: &AllArgs) return; } - let abi_args = args.to_cargo_abi_for_build(); - - let meta_path = contract_crate.meta_path(); - let exit_status = Command::new("cargo") - .current_dir(&meta_path) - .args(abi_args) - .spawn() - .expect("failed to spawn cargo run process in meta crate") - .wait() - .expect("cargo run process in meta crate was not running"); - - assert!(exit_status.success(), "contract meta process failed"); + ContractMetaCall::new(ContractCliAction::Abi, &args.meta_lib_args) + .call_for_contract(contract_crate); let output_path = contract_crate.output_path(); diff --git a/framework/meta/src/cmd/all/contract_meta_call.rs b/framework/meta/src/cmd/all/contract_meta_call.rs new file mode 100644 index 0000000000..185b6446b3 --- /dev/null +++ b/framework/meta/src/cmd/all/contract_meta_call.rs @@ -0,0 +1,108 @@ +use std::{path::Path, process::Command}; + +use multiversx_sc_meta_lib::cli::{CliArgsToRaw, ContractCliAction}; + +use crate::{ + cli::MetaLibArgs, cmd::print_util::print_all_command, folder_structure::RelevantDirectory, +}; + +/// Holds the arguments for calling a contract's meta crate via `cargo run`. +/// +/// Separates cargo-level args (e.g. `--target-dir`, placed before `--`) +/// from binary-level args (the subcommand and its flags, placed after `--`). +pub struct ContractMetaCall { + /// Args passed to `cargo run` itself (e.g. `--target-dir `). + cargo_args: Vec, + /// Args passed to the compiled meta binary (e.g. `build --locked`). + binary_args: Vec, +} + +impl ContractMetaCall { + pub fn new(mut command: ContractCliAction, meta_lib_args: &MetaLibArgs) -> Self { + apply_target_dir_all_to_wasm(&mut command, meta_lib_args); + + let mut cargo_args = Vec::new(); + if let Some(target_dir_meta) = effective_target_dir_meta(meta_lib_args) { + cargo_args.push("--target-dir".to_string()); + cargo_args.push(target_dir_meta.clone()); + } + + let mut binary_args = command.to_raw(); + if !meta_lib_args.load_abi_git_version { + binary_args.push("--no-abi-git-version".to_string()); + } + + ContractMetaCall { + cargo_args, + binary_args, + } + } + + /// Creates a `ContractMetaCall` from a raw binary argument list, + /// with no cargo-level args. + pub fn from_raw(binary_args: Vec) -> Self { + ContractMetaCall { + cargo_args: Vec::new(), + binary_args, + } + } + + pub fn binary_args(&self) -> &[String] { + &self.binary_args + } + + /// Returns the full argument list for `cargo`, starting with `run`. + fn all_cargo_args(&self) -> Vec { + let mut all = vec!["run".to_string()]; + all.extend_from_slice(&self.cargo_args); + all.push("--".to_string()); + all.extend_from_slice(&self.binary_args); + all + } + + pub fn call_in_dir(&self, meta_path: &Path) { + let all = self.all_cargo_args(); + print_all_command(meta_path, &all); + + let exit_status = Command::new("cargo") + .current_dir(meta_path) + .args(&all) + .spawn() + .expect("failed to spawn cargo run process in meta crate") + .wait() + .expect("cargo run process in meta crate was not running"); + + assert!(exit_status.success(), "contract meta process failed"); + } + + pub fn call_for_contract(&self, contract_crate: &RelevantDirectory) { + let meta_path = contract_crate.meta_path(); + self.call_in_dir(&meta_path); + } +} + +/// `--target-dir-all` overrides `--target-dir-meta`; returns whichever is set. +fn effective_target_dir_meta(meta_lib_args: &MetaLibArgs) -> Option<&String> { + meta_lib_args + .target_dir_all + .as_ref() + .or(meta_lib_args.target_dir_meta.as_ref()) +} + +/// Applies the `--target-dir-all` override to the wasm target dir inside the command. +fn apply_target_dir_all_to_wasm(command: &mut ContractCliAction, meta_lib_args: &MetaLibArgs) { + if let Some(target_dir_all) = &meta_lib_args.target_dir_all { + match command { + ContractCliAction::Build(build_args) => { + build_args.target_dir_wasm = Some(target_dir_all.clone()); + } + ContractCliAction::BuildDbg(build_args) => { + build_args.target_dir_wasm = Some(target_dir_all.clone()); + } + ContractCliAction::Twiggy(build_args) => { + build_args.target_dir_wasm = Some(target_dir_all.clone()); + } + _ => {} + } + } +} diff --git a/framework/meta/src/cmd/print_util.rs b/framework/meta/src/cmd/print_util.rs index 1505e4af67..d0895cdf87 100644 --- a/framework/meta/src/cmd/print_util.rs +++ b/framework/meta/src/cmd/print_util.rs @@ -18,13 +18,13 @@ pub fn print_all_index(contract_crates_index: usize, num_contract_crates: usize) ); } -pub fn print_all_command(meta_path: &Path, cargo_run_args: &[String]) { +pub fn print_all_command(meta_path: &Path, all_cargo_args: &[String]) { + let full = format!("cargo {}", all_cargo_args.join(" ")); println!( - "{} {}\n{} `cargo {}`", + "{} {}\n{} `{full}`", "In".green(), meta_path.display(), "Calling".green(), - cargo_run_args.join(" "), ); } diff --git a/framework/meta/src/cmd/reproducible_builds.rs b/framework/meta/src/cmd/reproducible_builds.rs new file mode 100644 index 0000000000..d6e8d01ce4 --- /dev/null +++ b/framework/meta/src/cmd/reproducible_builds.rs @@ -0,0 +1,13 @@ +mod build_outcome; +mod docker_build; +mod local_build; +pub mod local_deps; +mod source_json_model; +mod source_pack; +mod source_unpack; + +pub use docker_build::docker_build; +pub use local_build::local_build; +pub use local_deps::local_deps; +pub use source_pack::source_pack; +pub use source_unpack::source_unpack; diff --git a/framework/meta/src/cmd/reproducible_builds/build_outcome.rs b/framework/meta/src/cmd/reproducible_builds/build_outcome.rs new file mode 100644 index 0000000000..11d9db5a06 --- /dev/null +++ b/framework/meta/src/cmd/reproducible_builds/build_outcome.rs @@ -0,0 +1,228 @@ +use std::{ + env, fs, + path::{Path, PathBuf}, + process::Command, +}; + +use indexmap::IndexMap; +use serde::Serialize; + +/// Mirrors the Python `BuildMetadata`, `BuildOptions`, `BuildOutcome`, and +/// `BuildOutcomeEntry` classes from `mx-sdk-rust-contract-builder`. +/// +/// The resulting JSON is written to `/artifacts.json`. + +// ─── Metadata ──────────────────────────────────────────────────────────────── + +#[derive(Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct ArtifactsBuildMetadata { + pub version_rust: Option, + pub version_sc_tool: Option, + pub version_wasm_opt: Option, + pub target_platform: Option, +} + +impl ArtifactsBuildMetadata { + /// Reads from the `BUILD_METADATA_*` env vars set inside the Docker image. + /// Falls back to running the tools directly for local (non-Docker) builds. + pub fn detect() -> Self { + ArtifactsBuildMetadata { + version_rust: env::var("BUILD_METADATA_VERSION_RUST") + .ok() + .or_else(detect_rustc_version), + version_sc_tool: env::var("BUILD_METADATA_VERSION_SC_META") + .ok() + .or_else(|| Some(env!("CARGO_PKG_VERSION").to_string())), + version_wasm_opt: env::var("BUILD_METADATA_VERSION_WASM_OPT") + .ok() + .or_else(detect_wasm_opt_version), + target_platform: env::var("BUILD_METADATA_TARGETPLATFORM") + .ok() + .or_else(detect_target_platform), + } + } +} + +// ─── Options ───────────────────────────────────────────────────────────────── + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ArtifactsBuildOptions { + /// Kept for compatibility with the Python builder output. + pub package_whole_project_src: bool, + pub specific_contract: Option, + pub no_wasm_opt: bool, + pub build_root_folder: String, +} + +// ─── Per-contract artifacts ─────────────────────────────────────────────────── + +#[derive(Serialize)] +pub struct ContractArtifactFiles { + pub bytecode: String, + pub abi: String, + #[serde(rename = "srcPackage")] + pub src_package: String, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ContractOutcomeEntry { + pub version: String, + pub codehash: String, + pub artifacts: ContractArtifactFiles, +} + +// ─── Top-level outcome ──────────────────────────────────────────────────────── + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct BuildOutcome { + pub build_metadata: ArtifactsBuildMetadata, + pub build_options: ArtifactsBuildOptions, + /// Contracts in path-sorted insertion order, matching the Python builder output. + pub contracts: IndexMap, +} + +impl BuildOutcome { + pub fn new(metadata: ArtifactsBuildMetadata, options: ArtifactsBuildOptions) -> Self { + BuildOutcome { + build_metadata: metadata, + build_options: options, + contracts: IndexMap::new(), + } + } + + /// Scans `output_subfolder` (the per-contract output dir) for build + /// artifacts and adds an entry for each `.wasm` file found. + /// + /// `build_folder` is the contract directory in the build root, used to + /// read the version from `Cargo.toml`. + pub fn gather(&mut self, contract_name: &str, output_subfolder: &Path) { + let wasm_files = glob_files(output_subfolder, ".wasm"); + for wasm in wasm_files { + let stem = wasm + .file_stem() + .unwrap_or_default() + .to_string_lossy() + .into_owned(); + + let codehash = read_codehash(output_subfolder, &stem); + let abi = find_file_by_suffix(output_subfolder, ".abi.json") + .unwrap_or_else(|| format!("{stem}.abi.json")); + let src_package = find_file_by_suffix(output_subfolder, ".source.json") + .unwrap_or_else(|| format!("{stem}.source.json")); + + self.contracts.insert( + stem, + ContractOutcomeEntry { + version: read_contract_version(output_subfolder, contract_name), + codehash, + artifacts: ContractArtifactFiles { + bytecode: wasm + .file_name() + .unwrap_or_default() + .to_string_lossy() + .into_owned(), + abi, + src_package, + }, + }, + ); + } + } + + pub fn save(&self, output_folder: &Path) { + let path = output_folder.join("artifacts.json"); + let mut buf = Vec::new(); + let formatter = serde_json::ser::PrettyFormatter::with_indent(b" "); + let mut ser = serde_json::Serializer::with_formatter(&mut buf, formatter); + self.serialize(&mut ser).unwrap(); + fs::write(&path, &buf).unwrap(); + println!("Artifacts summary: {}", path.display()); + } +} + +// ─── Helpers ───────────────────────────────────────────────────────────────── + +fn glob_files(dir: &Path, suffix: &str) -> Vec { + let Ok(rd) = fs::read_dir(dir) else { + return vec![]; + }; + let mut files: Vec = rd + .flatten() + .map(|e| e.path()) + .filter(|p| p.is_file() && p.to_string_lossy().ends_with(suffix)) + .collect(); + files.sort(); + files +} + +fn find_file_by_suffix(dir: &Path, suffix: &str) -> Option { + glob_files(dir, suffix) + .into_iter() + .next() + .and_then(|p| p.file_name().map(|n| n.to_string_lossy().into_owned())) +} + +fn read_codehash(output_subfolder: &Path, stem: &str) -> String { + let path = output_subfolder.join(format!("{stem}.codehash.txt")); + fs::read_to_string(&path) + .unwrap_or_default() + .trim() + .to_string() +} + +/// Tries to read the version from the `.source.json` (it has `contractVersion` +/// in its metadata). Falls back to an empty string so a missing file is not fatal. +fn read_contract_version(output_subfolder: &Path, _contract_name: &str) -> String { + let Some(src_json_path) = glob_files(output_subfolder, ".source.json") + .into_iter() + .next() + else { + return String::new(); + }; + let Ok(text) = fs::read_to_string(&src_json_path) else { + return String::new(); + }; + let Ok(v) = serde_json::from_str::(&text) else { + return String::new(); + }; + v["metadata"]["contractVersion"] + .as_str() + .unwrap_or("") + .to_string() +} + +fn run_tool_version(program: &str, args: &[&str]) -> Option { + let out = Command::new(program).args(args).output().ok()?; + if out.status.success() { + let raw = String::from_utf8_lossy(&out.stdout).trim().to_string(); + Some(raw) + } else { + None + } +} + +fn detect_rustc_version() -> Option { + // `rustc --version` → "rustc 1.93.0 (... ...)" + run_tool_version("rustc", &["--version"]) + .map(|s| s.split_whitespace().nth(1).unwrap_or(&s).to_string()) +} + +fn detect_wasm_opt_version() -> Option { + // `wasm-opt --version` → "wasm-opt version 116 (... ...)" or similar + run_tool_version("wasm-opt", &["--version"]) +} + +fn detect_target_platform() -> Option { + // `rustc -vV` includes "host: x86_64-unknown-linux-gnu" + let out = run_tool_version("rustc", &["-vV"])?; + for line in out.lines() { + if let Some(host) = line.strip_prefix("host: ") { + return Some(host.trim().to_string()); + } + } + None +} diff --git a/framework/meta/src/cmd/reproducible_builds/docker_build.rs b/framework/meta/src/cmd/reproducible_builds/docker_build.rs new file mode 100644 index 0000000000..9b69581ca7 --- /dev/null +++ b/framework/meta/src/cmd/reproducible_builds/docker_build.rs @@ -0,0 +1,174 @@ +use std::{ + fs, + path::{Path, PathBuf}, + process::{Command, Stdio}, +}; + +use crate::cli::DockerBuildArgs; + +/// Host directory used for Cargo caches that are shared across Docker runs. +const CARGO_CACHE_BASE: &str = "/tmp/multiversx_sc_meta_builder"; + +/// Runs the reproducible build inside Docker. +/// +/// Mirrors `build_with_docker.py` from `mx-sdk-rust-contract-builder` and +/// `mxpy contract reproducible-build`, but uses the `sc-meta`-based Docker +/// image whose entrypoint is: +/// +/// sc-meta reproducible-build local-build --output /output --target-dir /rust/cargo-target-dir +/// +/// Volume layout (host → container): +/// → /project (source; read-only in practice) +/// → /output (artifacts written here) +/// cargo-target-dir → /rust/cargo-target-dir (optional cache) +/// cargo-registry → /rust/registry (optional cache) +/// cargo-git → /rust/git (optional cache) +pub fn docker_build(args: &DockerBuildArgs) { + check_docker_available(); + + let project = resolve_project(args.project.as_deref()); + let output = resolve_output(&project, args.output.as_deref()); + + fs::create_dir_all(&output).unwrap(); + let output = output.canonicalize().unwrap(); + + // Shared Cargo cache directories — created once, reused across runs. + let cache_base = Path::new(CARGO_CACHE_BASE); + let cargo_target = cache_base.join("cargo-target-dir"); + let cargo_registry = cache_base.join("cargo-registry"); + let cargo_git = cache_base.join("cargo-git"); + fs::create_dir_all(&cargo_target).unwrap(); + fs::create_dir_all(&cargo_registry).unwrap(); + fs::create_dir_all(&cargo_git).unwrap(); + + let mut cmd = Command::new("docker"); + cmd.arg("run"); + + if !args.no_default_platform { + cmd.args(["--platform", "linux/amd64"]); + } + if !args.no_docker_interactive { + cmd.arg("--interactive"); + } + if !args.no_docker_tty { + cmd.arg("--tty"); + } + + #[cfg(unix)] + if let Some(user_arg) = get_unix_user() { + cmd.args(["--user", &user_arg]); + } + + cmd.arg("--rm"); + + // Volume mounts + cmd.args(["--volume", &format!("{}:/project", project.display())]); + cmd.args(["--volume", &format!("{}:/output", output.display())]); + cmd.args([ + "--volume", + &format!("{}:/rust/cargo-target-dir", cargo_target.display()), + ]); + cmd.args([ + "--volume", + &format!("{}:/rust/registry", cargo_registry.display()), + ]); + cmd.args(["--volume", &format!("{}:/rust/git", cargo_git.display())]); + + let verbose = if args.cargo_verbose { "true" } else { "false" }; + cmd.args(["--env", &format!("CARGO_TERM_VERBOSE={verbose}")]); + + // Image name + cmd.arg(&args.docker_image); + + // Entrypoint args — appended after the ENTRYPOINT baked into the image. + // The image already provides: --output /output --target-dir /rust/cargo-target-dir + cmd.args(["--path", "/project"]); + if let Some(contract) = &args.contract { + cmd.args(["--contract", contract]); + } + if args.no_wasm_opt { + cmd.arg("--no-wasm-opt"); + } + if let Some(build_root) = &args.build_root { + cmd.args(["--build-root", build_root]); + } + + println!("Running: {}", format_command(&cmd)); + + let status = cmd + .stdin(Stdio::inherit()) + .stdout(Stdio::inherit()) + .stderr(Stdio::inherit()) + .status() + .unwrap_or_else(|e| panic!("Failed to launch docker: {e}")); + + if !status.success() { + panic!( + "Docker build failed with exit code: {}", + status.code().unwrap_or(-1) + ); + } + + println!("Output written to: {}", output.display()); +} + +fn check_docker_available() { + let ok = Command::new("docker") + .args(["info"]) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + .map(|s| s.success()) + .unwrap_or(false); + + if !ok { + panic!( + "Docker is not available or the daemon is not running.\n\ + Install Docker and ensure `docker info` succeeds before retrying." + ); + } +} + +fn resolve_project(path: Option<&str>) -> PathBuf { + let p = path.unwrap_or("."); + Path::new(p) + .canonicalize() + .unwrap_or_else(|_| PathBuf::from(p)) +} + +fn resolve_output(project: &Path, output: Option<&str>) -> PathBuf { + match output { + Some(o) => PathBuf::from(o), + None => project.join("output-docker"), + } +} + +/// Formats a `Command` as a readable shell string for logging. +fn format_command(cmd: &Command) -> String { + let prog = cmd.get_program().to_string_lossy().into_owned(); + let args: Vec = cmd + .get_args() + .map(|a| { + let s = a.to_string_lossy(); + if s.contains(' ') { + format!("\"{s}\"") + } else { + s.into_owned() + } + }) + .collect(); + format!("{prog} {}", args.join(" ")) +} + +#[cfg(unix)] +fn get_unix_user() -> Option { + let uid = Command::new("id").arg("-u").output().ok()?; + let gid = Command::new("id").arg("-g").output().ok()?; + let uid = String::from_utf8(uid.stdout).ok()?.trim().to_string(); + let gid = String::from_utf8(gid.stdout).ok()?.trim().to_string(); + if uid.is_empty() || gid.is_empty() { + None + } else { + Some(format!("{uid}:{gid}")) + } +} diff --git a/framework/meta/src/cmd/reproducible_builds/local_build.rs b/framework/meta/src/cmd/reproducible_builds/local_build.rs new file mode 100644 index 0000000000..e7ff863222 --- /dev/null +++ b/framework/meta/src/cmd/reproducible_builds/local_build.rs @@ -0,0 +1,307 @@ +use std::{ + collections::HashMap, + fs, + path::{Path, PathBuf}, +}; + +use multiversx_sc_meta_lib::cargo_toml::CargoTomlContents; +use multiversx_sc_meta_lib::cli::{BuildArgs, ContractCliAction}; +use multiversx_sc_meta_lib::tools::generate_codehashes_in_output; + +use crate::cli::{AllArgs, LocalBuildArgs, MetaLibArgs}; +use crate::cmd::all::call_contract_meta; +use crate::folder_structure::{RelevantDirectories, RelevantDirectory}; + +use super::build_outcome::{ArtifactsBuildMetadata, ArtifactsBuildOptions, BuildOutcome}; +use super::source_pack::source_pack_contract; +use super::source_unpack::unpack_packaged_src; + +/// Mirrors the Python `build_project` pipeline, but runs locally instead of inside Docker. +/// +/// Steps: +/// 1. Discover all contracts (`multiversx.json` markers). +/// 2. Copy the project to `--build-root` (wipes it first; skips `target/` dirs). +/// 3. Snapshot `Cargo.lock` files. +/// 4. For each contract (filtered by `--contract` if set): +/// a. Clean build artifacts (`wasm/target/`, `meta/target/`, `output/`). +/// b. Run `sc-meta all build --target-dir --locked`. +/// c. Write the `.source.json` via `source_pack_contract`. +/// d. Clean again, keeping `output/`. +/// e. Copy `output/` to `--output//`. +/// 5. Verify no `Cargo.lock` file changed (enforces `--locked`). +pub fn local_build(args: &LocalBuildArgs) { + if args.path.is_some() && args.packaged_src.is_some() { + eprintln!("Error: --path and --packaged-src are mutually exclusive."); + std::process::exit(1); + } + + // If --packaged-src is set, unpack to /tmp/unwrapped/ and derive project/build-root from it. + let (project_folder, build_root) = if let Some(src) = args.packaged_src.as_deref() { + let unwrap_folder = PathBuf::from(super::source_unpack::HARDCODED_UNWRAP_FOLDER); + let (folder, build_root_from_json) = unpack_packaged_src(Path::new(src), &unwrap_folder); + (folder, PathBuf::from(build_root_from_json)) + } else { + let folder = resolve_path(args.path.as_deref()); + let root = PathBuf::from(args.build_root.as_deref().unwrap_or("/tmp/sc-build")); + (folder, root) + }; + + let output_folder = { + fs::create_dir_all(&args.output).unwrap(); + let p = Path::new(&args.output).canonicalize().unwrap(); + guard_output_folder(&p, args.force); + p + }; + + let cargo_target_dir = { + let p = args.target_dir.as_deref().unwrap_or("/tmp/sc-target"); + fs::create_dir_all(p).unwrap(); + Path::new(p).canonicalize().unwrap() + }; + + // 1. Discover contracts + let dirs = RelevantDirectories::find_all(&project_folder, &["target".to_string()]); + if dirs.iter_contract_crates().count() == 0 { + println!( + "No contracts found (no multiversx.json) under: {}", + project_folder.display() + ); + return; + } + + dirs.ensure_distinct_contract_names(); + + // 2. Copy project to build root (wipes first, skips target/) + println!("Copying project to build root: {}", build_root.display()); + copy_project_to_build_root(&project_folder, &build_root); + + // Canonicalize after the directory exists to resolve symlinks (e.g. /tmp → /private/tmp on macOS). + // This ensures all subsequent path operations use the same resolved prefix. + let build_root = build_root.canonicalize().unwrap(); + + // 3. Snapshot Cargo.lock files + let locks_before = snapshot_cargo_locks(&build_root); + + let target_dir_str = cargo_target_dir.to_string_lossy().into_owned(); + let all_args = AllArgs { + command: ContractCliAction::Build(BuildArgs { + locked: true, + wasm_opt: !args.no_wasm_opt, + target_dir_wasm: Some(target_dir_str.clone()), + ..Default::default() + }), + meta_lib_args: MetaLibArgs { + target_dir_meta: Some(target_dir_str), + ..Default::default() + }, + ..Default::default() + }; + + let mut outcome = BuildOutcome::new( + ArtifactsBuildMetadata::detect(), + ArtifactsBuildOptions { + package_whole_project_src: true, + specific_contract: args.contract.clone(), + no_wasm_opt: args.no_wasm_opt, + build_root_folder: build_root.to_string_lossy().into_owned(), + }, + ); + + // 4. Build each contract + for dir in dirs.iter_contract_crates() { + let cargo_toml = CargoTomlContents::load_from_file(dir.path.join("Cargo.toml")); + let contract_name = cargo_toml.package_name(); + + if let Some(filter) = args.contract.as_deref() { + if contract_name != filter { + println!("Skipping: {contract_name}"); + continue; + } + } + + let relative = dir.path.strip_prefix(&project_folder).unwrap(); + let build_contract_folder = build_root.join(relative); + let output_subfolder = output_folder.join(&contract_name); + fs::create_dir_all(&output_subfolder).unwrap(); + + println!("Building: {contract_name}"); + + // a. Clean (remove output/ too) + clean_contract(&build_contract_folder, true); + + // b. Build + let build_dir = RelevantDirectory { + path: build_contract_folder.clone(), + ..dir.clone() + }; + call_contract_meta(&build_dir, &all_args); + + // b2. Generate codehash for each .wasm in output/ + generate_codehashes_in_output(&build_contract_folder.join("output")); + + // c. Pack source into build_contract_folder/output/ + source_pack_contract( + &build_root, + &build_contract_folder, + args.contract.as_deref(), + ); + + // d. Clean, keep output/ + clean_contract(&build_contract_folder, false); + + // e. Copy output/ to parent output subfolder + copy_dir_contents(&build_contract_folder.join("output"), &output_subfolder); + + // f. Gather artifacts for artifacts.json + outcome.gather(&contract_name, &output_subfolder); + + println!("Output: {}", output_subfolder.display()); + } + + // 5. Verify Cargo.lock unchanged + let locks_after = snapshot_cargo_locks(&build_root); + check_cargo_locks_unchanged(&locks_before, &locks_after); + + // 6. Write artifacts.json to the output folder root + outcome.save(&output_folder); +} + +fn resolve_path(path: Option<&str>) -> PathBuf { + let p = path.unwrap_or("."); + Path::new(p) + .canonicalize() + .unwrap_or_else(|_| PathBuf::from(p)) +} + +/// Checks that `output_folder` is empty before starting a build. +/// If `force` is true, wipes the folder instead of aborting. +fn guard_output_folder(output_folder: &Path, force: bool) { + let is_non_empty = output_folder + .read_dir() + .map(|mut rd| rd.next().is_some()) + .unwrap_or(false); + if is_non_empty { + if force { + fs::remove_dir_all(output_folder).unwrap(); + fs::create_dir_all(output_folder).unwrap(); + } else { + eprintln!( + "Error: output folder is not empty: {}\nUse --force to wipe it before building.", + output_folder.display() + ); + std::process::exit(1); + } + } else { + println!("Output empty"); + } +} + +/// Wipes `build_root` and copies the entire `project_folder` into it. +/// Skips `target/` directories to avoid copying large build artifacts. +fn copy_project_to_build_root(project_folder: &Path, build_root: &Path) { + if build_root.exists() { + fs::remove_dir_all(build_root).unwrap(); + } + copy_dir_skip_target(project_folder, build_root); +} + +fn copy_dir_skip_target(src: &Path, dst: &Path) { + fs::create_dir_all(dst).unwrap(); + let Ok(read_dir) = fs::read_dir(src) else { + return; + }; + for entry in read_dir.flatten() { + let path = entry.path(); + let name = entry.file_name(); + if name == "target" { + continue; + } + let dest = dst.join(&name); + if path.is_symlink() { + let link_target = fs::read_link(&path).unwrap(); + #[cfg(unix)] + std::os::unix::fs::symlink(&link_target, &dest).unwrap_or_else(|_| { + eprintln!("Warning: could not create symlink: {}", dest.display()) + }); + } else if path.is_dir() { + copy_dir_skip_target(&path, &dest); + } else { + fs::copy(&path, &dest).unwrap(); + } + } +} + +fn copy_dir_contents(src: &Path, dst: &Path) { + if !src.is_dir() { + return; + } + fs::create_dir_all(dst).unwrap(); + let Ok(read_dir) = fs::read_dir(src) else { + return; + }; + for entry in read_dir.flatten() { + let path = entry.path(); + let dest = dst.join(entry.file_name()); + if path.is_dir() { + copy_dir_contents(&path, &dest); + } else { + fs::copy(&path, &dest).unwrap(); + } + } +} + +/// Removes `wasm/target/` and `meta/target/` inside `contract_folder`. +/// If `clean_output` is true, also removes `output/`. +fn clean_contract(contract_folder: &Path, clean_output: bool) { + let _ = fs::remove_dir_all(contract_folder.join("wasm").join("target")); + let _ = fs::remove_dir_all(contract_folder.join("meta").join("target")); + if clean_output { + let _ = fs::remove_dir_all(contract_folder.join("output")); + } +} + +fn snapshot_cargo_locks(root: &Path) -> HashMap> { + let mut map = HashMap::new(); + collect_cargo_locks(root, &mut map); + map +} + +fn collect_cargo_locks(dir: &Path, map: &mut HashMap>) { + let Ok(read_dir) = fs::read_dir(dir) else { + return; + }; + for entry in read_dir.flatten() { + let path = entry.path(); + if path.is_dir() { + if path.file_name().map(|n| n != "target").unwrap_or(true) { + collect_cargo_locks(&path, map); + } + } else if path.file_name().map(|n| n == "Cargo.lock").unwrap_or(false) { + if let Ok(contents) = fs::read(&path) { + map.insert(path, contents); + } + } + } +} + +fn check_cargo_locks_unchanged( + before: &HashMap>, + after: &HashMap>, +) { + let mut any_changed = false; + for (path, before_contents) in before { + match after.get(path) { + Some(after_contents) if before_contents != after_contents => { + eprintln!("Error: Cargo.lock changed during build: {}", path.display()); + any_changed = true; + } + None => { + eprintln!("Warning: Cargo.lock disappeared: {}", path.display()); + } + _ => {} + } + } + if any_changed { + panic!("One or more Cargo.lock files changed during build. Use --locked to prevent this."); + } +} diff --git a/framework/meta/src/cmd/local_deps.rs b/framework/meta/src/cmd/reproducible_builds/local_deps.rs similarity index 79% rename from framework/meta/src/cmd/local_deps.rs rename to framework/meta/src/cmd/reproducible_builds/local_deps.rs index bd06511fba..f4dc57aca7 100644 --- a/framework/meta/src/cmd/local_deps.rs +++ b/framework/meta/src/cmd/reproducible_builds/local_deps.rs @@ -14,6 +14,10 @@ use std::{ path::{Path, PathBuf}, }; +pub const LOCAL_DEPS_FILE_NAME: &str = "local_deps.txt"; + +pub type DependencyDepth = u64; + #[derive(Serialize, Deserialize, Default)] #[serde(rename_all = "camelCase")] pub struct LocalDeps { @@ -26,11 +30,11 @@ pub struct LocalDeps { #[derive(Serialize, Deserialize, Clone)] pub struct LocalDep { pub path: String, - pub depth: usize, + pub depth: DependencyDepth, } impl LocalDep { - fn new(path: &Path, depth: usize) -> Self { + fn new(path: &Path, depth: DependencyDepth) -> Self { LocalDep { path: path.to_string_lossy().to_string(), depth, @@ -49,6 +53,21 @@ pub fn local_deps(args: &LocalDepsArgs) { perform_local_deps(path, args.ignore.as_slice()); } +pub fn compute_local_deps(contract_dir: &Path) -> LocalDeps { + let contract_dir = contract_dir.canonicalize().unwrap(); + let mut dep_map: BTreeMap = BTreeMap::new(); + expand_deps(&contract_dir, contract_dir.clone(), &mut dep_map); + + let common_dependency_path = common_path_all(dep_map.keys().map(|p| p.as_path())); + + LocalDeps { + root: contract_dir.clone(), + contract_path: contract_dir, + common_dependency_path: common_dependency_path.map(|p| p.to_string_lossy().to_string()), + dependencies: dep_map.values().cloned().collect(), + } +} + fn perform_local_deps(root_path: &Path, ignore: &[String]) { let dirs = RelevantDirectories::find_all(root_path, ignore); dir_pretty_print(dirs.iter_contract_crates(), "", &|_| {}); @@ -71,12 +90,12 @@ fn perform_local_deps(root_path: &Path, ignore: &[String]) { dependencies: dep_map.values().cloned().collect(), }; - let mut deps_file = File::create(output_dir_path.join("local_deps.txt")).unwrap(); + let mut deps_file = File::create(output_dir_path.join(LOCAL_DEPS_FILE_NAME)).unwrap(); writeln!(deps_file, "{}", serialize_local_deps_json(&deps_contents)).unwrap(); } } -fn expand_deps( +pub fn expand_deps( root_path: &Path, starting_path: PathBuf, dep_map: &mut BTreeMap, @@ -93,7 +112,7 @@ fn expand_deps( let parent_depth = if let Some(parent_dep) = dep_map.get(&parent) { parent_dep.depth } else { - 0 + 0u64 }; let child_depth = parent_depth + 1; if let Some(local_dep) = dep_map.get_mut(&full_path) { @@ -111,7 +130,7 @@ fn expand_deps( } } -fn serialize_local_deps_json(deps_contents: &LocalDeps) -> String { +pub fn serialize_local_deps_json(deps_contents: &LocalDeps) -> String { let buf = Vec::new(); let formatter = serde_json::ser::PrettyFormatter::with_indent(b" "); let mut ser = serde_json::Serializer::with_formatter(buf, formatter); diff --git a/framework/meta/src/cmd/reproducible_builds/repro-todo.md b/framework/meta/src/cmd/reproducible_builds/repro-todo.md new file mode 100644 index 0000000000..f125374c5c --- /dev/null +++ b/framework/meta/src/cmd/reproducible_builds/repro-todo.md @@ -0,0 +1,159 @@ +# Reproducible Build — Missing Features in `sc-meta` + +Reference implementations: +- Python builder: `mx-sdk-rust-contract-builder/multiversx_sdk_rust_contract_builder/` +- Docker invocation: `mx-sdk-rust-contract-builder/build_with_docker.py` +- mxpy CLI docs: `mx-sdk-py-cli/docs/reproducible-build.md` + +--- + +## 1. `docker-build` subcommand *(most critical)* + +There is no Docker-based build command at all. `local-build` runs purely on the host. A new `docker-build` subcommand is needed in `ReproducibleBuildCliAction`, mirroring `mxpy contract reproducible-build` and `build_with_docker.py`. + +**Required `DockerBuildArgs` flags:** + +| Flag | Default | Notes | +|---|---|---| +| `--docker-image` | *required* | Pinned tag e.g. `multiversx/sdk-rust-contract-builder:v8.0.0` | +| `--project` | cwd | Host path, mounted as `/project` | +| `--output` | `/output-docker/` | Host path, mounted as `/output` | +| `--contract` | *(all)* | Filter by name from `Cargo.toml` | +| `--no-wasm-opt` | false | Forwarded to container entrypoint | +| `--build-root` | *(container default)* | Forwarded to container entrypoint | +| `--no-docker-interactive` | false | Omit `--interactive` (CI mode) | +| `--no-docker-tty` | false | Omit `--tty` (CI mode) | +| `--no-default-platform` | false | Skip `--platform linux/amd64` | +| `--cargo-verbose` | false | Sets `CARGO_TERM_VERBOSE=true` in container env | + +**Docker command construction** (`std::process::Command`): +``` +docker run + [--platform linux/amd64] # unless --no-default-platform + [--interactive] [--tty] # unless --no-docker-* + --user : # from libc::getuid/getgid on Unix; skip/warn on Windows + --rm + --volume :/project + --volume :/output + --volume :/rust/cargo-target-dir + --volume :/rust/registry + --volume :/rust/git + --env CARGO_TERM_VERBOSE=false + + --project project # container-side path + [--contract ] + [--no-wasm-opt] + [--build-root ] +``` + +The cargo cache volumes (`/tmp/multiversx_sdk_rust_contract_builder/{cargo-target-dir,cargo-registry,cargo-git}`) should be created on the host before running, mirroring `build_with_docker.py`. + +**Docker availability check:** Run `docker info` (or `docker --version`) before attempting; emit a clear error if absent. + +--- + +## 2. `artifacts.json` output from `local-build` + +The Python builder writes `/artifacts.json` containing build metadata, build options, and per-contract entries (version, codehash, wasm/abi/source paths). `local_build.rs` produces none of this. The file is consumed by downstream tooling and the GitHub Actions release workflow (for blake2b hash notes in release descriptions). + +**Required structure** (mirrors `BuildOutcome.to_dict()`): +```json +{ + "buildMetadata": { + "versionRust": "…", + "versionScTool": "…", + "versionWasmOpt": "…", + "targetPlatform": "…" + }, + "buildOptions": { + "specificContract": null, + "noWasmOpt": false, + "buildRootFolder": "…" + }, + "contracts": { + "adder": { + "version": "0.0.0", + "codehash": "…", + "wasmPath": "…", + "abiPath": "…", + "srcPackagePath": "…" + } + } +} +``` + +--- + +## 3. `BuildMetadata` population in `.source.json` + +`source.rs` always emits `BuildMetadata::default()` (all fields `None`). Both `local-build` and `docker-build` must populate these fields: + +- **`versionRust`** — parse from `rustc --version` +- **`versionScTool`** — `sc-meta --version` or `env!("CARGO_PKG_VERSION")` +- **`versionWasmOpt`** — parse from `wasm-opt --version` (when wasm-opt is enabled) +- **`targetPlatform`** — `linux/amd64` when in Docker; on `local-build` use the host target triple (or `linux/amd64` if forced) + +In the Docker case these come from the Dockerfile's `ENV BUILD_METADATA_*` variables set at image build time. The `local-build` path must detect them at runtime by invoking the tools. + +--- + +## 4. `buildRootFolder` should reflect the actual build path + +In `source_pack_contract`, `buildRootFolder` is currently set to the host `project_folder` path. When building inside Docker (or `local-build` copying to `/tmp/sc-build`), this must be the **build-root path** — the path that was used when `sc-meta all build` ran — so that a downstream verifier can reproduce the exact layout. + +`source_pack_contract` should accept a `build_root: &Path` argument and write that into `BuildOptions::build_root_folder` instead of `project_folder`. + +--- + +## 5. Dockerfile with `sc-meta` as entrypoint + +The existing `Dockerfile` in `mx-sdk-rust-contract-builder` uses Python's `main.py` as its entrypoint. To make `sc-meta reproducible-build docker-build` self-contained, a Dockerfile whose entrypoint is `sc-meta` is needed: + +```dockerfile +ENTRYPOINT ["sc-meta", "reproducible-build", "local-build", \ + "--output", "/output", \ + "--target-dir", "/rust/cargo-target-dir"] +``` + +The Dockerfile must pin the same toolchain versions and set the same `BUILD_METADATA_*` env vars as the Python builder's Dockerfile. Decision needed: does this live in `mx-sdk-rs` (e.g. `framework/meta/Dockerfile`) or remain in `mx-sdk-rust-contract-builder`? + +--- + +## 6. `--packaged-src` mode in `local-build` + +Build from an existing `.source.json` by unpacking it to a temp folder, mirroring Python's `HARDCODED_UNWRAP_FOLDER = /tmp/unwrapped` path. Not present at all in the current Rust implementation. + +This mode is needed for the `mxpy contract verify` / contract verification service flow, which re-builds from the packaged source. + +Steps: +1. Parse the `.source.json` file. +2. Unpack all `entries` back to the filesystem under `/tmp/unwrapped/` (base64-decode each file content). +3. Read `metadata.buildOptions.buildRootFolder` to set the correct `--build-root`. +4. Proceed with the standard `local-build` pipeline from that folder. + +--- + +## 7. `CARGO_NET_GIT_FETCH_WITH_CLI=true` in build subprocess + +`local_build.rs` calls `call_contract_meta` but never sets this env var. The Python builder sets it explicitly to avoid high memory usage when Cargo fetches registry indexes and git dependencies (see https://github.com/rust-lang/cargo/issues/10583). Should be set as an env override on the build subprocess. + +--- + +## 8. Output folder non-empty guard + +`local_build.rs` silently overwrites previous outputs. The Python builder raises an error if the output folder is not empty, preventing accidental mixing of artifacts from different builds. Should add a guard (or a `--force` flag to skip it). + +--- + +## Summary + +| # | Gap | Complexity | +|---|---|---| +| 1 | `docker-build` subcommand + Docker invocation | High | +| 2 | `artifacts.json` per-build summary | Medium | +| 3 | `BuildMetadata` detection + population in `.source.json` | Medium | +| 4 | `buildRootFolder` reflects actual build path in `.source.json` | Low | +| 5 | Dockerfile with `sc-meta` as entrypoint | Medium | +| 6 | `--packaged-src` build-from-JSON mode | Medium | +| 7 | `CARGO_NET_GIT_FETCH_WITH_CLI=true` in build subprocess | Low | +| 8 | Output-folder non-empty guard | Low | diff --git a/framework/meta/src/cmd/reproducible_builds/source_json_model.rs b/framework/meta/src/cmd/reproducible_builds/source_json_model.rs new file mode 100644 index 0000000000..9e6d1ee537 --- /dev/null +++ b/framework/meta/src/cmd/reproducible_builds/source_json_model.rs @@ -0,0 +1,58 @@ +use serde::{Deserialize, Serialize}; + +use super::local_deps::DependencyDepth; + +pub const SCHEMA_VERSION: &str = "2.0.0"; +pub const SOURCE_JSON_EXTENSION: &str = ".source.json"; + +/// Sentinel depth for project-level files (mirrors Python's `sys.maxsize`). +pub const SYS_MAXSIZE: DependencyDepth = i64::MAX as DependencyDepth; + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SourceFileEntry { + pub path: String, + pub content: String, + pub module: String, + pub dependency_depth: DependencyDepth, + pub is_test_file: bool, +} + +#[derive(Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SourceBuildMetadata { + #[serde(skip_serializing_if = "Option::is_none")] + pub version_rust: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub version_sc_tool: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub version_wasm_opt: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub target_platform: Option, +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SourceBuildOptions { + /// Kept for compatibility with the Python builder. + pub package_whole_project_src: bool, + pub specific_contract: Option, + pub build_root_folder: String, +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SourceMetadata { + pub contract_name: String, + pub contract_version: String, + pub build_metadata: SourceBuildMetadata, + pub build_options: SourceBuildOptions, +} + +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PackedSource { + pub schema_version: String, + pub metadata: SourceMetadata, + pub entries: Vec, +} diff --git a/framework/meta/src/cmd/reproducible_builds/source_pack.rs b/framework/meta/src/cmd/reproducible_builds/source_pack.rs new file mode 100644 index 0000000000..1447fd96e0 --- /dev/null +++ b/framework/meta/src/cmd/reproducible_builds/source_pack.rs @@ -0,0 +1,220 @@ +use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; +use serde::Serialize; +use std::{ + collections::HashSet, + fs, + path::{Path, PathBuf}, +}; + +use multiversx_sc_meta_lib::cargo_toml::CargoTomlContents; + +use crate::cli::PackArgs; +use crate::folder_structure::RelevantDirectories; + +use super::local_deps::{DependencyDepth, compute_local_deps}; +use super::source_json_model::{ + PackedSource, SCHEMA_VERSION, SOURCE_JSON_EXTENSION, SYS_MAXSIZE, SourceBuildMetadata, + SourceBuildOptions, SourceFileEntry, SourceMetadata, +}; + +/// File names (regardless of extension) that are included as source files. +const NAMED_SOURCE_FILES: &[&str] = &[ + "Cargo.toml", + "Cargo.lock", + "multicontract.toml", + "sc-config.toml", + "multiversx.json", +]; + +/// Packages the source code for all contracts found in `project_folder`. +/// +/// `project_folder` can be a workspace root (containing multiple contracts) or a +/// single contract folder. Mirrors the behaviour of the Python builder's +/// `build_project` / `create_packaged_source_code` functions. +/// +/// For each contract, writes: +/// `/output/-.source.json` +pub fn source_pack(args: &PackArgs) { + let project_folder = if let Some(p) = &args.path { + Path::new(p).canonicalize().unwrap() + } else { + Path::new(".").canonicalize().unwrap() + }; + + let dirs = RelevantDirectories::find_all(&project_folder, &["target".to_string()]); + if dirs.iter_contract_crates().count() == 0 { + println!( + "No contracts found (no multiversx.json) under: {}", + project_folder.display() + ); + return; + } + + for dir in dirs.iter_contract_crates() { + source_pack_contract(&project_folder, &dir.path, args.contract.as_deref()); + } +} + +/// Packs source for one contract, with all paths relative to `project_folder`. +/// If `specific_contract` is `Some`, skips contracts whose name doesn't match. +pub(crate) fn source_pack_contract( + project_folder: &Path, + contract_folder: &Path, + specific_contract: Option<&str>, +) { + let cargo_toml = CargoTomlContents::load_from_file(contract_folder.join("Cargo.toml")); + let contract_name = cargo_toml.package_name(); + if let Some(filter) = specific_contract { + if contract_name != filter { + return; + } + } + let contract_version = cargo_toml.package_version(); + + let local_deps = compute_local_deps(contract_folder); + + let mut entries: Vec = Vec::new(); + let mut added: HashSet = HashSet::new(); + + // 1. Files from the contract folder itself (depth 0, module = contract relative to project) + let contract_module = module_path(project_folder, contract_folder); + for file in collect_source_files(contract_folder) { + entries.push(make_entry(&file, project_folder, &contract_module, 0u64)); + added.insert(file); + } + + // 2. Files from each local dependency folder + for dep in &local_deps.dependencies { + let dep_folder = contract_folder.join(&dep.path).canonicalize().unwrap(); + let dep_module = module_path(project_folder, &dep_folder); + for file in collect_source_files(&dep_folder) { + if added.contains(&file) { + continue; + } + entries.push(make_entry(&file, project_folder, &dep_module, dep.depth)); + added.insert(file); + } + } + + // 3. Remaining files from the project folder (catches workspace-level Cargo.lock, etc.) + // This is a no-op when project_folder == contract_folder. + for file in collect_source_files(project_folder) { + if !added.contains(&file) { + entries.push(make_entry( + &file, + project_folder, + &contract_module, + SYS_MAXSIZE, + )); + added.insert(file); + } + } + + // Sort by (dependency_depth, path) to match the Python builder output + entries.sort_by(|a, b| { + a.dependency_depth + .cmp(&b.dependency_depth) + .then(a.path.cmp(&b.path)) + }); + + let packed = PackedSource { + schema_version: SCHEMA_VERSION.to_string(), + metadata: SourceMetadata { + contract_name: contract_name.clone(), + contract_version: contract_version.clone(), + build_metadata: SourceBuildMetadata::default(), + build_options: SourceBuildOptions { + package_whole_project_src: true, + specific_contract: specific_contract.map(|s| s.to_string()), + build_root_folder: project_folder.to_string_lossy().into_owned(), + }, + }, + entries, + }; + + let output_dir = contract_folder.join("output"); + fs::create_dir_all(&output_dir).unwrap(); + let output_path = output_dir.join(format!( + "{contract_name}-{contract_version}{SOURCE_JSON_EXTENSION}" + )); + + let formatter = serde_json::ser::PrettyFormatter::with_indent(b" "); + let mut buf = Vec::new(); + let mut ser = serde_json::Serializer::with_formatter(&mut buf, formatter); + packed.serialize(&mut ser).unwrap(); + buf.push(b'\n'); + fs::write(&output_path, &buf).unwrap(); + + println!("Source packed to: {}", output_path.display()); +} + +/// Returns the path of `folder` relative to `project_folder`, using forward slashes. +fn module_path(project_folder: &Path, folder: &Path) -> String { + pathdiff::diff_paths(folder, project_folder) + .unwrap_or_else(|| folder.to_path_buf()) + .to_string_lossy() + .replace('\\', "/") +} + +fn make_entry( + file: &Path, + project_folder: &Path, + module: &str, + depth: DependencyDepth, +) -> SourceFileEntry { + let rel = pathdiff::diff_paths(file, project_folder).unwrap(); + let path_str = rel.to_string_lossy().replace('\\', "/"); + let content = BASE64.encode(fs::read(file).unwrap()); + SourceFileEntry { + path: path_str.clone(), + content, + module: module.to_string(), + dependency_depth: depth, + is_test_file: is_test_file(&path_str), + } +} + +/// A file is a test file if it is a `.rs` file whose path contains a `test` or `tests` component. +fn is_test_file(path_str: &str) -> bool { + if !path_str.ends_with(".rs") { + return false; + } + path_str + .split('/') + .any(|component| component == "test" || component == "tests") +} + +fn collect_source_files(folder: &Path) -> Vec { + let mut result = Vec::new(); + collect_recursive(folder, &mut result); + result.sort(); + result +} + +fn collect_recursive(current: &Path, result: &mut Vec) { + let entries = match fs::read_dir(current) { + Ok(e) => e, + Err(_) => return, + }; + for entry in entries.flatten() { + let path = entry.path(); + if path.is_dir() { + if path.file_name().map(|n| n != "target").unwrap_or(true) { + collect_recursive(&path, result); + } + } else if is_source_file(&path) { + result.push(path); + } + } +} + +fn is_source_file(path: &Path) -> bool { + if path.extension().map(|e| e == "rs").unwrap_or(false) { + return true; + } + if let Some(name) = path.file_name().and_then(|n| n.to_str()) { + NAMED_SOURCE_FILES.contains(&name) + } else { + false + } +} diff --git a/framework/meta/src/cmd/reproducible_builds/source_unpack.rs b/framework/meta/src/cmd/reproducible_builds/source_unpack.rs new file mode 100644 index 0000000000..f15d548316 --- /dev/null +++ b/framework/meta/src/cmd/reproducible_builds/source_unpack.rs @@ -0,0 +1,57 @@ +use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use crate::cli::SourceUnpackArgs; + +use super::source_json_model::PackedSource; + +pub const HARDCODED_UNWRAP_FOLDER: &str = "/tmp/unwrapped"; + +/// CLI entry point for `sc-meta reproducible-build source-unpack`. +pub fn source_unpack(args: &SourceUnpackArgs) { + let output_folder = args + .output + .as_deref() + .map(PathBuf::from) + .unwrap_or_else(|| PathBuf::from(HARDCODED_UNWRAP_FOLDER)); + let (folder, build_root) = unpack_packaged_src(Path::new(&args.packaged_src), &output_folder); + println!("Unwrapped to: {}", folder.display()); + println!("Build root folder: {build_root}"); +} + +/// Unpacks a `.source.json` to `unwrap_folder` and returns: +/// - the canonicalized unwrap folder +/// - the `buildRootFolder` recorded in the JSON metadata +pub fn unpack_packaged_src(src_path: &Path, unwrap_folder: &Path) -> (PathBuf, String) { + let text = fs::read_to_string(src_path) + .unwrap_or_else(|e| panic!("Failed to read {}: {e}", src_path.display())); + let packed: PackedSource = serde_json::from_str(&text) + .unwrap_or_else(|e| panic!("Failed to parse {}: {e}", src_path.display())); + + if unwrap_folder.exists() { + fs::remove_dir_all(unwrap_folder).unwrap(); + } + + for entry in &packed.entries { + let file_path = unwrap_folder.join(&entry.path); + if let Some(parent) = file_path.parent() { + fs::create_dir_all(parent).unwrap(); + } + let content = BASE64 + .decode(&entry.content) + .unwrap_or_else(|e| panic!("Failed to decode entry '{}': {e}", entry.path)); + fs::write(&file_path, content).unwrap(); + } + + println!( + "Unpacked {} entries to: {}", + packed.entries.len(), + unwrap_folder.display() + ); + + let folder = unwrap_folder.canonicalize().unwrap(); + (folder, packed.metadata.build_options.build_root_folder) +} diff --git a/framework/meta/src/cmd/upgrade/upgrade_common.rs b/framework/meta/src/cmd/upgrade/upgrade_common.rs index c3fd35f3a2..a1f63d195c 100644 --- a/framework/meta/src/cmd/upgrade/upgrade_common.rs +++ b/framework/meta/src/cmd/upgrade/upgrade_common.rs @@ -1,13 +1,17 @@ use super::{upgrade_print::*, upgrade_settings::UpgradeSettings}; use crate::{ - cmd::all::call_contract_meta, + cli::MetaLibArgs, + cmd::all::ContractMetaCall, folder_structure::{ CARGO_TOML_FILE_NAME, DirectoryType, FRAMEWORK_CRATE_NAMES, RelevantDirectory, }, version::FrameworkVersion, }; -use multiversx_sc_meta_lib::cargo_toml::{ - CARGO_TOML_DEPENDENCIES, CARGO_TOML_DEV_DEPENDENCIES, CargoTomlContents, VersionReq, +use multiversx_sc_meta_lib::{ + cargo_toml::{ + CARGO_TOML_DEPENDENCIES, CARGO_TOML_DEV_DEPENDENCIES, CargoTomlContents, VersionReq, + }, + cli::ContractCliAction, }; use ruplacer::{Console, DirectoryPatcher, Query, Settings}; use std::{ @@ -194,14 +198,13 @@ fn change_version_string( ); } -pub fn re_generate_wasm_crate(dir: &RelevantDirectory) { - if dir.dir_type != DirectoryType::Contract { +pub fn re_generate_wasm_crate(contract_crate: &RelevantDirectory) { + if contract_crate.dir_type != DirectoryType::Contract { return; } - call_contract_meta( - &dir.path, - &["abi".to_string(), "--no-abi-git-version".to_string()], - ); + + ContractMetaCall::new(ContractCliAction::Abi, &MetaLibArgs::default()) + .call_for_contract(contract_crate); } pub fn cargo_check(dir: &RelevantDirectory, settings: &UpgradeSettings) { diff --git a/framework/meta/src/folder_structure/relevant_directory.rs b/framework/meta/src/folder_structure/relevant_directory.rs index 4235cb1f13..7e9cbde9b7 100644 --- a/framework/meta/src/folder_structure/relevant_directory.rs +++ b/framework/meta/src/folder_structure/relevant_directory.rs @@ -1,6 +1,7 @@ use crate::version::FrameworkVersion; use multiversx_sc_meta_lib::cargo_toml::{CargoTomlContents, DependencyReference}; use std::{ + collections::HashMap, fs::{self, DirEntry}, path::{Path, PathBuf}, }; @@ -139,6 +140,52 @@ impl RelevantDirectories { } } } + + /// Returns a map of contract package name → list of paths for names that appear more than once. + pub fn find_duplicate_contract_names(&self) -> HashMap> { + let mut seen: HashMap> = HashMap::new(); + for dir in self.iter_contract_crates() { + if let Some(cargo_toml) = load_cargo_toml_contents(&dir.path) { + seen.entry(cargo_toml.package_name()) + .or_default() + .push(dir.path.clone()); + } + } + seen.retain(|_, paths| paths.len() > 1); + seen + } + + /// Prints a warning to stderr for each contract package name that appears in more than one crate. + pub fn warn_duplicate_contract_names(&self) { + let duplicates = self.find_duplicate_contract_names(); + if !duplicates.is_empty() { + let mut names: Vec<_> = duplicates.keys().collect(); + names.sort(); + for name in &names { + eprintln!("Warning: duplicate contract name '{name}' found in:"); + for path in &duplicates[*name] { + eprintln!(" - {}", path.display()); + } + } + } + } + + /// Panics if any two contract crates share the same package name. + pub fn ensure_distinct_contract_names(&self) { + let duplicates = self.find_duplicate_contract_names(); + if !duplicates.is_empty() { + let mut msg = String::from("Duplicate contract names found:"); + let mut names: Vec<_> = duplicates.keys().collect(); + names.sort(); + for name in names { + msg.push_str(&format!("\n {name}:")); + for path in &duplicates[name] { + msg.push_str(&format!("\n - {}", path.display())); + } + } + panic!("{msg}"); + } + } } fn populate_directories(path: &Path, ignore: &[String], result: &mut Vec) { @@ -146,8 +193,9 @@ fn populate_directories(path: &Path, ignore: &[String], result: &mut Vec = read_dir.flatten().collect(); + children.sort_by_key(|e| e.file_name()); + for child in children { if can_continue_recursion(&child, ignore) { populate_directories(child.path().as_path(), ignore, result); } diff --git a/framework/scenario/Cargo.toml b/framework/scenario/Cargo.toml index be2ad79fe2..56119096da 100644 --- a/framework/scenario/Cargo.toml +++ b/framework/scenario/Cargo.toml @@ -27,7 +27,6 @@ contract-call-legacy = ["multiversx-sc/contract-call-legacy"] [dependencies] -base64 = "0.22" num-bigint = "0.4" num-traits = "0.2" hex = "0.4" diff --git a/framework/snippets/Cargo.toml b/framework/snippets/Cargo.toml index 86fe0a03d1..b74c6d623e 100644 --- a/framework/snippets/Cargo.toml +++ b/framework/snippets/Cargo.toml @@ -20,7 +20,6 @@ dapp = ["multiversx-sdk-dapp"] [dependencies] hex = "0.4" -base64 = "0.22" log = "0.4" env_logger = "0.11" futures = "0.3"