diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index fd78ec8..1c36215 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -8,18 +8,30 @@ jobs: docker: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 - - name: GitHub Login - uses: azure/docker-login@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + id: buildx with: - login-server: docker.pkg.github.com + install: true + + - name: DockerHub Login + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }} + + - name: GitHub Container Registry Login + uses: docker/login-action@v2 + with: + registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Set Docker Package and Version - env: - ACTIONS_ALLOW_UNSECURE_COMMANDS: true + id: version run: | set +e _=$(echo "$GITHUB_REF" | grep "^refs/heads/") @@ -41,20 +53,18 @@ jobs: PKG=etheno fi set -e - echo "::set-env name=PKG::$PKG" - echo "::set-env name=VER::$VER" + echo "::set-output name=PKG::$PKG" + echo "::set-output name=VER::$VER" - - name: Docker Build - run: docker build -t $PKG:$VER . - - name: DockerHub Login - uses: azure/docker-login@v1 + - name: Docker build and push + uses: docker/build-push-action@v3 with: - login-server: registry.hub.docker.com - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }} - - - name: Docker Push - run: | - docker tag $PKG:$VER registry.hub.docker.com/trailofbits/$PKG:$VER - docker push registry.hub.docker.com/trailofbits/$PKG:$VER + push: true + target: final + platforms: | + linux/arm64/v8 + linux/amd64 + tags: | + trailofbits/${{ steps.version.outputs.PKG }}:${{ steps.version.outputs.VER }} + ghcr.io/${{ github.repository }}/${{ steps.version.outputs.PKG }}:${{ steps.version.outputs.VER }} diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index f88bdcd..683daba 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -5,7 +5,7 @@ name: Upload Python Package on: release: - types: [created] + types: [published] jobs: deploy: @@ -13,15 +13,16 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: '3.x' - - name: Install dependencies + - name: Install and upgrade dependencies run: | python -m pip install --upgrade pip pip install setuptools wheel twine + pip install --upgrade setuptools wheel twine - name: Build and publish env: TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} diff --git a/.gitignore b/.gitignore index d7423af..be5fc5a 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,10 @@ *.pyc build dist +venv/ +*egg* +init.json +tests/drizzle/node_modules/ +tests/drizzle/artifacts/ +tests/drizzle/yarn.lock +tests/drizzle/cache/ \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index ed5c0ef..8367053 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,8 +2,24 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/). -## [Unreleased](https://github.com/trailofbits/etheno/compare/v0.2.3...HEAD) +## [Unreleased](https://github.com/trailofbits/etheno/compare/v0.3.0...HEAD) +## 0.3.0 - 2022-07-08 + +### Changed +- We are now using `ganache` instead of `ganache-cli` for running Ganache +- Using the latest Flask version +- Python 3.7.x is now the lowest allowed version + +### Fixed +- Fixed a bug that occurred when a `JSONRPCError` was being logged +- Fixed a bug that occurred when using non-hex strings during Ganache initialization +- Fixed a bug that prevented Ganache from being used from within a Docker container + +### Removed +- Removed Manticore integration and all associated command-line parameters +- Removed Echidna integration and all associated command-line parameters +- Removed `examples/` folder since it is outdated and uses deprecated features ## 0.2.3 — 2019-06-27 ### Added diff --git a/Dockerfile b/Dockerfile index 09123b8..40e0cbe 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,69 +1,85 @@ -FROM ubuntu:bionic -MAINTAINER Evan Sultanik - -RUN DEBIAN_FRONTEND=noninteractive \ - apt-get update && apt-get install -y --no-install-recommends \ +# syntax=docker/dockerfile:1.3 +FROM ubuntu:focal AS python-wheels +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + build-essential \ + ca-certificates \ + cmake \ curl \ + python3-dev \ + python3-pip \ + python3-setuptools + +# Needed for rusty-rlp wheel +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +ENV PATH="/root/.cargo/bin:${PATH}" + +RUN --mount=type=bind,target=/etheno \ + cd /etheno && \ + pip3 install --no-cache-dir --upgrade pip setuptools && \ + pip3 wheel --no-cache-dir -w /wheels \ + https://github.com/cburgdorf/rusty-rlp/archive/refs/tags/0.1.15.tar.gz \ + . + + +FROM ubuntu:focal AS ganache +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + build-essential \ ca-certificates \ + curl \ + gnupg \ + lsb-release +RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends nodejs +RUN npm install --omit=dev --location=global --prefix /opt/node ganache truffle + + +FROM ubuntu:focal AS final +LABEL org.opencontainers.image.authors="Evan Sultanik" + +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ bash-completion \ - sudo \ + ca-certificates \ + curl \ + gpg-agent \ + libudev-dev \ + locales \ python3 \ - libpython3-dev \ python3-pip \ - python3-setuptools \ - git \ - build-essential \ software-properties-common \ - locales-all locales \ - libudev-dev \ - gpg-agent \ -&& apt-get clean \ + sudo \ && rm -rf /var/lib/apt/lists/* -RUN DEBIAN_FRONTEND=noninteractive add-apt-repository -y ppa:ethereum/ethereum && \ +# NOTE: solc was removed from the below command since the echidna integration is being removed +# If the solc option is added back, --platform linux-amd64 needs to be added to the `docker build` command for M1 machines +RUN add-apt-repository -y ppa:ethereum/ethereum && \ apt-get update && apt-get install -y --no-install-recommends \ - solc \ ethereum \ -&& apt-get clean \ && rm -rf /var/lib/apt/lists/* -RUN curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash - && sudo apt-get install -y --no-install-recommends nodejs && apt-get clean && rm -rf /var/lib/apt/lists/* - -RUN npm install --production -g ganache-cli truffle && npm --force cache clean - -# BEGIN Install Echidna - -COPY --from=trailofbits/echidna:latest /root/.local/bin/echidna-test /usr/local/bin/echidna-test - -RUN update-locale LANG=en_US.UTF-8 && locale-gen en_US.UTF-8 -ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8 +RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash && \ + apt-get install -y --no-install-recommends nodejs \ +&& rm -rf /var/lib/apt/lists/* -# END Install Echidna +COPY --from=ganache /opt/node /usr/local/ -RUN useradd -m etheno -RUN usermod -aG sudo etheno -USER etheno -WORKDIR /home/etheno -USER root -WORKDIR /root +# BEGIN Install Etheno +RUN --mount=type=bind,target=/mnt/etheno \ + --mount=type=bind,target=/mnt/wheels,source=/wheels,from=python-wheels \ + cd /mnt/etheno && \ + pip3 install --no-cache-dir --upgrade pip setuptools && \ + pip3 install --no-cache-dir --no-index --find-links /mnt/wheels . -# Install Parity -RUN curl https://get.parity.io -L | bash +RUN useradd -m -G sudo etheno # Allow passwordless sudo for etheno RUN echo 'etheno ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers USER etheno -ENV HOME=/home/etheno PATH=$PATH:/home/etheno/.local/bin -WORKDIR /home/etheno +ENV HOME=/home/etheno -COPY --chown=etheno:etheno LICENSE setup.py etheno/ -COPY --chown=etheno:etheno etheno/*.py etheno/etheno/ -RUN cd etheno && \ - pip3 install --no-cache-dir --user '.[manticore]' && \ - cd .. && \ - rm -rf etheno - -COPY --chown=etheno:etheno examples examples/ +# Use the DOCKER env variable to set hostname accordingly +ENV DOCKER=1 +WORKDIR /home/etheno +# TODO: Need to copy tests and models CMD ["/bin/bash"] diff --git a/README.md b/README.md index 2f3a70b..46f4ea0 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,9 @@
-Etheno is the Ethereum testing Swiss Army knife. It’s a JSON RPC multiplexer, analysis tool wrapper, and test integration tool. It eliminates the complexity of setting up analysis tools like [Manticore](https://github.com/trailofbits/manticore/) and [Echidna](https://github.com/trailofbits/echidna) on large, multi-contract projects. In particular, custom Manticore analysis scripts require less code, are simpler to write, and integrate with Truffle. +Etheno is the Ethereum testing Swiss Army knife. It’s a JSON RPC multiplexer, analysis tool wrapper, and test integration tool. It eliminates the complexity of setting up analysis tools like [Echidna](https://github.com/trailofbits/echidna) on large, multi-contract projects. -If you are a smart contract developer, you should use Etheno to test your contracts. If you are an Ethereum client developer, you should use Etheno to perform differential testing on your implementation. For example, Etheno is [capable of automatically reproducing](examples/ConstantinopleGasUsage) the Constantinople gas usage consensus bug that caused a fork on Ropsten. +If you are a smart contract developer, you should use Etheno to test your contracts. If you are an Ethereum client developer, you should use Etheno to perform differential testing on your implementation. Etheno is named after the Greek goddess [Stheno](https://en.wikipedia.org/wiki/Stheno), sister of Medusa, and mother of Echidna—which also happens to be the name of [our EVM property-based fuzz tester](https://github.com/trailofbits/echidna). @@ -19,14 +19,8 @@ Etheno is named after the Greek goddess [Stheno](https://en.wikipedia.org/wiki/S * API for filtering and modifying JSON RPC calls * Enables differential testing by sending JSON RPC sequences to multiple Ethereum clients * Deploy to and interact with multiple networks at the same time -* **Analysis Tool Wrapper**: Etheno provides a JSON RPC client for advanced analysis tools like [Manticore](https://github.com/trailofbits/manticore/) - * Lowers barrier to entry for using advanced analysis tools - * No need for custom scripts to set up account and contract state - * Analyze arbitrary transactions without Solidity source code * **Integration with Test Frameworks** like Ganache and Truffle * Run a local test network with a single command - * Use Truffle migrations to bootstrap Manticore analyses - * Symbolic semantic annotations within unit tests ## Quickstart @@ -35,12 +29,13 @@ Use our prebuilt Docker container to quickly install and try Etheno: ``` docker pull trailofbits/etheno docker run -it trailofbits/etheno - -# Run one of the examples -etheno@982abdc96791:~$ cd examples/BrokenMetaCoin/ -etheno@982abdc96791:~/examples/BrokenMetaCoin$ etheno --truffle --ganache --manticore --manticore-max-depth 2 --manticore-script ExploitMetaCoinManticoreScript.py ``` +**NOTE:** Many of Etheno's capabilities will require publishing one or more ports and persisting data using volumes as part of the `docker run` command. +- To learn about publishing ports, click [here](https://docs.docker.com/storage/volumes/) +- To learn more about persisting data using volumes, click [here](https://docs.docker.com/storage/volumes/) + + Alternatively, natively install Etheno in a few shell commands: ``` @@ -52,13 +47,35 @@ pip3 install --user etheno # Use the Etheno CLI cd /path/to/a/truffle/project -etheno --manticore --ganache --truffle +etheno --ganache --truffle ``` ## Usage Etheno can be used in many different ways and therefore has numerous command-line argument combinations. +### Ganache Integration + +A Ganache instance can automatically be run within Etheno: +``` +etheno --ganache +``` + +* `--ganache-port` will set the port on which Ganache is run; if omitted, Etheno will choose the lowest port higher than the port on which Etheno’s JSON RPC server is running +* `--ganache-args` lets you pass additional arguments to Ganache +* `--accounts` or `-a` sets the number of accounts to create in Ganache (default is 10) +* `--balance` or `-b` sets the default balance (in Ether) to seed to each Ganache account (default is 1000.0) +* `--gas-price` or `-c` sets the default gas price in wei for Ganache (default is 20_000_000_000) + +Running a Ganache instance via Etheno can be used to deploy large, multi-contract projects in tandem with Echidna. To learn more on how to use Echidna and Ganache together, click [here](https://github.com/crytic/building-secure-contracts/blob/master/program-analysis/echidna/end-to-end-testing.md). + + +**NOTE:** We recommend using the latest version of Ganache (v7.3.2) and Node 16.x. After the upstream bug (see below) is fixed, the Ganache package should be upgraded. + + +**NOTE:** Currently, there is an upstream bug in the latest version of Ganache (v7.3.2) that prevents the Etheno integration from working if the contract size that is being tested is very large (https://github.com/trufflesuite/ganache/issues/3332). + + ### JSON RPC Server and Multiplexing This command starts a JSON RPC server and forwards all messages to the given clients: @@ -70,8 +87,8 @@ etheno https://client1.url.com:1234/ https://client2.url.com:8545/ http://client * `--port` or `-p` allows you to specify a port on which to run Etheno’s JSON RPC server (default is 8545) * `--run-publicly` allows incoming JSON RPC connections from external computers on the network * `--debug` will run a web-based interactive debugger in the event that an internal Etheno client throws an exception while processing a JSON RPC call; this should _never_ be used in conjunction with `--run-publicly` -* `--master` or `-s` will set the “master” client, which will be used for synchronizing with Etheno clients like Manticore. If a master is not explicitly provided, it defaults to the first client listed. -* `--raw`, when prefixed before a client URL, will cause Etheno to auto-sign all transactions and submit then to the client as raw transactions +* `--master` or `-s` will set the “master” client, which will be used for synchronizing with Etheno clients. If a master is not explicitly provided, it defaults to the first client listed. +* `--raw`, when prefixed before a client URL, will cause Etheno to auto-sign all transactions and submit them to the client as raw transactions ### Geth and Parity Integration @@ -85,24 +102,6 @@ The network ID of each client will default to 0x657468656E6F (equal to the strin EIP and hard fork block numbers can be set within a custom genesis.json as usual, or they may be specified as command-line options such as `--constantinople`. -### Ganache Integration - -A Ganache instance can automatically be run within Etheno: -``` -etheno --ganache -``` - -* `--ganache-port` will set the port on which Ganache is run; if omitted, Etheno will choose the lowest port higher than the port on which Etheno’s JSON RPC server is running -* `--ganache-args` lets you pass additional arguments to Ganache -* `--accounts` or `-a` sets the number of accounts to create in Ganache (default is 10) -* `--balance` or `-b` sets the default balance (in Ether) to seed to each Ganache account (default is 100.0) -* `--gas-price` or `-c` sets the default gas price for Ganache (default is 20000000000) - -**NOTE**: As of September, 2020, there is -[an upstream bug in `ganache-cli` that prevents it from being run on Node version 14](https://github.com/trufflesuite/ganache-cli/issues/732). -If you intend to use Truffle and/or Ganache, we suggest using Node 12 (_e.g._, -with [`nvm`](https://github.com/nvm-sh/nvm)). - ### Differential Testing Whenever two or more clients are run within Etheno, the differential @@ -112,28 +111,6 @@ usage differences. A report is printed when Etheno exits. This plugin can be disabled with the `--no-differential-testing` option. -### Property-Based Fuzz Testing - -Echidna can be run to fuzz test the clients, which is useful for differential testing: -``` -etheno --echidna -``` -By default, Echidna deploys a generic fuzz testing contract to all clients, enumerates a minimal set of transactions that maximize the coverage of the contract, sends those transactions to the clients, and then exits. - -* `--fuzz-limit` limits the number of transactions that Echidna will emit -* `--fuzz-contract` lets the user specify a custom contract for Echidna to deploy and fuzz - -### Manticore Client - -Manticore—which, by itself, does not implement a JSON RPC interface—can be run as an Etheno client, synchronizing its accounts with Etheno’s master client and symbolically executing all transactions sent to Etheno. -``` -etheno --manticore -``` -This alone will not run any Manticore analyses; they must either be run manually, or automated through [the `--truffle` command](#truffle-integration); - -* `--manticore-verbosity` sets Manticore’s logging verbosity (default is 3) -* `--manticore-max-depth` sets the maximum state depth for Manticore to explore; if omitted, Manticore will have no depth limit - ### Truffle Integration Truffle migrations can automatically be run within a Truffle project: @@ -141,20 +118,6 @@ Truffle migrations can automatically be run within a Truffle project: etheno --truffle ``` -When combined with the `--manticore` option, this will automatically run Manticore’s default analyses on all contracts created once the Truffle migration completes: -``` -etheno --truffle --manticore -``` - -This requires a master JSON RPC client, so will most often be used in conjunction with Ganache. If a local Ganache server is not running, you can simply add that to the command: -``` -etheno --truffle --manticore --ganache -``` - -If you would like to run a custom Manticore script instead of the standard Manticore analysis and detectors, it can be specified using the `--manticore-script` or `-r` command. - -This script does not need to import Manticore or create a `ManticoreEVM` object; Etheno will run the script with a global variable called `manticore` that already contains all of the accounts and contracts automatically provisioned. See the [`BrokenMetaCoin` Manticore script](examples/BrokenMetaCoin/ExploitMetaCoinManticoreScript.py) for an example. - Additional arguments can be passed to Truffle using `--truffle-args`. ### Logging @@ -173,27 +136,18 @@ saved: ## Requirements -* Python 3.6 or newer -* [Manticore](https://github.com/trailofbits/manticore/) release 0.2.2 or newer -* [Flask](http://flask.pocoo.org/), which is used to run the JSON RPC server +* Python 3.7 or newer ### Optional Requirements -* [Truffle and Ganache](https://truffleframework.com/) for their associated integrations +* [Node](https://nodejs.org/en/) 16.x or newer to install various integrations +* [Ganache](https://www.npmjs.com/package/ganache) 7.3.2 or newer for its associated integrations +* [Truffle](https://www.npmjs.com/package/truffle) for its associated integrations * [Geth](https://github.com/ethereum/go-ethereum) and/or [Parity](https://github.com/paritytech/parity-ethereum), if you would like to have Etheno run them -* [Echidna](https://github.com/trailofbits/echidna), for smart contract fuzzing and differential testing - * Note that Etheno currently requires the features in the [`dev-no-hedgehog` branch](https://github.com/trailofbits/echidna/tree/dev-no-hedgehog); Etheno will prompt you to automatically install this when you try and run it the first time - * Running Echidna also requires the [`solc`](https://github.com/ethereum/solidity) compiler ## Getting Help Feel free to stop by our [Slack channel](https://empirehacking.slack.com/) for help on using or extending Etheno. -Documentation is available in several places: - - * The [wiki](https://github.com/trailofbits/etheno/wiki) contains some basic information about getting started with Etheno and contributing - - * The [examples](examples) directory has some very minimal examples that showcase API features - ## License Etheno is licensed and distributed under the [AGPLv3](LICENSE) license. [Contact us](mailto:opensource@trailofbits.com) if you’re looking for an exception to the terms. diff --git a/docker/install-libff.sh b/docker/install-libff.sh deleted file mode 100755 index da8225f..0000000 --- a/docker/install-libff.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh - -set -eux - -if ls /usr/local/lib | grep -q libff; then exit 0; fi - -git clone https://github.com/scipr-lab/libff --recursive -cd libff -git submodule init && git submodule update -ARGS="-DWITH_PROCPS=OFF" -CXXFLAGS="" -if [ "$(uname)" = "Darwin" ]; then - export LDFLAGS=-L/usr/local/opt/openssl/lib - export CPPFLAGS=-I/usr/local/opt/openssl/include - export CXXFLAGS=-I/usr/local/opt/openssl/include - ARGS="$ARGS -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl/include/openssl -DCURVE=ALT_BN128" - sed -i '' 's/STATIC/SHARED/' libff/CMakeLists.txt # Fix GHC segfaults from hell (idk why) - sed -i '' 's/STATIC/SHARED/' depends/CMakeLists.txt -fi -mkdir build -cd build -CXXFLAGS="-fPIC $CXXFLAGS" cmake $ARGS .. -make && sudo make install -cd ../.. && rm -rf libff diff --git a/etheno/__main__.py b/etheno/__main__.py index 06d2030..f459428 100644 --- a/etheno/__main__.py +++ b/etheno/__main__.py @@ -7,115 +7,266 @@ from .client import RpcProxyClient from .differentials import DifferentialTester -from .echidna import echidna_exists, EchidnaPlugin, install_echidna from .etheno import app, EthenoView, GETH_DEFAULT_RPC_PORT, ETHENO, VERSION_NAME from .genesis import Account, make_accounts, make_genesis from .jsonrpc import EventSummaryExportPlugin, JSONRPCExportPlugin from .synchronization import AddressSynchronizingClient, RawTransactionClient -from .utils import clear_directory, decode_value, find_open_port, format_hex_address, ynprompt +from .utils import ( + clear_directory, + decode_value, + find_open_port, + format_hex_address, + ynprompt, +) from . import ganache from . import geth from . import logger from . import parity from . import truffle +from .precompiler import Precompiler -try: - from .manticoreclient import ManticoreClient - from . import manticoreutils - MANTICORE_INSTALLED = True -except ModuleNotFoundError: - MANTICORE_INSTALLED = False +# Constant for converting whole units to wei +ETHER = 1e18 def main(argv=None): - parser = argparse.ArgumentParser(description='An Ethereum JSON RPC multiplexer and Manticore wrapper') - parser.add_argument('--debug', action='store_true', default=False, - help='Enable debugging from within the web server') - parser.add_argument('--run-publicly', action='store_true', default=False, - help='Allow the web server to accept external connections') - parser.add_argument('-p', '--port', type=int, default=GETH_DEFAULT_RPC_PORT, - help='Port on which to run the JSON RPC webserver (default=%d)' % GETH_DEFAULT_RPC_PORT) - parser.add_argument('-a', '--accounts', type=int, default=None, - help='Number of accounts to create in the client (default=10)') - parser.add_argument('-b', '--balance', type=float, default=100.0, - help='Default balance (in Ether) to seed to each account (default=100.0)') - parser.add_argument('-c', '--gas-price', type=int, default=None, - help='Default gas price (default=20000000000)') - parser.add_argument('-i', '--network-id', type=int, default=None, - help='Specify a network ID (default is the network ID of the master client)') - parser.add_argument('-m', '--manticore', action='store_true', default=False, - help='Run all transactions through manticore') - parser.add_argument('-r', '--manticore-script', type=argparse.FileType('rb'), default=None, - help='Instead of running automated detectors and analyses, run this Manticore script') - parser.add_argument('--manticore-max-depth', type=int, default=None, - help='Maximum state depth for Manticore to explore') - parser.add_argument('-e', '--echidna', action='store_true', default=False, - help='Fuzz the clients using transactions generated by Echidna') - parser.add_argument('--fuzz-limit', type=int, default=None, - help='The maximum number of transactions for Echidna to generate (default=unlimited)') - parser.add_argument('--fuzz-contract', type=str, default=None, - help='Path to a Solidity contract to have Echidna use for fuzzing (default is to use a builtin ' - 'generic Echidna fuzzing contract)') - parser.add_argument('-t', '--truffle', action='store_true', default=False, - help='Run the truffle migrations in the current directory and exit') - parser.add_argument('--truffle-cmd', type=str, default='truffle', help='Command to run truffle (default=truffle)') - parser.add_argument('--truffle-args', type=str, default='migrate', - help='Arguments to pass to truffle (default=migrate)') - parser.add_argument('-g', '--ganache', action='store_true', default=False, - help='Run Ganache as a master JSON RPC client (cannot be used in conjunction with --master)') + parser = argparse.ArgumentParser( + description="An Ethereum JSON RPC multiplexer, differential fuzzer, and test framework integration tool." + ) + parser.add_argument( + "--debug", + action="store_true", + default=False, + help="Enable debugging from within the web server", + ) + parser.add_argument( + "--run-publicly", + action="store_true", + default=False, + help="Allow the web server to accept external connections", + ) + parser.add_argument( + "-p", + "--port", + type=int, + default=GETH_DEFAULT_RPC_PORT, + help="Port on which to run the JSON RPC webserver (default=%d)" + % GETH_DEFAULT_RPC_PORT, + ) + parser.add_argument( + "-a", + "--accounts", + type=int, + default=10, + help="Number of accounts to create in the client (default=10)", + ) + parser.add_argument( + "-b", + "--balance", + type=float, + default=1000.0, + help="Default balance (in Ether) to seed to each account (default=100.0)", + ) + # TODO: do we really need a gas price specified for ganache? is there a use case here? + parser.add_argument( + "-c", + "--gas-price", + type=int, + default=20000000000, + help="Default gas price (default=20000000000)", + ) + # TODO: networkID can have a default value it seems like + parser.add_argument( + "-i", + "--network-id", + type=int, + default=None, + help="Specify a network ID (default is the network ID of the master client)", + ) + parser.add_argument( + "-t", + "--truffle", + action="store_true", + default=False, + help="Run the truffle migrations in the current directory and exit", + ) + parser.add_argument( + "--truffle-cmd", + type=str, + default="truffle", + help="Command to run truffle (default=truffle)", + ) + parser.add_argument( + "--truffle-args", + type=str, + default="migrate", + help="Arguments to pass to truffle (default=migrate)", + ) + parser.add_argument( + "-g", + "--ganache", + action="store_true", + default=False, + help="Run Ganache as a master JSON RPC client (cannot be used in conjunction with --master)", + ) + # TODO: This cmd-line argument is error-prone and should probably be removed. Commenting it out for now + """ parser.add_argument('--ganache-cmd', type=str, default=None, help='Specify a command that runs Ganache ' - '(default="/usr/bin/env ganache-cli")') - parser.add_argument('--ganache-args', type=str, default=None, - help='Additional arguments to pass to Ganache') - parser.add_argument('--ganache-port', type=int, default=None, - help='Port on which to run Ganache (defaults to the closest available port to the port ' - 'specified with --port plus one)') - parser.add_argument('-go', '--geth', action='store_true', default=False, help='Run Geth as a JSON RPC client') - parser.add_argument('--geth-port', type=int, default=None, - help='Port on which to run Geth (defaults to the closest available port to the port specified ' - 'with --port plus one)') - parser.add_argument('-pa', '--parity', action='store_true', default=False, help='Run Parity as a JSON RPC client') - parser.add_argument('--parity-port', type=int, default=None, - help='Port on which to run Parity (defaults to the closest available port to the port ' - 'specified with --port plus one)') - parser.add_argument('-j', '--genesis', type=str, default=None, - help='Path to a genesis.json file to use for initializing clients. Any genesis-related options ' - 'like --network-id will override the values in this file. If --accounts is greater than ' - 'zero, that many new accounts will be appended to the accounts in the genesis file.') - parser.add_argument('--save-genesis', type=str, default=None, - help="Save a genesis.json file to reproduce the state of this run. Note that this genesis file " - "will include all known private keys for the genesis accounts, so use this with caution.") - parser.add_argument('--constantinople-block', type=int, default=None, - help='The block in which to enable Constantinople EIPs (default=do not enable Constantinople)') - parser.add_argument('--constantinople', action='store_true', default=False, - help='Enables Constantinople EIPs; equivalent to `--constantinople-block 0`') - parser.add_argument('--no-differential-testing', action='store_false', dest='run_differential', default=True, - help='Do not run differential testing, which is run by default') - parser.add_argument('-l', '--log-level', type=str.upper, choices={'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'}, - default='INFO', help='Set Etheno\'s log level (default=INFO)') - parser.add_argument('--log-file', type=str, default=None, - help='Path to save all log output to a single file') - parser.add_argument('--log-dir', type=str, default=None, - help='Path to a directory in which to save all log output, divided by logging source') - parser.add_argument('-d', '--dump-jsonrpc', type=str, default=None, - help='Path to a JSON file in which to dump all raw JSON RPC calls; if `--log-dir` is provided, ' - 'the raw JSON RPC calls will additionally be dumped to `rpc.json` in the log directory.') - parser.add_argument('-x', '--export-summary', type=str, default=None, - help='Path to a JSON file in which to export an event summary') - parser.add_argument('-v', '--version', action='store_true', default=False, help='Print version information and exit') - parser.add_argument('client', type=str, nargs='*', - help='JSON RPC client URLs to multiplex; if no client is specified for --master, the first ' - 'client in this list will default to the master (format="http://foo.com:8545/")') - parser.add_argument('-s', '--master', type=str, default=None, help='A JSON RPC client to use as the master ' - '(format="http://foo.com:8545/")') - parser.add_argument('--raw', type=str, nargs='*', action='append', - help='JSON RPC client URLs to multiplex that do not have any local accounts; Etheno will ' - 'automatically use auto-generated accounts with known private keys, pre-sign all ' - 'transactions, and only use eth_sendRawTransaction') + '(default="/usr/bin/env ganache")') + """ + parser.add_argument( + "--ganache-args", + type=str, + default=None, + help="Additional arguments to pass to Ganache", + ) + parser.add_argument( + "--ganache-port", + type=int, + default=None, + help="Port on which to run Ganache (defaults to the closest available port to the port " + "specified with --port plus one)", + ) + parser.add_argument( + "-arb", + "--deploy-arbitrum-contracts", + action="store_true", + default=False, + help="Deploy ArbSys and ArbRetryableTx", # TODO: Improve description down the line + ) + parser.add_argument( + "-go", + "--geth", + action="store_true", + default=False, + help="Run Geth as a JSON RPC client", + ) + parser.add_argument( + "--geth-port", + type=int, + default=None, + help="Port on which to run Geth (defaults to the closest available port to the port specified " + "with --port plus one)", + ) + parser.add_argument( + "-pa", + "--parity", + action="store_true", + default=False, + help="Run Parity as a JSON RPC client", + ) + parser.add_argument( + "--parity-port", + type=int, + default=None, + help="Port on which to run Parity (defaults to the closest available port to the port " + "specified with --port plus one)", + ) + parser.add_argument( + "-j", + "--genesis", + type=str, + default=None, + help="Path to a genesis.json file to use for initializing clients. Any genesis-related options " + "like --network-id will override the values in this file. If --accounts is greater than " + "zero, that many new accounts will be appended to the accounts in the genesis file.", + ) + parser.add_argument( + "--save-genesis", + type=str, + default=None, + help="Save a genesis.json file to reproduce the state of this run. Note that this genesis file " + "will include all known private keys for the genesis accounts, so use this with caution.", + ) + parser.add_argument( + "--constantinople-block", + type=int, + default=None, + help="The block in which to enable Constantinople EIPs (default=do not enable Constantinople)", + ) + parser.add_argument( + "--constantinople", + action="store_true", + default=False, + help="Enables Constantinople EIPs; equivalent to `--constantinople-block 0`", + ) + parser.add_argument( + "--no-differential-testing", + action="store_false", + dest="run_differential", + default=True, + help="Do not run differential testing, which is run by default", + ) + parser.add_argument( + "-l", + "--log-level", + type=str.upper, + choices={"CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"}, + default="INFO", + help="Set Etheno's log level (default=INFO)", + ) + parser.add_argument( + "--log-file", + type=str, + default=None, + help="Path to save all log output to a single file", + ) + parser.add_argument( + "--log-dir", + type=str, + default=None, + help="Path to a directory in which to save all log output, divided by logging source", + ) + parser.add_argument( + "-d", + "--dump-jsonrpc", + type=str, + default=None, + help="Path to a JSON file in which to dump all raw JSON RPC calls; if `--log-dir` is provided, " + "the raw JSON RPC calls will additionally be dumped to `rpc.json` in the log directory.", + ) + parser.add_argument( + "-x", + "--export-summary", + type=str, + default=None, + help="Path to a JSON file in which to export an event summary", + ) + parser.add_argument( + "-v", + "--version", + action="store_true", + default=False, + help="Print version information and exit", + ) + parser.add_argument( + "client", + type=str, + nargs="*", + help="JSON RPC client URLs to multiplex; if no client is specified for --master, the first " + 'client in this list will default to the master (format="http://foo.com:8545/")', + ) + parser.add_argument( + "-s", + "--master", + type=str, + default=None, + help="A JSON RPC client to use as the master " + '(format="http://foo.com:8545/")', + ) + parser.add_argument( + "--raw", + type=str, + nargs="*", + action="append", + help="JSON RPC client URLs to multiplex that do not have any local accounts; Etheno will " + "automatically use auto-generated accounts with known private keys, pre-sign all " + "transactions, and only use eth_sendRawTransaction", + ) if argv is None: argv = sys.argv - + args = parser.parse_args(argv[1:]) if args.version: @@ -132,7 +283,10 @@ def main(argv=None): if args.log_dir: if os.path.exists(args.log_dir): - if not ynprompt("Logging path `%s` already exists! Would you like to overwrite it? [yN] " % args.log_dir): + if not ynprompt( + "Logging path `%s` already exists! Would you like to overwrite it? [yN] " + % args.log_dir + ): sys.exit(1) elif os.path.isfile(args.log_dir): os.remove(args.log_dir) @@ -140,22 +294,32 @@ def main(argv=None): # don't delete the directory, just its contents # we can't use shutil.rmtree here, because that deletes the directory and also it doesn't work on # symlinks - if not ynprompt("We are about to delete the contents of `%s`. Are you sure? [yN] " % args.log_dir): + if not ynprompt( + "We are about to delete the contents of `%s`. Are you sure? [yN] " + % args.log_dir + ): sys.exit(1) abspath = os.path.abspath(args.log_dir) - if abspath == '' or abspath == '/' or abspath.endswith('://') or abspath.endswith(':\\\\'): - print("Wait a sec, you want me to delete `%s`?!\nThat looks too dangerous.\nIf I were to do that, " - "you'd file an angry GitHub issue complaining that I deleted your hard drive.\nYou're on " - "your own deleting this directory!" % abspath) + if ( + abspath == "" + or abspath == "/" + or abspath.endswith("://") + or abspath.endswith(":\\\\") + ): + print( + "Wait a sec, you want me to delete `%s`?!\nThat looks too dangerous.\nIf I were to do that, " + "you'd file an angry GitHub issue complaining that I deleted your hard drive.\nYou're on " + "your own deleting this directory!" % abspath + ) sys.exit(1) clear_directory(args.log_dir) - + ETHENO.logger.save_to_directory(args.log_dir) if not args.log_file: # Also create a unified log in the log dir: - ETHENO.logger.save_to_file(os.path.join(args.log_dir, 'Complete.log')) + ETHENO.logger.save_to_file(os.path.join(args.log_dir, "Complete.log")) - ETHENO.add_plugin(JSONRPCExportPlugin(os.path.join(args.log_dir, 'rpc.json'))) + ETHENO.add_plugin(JSONRPCExportPlugin(os.path.join(args.log_dir, "rpc.json"))) if args.dump_jsonrpc is not None: ETHENO.add_plugin(JSONRPCExportPlugin(args.dump_jsonrpc)) @@ -163,59 +327,55 @@ def main(argv=None): if args.export_summary is not None: ETHENO.add_plugin(EventSummaryExportPlugin(args.export_summary)) - # First, see if we need to install Echidna: - if args.echidna: - if not echidna_exists(): - if not ynprompt('Echidna does not appear to be installed.\nWould you like to have Etheno attempt to ' - 'install it now? [yN] '): - sys.exit(1) - install_echidna() - if not echidna_exists(): - ETHENO.logger.error('Etheno failed to install Echidna. Please install it manually ' - 'https://github.com/trailofbits/echidna') - sys.exit(1) - if args.genesis is None: # Set defaults since no genesis was supplied if args.accounts is None: args.accounts = 10 if args.gas_price is None: args.gas_price = 20000000000 - + accounts = [] + # TODO: args.gas_price is not set if a genesis file is provided if args.genesis: - with open(args.genesis, 'rb') as f: + with open(args.genesis, "rb") as f: genesis = json.load(f) - if 'config' not in genesis: - genesis['config'] = {} - if 'alloc' not in genesis: - genesis['alloc'] = {} + if "config" not in genesis: + genesis["config"] = {} + if "alloc" not in genesis: + genesis["alloc"] = {} if args.network_id is None: - args.network_id = genesis['config'].get('chainId', None) + args.network_id = genesis["config"].get("chainId", None) if args.constantinople_block is None: - args.constantinople_block = genesis['config'].get('constantinopleBlock', None) + args.constantinople_block = genesis["config"].get( + "constantinopleBlock", None + ) args.constantinople = args.constantinople_block is not None - for addr, bal in genesis['alloc'].items(): + for addr, bal in genesis["alloc"].items(): pkey = None - if 'privateKey' in bal: - pkey = bal['privateKey'] - accounts.append(Account(address=int(addr, 16), balance=decode_value(bal['balance']), - private_key=decode_value(pkey))) + if "privateKey" in bal: + pkey = bal["privateKey"] + accounts.append( + Account( + address=int(addr, 16), + balance=decode_value(bal["balance"]), + private_key=decode_value(pkey), + ) + ) else: # We will generate it further below once we've resolved all of the parameters genesis = None - accounts += make_accounts(args.accounts, default_balance=int(args.balance * 1000000000000000000)) + accounts += make_accounts(args.accounts, default_balance=int(args.balance * ETHER)) if genesis is not None: # add the new accounts to the genesis - for account in accounts[len(genesis['alloc']):]: - genesis['alloc'][format_hex_address(account.address)] = { - 'balance': "%d" % account.balance, - 'privateKey': format_hex_address(account.private_key), - 'comment': '`privateKey` and `comment` are ignored. In a real chain, the private key should _not_ be ' - 'stored!' + for account in accounts[len(genesis["alloc"]) :]: + genesis["alloc"][format_hex_address(account.address)] = { + "balance": "%d" % account.balance, + "privateKey": format_hex_address(account.private_key), + "comment": "`privateKey` and `comment` are ignored. In a real chain, the private key should _not_ be " + "stored!", } if args.raw is None: @@ -223,25 +383,38 @@ def main(argv=None): else: args.raw = [r[0] for r in args.raw] + # TODO: This if/elif/else logic is flawed - needs rework if args.ganache and args.master: parser.print_help() - sys.stderr.write('\nError: You cannot specify both --ganache and --master at the same time!\n') - sys.exit(1) + sys.stderr.write( + "\nError: You cannot specify both --ganache and --master at the same time!\n" + ) + sys.exit(1) elif args.ganache: if args.ganache_port is None: args.ganache_port = find_open_port(args.port + 1) if args.network_id is None: - args.network_id = 0x657468656E6F # 'etheno' in hex - - ganache_accounts = ["--account=%s,0x%x" % (acct.private_key, acct.balance) for acct in accounts] - - ganache_args = ganache_accounts + ['-g', str(args.gas_price), '-i', str(args.network_id)] + args.network_id = 0x657468656E6F # 'etheno' in hex + + # Have to use hex() so that string is hex-encoded (prefixed with 0x) that is necessary for Ganache v7.0+ + # https://github.com/trufflesuite/ganache/discussions/1075 + ganache_accounts = [ + "--account=%s,0x%x" % (hex(acct.private_key), acct.balance) + for acct in accounts + ] + + ganache_args = ganache_accounts + [ + "-g", + str(args.gas_price), + "-i", + str(args.network_id), + ] if args.ganache_args is not None: ganache_args += shlex.split(args.ganache_args) - - ganache_instance = ganache.Ganache(cmd=args.ganache_cmd, args=ganache_args, port=args.ganache_port) + # Removed cmd argument + ganache_instance = ganache.Ganache(args=ganache_args, port=args.ganache_port) ETHENO.master_client = ganache.GanacheClient(ganache_instance) @@ -249,32 +422,40 @@ def main(argv=None): elif args.master: ETHENO.master_client = AddressSynchronizingClient(RpcProxyClient(args.master)) elif args.client and not args.geth and not args.parity: - ETHENO.master_client = AddressSynchronizingClient(RpcProxyClient(args.client[0])) + ETHENO.master_client = AddressSynchronizingClient( + RpcProxyClient(args.client[0]) + ) args.client = args.client[1:] elif args.raw and not args.geth and not args.parity: - ETHENO.master_client = RawTransactionClient(RpcProxyClient(args.raw[0]), accounts) + ETHENO.master_client = RawTransactionClient( + RpcProxyClient(args.raw[0]), accounts + ) args.raw = args.raw[1:] - + if args.network_id is None: if ETHENO.master_client: - args.network_id = int(ETHENO.master_client.post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'net_version' - })['result'], 16) + args.network_id = int( + ETHENO.master_client.post( + {"id": 1, "jsonrpc": "2.0", "method": "net_version"} + )["result"], + 16, + ) else: - args.network_id = 0x657468656E6F # 'etheno' in hex + args.network_id = 0x657468656E6F # 'etheno' in hex if genesis is None: - genesis = make_genesis(network_id=args.network_id, accounts=accounts, - constantinople_block=args.constantinople_block) + genesis = make_genesis( + network_id=args.network_id, + accounts=accounts, + constantinople_block=args.constantinople_block, + ) else: # Update the genesis with any overridden values - genesis['config']['chainId'] = args.network_id + genesis["config"]["chainId"] = args.network_id if args.save_genesis: - with open(args.save_genesis, 'wb') as f: - f.write(json.dumps(genesis).encode('utf-8')) + with open(args.save_genesis, "wb") as f: + f.write(json.dumps(genesis).encode("utf-8")) ETHENO.logger.info("Saved genesis to %s" % args.save_genesis) if args.geth: @@ -285,7 +466,9 @@ def main(argv=None): geth_instance.etheno = ETHENO for account in accounts: # TODO: Make some sort of progress bar here - geth_instance.logger.info("Unlocking Geth account %s" % format_hex_address(account.address, True)) + geth_instance.logger.info( + "Unlocking Geth account %s" % format_hex_address(account.address, True) + ) geth_instance.import_account(account.private_key) geth_instance.start(unlock_accounts=True) if ETHENO.master_client is None: @@ -309,7 +492,7 @@ def main(argv=None): if ETHENO.master_client is None: ETHENO.master_client = parity_instance else: - ETHENO.add_client(AddressSynchronizingClient(parity_instance)) + ETHENO.add_client(AddressSynchronizingClient(parity_instance)) for client in args.client: ETHENO.add_client(AddressSynchronizingClient(RpcProxyClient(client))) @@ -317,35 +500,10 @@ def main(argv=None): for client in args.raw: ETHENO.add_client(RawTransactionClient(RpcProxyClient(client), accounts)) - manticore_client = None - if args.manticore: - if not MANTICORE_INSTALLED: - ETHENO.logger.error('Manticore is not installed! Running Etheno with Manticore requires Manticore version ' - '0.2.2 or newer. Reinstall Etheno with Manticore support by running ' - '`pip3 install --user \'etheno[manticore]\'`, or install Manticore separately with ' - '`pip3 install --user \'manticore\'`') - sys.exit(1) - new_enough = manticoreutils.manticore_is_new_enough() - if new_enough is None: - ETHENO.logger.warning(f"Unknown Manticore version {manticoreutils.manticore_version()}; it may not be new " - "enough to have Etheno support!") - elif not new_enough: - ETHENO.logger.error(f"The version of Manticore installed is {manticoreutils.manticore_version()}, but the " - f"minimum required version with Etheno support is 0.2.2. We will try to proceed, but " - f"things might not work correctly! Please upgrade Manticore.") - manticore_client = ManticoreClient() - ETHENO.add_client(manticore_client) - if args.manticore_max_depth is not None: - manticore_client.manticore.register_detector(manticoreutils.StopAtDepth(args.manticore_max_depth)) - if manticoreutils.manticore_is_new_enough(0, 2, 4): - # the verbosity static method was deprecated - from manticore.utils.log import set_verbosity - set_verbosity(getattr(logger, args.log_level)) - else: - manticore_client.manticore.verbosity(getattr(logger, args.log_level)) - if args.truffle: - truffle_controller = truffle.Truffle(truffle_cmd=args.truffle_cmd, parent_logger=ETHENO.logger) + truffle_controller = truffle.Truffle( + truffle_cmd=args.truffle_cmd, parent_logger=ETHENO.logger + ) def truffle_thread(): if ETHENO.master_client: @@ -360,43 +518,34 @@ def truffle_thread(): for plugin in ETHENO.plugins: plugin.finalize() - if manticore_client is not None: - if args.manticore_script is not None: - f = args.manticore_script - code = compile(f.read(), f.name, 'exec') - exec(code, { - 'manticore': manticore_client.manticore, - 'manticoreutils': manticoreutils, - 'logger': logger.EthenoLogger(os.path.basename(args.manticore_script.name), parent=manticore_client.logger) - }) - else: - manticoreutils.register_all_detectors(manticore_client.manticore) - manticore_client.multi_tx_analysis() - manticore_client.manticore.finalize() - manticore_client.logger.info("Results are in %s" % manticore_client.manticore.workspace) - ETHENO.shutdown() - elif not ETHENO.clients and not ETHENO.plugins: + if not ETHENO.clients and not ETHENO.plugins: ETHENO.logger.info("No clients or plugins running; exiting...") ETHENO.shutdown() thread = Thread(target=truffle_thread) thread.start() - if args.run_differential and (ETHENO.master_client is not None) and \ - next(filter(lambda c: not isinstance(c, ManticoreClient), ETHENO.clients), False): + # TODO: Will this only be allowed with Ganache? + if args.deploy_arbitrum_contracts and ETHENO.master_client is not None: + print("hello there!") + precompiler = Precompiler(deploy_arb=args.deploy_arbitrum_contracts) + ETHENO.add_plugin(precompiler) + + + # Without Manticore integration the only client types are geth, parity, and command-line raw/regular clients. + # So checking len() >= 1 should be sufficient. + if ( + args.run_differential + and (ETHENO.master_client is not None) + and len(ETHENO.clients) >= 1 + ): # There are at least two non-Manticore clients running - ETHENO.logger.info("Initializing differential tests to compare clients %s" % ', '.join( - map(str, [ETHENO.master_client] + ETHENO.clients) - )) + ETHENO.logger.info( + "Initializing differential tests to compare clients %s" + % ", ".join(map(str, [ETHENO.master_client] + ETHENO.clients)) + ) ETHENO.add_plugin(DifferentialTester()) - if args.echidna: - contract_source = None - if args.fuzz_contract is not None: - with open(args.fuzz_contract, 'rb') as c: - contract_source = c.read() - ETHENO.add_plugin(EchidnaPlugin(transaction_limit=args.fuzz_limit, contract_source=contract_source)) - had_plugins = len(ETHENO.plugins) > 0 if ETHENO.master_client is None and not ETHENO.clients and not ETHENO.plugins: @@ -406,7 +555,7 @@ def truffle_thread(): return etheno = EthenoView() - app.add_url_rule('/', view_func=etheno.as_view('etheno')) + app.add_url_rule("/", view_func=etheno.as_view("etheno")) ETHENO.run(debug=args.debug, run_publicly=args.run_publicly, port=args.port) if args.truffle: @@ -415,8 +564,11 @@ def truffle_thread(): if args.log_file is not None: print("Log file saved to: %s" % os.path.realpath(args.log_file)) if args.log_dir is not None: - print("Logs %ssaved to: %s" % (['','also '][args.log_file is not None], os.path.realpath(args.log_dir))) + print( + "Logs %ssaved to: %s" + % (["", "also "][args.log_file is not None], os.path.realpath(args.log_dir)) + ) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/etheno/ascii_escapes.py b/etheno/ascii_escapes.py index 687ac99..81bf0f2 100644 --- a/etheno/ascii_escapes.py +++ b/etheno/ascii_escapes.py @@ -1,67 +1,67 @@ from typing import Iterable, Union CONTROL_CODES = { - b'0' : b'\0', - b'a' : b'\x07', # alert - b'b' : b'\x08', # backspace - b'f' : b'\x0C', # form feed - b'n' : b'\x0A', # newline (line feed) - b'r' : b'\x0D', # carriage return - b't' : b'\x09', # horizontal tab - b'v' : b'\x0B', # vertical tab - b'"' : b'\x22', # double quote - b'&' : b'', # empty string - b'\'' : b'\x27', # single quote - b'\\' : b'\x5C', # backslash - b'NUL' : b'\0', # null character - b'SOH' : b'\x01', # start of heading - b'STX' : b'\x02', # start of text - b'ETX' : b'\x03', # end of text - b'EOT' : b'\x04', # end of transmission - b'ENQ' : b'\x05', # enquiry - b'ACK' : b'\x06', # acknowledge - b'BEL' : b'\x07', # bell - b'BS' : b'\x08', # backspace - b'HT' : b'\x09', # horizontal tab - b'LF' : b'\x0A', # line feed (newline) - b'VT' : b'\x0B', # vertical tab - b'FF' : b'\x0C', # form feed - b'CR' : b'\x0D', # carriage return - b'SO' : b'\x0E', # shift out - b'SI' : b'\x0F', # shift in - b'DLE' : b'\x10', # data link escape - b'DC1' : b'\x11', # device control 1 - b'DC2' : b'\x12', # device control 2 - b'DC3' : b'\x13', # device control 3 - b'DC4' : b'\x14', # device control 4 - b'NAK' : b'\x15', # negative acknowledge - b'SYN' : b'\x16', # synchronous idle - b'ETB' : b'\x17', # end of transmission block - b'CAN' : b'\x18', # cancel - b'EM' : b'\x19', # end of medium - b'SUB' : b'\x1A', # substitute - b'ESC' : b'\x1B', # escape - b'FS' : b'\x1C', # file separator - b'GS' : b'\x1D', # group separator - b'RS' : b'\x1E', # record separator - b'US' : b'\x1F', # unit separator - b'SP' : b'\x20', # space - b'DEL' : b'\x7F', # delete - b'^@' : b'\0', - b'^[' : b'\x1B', # escape - b'^\\' : b'\x1C', # file separator - b'^]' : b'\x1D', # group separator - b'^^' : b'\x1E', # record separator - b'^_' : b'\x1F', # unit separator + b"0": b"\0", + b"a": b"\x07", # alert + b"b": b"\x08", # backspace + b"f": b"\x0C", # form feed + b"n": b"\x0A", # newline (line feed) + b"r": b"\x0D", # carriage return + b"t": b"\x09", # horizontal tab + b"v": b"\x0B", # vertical tab + b'"': b"\x22", # double quote + b"&": b"", # empty string + b"'": b"\x27", # single quote + b"\\": b"\x5C", # backslash + b"NUL": b"\0", # null character + b"SOH": b"\x01", # start of heading + b"STX": b"\x02", # start of text + b"ETX": b"\x03", # end of text + b"EOT": b"\x04", # end of transmission + b"ENQ": b"\x05", # enquiry + b"ACK": b"\x06", # acknowledge + b"BEL": b"\x07", # bell + b"BS": b"\x08", # backspace + b"HT": b"\x09", # horizontal tab + b"LF": b"\x0A", # line feed (newline) + b"VT": b"\x0B", # vertical tab + b"FF": b"\x0C", # form feed + b"CR": b"\x0D", # carriage return + b"SO": b"\x0E", # shift out + b"SI": b"\x0F", # shift in + b"DLE": b"\x10", # data link escape + b"DC1": b"\x11", # device control 1 + b"DC2": b"\x12", # device control 2 + b"DC3": b"\x13", # device control 3 + b"DC4": b"\x14", # device control 4 + b"NAK": b"\x15", # negative acknowledge + b"SYN": b"\x16", # synchronous idle + b"ETB": b"\x17", # end of transmission block + b"CAN": b"\x18", # cancel + b"EM": b"\x19", # end of medium + b"SUB": b"\x1A", # substitute + b"ESC": b"\x1B", # escape + b"FS": b"\x1C", # file separator + b"GS": b"\x1D", # group separator + b"RS": b"\x1E", # record separator + b"US": b"\x1F", # unit separator + b"SP": b"\x20", # space + b"DEL": b"\x7F", # delete + b"^@": b"\0", + b"^[": b"\x1B", # escape + b"^\\": b"\x1C", # file separator + b"^]": b"\x1D", # group separator + b"^^": b"\x1E", # record separator + b"^_": b"\x1F", # unit separator } for i in range(26): - CONTROL_CODES[bytes([ord('^'), ord('A') + i])] = bytes([i + 1]) + CONTROL_CODES[bytes([ord("^"), ord("A") + i])] = bytes([i + 1]) def decode(text: Union[str, bytes, Iterable[int]]) -> bytes: escaped = None - ret = b'' + ret = b"" for c in text: if isinstance(c, str): c = ord(c) @@ -84,8 +84,8 @@ def decode(text: Union[str, bytes, Iterable[int]]) -> bytes: except ValueError: pass raise ValueError(f"Unknown escape sequence: {escaped!r}") - elif c == b'\\': - escaped = b'' + elif c == b"\\": + escaped = b"" else: ret += c return ret diff --git a/etheno/client.py b/etheno/client.py index a493c87..9f9fbed 100644 --- a/etheno/client.py +++ b/etheno/client.py @@ -26,12 +26,14 @@ def wrapper(self, *args, **kwargs): args = tuple(converted_args) kwargs = dict(kwargs) for arg_name, conversion in types.items(): - if arg_name == 'RETURN': + if arg_name == "RETURN": return_type = conversion elif arg_name in kwargs: - kwargs[arg_name] = conversion(kwargs[arg_name]) + kwargs[arg_name] = conversion(kwargs[arg_name]) return function(self, *args, **kwargs) + return wrapper + return decorator @@ -44,19 +46,19 @@ def post(self, data) -> Dict[str, Union[int, str, Dict[str, Any]]]: data = dict(data) self.rpc_id += 1 return_id = None - if 'jsonrpc' not in data: - data['jsonrpc'] = '2.0' - if 'id' in data: - return_id = data['id'] - data['id'] = self.rpc_id + if "jsonrpc" not in data: + data["jsonrpc"] = "2.0" + if "id" in data: + return_id = data["id"] + data["id"] = self.rpc_id request = Request( self.urlstring, - data=bytearray(json.dumps(data), 'utf8'), - headers={'Content-type': 'application/json'} + data=bytearray(json.dumps(data), "utf8"), + headers={"Content-type": "application/json"}, ) ret = json.loads(urlopen(request).read()) - if return_id is not None and 'id' in ret: - ret['id'] = return_id + if return_id is not None and "id" in ret: + ret["id"] = return_id return ret def __str__(self): @@ -69,7 +71,8 @@ def __repr__(self): class JSONRPCError(RuntimeError): def __init__(self, client, data, result): super().__init__( - "JSON RPC Error in Client %s when processing transaction:\n%s\n%s" % (client, data, result['error']) + "JSON RPC Error in Client %s when processing transaction:\n%s\n%s" + % (client, data, result["error"]) ) self.client = client self.json = data @@ -77,15 +80,15 @@ def __init__(self, client, data, result): def transaction_receipt_succeeded(data): - if not (data and 'result' in data and data['result']): + if not (data and "result" in data and data["result"]): return None - elif 'contractAddress' in data['result'] and data['result']['contractAddress']: + elif "contractAddress" in data["result"] and data["result"]["contractAddress"]: return True - elif 'blockHash' in data['result'] and data['result']['blockHash']: + elif "blockHash" in data["result"] and data["result"]["blockHash"]: return True - elif 'status' not in data['result']: + elif "status" not in data["result"]: return None - status = data['result']['status'] + status = data["result"]["status"] if status is None: return None elif not isinstance(status, int): @@ -110,7 +113,9 @@ def etheno(self, instance): return elif instance == self._etheno: return - raise ValueError('An Etheno client can only ever be associated with a single Etheno instance') + raise ValueError( + "An Etheno client can only ever be associated with a single Etheno instance" + ) self._etheno = instance self.logger = logger.EthenoLogger(self.short_name, parent=self._etheno.logger) self.etheno_set() @@ -127,7 +132,7 @@ def etheno_set(self): """A callback for once the etheno instance and logger for this client is set""" pass - def create_account(self, balance = 0, address = None): + def create_account(self, balance=0, address=None): """A request for the client to create a new account. Subclasses implementing this function should raise a NotImplementedError if an address @@ -137,7 +142,7 @@ def create_account(self, balance = 0, address = None): :param address: The address for the account, or None if the address should be auto-generated :return: returns the address of the account created """ - raise NotImplementedError('Clients must extend this function') + raise NotImplementedError("Clients must extend this function") def shutdown(self): pass @@ -169,29 +174,39 @@ def create_account(self, balance: int = 0, address: Optional[int] = None): if address is not None: raise NotImplementedError() if self._accounts is None: - self._accounts = list(int(a, 16) for a in self.post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'eth_accounts' - })['result']) + self._accounts = list( + int(a, 16) + for a in self.post( + {"id": 1, "jsonrpc": "2.0", "method": "eth_accounts"} + )["result"] + ) self._created_account_index += 1 return self._accounts[self._created_account_index] def wait_until_running(self): pass + # TODO: need to ensure that JSON RPC calls match latest API spec def post(self, data: Dict[str, Any]) -> Optional[Dict[str, Any]]: ret = self.client.post(data) - if ret is not None and 'error' in ret: - if 'method' in data and ( - data['method'] == 'eth_sendTransaction' or data['method'] == 'eth_sendRawTransaction' + if ret is not None and "error" in ret: + if "method" in data and ( + data["method"] == "eth_sendTransaction" + or data["method"] == "eth_sendRawTransaction" ): - if self.etheno.master_client != self and self.etheno.rpc_client_result \ - and not isinstance(self.etheno.rpc_client_result, JSONRPCError) \ - and 'result' in self.etheno.rpc_client_result: - self.logger.error(f"{self!s}: Failed transaction associated with master client transaction " - f"{self.etheno.rpc_client_result['result']}") - self._failed_transactions.add(self.etheno.rpc_client_result['result'].lower()) + if ( + self.etheno.master_client != self + and self.etheno.rpc_client_result + and not isinstance(self.etheno.rpc_client_result, JSONRPCError) + and "result" in self.etheno.rpc_client_result + ): + self.logger.error( + f"{self!s}: Failed transaction associated with master client transaction " + f"{self.etheno.rpc_client_result['result']}" + ) + self._failed_transactions.add( + self.etheno.rpc_client_result["result"].lower() + ) # TODO: Figure out a better way to handle JSON RPC errors raise JSONRPCError(self, data, ret) return ret @@ -202,51 +217,55 @@ def estimate_gas(self, transaction: Dict[str, Any]) -> int: :param transaction: a dict containing the entire transaction as if it were to be sent to `post()` :return: the gas cost in wei as an int """ - return int(self.post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'eth_estimateGas', - 'params': transaction['params'] - })['result'], 16) + return int( + self.post( + { + "id": 1, + "jsonrpc": "2.0", + "method": "eth_estimateGas", + "params": transaction["params"], + } + )["result"], + 16, + ) def get_gas_price(self) -> int: - return int(self.post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'eth_gasPrice' - })['result'], 16) + return int( + self.post({"id": 1, "jsonrpc": "2.0", "method": "eth_gasPrice"})["result"], + 16, + ) def get_net_version(self) -> int: - return int(self.post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'net_version' - })['result'], 16) + return int( + self.post({"id": 1, "jsonrpc": "2.0", "method": "net_version"})["result"], + 16, + ) def get_transaction_count(self, from_address) -> int: - return int(self.post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'eth_getTransactionCount', - 'params': [format_hex_address(from_address, True), 'latest'] - })['result'], 16) + return int( + self.post( + { + "id": 1, + "jsonrpc": "2.0", + "method": "eth_getTransactionCount", + "params": [format_hex_address(from_address, True), "latest"], + } + )["result"], + 16, + ) def wait_for_transaction(self, tx_hash): """Blocks until the given transaction has been mined :param tx_hash: the transaction hash for the transaction to monitor :return: The transaction receipt """ - if isinstance(tx_hash, int): - tx_hash = "0x%x" % tx_hash - tx_hash = tx_hash.lower() while True: - receipt = self.post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'eth_getTransactionReceipt', - 'params': [tx_hash] - }) - if tx_hash in self._failed_transactions or transaction_receipt_succeeded(receipt) is not None: + request_object = self.etheno.get_transaction_receipt_request(tx_hash) + receipt = self.post(request_object) + if ( + tx_hash in self._failed_transactions + or transaction_receipt_succeeded(receipt) is not None + ): return receipt self.logger.info("Waiting to mine transaction %s..." % tx_hash) time.sleep(5.0) @@ -286,7 +305,7 @@ def wait_until_running(self): def QUANTITY(to_convert: Optional[str]) -> Optional[int]: if to_convert is None: return None - elif to_convert[:2] == '0x': + elif to_convert[:2] == "0x": return int(to_convert[2:], 16) else: return int(to_convert) diff --git a/etheno/contracts.py b/etheno/contracts.py index 5c21e17..6a3eac2 100644 --- a/etheno/contracts.py +++ b/etheno/contracts.py @@ -2,13 +2,14 @@ from .etheno import EthenoPlugin from .utils import format_hex_address +# TODO: what is this file for? class ContractSynchronizer(EthenoPlugin): def __init__(self, source_client, contract_address): - if isintsance(source_client, str): + if isinstance(source_client, str): source_client = RpcProxyClient(source_client) self.source = source_client self.contract = format_hex_address(contract_address, True) - + def added(self): # get the contract: pass diff --git a/etheno/differentials.py b/etheno/differentials.py index 1c63d27..5203f73 100644 --- a/etheno/differentials.py +++ b/etheno/differentials.py @@ -4,21 +4,31 @@ from .client import JSONRPCError, SelfPostingClient from .etheno import EthenoPlugin + class DifferentialTest(object): - def __init__(self, tester, test_name, success, message = ''): + def __init__(self, tester, test_name, success, message=""): self.tester = tester self.test_name = test_name self.message = message self.success = success - self.tester.logger.make_constant_logged_file(self.message, prefix=['FAILED', 'PASSED'][self.success.value], suffix='.log', dir=os.path.join(self.tester.logger.directory, self.test_name)) + self.tester.logger.make_constant_logged_file( + self.message, + prefix=["FAILED", "PASSED"][self.success.value], + suffix=".log", + dir=os.path.join(self.tester.logger.directory, self.test_name), + ) + def __str__(self): return "[%s] %s\t%s" % (self.test_name, self.success, self.message) + __repr__ = __str__ + class TestResult(Enum): FAILED = 0 PASSED = 1 + class DifferentialTester(EthenoPlugin): def __init__(self): self._transactions_by_hash = {} @@ -34,99 +44,180 @@ def add_test_result(self, result): self.tests[result.test_name][result.success].append(result) def after_post(self, data, client_results): - method = data['method'] + method = data["method"] # First, see if any of the clients returned an error. If one did, they all should! - clients_with_errors = tuple(i for i, result in enumerate(client_results) if isinstance(result, JSONRPCError)) - clients_without_errors = tuple(sorted(frozenset(range(len(client_results))) - frozenset(clients_with_errors))) + clients_with_errors = tuple( + i + for i, result in enumerate(client_results) + if isinstance(result, JSONRPCError) + ) + clients_without_errors = tuple( + sorted( + frozenset(range(len(client_results))) - frozenset(clients_with_errors) + ) + ) if clients_with_errors: clients = [self.etheno.master_client] + self.etheno.clients if clients_without_errors: - test = DifferentialTest(self, 'JSON_RPC_ERRORS', TestResult.FAILED, "%s executed JSON RPC call %s with no errors, but %s executed the same transaction with errors:\n%s" % ( - ', '.join(str(clients[client]) for client in clients_without_errors), - data, - ', '.join(str(clients[client]) for client in clients_with_errors), - '\n'.join(str(client_results[client]) for client in clients_with_errors) - )) + test = DifferentialTest( + self, + "JSON_RPC_ERRORS", + TestResult.FAILED, + "%s executed JSON RPC call %s with no errors, but %s executed the same transaction with errors:\n%s" + % ( + ", ".join( + str(clients[client]) for client in clients_without_errors + ), + data, + ", ".join( + str(clients[client]) for client in clients_with_errors + ), + "\n".join( + str(client_results[client]) + for client in clients_with_errors + ), + ), + ) else: - test = DifferentialTest(self, 'JSON_RPC_ERRORS', TestResult.PASSED, "All clients executed JSON RPC call %s with errors" % data) + test = DifferentialTest( + self, + "JSON_RPC_ERRORS", + TestResult.PASSED, + "All clients executed JSON RPC call %s with errors" % data, + ) self.add_test_result(test) self.logger.error(test.message) return else: - self.add_test_result(DifferentialTest(self, 'JSON_RPC_ERRORS', TestResult.PASSED, "All clients executed transaction %s without error" % data)) - + self.add_test_result( + DifferentialTest( + self, + "JSON_RPC_ERRORS", + TestResult.PASSED, + "All clients executed transaction %s without error" % data, + ) + ) + master_result = client_results[0] - if method == 'eth_sendTransaction' or method == 'eth_sendRawTransaction': - if not isinstance(master_result, JSONRPCError) and 'result' in master_result and master_result['result']: - self._unprocessed_transactions.add(master_result['result']) - self._transactions_by_hash[master_result['result']] = data - elif method == 'eth_getTransactionReceipt': - if master_result and 'result' in master_result and master_result['result']: + if method == "eth_sendTransaction" or method == "eth_sendRawTransaction": + if ( + not isinstance(master_result, JSONRPCError) + and "result" in master_result + and master_result["result"] + ): + self._unprocessed_transactions.add(master_result["result"]) + self._transactions_by_hash[master_result["result"]] = data + elif method == "eth_getTransactionReceipt": + if master_result and "result" in master_result and master_result["result"]: # mark that we have processed the receipt for this transaction: - if data['params'][0] in self._unprocessed_transactions: - self._unprocessed_transactions.remove(data['params'][0]) + if data["params"][0] in self._unprocessed_transactions: + self._unprocessed_transactions.remove(data["params"][0]) - if 'contractAddress' in master_result['result'] and master_result['result']['contractAddress']: + if ( + "contractAddress" in master_result["result"] + and master_result["result"]["contractAddress"] + ): # the master client created a new contract # so make sure that all of the other clients did, too - for client, client_data in zip(self.etheno.clients, client_results[1:]): + for client, client_data in zip( + self.etheno.clients, client_results[1:] + ): created = False try: - created = client_data['result']['contractAddress'] + created = client_data["result"]["contractAddress"] except Exception: pass if not created: - test = DifferentialTest(self, 'CONTRACT_CREATION', TestResult.FAILED, f"{self.etheno.master_client} created a contract for transaction {data['params'][0]}, but {client} did not") + test = DifferentialTest( + self, + "CONTRACT_CREATION", + TestResult.FAILED, + f"{self.etheno.master_client} created a contract for transaction {data['params'][0]}, but {client} did not", + ) self.add_test_result(test) self.logger.error(test.message) else: - self.add_test_result(DifferentialTest(self, 'CONTRACT_CREATION', TestResult.PASSED, "client %s transaction %s" % (client, data['params'][0]))) - if 'gasUsed' in master_result['result'] and master_result['result']['gasUsed']: + self.add_test_result( + DifferentialTest( + self, + "CONTRACT_CREATION", + TestResult.PASSED, + "client %s transaction %s" + % (client, data["params"][0]), + ) + ) + if ( + "gasUsed" in master_result["result"] + and master_result["result"]["gasUsed"] + ): # make sure each client used the same amount of gas - master_gas = int(master_result['result']['gasUsed'], 16) - for client, client_data in zip(self.etheno.clients, client_results[1:]): + master_gas = int(master_result["result"]["gasUsed"], 16) + for client, client_data in zip( + self.etheno.clients, client_results[1:] + ): gas_used = 0 try: - gas_used = int(client_data['result']['gasUsed'], 16) + gas_used = int(client_data["result"]["gasUsed"], 16) except Exception: pass if gas_used != master_gas: - test = DifferentialTest(self, 'GAS_USAGE', TestResult.FAILED, f"""Transaction {data['params'][0]} used: + test = DifferentialTest( + self, + "GAS_USAGE", + TestResult.FAILED, + f"""Transaction {data['params'][0]} used: {hex(master_gas)} gas in {self.etheno.master_client} but {hex(gas_used)} gas in {client} while mining this transaction: -{self._transactions_by_hash.get(data['params'][0], 'UNKNOWN TRANSACTION')}""") +{self._transactions_by_hash.get(data['params'][0], 'UNKNOWN TRANSACTION')}""", + ) self.add_test_result(test) self.logger.error(test.message) else: - self.add_test_result(DifferentialTest(self, 'GAS_USAGE', TestResult.PASSED, "client %s transaction %s used 0x%x gas" % (client, data['params'][0], gas_used))) + self.add_test_result( + DifferentialTest( + self, + "GAS_USAGE", + TestResult.PASSED, + "client %s transaction %s used 0x%x gas" + % (client, data["params"][0], gas_used), + ) + ) # we have processed this transaction, so no need to keep its original arguments around: - if data['params'][0] in self._transactions_by_hash: - del self._transactions_by_hash[data['params'][0]] + if data["params"][0] in self._transactions_by_hash: + del self._transactions_by_hash[data["params"][0]] def finalize(self): unprocessed = self._unprocessed_transactions self._unprocessed_transactions = set() for tx_hash in unprocessed: - self.logger.info("Requesting transaction receipt for %s to check differentials..." % tx_hash) + self.logger.info( + "Requesting transaction receipt for %s to check differentials..." + % tx_hash + ) if not isinstance(self.etheno.master_client, SelfPostingClient): - self.logger.warn("The DifferentialTester currently only supports master clients that extend from SelfPostingClient, but %s does not; skipping checking transaction(s) %s" % (self.etheno.master_client, ', '.join(unprocessed))) + self.logger.warn( + "The DifferentialTester currently only supports master clients that extend from SelfPostingClient, but %s does not; skipping checking transaction(s) %s" + % (self.etheno.master_client, ", ".join(unprocessed)) + ) return while True: - receipt = self.etheno.post({ - 'jsonrpc': '2.0', - 'method': 'eth_getTransactionReceipt', - 'params': [tx_hash] - }) + receipt = self.etheno.post( + { + "jsonrpc": "2.0", + "method": "eth_getTransactionReceipt", + "params": [tx_hash], + } + ) # if this post is successful, it will trigger the `after_post` callback above # where were check for the differentials - if 'result' in receipt and receipt['result']: + if "result" in receipt and receipt["result"]: break # The transaction is still pending time.sleep(3.0) @@ -136,11 +227,15 @@ def shutdown(self): super().shutdown() if self.tests and not self._printed_summary: self._printed_summary = True - ret = '\nDifferential Test Summary:\n\n' + ret = "\nDifferential Test Summary:\n\n" for test in sorted(self.tests): ret += " %s\n" % test total = sum(map(len, self.tests[test].values())) for result in self.tests[test]: - ret += " %s\t%d / %d\n" % (result, len(self.tests[test][result]), total) - ret += '\n' + ret += " %s\t%d / %d\n" % ( + result, + len(self.tests[test][result]), + total, + ) + ret += "\n" self.logger.info(ret) diff --git a/etheno/echidna.py b/etheno/echidna.py deleted file mode 100644 index 679291b..0000000 --- a/etheno/echidna.py +++ /dev/null @@ -1,199 +0,0 @@ -import os -import subprocess -import tempfile -from typing import Optional, Union - -from .ascii_escapes import decode -from .etheno import EthenoPlugin -from .utils import ConstantTemporaryFile, format_hex_address - -ECHIDNA_CONTRACT = b'''pragma solidity ^0.4.24; -contract C { - mapping(int => int) public s; - int public stored = 1337; - function save(int key, int value) public { - s[key] = value; - } - function remove(int key) public { - delete s[key]; - } - function setStored(int value) public { - stored = value; - } - function f(uint, int, int[]) public { } - function g(bool, int, address[]) public { } - function echidna_() public returns (bool) { - return true; - } -} -''' - -ECHIDNA_CONFIG = b'''outputRawTxs: true\nquiet: true\ndashboard: false\ngasLimit: 0xfffff\n''' - - -def echidna_exists(): - return subprocess.call(['/usr/bin/env', 'echidna-test', '--help'], stdout=subprocess.DEVNULL) == 0 - - -def stack_exists(): - return subprocess.call(['/usr/bin/env', 'stack', '--help'], stdout=subprocess.DEVNULL) == 0 - - -def git_exists(): - return subprocess.call(['/usr/bin/env', 'git', '--version'], stdout=subprocess.DEVNULL) == 0 - - -def install_echidna(allow_reinstall: bool = False): - if not allow_reinstall and echidna_exists(): - return - elif not git_exists(): - raise Exception('Git must be installed in order to install Echidna') - elif not stack_exists(): - raise Exception('Haskell Stack must be installed in order to install Echidna. On macOS you can easily install ' - 'it using Homebrew: `brew install haskell-stack`') - - with tempfile.TemporaryDirectory() as path: - subprocess.check_call(['/usr/bin/env', 'git', 'clone', 'https://github.com/trailofbits/echidna.git', path]) - # TODO: Once the `dev-etheno` branch is merged into `master`, we can remove this: - subprocess.call(['/usr/bin/env', 'git', 'checkout', 'dev-etheno'], cwd=path) - subprocess.check_call(['/usr/bin/env', 'stack', 'install'], cwd=path) - - -def decode_binary_json(text: Union[str, bytes]) -> Optional[bytes]: - orig = text - text = decode(text).strip() - if not text.startswith(b'['): - return None - offset = len(orig) - len(text) - orig = text - text = text[1:].strip() - offset += len(orig) - len(text) - if text[:1] != b'"': - raise ValueError( - f"Malformed JSON list! Expected '\"' but instead got '{text[0:1].decode()}' at offset {offset}" - ) - text = text[1:] - offset += 1 - if text[-1:] != b']': - raise ValueError( - f"Malformed JSON list! Expected ']' but instead got '{chr(text[-1])}' at offset {offset + len(text) - 1}" - ) - text = text[:-1].strip() - if text[-1:] != b'"': - raise ValueError( - f"Malformed JSON list! Expected '\"' but instead got '{chr(text[-1])}' at offset {offset + len(text) - 1}" - ) - return text[:-1] - - -class EchidnaPlugin(EthenoPlugin): - def __init__(self, transaction_limit: Optional[int] = None, contract_source: Optional[bytes] = None): - self._transaction: int = 0 - self.limit: Optional[int] = transaction_limit - self.contract_address = None - if contract_source is None: - self.contract_source: bytes = ECHIDNA_CONTRACT - else: - self.contract_source = contract_source - self.contract_bytecode = None - - def added(self): - # Wait until the plugin was added to Etheno so its logger is initialized - self.contract_bytecode = self.compile(self.contract_source) - - def run(self): - if not self.etheno.accounts: - self.logger.info("Etheno does not know about any accounts, so Echidna has nothing to do!") - self._shutdown() - return - elif self.contract_source is None: - self.logger.error("Error compiling source contract") - self._shutdown() - # First, deploy the testing contract: - self.logger.info('Deploying Echidna test contract...') - self.contract_address = format_hex_address(self.etheno.deploy_contract(self.etheno.accounts[0], - self.contract_bytecode), True) - if self.contract_address is None: - self.logger.error('Unable to deploy Echidna test contract!') - self._shutdown() - return - self.logger.info("Deployed Echidna test contract to %s" % self.contract_address) - config = self.logger.make_constant_logged_file(ECHIDNA_CONFIG, prefix='echidna', suffix='.yaml') - sol = self.logger.make_constant_logged_file( - self.contract_source, prefix='echidna', suffix='.sol') # type: ignore - echidna_args = ['/usr/bin/env', 'echidna-test', self.logger.to_log_path(sol), '--config', - self.logger.to_log_path(config)] - run_script = self.logger.make_constant_logged_file(' '.join(echidna_args), prefix='run_echidna', suffix='.sh') - # make the script executable: - os.chmod(run_script, 0o755) - - echidna = subprocess.Popen(echidna_args, stderr=subprocess.DEVNULL, stdout=subprocess.PIPE, bufsize=1, - universal_newlines=True, cwd=self.log_directory) - while self.limit is None or self._transaction < self.limit: - line = echidna.stdout.readline() - if line != b'': - txn = decode_binary_json(line) - if txn is None: - continue - self.emit_transaction(txn) - else: - break - self._shutdown() - - def _shutdown(self): - etheno = self.etheno - self.etheno.remove_plugin(self) - etheno.shutdown() - - def compile(self, solidity): - with ConstantTemporaryFile(solidity, prefix='echidna', suffix='.sol') as contract: - solc = subprocess.Popen(['/usr/bin/env', 'solc', '--bin', contract], stderr=subprocess.PIPE, - stdout=subprocess.PIPE, bufsize=1, universal_newlines=True) - errors = solc.stderr.read().strip() - output = solc.stdout.read() - if solc.wait() != 0: - self.logger.error(f"{errors}\n{output}") - return None - elif errors: - if solidity == ECHIDNA_CONTRACT: - # no need to raise a warning with our own contract: - self.logger.debug(errors) - else: - self.logger.warning(errors) - binary_key = 'Binary:' - offset = output.find(binary_key) - if offset < 0: - self.logger.error(f"Could not parse `solc` output:\n{output}") - return None - code = hex(int(output[offset+len(binary_key):].strip(), 16)) - self.logger.debug(f"Compiled contract code: {code}") - return code - - def emit_transaction(self, txn): - self._transaction += 1 - transaction = { - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'eth_sendTransaction', - 'params' : [{ - 'from': format_hex_address(self.etheno.accounts[0], True), - 'to': self.contract_address, - 'gasPrice': "0x%x" % self.etheno.master_client.get_gas_price(), - 'value': '0x0', - 'data': "0x%s" % txn.hex() - }] - } - gas = self.etheno.estimate_gas(transaction) - if gas is None: - self.logger.warning(f"All clients were unable to estimate the gas cost for transaction {self._transaction}." - f" This typically means that Echidna emitted a transaction that is too large.") - return - gas = "0x%x" % gas - self.logger.info(f"Estimated gas cost for Transaction {self._transaction}: {gas}") - transaction['params'][0]['gas'] = gas - self.logger.info("Emitting Transaction %d" % self._transaction) - self.etheno.post(transaction) - - -if __name__ == '__main__': - install_echidna(allow_reinstall=True) diff --git a/etheno/etheno.py b/etheno/etheno.py index 74bf2eb..e87dc2b 100644 --- a/etheno/etheno.py +++ b/etheno/etheno.py @@ -1,6 +1,8 @@ import pkg_resources +import os from threading import Thread from typing import Any, Dict, List, Optional +from werkzeug.serving import make_server from flask import Flask, jsonify, request, abort from flask.views import MethodView @@ -12,8 +14,8 @@ VERSION: str = pkg_resources.require("etheno")[0].version VERSION_NAME = f"ToB/v{VERSION}/source/Etheno" -JSONRPC_VERSION = '2.0' -VERSION_ID=67 +JSONRPC_VERSION = "2.0" +VERSION_ID = 67 app = Flask(__name__) @@ -25,23 +27,12 @@ def to_account_address(raw_address: int) -> str: addr = "%x" % raw_address - return "0x%s%s" % ('0'*(40 - len(addr)), addr) + return "0x%s%s" % ("0" * (40 - len(addr)), addr) _CONTROLLER = threadwrapper.MainThreadController() -@app.route('/shutdown') -def _etheno_shutdown(): - # shut down the Flask server - shutdown = request.environ.get('werkzeug.server.shutdown') - if shutdown is None: - raise RuntimeError('Not running with the Werkzeug Server') - _CONTROLLER.quit() - shutdown() - return '' - - class DropPost(RuntimeError): pass @@ -63,9 +54,13 @@ def etheno(self, instance: "Etheno"): if instance is None: self._etheno = None return - raise ValueError('An Etheno plugin can only ever be associated with a single Etheno instance') + raise ValueError( + "An Etheno plugin can only ever be associated with a single Etheno instance" + ) self._etheno = instance - self.logger = logger.EthenoLogger(self.__class__.__name__, parent=self._etheno.logger) + self.logger = logger.EthenoLogger( + self.__class__.__name__, parent=self._etheno.logger + ) @property def log_directory(self): @@ -103,7 +98,7 @@ def run(self): A callback when Etheno is running and all other clients and plugins are initialized """ pass - + def finalize(self): """ Called when an analysis pass should be finalized (e.g., after a Truffle migration completes). @@ -131,7 +126,7 @@ def __init__(self, master_client: Optional[SelfPostingClient] = None): self.rpc_client_result = None self.plugins: List[EthenoPlugin] = [] self._shutting_down: bool = False - self.logger: logger.EthenoLogger = logger.EthenoLogger('Etheno', logger.INFO) + self.logger: logger.EthenoLogger = logger.EthenoLogger("Etheno", logger.INFO) @property def log_level(self) -> int: @@ -153,14 +148,19 @@ def master_client(self, client: Optional[SelfPostingClient]): self._master_client = None return if not isinstance(client, SelfPostingClient): - raise Exception('The master client must be an instance of a SelfPostingClient') + raise Exception( + "The master client must be an instance of a SelfPostingClient" + ) client.etheno = self self._master_client = client - self.accounts: list[int] = list(map(lambda a: int(a, 16), client.post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'eth_accounts' - })['result'])) + self.accounts: list[int] = list( + map( + lambda a: int(a, 16), + client.post({"id": 1, "jsonrpc": "2.0", "method": "eth_accounts"})[ + "result" + ], + ) + ) for client in self.clients: self._create_accounts(client) @@ -170,15 +170,16 @@ def estimate_gas(self, transaction) -> Optional[int]: Iterates through all clients until it finds a client that is capable of estimating the gas cost without error. If all clients return an error, this function will return None. """ - clients = [self.master_client] + \ - [client for client in self.clients if hasattr(client, "estimate_gas")] # type: ignore + clients = [self.master_client] + [ + client for client in self.clients if hasattr(client, "estimate_gas") + ] # type: ignore for client in clients: try: return client.estimate_gas(transaction) except JSONRPCError: continue return None - + def post(self, data): self.logger.debug(f"Handling JSON RPC request {data}") @@ -186,60 +187,68 @@ def post(self, data): try: new_data = plugin.before_post(dict(data)) if new_data is not None and new_data != data: - self.logger.debug(f"Incoming JSON RPC request {data} changed by plugin {plugin!r} to {new_data}") + self.logger.debug( + f"Incoming JSON RPC request {data} changed by plugin {plugin!r} to {new_data}" + ) data = new_data except DropPost: - self.logger.info(f"Incoming JSON RPC request {data} dropped by plugin {plugin!r}") + self.logger.info( + f"Incoming JSON RPC request {data} dropped by plugin {plugin!r}" + ) - method = data['method'] + method = data["method"] args = () kwargs = {} - if 'params' in data: - params = data['params'] + if "params" in data: + params = data["params"] if len(params) == 1 and isinstance(params[0], dict): kwargs = dict(params[0]) # handle Python reserved words: - if 'from' in kwargs: - kwargs['from_addr'] = kwargs['from'] - del kwargs['from'] + if "from" in kwargs: + kwargs["from_addr"] = kwargs["from"] + del kwargs["from"] else: - args = data['params'] + args = data["params"] if self.master_client is None: ret = None else: - if method == 'eth_getTransactionReceipt': + if method == "eth_getTransactionReceipt": # for eth_getTransactionReceipt, make sure we block until all clients have mined the transaction - ret = self.master_client.wait_for_transaction(data['params'][0]) - if 'id' in data and 'id' in ret: - ret['id'] = data['id'] + ret = self.master_client.wait_for_transaction(data["params"][0]) + if "id" in data and "id" in ret: + ret["id"] = data["id"] else: try: ret = self.master_client.post(data) except JSONRPCError as e: self.logger.error(e) ret = e - + self.rpc_client_result = ret - self.logger.debug(f"Result from the master client ({self.master_client}): {ret}") + self.logger.debug( + f"Result from the master client ({self.master_client}): {ret}" + ) results = [] for client in self.clients: try: if hasattr(client, method): - self.logger.info("Enrobing JSON RPC call to %s.%s" % (client, method)) + self.logger.info( + "Enrobing JSON RPC call to %s.%s" % (client, method) + ) function = getattr(client, method) if function is not None: - kwargs['rpc_client_result'] = ret + kwargs["rpc_client_result"] = ret results.append(function(*args, **kwargs)) else: self.logger.warn(f"Function {method} of {client} is None!") results.append(None) elif isinstance(client, SelfPostingClient): - if method == 'eth_getTransactionReceipt': + if method == "eth_getTransactionReceipt": # for eth_getTransactionReceipt, make sure we block until all clients have mined the transaction - results.append(client.wait_for_transaction(data['params'][0])) + results.append(client.wait_for_transaction(data["params"][0])) else: results.append(client.post(data)) else: @@ -257,7 +266,7 @@ def post(self, data): plugin.after_post(data, results) return ret - + def add_plugin(self, plugin: EthenoPlugin): plugin.etheno = self self.plugins.append(plugin) @@ -282,32 +291,55 @@ def add_client(self, client: EthenoClient): self.clients.append(client) self._create_accounts(client) - def deploy_contract(self, from_address, bytecode, gas=0x99999, gas_price=None, value=0) -> Optional[int]: + def deploy_contract( + self, from_address, bytecode, gas=0x99999, gas_price=None, value=0 + ) -> Optional[int]: if gas_price is None: gas_price = self.master_client.get_gas_price() if isinstance(bytecode, bytes): bytecode = bytecode.decode() - if not bytecode.startswith('0x'): + if not bytecode.startswith("0x"): bytecode = "0x%s" % bytecode - tx_hash = self.post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'eth_sendTransaction', - 'params': [{ - "from": format_hex_address(from_address, True), - "gas": "0x%x" % gas, - "gasPrice": "0x%x" % gas_price, - "value": "0x0", - "data": bytecode - }] - })['result'] + tx_hash = self.post( + { + "id": 1, + "jsonrpc": "2.0", + "method": "eth_sendTransaction", + "params": [ + { + "from": format_hex_address(from_address, True), + "gas": "0x%x" % gas, + "gasPrice": "0x%x" % gas_price, + "value": "0x0", + "data": bytecode, + } + ], + } + )["result"] receipt = self.master_client.wait_for_transaction(tx_hash) - if 'result' in receipt and receipt['result'] and 'contractAddress' in receipt['result'] and \ - receipt['result']['contractAddress']: - return int(receipt['result']['contractAddress'], 16) + if ( + "result" in receipt + and receipt["result"] + and "contractAddress" in receipt["result"] + and receipt["result"]["contractAddress"] + ): + # No longer returning an integer because if the address starts with zeros, those will be truncated when converted to integer + # and then back to a hex string. + return receipt["result"]["contractAddress"] else: return None + def get_transaction_receipt_request(self, tx_hash: str) -> Dict: + """ + Takes in a transaction hash and returns the request object for an eth_getTransactionReceipt API call + """ + return { + "id": 1, + "jsonrpc": "2.0", + "method": "eth_getTransactionReceipt", + "params": [tx_hash], + } + def shutdown(self, port: int = GETH_DEFAULT_RPC_PORT): if self._shutting_down: return @@ -320,28 +352,24 @@ def shutdown(self, port: int = GETH_DEFAULT_RPC_PORT): for client in self.clients: client.shutdown() self.logger.close() - from urllib.request import urlopen - import socket - import urllib - try: - urlopen("http://127.0.0.1:%d/shutdown" % port, timeout = 2) - except socket.timeout: - pass - except urllib.error.URLError: - pass + _CONTROLLER.quit() def run(self, debug=True, run_publicly=False, port=GETH_DEFAULT_RPC_PORT): # Manticore only works in the main thread, so use a threadsafe wrapper: - def flask_thread(): - if run_publicly: - host='0.0.0.0' + def server_thread(): + IS_DOCKER = os.environ.get("DOCKER", 0) + if run_publicly or IS_DOCKER: + host = "0.0.0.0" else: - host = None + host = "127.0.0.1" # Do not use the reloader, because Flask needs to run in the main thread to use the reloader - app.run(debug=debug, host=host, port=port, use_reloader=False) - thread = Thread(target=flask_thread) - thread.start() + server = make_server(host=host, port=port, app=app, threaded=True) + return server + # app.run(debug=debug, host=host, port=port, use_reloader=False) + server = server_thread() + thread = Thread(target=server.serve_forever) + thread.start() self.logger.info("Etheno v%s" % VERSION) for plugin in self.plugins: @@ -349,6 +377,8 @@ def flask_thread(): _CONTROLLER.run() self.shutdown() + self.logger.info("Shutting Etheno down") + server.shutdown() thread.join() @@ -368,17 +398,18 @@ def post(self): ETHENO.logger.error("Unexpected POST data: %s" % data) abort(400) - if 'jsonrpc' not in data or 'method' not in data: + if "jsonrpc" not in data or "method" not in data: abort(400) try: - jsonrpc_version = float(data['jsonrpc']) + jsonrpc_version = float(data["jsonrpc"]) except ValueError: abort(400) if jsonrpc_version < 2.0: abort(426) elif jsonrpc_version > 2.0: ETHENO.logger.warn( - f"Client is using a newer version of the JSONRPC protocol! Expected 2.0, but got {jsonrpc_version}") + f"Client is using a newer version of the JSONRPC protocol! Expected 2.0, but got {jsonrpc_version}" + ) ret = ETHENO.post(data) @@ -393,5 +424,5 @@ def post(self): if was_list: ret = [ret] ret = jsonify(ret) - + return ret diff --git a/etheno/ganache.py b/etheno/ganache.py index 9446c36..8da2dbe 100644 --- a/etheno/ganache.py +++ b/etheno/ganache.py @@ -19,18 +19,22 @@ def __init__(self, cmd=None, args=None, port=8546): if cmd is not None: cmd = shlex.split(cmd) else: - cmd = ['/usr/bin/env', 'ganache-cli'] + cmd = ["/usr/bin/env", "ganache"] if args is None: args = [] - self.args = cmd + ['-d', '-p', str(port)] + args + self.args = ( + cmd + ["-d", "-p", str(port), "--chain.allowUnlimitedContractSize"] + args + ) self.ganache = None self._client = None def start(self): if self.ganache: return - if shutil.which("ganache-cli") is None: - raise ValueError("`ganache-cli` is not installed! Install it by running `npm -g i ganache-cli`") + if shutil.which("ganache") is None: + raise ValueError( + "`ganache` is not installed! Install it by running `npm -g i ganache`" + ) if self._client: self.ganache = PtyLogger(self._client.logger, self.args) self.ganache.start() @@ -39,9 +43,12 @@ def ganache_errored() -> int: if self.ganache.is_done(): return self.ganache.exitstatus return 0 + else: ETHENO.logger.debug(f"Running ganache: {self.args}") - self.ganache = subprocess.Popen(self.args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1) + self.ganache = subprocess.Popen( + self.args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1 + ) def ganache_errored(): try: @@ -55,7 +62,9 @@ def ganache_errored(): time.sleep(0.25) retcode = ganache_errored() if retcode != 0: - raise RuntimeError(f"{' '.join(self.args)} exited with non-zero status {retcode}") + raise RuntimeError( + f"{' '.join(self.args)} exited with non-zero status {retcode}" + ) def post(self, data): if self.ganache is None: diff --git a/etheno/genesis.py b/etheno/genesis.py index f5dacb7..bafbe1e 100644 --- a/etheno/genesis.py +++ b/etheno/genesis.py @@ -4,7 +4,7 @@ class Account(object): - def __init__(self, address, balance = None, private_key = None): + def __init__(self, address, balance=None, private_key=None): self._address = address self.balance = balance self._private_key = private_key @@ -18,29 +18,47 @@ def private_key(self): return self._private_key -def make_genesis(network_id=0x657468656E6F, difficulty=20, gas_limit=200000000000, accounts=None, byzantium_block=0, dao_fork_block=0, homestead_block=0, eip150_block=0, eip155_block=0, eip158_block=0, constantinople_block=None): +def make_genesis( + network_id=0x657468656E6F, + difficulty=20, + gas_limit=200000000000, + accounts=None, + byzantium_block=0, + dao_fork_block=0, + homestead_block=0, + eip150_block=0, + eip155_block=0, + eip158_block=0, + constantinople_block=None, +): if accounts: - alloc = {format_hex_address(acct.address): {'balance': "%d" % acct.balance, 'privateKey': format_hex_address(acct.private_key)} for acct in accounts} + alloc = { + format_hex_address(acct.address): { + "balance": "%d" % acct.balance, + "privateKey": format_hex_address(acct.private_key), + } + for acct in accounts + } else: alloc = {} ret = { - 'config' : { - 'chainId': network_id, - 'byzantiumBlock': byzantium_block, - 'daoForkBlock': dao_fork_block, - 'homesteadBlock': homestead_block, - 'eip150Block': eip150_block, - 'eip155Block': eip155_block, - 'eip158Block': eip158_block + "config": { + "chainId": network_id, + "byzantiumBlock": byzantium_block, + "daoForkBlock": dao_fork_block, + "homesteadBlock": homestead_block, + "eip150Block": eip150_block, + "eip155Block": eip155_block, + "eip158Block": eip158_block, }, - 'difficulty': "%d" % difficulty, - 'gasLimit': "%d" % gas_limit, - 'alloc': alloc + "difficulty": "%d" % difficulty, + "gasLimit": "%d" % gas_limit, + "alloc": alloc, } if constantinople_block is not None: - ret['config']['constantinopleBlock'] = constantinople_block + ret["config"]["constantinopleBlock"] = constantinople_block return ret @@ -48,9 +66,9 @@ def make_genesis(network_id=0x657468656E6F, difficulty=20, gas_limit=20000000000 def geth_to_parity(genesis): """Converts a Geth style genesis to Parity style""" ret = { - 'name': 'etheno', - 'engine': { - 'instantSeal': None, + "name": "etheno", + "engine": { + "instantSeal": None, # 'Ethash': { # 'params': { # 'minimumDifficulty': "0x%s" % genesis['difficulty'], @@ -63,7 +81,7 @@ def geth_to_parity(genesis): # } # } }, - 'genesis': { + "genesis": { "seal": { "generic": "0x0" # 'ethereum': { @@ -71,57 +89,67 @@ def geth_to_parity(genesis): # 'mixHash': '0x0000000000000000000000000000000000000000000000000000000000000000' # } }, - 'difficulty': "0x%s" % genesis['difficulty'], - 'gasLimit': "0x%s" % genesis['gasLimit'], - 'author': list(genesis['alloc'])[-1] + "difficulty": "0x%s" % genesis["difficulty"], + "gasLimit": "0x%s" % genesis["gasLimit"], + "author": list(genesis["alloc"])[-1], }, - 'params': { - 'networkID' : "0x%x" % genesis['config']['chainId'], - 'maximumExtraDataSize': '0x20', - 'minGasLimit': "0x%s" % genesis['gasLimit'], - 'gasLimitBoundDivisor': '1', - 'eip150Transition': "0x%x" % genesis['config']['eip150Block'], - 'eip160Transition': '0x0', - 'eip161abcTransition': '0x0', - 'eip161dTransition': '0x0', - 'eip155Transition': "0x%x" % genesis['config']['eip155Block'], - 'eip98Transition': '0x7fffffffffffff', + "params": { + "networkID": "0x%x" % genesis["config"]["chainId"], + "maximumExtraDataSize": "0x20", + "minGasLimit": "0x%s" % genesis["gasLimit"], + "gasLimitBoundDivisor": "1", + "eip150Transition": "0x%x" % genesis["config"]["eip150Block"], + "eip160Transition": "0x0", + "eip161abcTransition": "0x0", + "eip161dTransition": "0x0", + "eip155Transition": "0x%x" % genesis["config"]["eip155Block"], + "eip98Transition": "0x7fffffffffffff", # 'eip86Transition': '0x7fffffffffffff', - 'maxCodeSize': 24576, - 'maxCodeSizeTransition': '0x0', - 'eip140Transition': '0x0', - 'eip211Transition': '0x0', - 'eip214Transition': '0x0', - 'eip658Transition': '0x0', - 'wasmActivationTransition': '0x0' + "maxCodeSize": 24576, + "maxCodeSizeTransition": "0x0", + "eip140Transition": "0x0", + "eip211Transition": "0x0", + "eip214Transition": "0x0", + "eip658Transition": "0x0", + "wasmActivationTransition": "0x0", }, - 'accounts': dict(genesis['alloc']) + "accounts": dict(genesis["alloc"]), } - if 'constantinopleBlock' in genesis['config']: - block = "0x%x" % genesis['config']['constantinopleBlock'] - ret['params']['eip145Transition'] = block - ret['params']['eip1014Transition'] = block - ret['params']['eip1052Transition'] = block + if "constantinopleBlock" in genesis["config"]: + block = "0x%x" % genesis["config"]["constantinopleBlock"] + ret["params"]["eip145Transition"] = block + ret["params"]["eip1014Transition"] = block + ret["params"]["eip1052Transition"] = block return ret -DEFAULT_PRIVATE_KEYS = [0xf2f48ee19680706196e2e339e5da3491186e0c4c5030670656b0e0164837257d, - 0x5d862464fe9303452126c8bc94274b8c5f9874cbd219789b3eb2128075a76f72, - 0xdf02719c4df8b9b8ac7f551fcb5d9ef48fa27eef7a66453879f4d8fdc6e78fb1, - 0xff12e391b79415e941a94de3bf3a9aee577aed0731e297d5cfa0b8a1e02fa1d0, - 0x752dd9cf65e68cfaba7d60225cbdbc1f4729dd5e5507def72815ed0d8abc6249, - 0xefb595a0178eb79a8df953f87c5148402a224cdf725e88c0146727c6aceadccd, - 0x83c6d2cc5ddcf9711a6d59b417dc20eb48afd58d45290099e5987e3d768f328f, - 0xbb2d3f7c9583780a7d3904a2f55d792707c345f21de1bacb2d389934d82796b2, - 0xb2fd4d29c1390b71b8795ae81196bfd60293adf99f9d32a0aff06288fcdac55f, - 0x23cb7121166b9a2f93ae0b7c05bde02eae50d64449b2cbb42bc84e9d38d6cc89] - -def make_accounts(num_accounts, default_balance = None): + +DEFAULT_PRIVATE_KEYS = [ + 0xF2F48EE19680706196E2E339E5DA3491186E0C4C5030670656B0E0164837257D, + 0x5D862464FE9303452126C8BC94274B8C5F9874CBD219789B3EB2128075A76F72, + 0xDF02719C4DF8B9B8AC7F551FCB5D9EF48FA27EEF7A66453879F4D8FDC6E78FB1, + 0xFF12E391B79415E941A94DE3BF3A9AEE577AED0731E297D5CFA0B8A1E02FA1D0, + 0x752DD9CF65E68CFABA7D60225CBDBC1F4729DD5E5507DEF72815ED0D8ABC6249, + 0xEFB595A0178EB79A8DF953F87C5148402A224CDF725E88C0146727C6ACEADCCD, + 0x83C6D2CC5DDCF9711A6D59B417DC20EB48AFD58D45290099E5987E3D768F328F, + 0xBB2D3F7C9583780A7D3904A2F55D792707C345F21DE1BACB2D389934D82796B2, + 0xB2FD4D29C1390B71B8795AE81196BFD60293ADF99F9D32A0AFF06288FCDAC55F, + 0x23CB7121166B9A2F93AE0B7C05BDE02EAE50D64449B2CBB42BC84E9D38D6CC89, +] + + +def make_accounts(num_accounts, default_balance=None): ret = [] if num_accounts > len(DEFAULT_PRIVATE_KEYS): - raise Exception('TODO: Too many accounts') + raise Exception("TODO: Too many accounts") for i in range(num_accounts): acct = w3.eth.account.from_key(DEFAULT_PRIVATE_KEYS[i]) - ret.append(Account(address=int(acct.address, 16), private_key=int(acct.privateKey.hex(), 16), balance=default_balance)) + ret.append( + Account( + address=int(acct.address, 16), + private_key=int(acct.privateKey.hex(), 16), + balance=default_balance, + ) + ) return ret diff --git a/etheno/geth.py b/etheno/geth.py index 1d699d6..6f6f7dd 100644 --- a/etheno/geth.py +++ b/etheno/geth.py @@ -8,60 +8,93 @@ from .jsonrpcclient import JSONRPCClient from .utils import format_hex_address + def ltrim_ansi(text): if text.startswith(logger.ANSI_RESET): - return ltrim_ansi(text[len(logger.ANSI_RESET):]) + return ltrim_ansi(text[len(logger.ANSI_RESET) :]) elif text.startswith(logger.ANSI_BOLD): - return ltrim_ansi(text[len(logger.ANSI_BOLD):]) + return ltrim_ansi(text[len(logger.ANSI_BOLD) :]) for color in logger.CGAColors: ansi = logger.ANSI_COLOR % (30 + color.value) if text.startswith(ansi): - return ltrim_ansi(text[len(ansi):]) + return ltrim_ansi(text[len(ansi) :]) for color in logger.CGAColors: ansi = f"\033[{30 + color.value}m" if text.startswith(ansi): - return ltrim_ansi(text[len(ansi):]) + return ltrim_ansi(text[len(ansi) :]) return text + class GethClient(JSONRPCClient): def __init__(self, genesis, port=8546): - super().__init__('Geth', genesis, port) + super().__init__("Geth", genesis, port) atexit.register(GethClient.shutdown.__get__(self, GethClient)) + def initialized(self): def log(logger, message): msg = ltrim_ansi(message) - if msg.startswith('ERROR'): + if msg.startswith("ERROR"): logger.error(msg[5:].lstrip()) - elif msg.startswith('WARNING'): + elif msg.startswith("WARNING"): logger.warning(msg[7:].lstrip()) - elif msg.startswith('WARN'): + elif msg.startswith("WARN"): logger.warning(msg[4:].lstrip()) - elif msg.startswith('DEBUG'): + elif msg.startswith("DEBUG"): logger.debug(msg[5:].lstrip()) - elif msg.startswith('INFO'): + elif msg.startswith("INFO"): logger.info(msg[4:].lstrip()) else: logger.info(message) + self.instance.log = log def etheno_set(self): super().etheno_set() try: - args = ['/usr/bin/env', 'geth', 'init', self.logger.to_log_path(self.genesis_file), '--datadir', self.logger.to_log_path(self.datadir)] + args = [ + "/usr/bin/env", + "geth", + "init", + self.logger.to_log_path(self.genesis_file), + "--datadir", + self.logger.to_log_path(self.datadir), + ] self.add_to_run_script(args) - subprocess.check_call(args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, cwd=self.log_directory) + subprocess.check_call( + args, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + cwd=self.log_directory, + ) except Exception as e: self.cleanup() raise e def import_account(self, private_key): - content = format_hex_address(private_key).encode('utf-8') + bytes([ord('\n')]) - import_dir = os.path.join(self.log_directory, 'private_keys') - keyfile = self.logger.make_constant_logged_file(content, prefix='private', suffix='.key', dir=import_dir) + content = format_hex_address(private_key).encode("utf-8") + bytes([ord("\n")]) + import_dir = os.path.join(self.log_directory, "private_keys") + keyfile = self.logger.make_constant_logged_file( + content, prefix="private", suffix=".key", dir=import_dir + ) while True: - args = ['/usr/bin/env', 'geth', 'account', 'import', '--datadir', self.logger.to_log_path(self.datadir), '--password', self.logger.to_log_path(self.passwords), self.logger.to_log_path(keyfile)] + args = [ + "/usr/bin/env", + "geth", + "account", + "import", + "--datadir", + self.logger.to_log_path(self.datadir), + "--password", + self.logger.to_log_path(self.passwords), + self.logger.to_log_path(keyfile), + ] self.add_to_run_script(args) - geth = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.log_directory) + geth = subprocess.Popen( + args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=self.log_directory, + ) if geth.wait() == 0: return # This sometimes happens with geth, I have no idea why, so just try again @@ -72,8 +105,13 @@ def post(self, data): try: return super().post(data) except JSONRPCError as e: - if e.result['error']['code'] == -32000 and 'authentication needed' in e.result['error']['message']: - self.logger.info("Waiting for Geth to finish unlocking our accounts...") + if ( + e.result["error"]["code"] == -32000 + and "authentication needed" in e.result["error"]["message"] + ): + self.logger.info( + "Waiting for Geth to finish unlocking our accounts..." + ) time.sleep(3.0) else: raise e @@ -89,10 +127,34 @@ def get_start_command(self, unlock_accounts=True): verbosity = 3 else: verbosity = 4 - base_args = ['/usr/bin/env', 'geth', '--nodiscover', '--rpc', '--rpcport', "%d" % self.port, '--networkid', "%d" % self.genesis['config']['chainId'], '--datadir', self.logger.to_log_path(self.datadir), '--mine', '--etherbase', format_hex_address(self.miner_account.address), f"--verbosity={verbosity}", '--minerthreads=1'] + base_args = [ + "/usr/bin/env", + "geth", + "--nodiscover", + "--rpc", + "--rpcport", + "%d" % self.port, + "--networkid", + "%d" % self.genesis["config"]["chainId"], + "--datadir", + self.logger.to_log_path(self.datadir), + "--mine", + "--etherbase", + format_hex_address(self.miner_account.address), + f"--verbosity={verbosity}", + "--minerthreads=1", + ] if unlock_accounts: - addresses = filter(lambda a : a != format_hex_address(self.miner_account.address), map(format_hex_address, self.genesis['alloc'])) - unlock_args = ['--unlock', ','.join(addresses), '--password', self.passwords] + addresses = filter( + lambda a: a != format_hex_address(self.miner_account.address), + map(format_hex_address, self.genesis["alloc"]), + ) + unlock_args = [ + "--unlock", + ",".join(addresses), + "--password", + self.passwords, + ] else: unlock_args = [] return base_args + unlock_args diff --git a/etheno/jsonrpc.py b/etheno/jsonrpc.py index a7f25be..140db77 100644 --- a/etheno/jsonrpc.py +++ b/etheno/jsonrpc.py @@ -1,9 +1,11 @@ import json from typing import Dict, TextIO, Union +from pyrsistent import m + from .etheno import EthenoPlugin from .utils import format_hex_address - +from .client import JSONRPCError # source: https://ethereum.stackexchange.com/a/83855 import rlp @@ -43,18 +45,18 @@ def decode_raw_tx(raw_tx: str): s = hex(tx.s) chain_id = (tx.v - 35) // 2 if tx.v % 2 else (tx.v - 36) // 2 return { - 'txHash': hash_tx, - 'from': from_, - 'to': to, - 'nonce': tx.nonce, - 'gas': hex(tx.gas), - 'gasPrice': hex(tx.gas_price), - 'value': hex(tx.value), - 'data': data, - 'chainId': chain_id, - 'r': r, - 's': s, - 'v': tx.v + "txHash": hash_tx, + "from": from_, + "to": to, + "nonce": tx.nonce, + "gas": hex(tx.gas), + "gasPrice": hex(tx.gas_price), + "value": hex(tx.value), + "data": data, + "chainId": chain_id, + "r": r, + "s": s, + "v": tx.v, } @@ -62,10 +64,10 @@ class JSONExporter: def __init__(self, out_stream: Union[str, TextIO]): self._was_path = isinstance(out_stream, str) if self._was_path: - self.output = open(out_stream, 'w', encoding='utf8') + self.output = open(out_stream, "w", encoding="utf8") else: self.output = out_stream - self.output.write('[') + self.output.write("[") self._count = 0 self._finalized = False @@ -73,8 +75,8 @@ def finalize(self): if self._finalized: return if self._count: - self.output.write('\n') - self.output.write(']') + self.output.write("\n") + self.output.write("]") self.output.flush() if self._was_path: self.output.close() @@ -84,70 +86,150 @@ def write_entry(self, entry): if self._finalized: return if self._count > 0: - self.output.write(',') + self.output.write(",") self._count += 1 - self.output.write('\n') + self.output.write("\n") json.dump(entry, self.output) self.output.flush() - + class JSONRPCExportPlugin(EthenoPlugin): def __init__(self, out_stream: Union[str, TextIO]): self._exporter = JSONExporter(out_stream) - + def after_post(self, post_data, client_results): self._exporter.write_entry([post_data, client_results]) def finalize(self): self._exporter.finalize() - if hasattr(self._exporter.output, 'name'): - self.logger.info(f'Raw JSON RPC messages dumped to {self._exporter.output.name}') + if hasattr(self._exporter.output, "name"): + self.logger.info( + f"Raw JSON RPC messages dumped to {self._exporter.output.name}" + ) class EventSummaryPlugin(EthenoPlugin): def __init__(self): - self._transactions: Dict[int, Dict[str, object]] = {} # Maps transaction hashes to their eth_sendTransaction arguments + self._transactions: Dict[ + int, Dict[str, object] + ] = {} # Maps transaction hashes to their eth_sendTransaction arguments + + def handle_contract_created( + self, + creator_address: str, + contract_address: str, + gas_used: str, + gas_price: str, + data: str, + value: str, + ): + self.logger.info( + f"Contract created at {contract_address} with {(len(data)-2)//2} bytes of data by account {creator_address} for {gas_used} gas with a gas price of {gas_price}" + ) - def handle_contract_created(self, creator_address: str, contract_address: str, gas_used: str, gas_price: str, data: str, value: str): - self.logger.info(f'Contract created at {contract_address} with {(len(data)-2)//2} bytes of data by account {creator_address} for {gas_used} gas with a gas price of {gas_price}') + def handle_function_call( + self, + from_address: str, + to_address: str, + gas_used: str, + gas_price: str, + data: str, + value: str, + ): + self.logger.info( + f"Function call with {value} wei from {from_address} to {to_address} with {(len(data)-2)//2} bytes of data for {gas_used} gas with a gas price of {gas_price}" + ) - def handle_function_call(self, from_address: str, to_address: str, gas_used: str, gas_price: str, data: str, value: str): - self.logger.info(f'Function call with {value} wei from {from_address} to {to_address} with {(len(data)-2)//2} bytes of data for {gas_used} gas with a gas price of {gas_price}') + def handle_unlogged_transactions(self): + unlogged_transactions = dict( + filter(lambda txn: txn[1]["is_logged"] == False, self._transactions.items()) + ) + for (tx_hash, txn) in unlogged_transactions.items(): + post_data = self._etheno.get_transaction_receipt_request(tx_hash) + self._etheno.post(post_data) def after_post(self, post_data, result): if len(result): result = result[0] - if 'method' not in post_data: + if "method" not in post_data: return - elif (post_data['method'] == 'eth_sendTransaction' or post_data['method'] == 'eth_sendRawTransaction') and 'result' in result: - try: - transaction_hash = int(result['result'], 16) - except ValueError: - return - if post_data['method'] == 'eth_sendRawTransaction': - self._transactions[transaction_hash] = decode_raw_tx(post_data['params'][0]) + # Fixes bug that occurs when a JSONRPCError is attempted to be logged + if isinstance(result, JSONRPCError): + self.logger.info( + f"Received a JSON RPC Error when logging transaction...skipping event logging" + ) + return + + elif ( + post_data["method"] == "eth_sendTransaction" + or post_data["method"] == "eth_sendRawTransaction" + ) and "result" in result: + transaction_hash = result["result"] + # Add a boolean to check at shutdown whether everything has been logged. + if post_data["method"] == "eth_sendRawTransaction": + self._transactions[transaction_hash] = { + "transaction": decode_raw_tx(post_data["params"][0]), + "is_logged": False, + } else: - self._transactions[transaction_hash] = post_data['params'][0] - elif post_data['method'] == 'evm_mine': + self._transactions[transaction_hash] = { + "transaction": post_data["params"][0], + "is_logged": False, + } + elif post_data["method"] == "evm_mine": self.handle_increase_block_number() - elif post_data['method'] == 'evm_increaseTime': - self.handle_increase_block_timestamp(post_data['params'][0]) - elif post_data['method'] == 'eth_getTransactionReceipt': - transaction_hash = int(post_data['params'][0], 16) + elif post_data["method"] == "evm_increaseTime": + self.handle_increase_block_timestamp(post_data["params"][0]) + elif post_data["method"] == "eth_getTransactionReceipt": + transaction_hash = post_data["params"][0] if transaction_hash not in self._transactions: - self.logger.error(f'Received transaction receipt {result} for unknown transaction hash {post_data["params"][0]}') + self.logger.error( + f'Received transaction receipt {result} for unknown transaction hash {post_data["params"][0]}' + ) + return + (original_transaction, is_logged) = ( + self._transactions[transaction_hash]["transaction"], + self._transactions[transaction_hash]["is_logged"], + ) + # Check if it was logged already + if is_logged: + self.logger.debug( + f"Transaction hash {transaction_hash} has already been logged. This should not happen." + ) return - original_transaction = self._transactions[transaction_hash] - if 'value' not in original_transaction or original_transaction['value'] is None: - value = '0x0' + if ( + "value" not in original_transaction + or original_transaction["value"] is None + ): + value = "0x0" else: - value = original_transaction['value'] - if 'to' not in result['result'] or result['result']['to'] is None: + value = original_transaction["value"] + if "to" not in result["result"] or result["result"]["to"] is None: # this transaction is creating a contract: - contract_address = result['result']['contractAddress'] - self.handle_contract_created(original_transaction['from'], contract_address, result['result']['gasUsed'], original_transaction['gasPrice'], original_transaction['data'], value) + # TODO: key errors are likely here...need to figure out a better way to do error handling + # TODO: log a warning about non-zero ether values + contract_address = result["result"]["contractAddress"] + self.handle_contract_created( + original_transaction["from"], + contract_address, + result["result"]["gasUsed"], + result["result"]["effectiveGasPrice"], + original_transaction["data"], + value, + ) else: - self.handle_function_call(original_transaction['from'], original_transaction['to'], result['result']['gasUsed'], original_transaction['gasPrice'], original_transaction['data'] if 'data' in original_transaction else '0x', value) + self.handle_function_call( + original_transaction["from"], + original_transaction["to"], + result["result"]["gasUsed"], + result["result"]["effectiveGasPrice"], + original_transaction["data"] + if "data" in original_transaction + else "0x", + value, + ) + # Transaction has been logged successfully + self._transactions[transaction_hash]["is_logged"] = True class EventSummaryExportPlugin(EventSummaryPlugin): @@ -157,51 +239,77 @@ def __init__(self, out_stream: Union[str, TextIO]): def run(self): for address in self.etheno.accounts: - self._exporter.write_entry({ - 'event' : 'AccountCreated', - 'address' : format_hex_address(address) - }) + self._exporter.write_entry( + {"event": "AccountCreated", "address": format_hex_address(address)} + ) super().run() def handle_increase_block_number(self): - self._exporter.write_entry({ - 'event' : 'BlockMined', - 'number_increment' : "1", - 'timestamp_increment' : "0" - }) - - def handle_increase_block_timestamp(self, number : str): - self._exporter.write_entry({ - 'event' : 'BlockMined', - 'number_increment' : "0", - 'timestamp_increment': str(number) - }) - - def handle_contract_created(self, creator_address: str, contract_address: str, gas_used: str, gas_price: str, data: str, value: str): - self._exporter.write_entry({ - 'event' : 'ContractCreated', - 'from' : creator_address, - 'contract_address' : contract_address, - 'gas_used' : gas_used, - 'gas_price' : gas_price, - 'data' : data, - 'value' : value - }) - super().handle_contract_created(creator_address, contract_address, gas_used, gas_price, data, value) - - def handle_function_call(self, from_address: str, to_address: str, gas_used: str, gas_price: str, data: str, value: str): - self._exporter.write_entry({ - 'event' : 'FunctionCall', - 'from' : from_address, - 'to' : to_address, - 'gas_used' : gas_used, - 'gas_price' : gas_price, - 'data' : data, - 'value' : value - }) - super().handle_function_call(from_address, to_address, gas_used, gas_price, data, value) + self._exporter.write_entry( + {"event": "BlockMined", "number_increment": "1", "timestamp_increment": "0"} + ) + + def handle_increase_block_timestamp(self, number: str): + self._exporter.write_entry( + { + "event": "BlockMined", + "number_increment": "0", + "timestamp_increment": str(number), + } + ) + + def handle_contract_created( + self, + creator_address: str, + contract_address: str, + gas_used: str, + gas_price: str, + data: str, + value: str, + ): + self._exporter.write_entry( + { + "event": "ContractCreated", + "from": creator_address, + "contract_address": contract_address, + "gas_used": gas_used, + "gas_price": gas_price, + "data": data, + "value": value, + } + ) + super().handle_contract_created( + creator_address, contract_address, gas_used, gas_price, data, value + ) + + def handle_function_call( + self, + from_address: str, + to_address: str, + gas_used: str, + gas_price: str, + data: str, + value: str, + ): + self._exporter.write_entry( + { + "event": "FunctionCall", + "from": from_address, + "to": to_address, + "gas_used": gas_used, + "gas_price": gas_price, + "data": data, + "value": value, + } + ) + super().handle_function_call( + from_address, to_address, gas_used, gas_price, data, value + ) def finalize(self): + super().handle_unlogged_transactions() self._exporter.finalize() - if hasattr(self._exporter.output, 'name'): - self.logger.info(f'Event summary JSON saved to {self._exporter.output.name}') + if hasattr(self._exporter.output, "name"): + self.logger.info( + f"Event summary JSON saved to {self._exporter.output.name}" + ) diff --git a/etheno/jsonrpcclient.py b/etheno/jsonrpcclient.py index a7b3b45..5b05b73 100644 --- a/etheno/jsonrpcclient.py +++ b/etheno/jsonrpcclient.py @@ -17,9 +17,9 @@ def __init__(self, name, genesis, port=8546): self.port = port self.genesis = copy.deepcopy(genesis) self.miner_account = make_accounts(1)[0] - self.genesis['alloc'][format_hex_address(self.miner_account.address)] = { - 'balance': '0', - 'privateKey': format_hex_address(self.miner_account.private_key) + self.genesis["alloc"][format_hex_address(self.miner_account.address)] = { + "balance": "0", + "privateKey": format_hex_address(self.miner_account.private_key), } self._accounts = [] self._created_address_index = -1 @@ -32,27 +32,31 @@ def __init__(self, name, genesis, port=8546): self.instance = None def write_genesis(self, outfile): - outfile.write(json.dumps(self.genesis).encode('utf-8')) + outfile.write(json.dumps(self.genesis).encode("utf-8")) def write_passwords(self, outfile): - for i in range(len(self.genesis['alloc'])): - outfile.write(b'etheno\n') - + for i in range(len(self.genesis["alloc"])): + outfile.write(b"etheno\n") + def etheno_set(self): super().etheno_set() - self.datadir = os.path.join(self.log_directory, 'chain_data') + self.datadir = os.path.join(self.log_directory, "chain_data") os.makedirs(self.datadir) - with self.logger.make_logged_file(prefix='genesis', suffix='.json') as genesis_output: + with self.logger.make_logged_file( + prefix="genesis", suffix=".json" + ) as genesis_output: self.genesis_file = genesis_output.name self.write_genesis(genesis_output) genesis_output.close() - with self.logger.make_logged_file(prefix=self._basename, suffix='.passwd') as password_output: + with self.logger.make_logged_file( + prefix=self._basename, suffix=".passwd" + ) as password_output: self.passwords = password_output.name self.write_passwords(password_output) def add_to_run_script(self, command): if isinstance(command, Sequence): - command = ' '.join(command) + command = " ".join(command) self._runscript.append(command) def import_account(self, private_key): @@ -60,7 +64,7 @@ def import_account(self, private_key): @property def accounts(self): - for addr, bal in self.genesis['alloc'].items(): + for addr, bal in self.genesis["alloc"].items(): yield int(addr, 16) def create_account(self, balance: int = 0, address=None): @@ -68,22 +72,29 @@ def create_account(self, balance: int = 0, address=None): if address is None: self._created_address_index += 1 if self._created_address_index >= len(accounts): - raise Exception("Ran out of %s genesis accounts and could not create a new one!" % self.short_name) + raise Exception( + "Ran out of %s genesis accounts and could not create a new one!" + % self.short_name + ) return accounts[self._created_address_index] elif address not in accounts: - valid_accounts = '\n'.join(map(hex, accounts)) - raise ValueError(f"Account {address!s} did not exist in the genesis for client {self.short_name}! " - f"Valid accounts:\n{valid_accounts}") + valid_accounts = "\n".join(map(hex, accounts)) + raise ValueError( + f"Account {address!s} did not exist in the genesis for client {self.short_name}! " + f"Valid accounts:\n{valid_accounts}" + ) else: return address def get_start_command(self, unlock_accounts=True): raise NotImplementedError() - + def save_run_script(self): - run_script = os.path.join(self.log_directory, "run_%s.sh" % self._basename.lower()) - with open(run_script, 'w') as f: - script = '\n'.join(self._runscript) + run_script = os.path.join( + self.log_directory, "run_%s.sh" % self._basename.lower() + ) + with open(run_script, "w") as f: + script = "\n".join(self._runscript) f.write(script) # make the script executable: os.chmod(run_script, 0o755) diff --git a/etheno/keyfile.py b/etheno/keyfile.py index 5600002..f55bab7 100644 --- a/etheno/keyfile.py +++ b/etheno/keyfile.py @@ -5,7 +5,7 @@ # This file is licensed under the MIT License (MIT) # # Copyright (c) 2017 Piper Merriam -# +# # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights @@ -61,7 +61,9 @@ def load_keyfile(path_or_file_obj): return json.load(path_or_file_obj) -def create_keyfile_json(private_key, password, version=3, kdf="pbkdf2", iterations=None): +def create_keyfile_json( + private_key, password, version=3, kdf="pbkdf2", iterations=None +): if version == 3: return _create_v3_keyfile_json(private_key, password, kdf, iterations) else: @@ -70,7 +72,7 @@ def create_keyfile_json(private_key, password, version=3, kdf="pbkdf2", iteratio def decode_keyfile_json(raw_keyfile_json, password): keyfile_json = normalize_keys(raw_keyfile_json) - version = keyfile_json['version'] + version = keyfile_json["version"] if version == 3: return _decode_keyfile_json_v3(keyfile_json, password) @@ -114,21 +116,21 @@ def _create_v3_keyfile_json(private_key, password, kdf, work_factor=None): if work_factor is None: work_factor = get_default_work_factor_for_kdf(kdf) - if kdf == 'pbkdf2': + if kdf == "pbkdf2": derived_key = _pbkdf2_hash( password, - hash_name='sha256', + hash_name="sha256", salt=salt, iterations=work_factor, dklen=DKLEN, ) kdfparams = { - 'c': work_factor, - 'dklen': DKLEN, - 'prf': 'hmac-sha256', - 'salt': encode_hex_no_prefix(salt), + "c": work_factor, + "dklen": DKLEN, + "prf": "hmac-sha256", + "salt": encode_hex_no_prefix(salt), } - elif kdf == 'scrypt': + elif kdf == "scrypt": derived_key = _scrypt_hash( password, salt=salt, @@ -138,11 +140,11 @@ def _create_v3_keyfile_json(private_key, password, kdf, work_factor=None): n=work_factor, ) kdfparams = { - 'dklen': DKLEN, - 'n': work_factor, - 'r': SCRYPT_R, - 'p': SCRYPT_P, - 'salt': encode_hex_no_prefix(salt), + "dklen": DKLEN, + "n": work_factor, + "r": SCRYPT_R, + "p": SCRYPT_P, + "salt": encode_hex_no_prefix(salt), } else: raise NotImplementedError("KDF not implemented: {0}".format(kdf)) @@ -155,19 +157,19 @@ def _create_v3_keyfile_json(private_key, password, kdf, work_factor=None): address = keys.PrivateKey(private_key).public_key.to_address() return { - 'address': remove_0x_prefix(address), - 'crypto': { - 'cipher': 'aes-128-ctr', - 'cipherparams': { - 'iv': encode_hex_no_prefix(int_to_big_endian(iv)), + "address": remove_0x_prefix(address), + "crypto": { + "cipher": "aes-128-ctr", + "cipherparams": { + "iv": encode_hex_no_prefix(int_to_big_endian(iv)), }, - 'ciphertext': encode_hex_no_prefix(ciphertext), - 'kdf': kdf, - 'kdfparams': kdfparams, - 'mac': encode_hex_no_prefix(mac), + "ciphertext": encode_hex_no_prefix(ciphertext), + "kdf": kdf, + "kdfparams": kdfparams, + "mac": encode_hex_no_prefix(mac), }, - 'id': str(uuid.uuid4()), - 'version': 3, + "id": str(uuid.uuid4()), + "version": 3, } @@ -175,23 +177,23 @@ def _create_v3_keyfile_json(private_key, password, kdf, work_factor=None): # Verson 3 decoder # def _decode_keyfile_json_v3(keyfile_json, password): - crypto = keyfile_json['crypto'] - kdf = crypto['kdf'] + crypto = keyfile_json["crypto"] + kdf = crypto["kdf"] # Derive the encryption key from the password using the key derivation # function. - if kdf == 'pbkdf2': + if kdf == "pbkdf2": derived_key = _derive_pbkdf_key(crypto, password) - elif kdf == 'scrypt': + elif kdf == "scrypt": derived_key = _derive_scrypt_key(crypto, password) else: raise TypeError("Unsupported key derivation function: {0}".format(kdf)) # Validate that the derived key matchs the provided MAC - ciphertext = decode_hex(crypto['ciphertext']) + ciphertext = decode_hex(crypto["ciphertext"]) mac = keccak(derived_key[16:32] + ciphertext) - expected_mac = decode_hex(crypto['mac']) + expected_mac = decode_hex(crypto["mac"]) if not hmac.compare_digest(mac, expected_mac): raise ValueError("MAC mismatch") @@ -199,8 +201,8 @@ def _decode_keyfile_json_v3(keyfile_json, password): # Decrypt the ciphertext using the derived encryption key to get the # private key. encrypt_key = derived_key[:16] - cipherparams = crypto['cipherparams'] - iv = big_endian_to_int(decode_hex(cipherparams['iv'])) + cipherparams = crypto["cipherparams"] + iv = big_endian_to_int(decode_hex(cipherparams["iv"])) private_key = decrypt_aes_ctr(ciphertext, encrypt_key, iv) @@ -211,12 +213,12 @@ def _decode_keyfile_json_v3(keyfile_json, password): # Key derivation # def _derive_pbkdf_key(crypto, password): - kdf_params = crypto['kdfparams'] - salt = decode_hex(kdf_params['salt']) - dklen = kdf_params['dklen'] - should_be_hmac, _, hash_name = kdf_params['prf'].partition('-') - assert should_be_hmac == 'hmac' - iterations = kdf_params['c'] + kdf_params = crypto["kdfparams"] + salt = decode_hex(kdf_params["salt"]) + dklen = kdf_params["dklen"] + should_be_hmac, _, hash_name = kdf_params["prf"].partition("-") + assert should_be_hmac == "hmac" + iterations = kdf_params["c"] derive_pbkdf_key = _pbkdf2_hash(password, hash_name, salt, iterations, dklen) @@ -224,12 +226,12 @@ def _derive_pbkdf_key(crypto, password): def _derive_scrypt_key(crypto, password): - kdf_params = crypto['kdfparams'] - salt = decode_hex(kdf_params['salt']) - p = kdf_params['p'] - r = kdf_params['r'] - n = kdf_params['n'] - buflen = kdf_params['dklen'] + kdf_params = crypto["kdfparams"] + salt = decode_hex(kdf_params["salt"]) + p = kdf_params["p"] + r = kdf_params["r"] + n = kdf_params["n"] + buflen = kdf_params["dklen"] derived_scrypt_key = _scrypt_hash( password, @@ -287,13 +289,22 @@ def encrypt_aes_ctr(value, key, iv): # Utility # def get_default_work_factor_for_kdf(kdf): - if kdf == 'pbkdf2': + if kdf == "pbkdf2": return 1000000 - elif kdf == 'scrypt': + elif kdf == "scrypt": return 262144 else: raise ValueError("Unsupported key derivation function: {0}".format(kdf)) -if __name__ == '__main__': + +if __name__ == "__main__": from utils import int_to_bytes - print(create_keyfile_json(int_to_bytes(0xda105ebbede69170f1d13c70b0b82715a7cb46740779457d7ee4fbfa54ba95c1), b'etheno')) + + print( + create_keyfile_json( + int_to_bytes( + 0xDA105EBBEDE69170F1D13C70B0B82715A7CB46740779457D7EE4FBFA54BA95C1 + ), + b"etheno", + ) + ) diff --git a/etheno/logger.py b/etheno/logger.py index 10298f0..7ff3944 100644 --- a/etheno/logger.py +++ b/etheno/logger.py @@ -4,7 +4,7 @@ import tempfile import threading import time -from typing import Callable, List, Optional, Union +from typing import Callable, List, Optional, Union, Any import ptyprocess @@ -30,10 +30,10 @@ class CGAColors(enum.Enum): WARNING: CGAColors.YELLOW, INFO: CGAColors.GREEN, DEBUG: CGAColors.CYAN, - NOTSET: CGAColors.BLUE + NOTSET: CGAColors.BLUE, } - +# TODO: seems like this function can be removed, no references? def formatter_message(message: str, use_color: bool = True) -> str: if use_color: message = message.replace("$RESET", RESET_SEQ).replace("$BOLD", BOLD_SEQ) @@ -60,30 +60,30 @@ class ColorFormatter(ComposableFormatter): def reformat(self, fmt: str) -> str: for color in CGAColors: fmt = fmt.replace("$%s" % color.name, ANSI_COLOR % (30 + color.value)) - fmt = fmt.replace('$RESET', ANSI_RESET) - fmt = fmt.replace('$BOLD', ANSI_BOLD) + fmt = fmt.replace("$RESET", ANSI_RESET) + fmt = fmt.replace("$BOLD", ANSI_BOLD) return fmt @staticmethod def remove_color(fmt: str) -> str: for color in CGAColors: - fmt = fmt.replace("$%s" % color.name, '') - fmt = fmt.replace('$RESET', '') - fmt = fmt.replace('$BOLD', '') - fmt = fmt.replace('$LEVELCOLOR', '') + fmt = fmt.replace("$%s" % color.name, "") + fmt = fmt.replace("$RESET", "") + fmt = fmt.replace("$BOLD", "") + fmt = fmt.replace("$LEVELCOLOR", "") return fmt def new_formatter(self, fmt: str, *args, **kwargs) -> logging.Formatter: - if 'datefmt' in kwargs: - kwargs['datefmt'] = self.reformat(kwargs['datefmt']) + if "datefmt" in kwargs: + kwargs["datefmt"] = self.reformat(kwargs["datefmt"]) return super().new_formatter(self.reformat(fmt), *args, **kwargs) def format(self, *args, **kwargs) -> str: levelcolor = LEVEL_COLORS.get(args[0].levelno, LEVEL_COLORS[NOTSET]) ret = self._parent_formatter.format(*args, **kwargs) - ret = ret.replace('$LEVELCOLOR', ANSI_COLOR % (30 + levelcolor.value)) - ret = ret.replace('\n', self.reformat('$RESET $BOLD$BLUE\\$RESET\n'), 1) - ret = ret.replace('\n', self.reformat('\n$RESET$BOLD$BLUE> $RESET')) + ret = ret.replace("$LEVELCOLOR", ANSI_COLOR % (30 + levelcolor.value)) + ret = ret.replace("\n", self.reformat("$RESET $BOLD$BLUE\\$RESET\n"), 1) + ret = ret.replace("\n", self.reformat("\n$RESET$BOLD$BLUE> $RESET")) return ret @@ -108,13 +108,6 @@ def getLogger(name: str): ret = ETHENO_LOGGERS[name] else: ret = _LOGGING_GETLOGGER(name) - # ####BEGIN#### - # Horrible hack to workaround Manticore's global logging system. - # This can be removed after https://github.com/trailofbits/manticore/issues/1369 - # is resolved. - if name.startswith('manticore'): - ret.propagate = False - # ####END#### return ret @@ -122,17 +115,23 @@ def getLogger(name: str): class EthenoLogger: - DEFAULT_FORMAT = '$RESET$LEVELCOLOR$BOLD%(levelname)-8s $BLUE[$RESET$WHITE%(asctime)14s$BLUE$BOLD]$NAME$RESET ' \ - '%(message)s' - - def __init__(self, - name: str, - log_level: Optional[int] = None, - parent: Optional["EthenoLogger"] = None, - cleanup_empty: bool = False, - displayname: Optional[str] = None): + DEFAULT_FORMAT = ( + "$RESET$LEVELCOLOR$BOLD%(levelname)-8s $BLUE[$RESET$WHITE%(asctime)14s$BLUE$BOLD]$NAME$RESET " + "%(message)s" + ) + + def __init__( + self, + name: str, + log_level: Optional[int] = None, + parent: Optional["EthenoLogger"] = None, + cleanup_empty: bool = False, + displayname: Optional[str] = None, + ): if name in ETHENO_LOGGERS: - raise Exception(f'An EthenoLogger instance for name {name} already exists: {ETHENO_LOGGERS[name]}') + raise Exception( + f"An EthenoLogger instance for name {name} already exists: {ETHENO_LOGGERS[name]}" + ) ETHENO_LOGGERS[name] = self self._directory: Optional[str] = None self.parent: Optional[EthenoLogger] = parent @@ -145,14 +144,19 @@ def __init__(self, self.displayname = displayname if log_level is None: if parent is None: - raise ValueError('A logger must be provided a parent if `log_level` is None') + raise ValueError( + "A logger must be provided a parent if `log_level` is None" + ) log_level = parent.log_level self._log_level: int = log_level self._logger: logging.Logger = _LOGGING_GETLOGGER(name) self._handlers: List[logging.Handler] = [logging.StreamHandler()] if log_level is not None: self.log_level = log_level - formatter = ColorFormatter(self.DEFAULT_FORMAT.replace('$NAME', self._name_format()), datefmt='%m$BLUE-$WHITE%d$BLUE|$WHITE%H$BLUE:$WHITE%M$BLUE:$WHITE%S') + formatter = ColorFormatter( + self.DEFAULT_FORMAT.replace("$NAME", self._name_format()), + datefmt="%m$BLUE-$WHITE%d$BLUE|$WHITE%H$BLUE:$WHITE%M$BLUE:$WHITE%S", + ) if self.parent is None: formatter = NonInfoFormatter(formatter) else: @@ -175,8 +179,14 @@ def close(self): os.remove(log_path) # next, check if the output directory can be cleaned up if self.directory: - for dirpath, dirnames, filenames in os.walk(self.directory, topdown=False): - if len(dirnames) == 0 and len(filenames) == 0 and dirpath != self.directory: + for dirpath, dirnames, filenames in os.walk( + self.directory, topdown=False + ): + if ( + len(dirnames) == 0 + and len(filenames) == 0 + and dirpath != self.directory + ): os.rmdir(dirpath) if self._tmpdir is not None: self._tmpdir.cleanup() @@ -187,11 +197,18 @@ def directory(self) -> Optional[str]: return self._directory def _add_child(self, child): - if child in self.children or any(c for c in self.children if c.name == child.name): - raise ValueError("Cannot double-add child logger %s to logger %s" % (child.name, self.name)) + if child in self.children or any( + c for c in self.children if c.name == child.name + ): + raise ValueError( + "Cannot double-add child logger %s to logger %s" + % (child.name, self.name) + ) self.children.append(child) if self.directory is not None: - child.save_to_directory(os.path.join(self.directory, child.name.replace(os.sep, '-'))) + child.save_to_directory( + os.path.join(self.directory, child.name.replace(os.sep, "-")) + ) else: child._tmpdir = tempfile.TemporaryDirectory() child.save_to_directory(child._tmpdir.name) @@ -205,10 +222,15 @@ def _name_format(self): if self.parent is not None and self.parent.parent is not None: ret = self.parent._name_format() else: - ret = '' + ret = "" return ret + "[$RESET$WHITE%s$BLUE$BOLD]" % self.displayname - def addHandler(self, handler: logging.Handler, include_descendants: bool = True, set_log_level: bool = True): + def addHandler( + self, + handler: logging.Handler, + include_descendants: bool = True, + set_log_level: bool = True, + ): if set_log_level: handler.setLevel(self.log_level) self._logger.addHandler(handler) @@ -217,16 +239,24 @@ def addHandler(self, handler: logging.Handler, include_descendants: bool = True, self._descendant_handlers.append(handler) for child in self.children: if isinstance(child, EthenoLogger): - child.addHandler(handler, include_descendants=include_descendants, set_log_level=set_log_level) + child.addHandler( + handler, + include_descendants=include_descendants, + set_log_level=set_log_level, + ) else: child.addHandler(handler) - def make_logged_file(self, prefix=None, suffix=None, mode='w+b', dir: Optional[str] = None): + def make_logged_file( + self, prefix=None, suffix=None, mode="w+b", dir: Optional[str] = None + ): """Returns an opened file stream to a unique file created according to the provided naming scheme""" if dir is None: - dir = '' + dir = "" else: - dir = os.path.relpath(os.path.realpath(dir), start=os.path.realpath(self.directory)) + dir = os.path.relpath( + os.path.realpath(dir), start=os.path.realpath(self.directory) + ) os.makedirs(os.path.join(self.directory, dir), exist_ok=True) i = 1 while True: @@ -242,7 +272,7 @@ def make_logged_file(self, prefix=None, suffix=None, mode='w+b', dir: Optional[s def make_constant_logged_file(self, contents: Union[str, bytes], *args, **kwargs): """Creates a logged file, populates it with the provided contents, and returns the absolute path to the file.""" if isinstance(contents, str): - contents = contents.encode('utf-8') + contents = contents.encode("utf-8") with self.make_logged_file(*args, **kwargs) as f: f.write(contents) # type: ignore return os.path.realpath(f.name) @@ -259,18 +289,33 @@ def save_to_file(self, path, include_descendants=True, log_level=None): log_level = self.log_level handler = logging.FileHandler(path) handler.setLevel(log_level) - handler.setFormatter(logging.Formatter(ColorFormatter.remove_color(self.DEFAULT_FORMAT.replace('$NAME', self._name_format())), datefmt='%m-%d|%H:%M:%S')) - self.addHandler(handler, include_descendants=include_descendants, set_log_level=False) + handler.setFormatter( + logging.Formatter( + ColorFormatter.remove_color( + self.DEFAULT_FORMAT.replace("$NAME", self._name_format()) + ), + datefmt="%m-%d|%H:%M:%S", + ) + ) + self.addHandler( + handler, include_descendants=include_descendants, set_log_level=False + ) def save_to_directory(self, path): if self.directory == path: # we are already set to save to this directory return elif self.directory is not None: - raise ValueError("Logger %s's save directory is already set to %s" % (self.name, path)) + raise ValueError( + "Logger %s's save directory is already set to %s" % (self.name, path) + ) self._directory = os.path.realpath(path) os.makedirs(path, exist_ok=True) - self.save_to_file(os.path.join(path, "%s.log" % self.name.replace(os.sep, '-')), include_descendants=False, log_level=DEBUG) + self.save_to_file( + os.path.join(path, "%s.log" % self.name.replace(os.sep, "-")), + include_descendants=False, + log_level=DEBUG, + ) for child in self.children: child.save_to_directory(os.path.join(path, child.name)) @@ -278,7 +323,9 @@ def save_to_directory(self, path): def log_level(self): if self._log_level is None: if self.parent is None: - raise ValueError('A logger must be provided a parent if `log_level` is None') + raise ValueError( + "A logger must be provided a parent if `log_level` is None" + ) return self.parent.log_level else: return self._log_level @@ -301,20 +348,23 @@ def __getattr__(self, name): return getattr(self._logger, name) def __repr__(self): - return f'{type(self).__name__}(name={self.name!r}, log_level={self.log_level!r}, parent={self.parent!r}, cleanup_empty={self.cleanup_empty!r}, displayname={self.displayname!r})' + return f"{type(self).__name__}(name={self.name!r}, log_level={self.log_level!r}, parent={self.parent!r}, cleanup_empty={self.cleanup_empty!r}, displayname={self.displayname!r})" + - class StreamLogger(threading.Thread): - def __init__(self, logger: logging.Logger, *streams, newline_char=b'\n'): + def __init__(self, logger: logging.Logger, *streams, newline_char=b"\n"): super().__init__(daemon=True) self.logger: logging.Logger = logger self.streams = streams if isinstance(newline_char, str): - newline_char = newline_char.encode('utf-8') + newline_char = newline_char.encode("utf-8") self._newline_char = newline_char - self._buffers = [b'' for i in range(len(streams))] + self._buffers = [b"" for i in range(len(streams))] self._done: bool = False - self.log: Callable[[logging.Logger, Union[str, bytes]], ...] = lambda lgr, message: lgr.info(message) + # TODO: Made a small change here due to the ellipses not being allowed, make sure it does not create any other issues + self.log: Callable[ + [logging.Logger, Union[str, bytes]], Any + ] = lambda lgr, message: lgr.info(message) def is_done(self) -> bool: return self._done @@ -328,10 +378,10 @@ def run(self): byte = stream.read(1) while byte is not None and len(byte): if isinstance(byte, str): - byte = byte.encode('utf-8') + byte = byte.encode("utf-8") if byte == self._newline_char: self.log(self.logger, self._buffers[i].decode()) - self._buffers[i] = b'' + self._buffers[i] = b"" else: self._buffers[i] += byte got_byte = True @@ -346,7 +396,11 @@ def run(self): class ProcessLogger(StreamLogger): def __init__(self, logger, process): self.process = process - super().__init__(logger, open(process.stdout.fileno(), buffering=1), open(process.stderr.fileno(), buffering=1)) + super().__init__( + logger, + open(process.stdout.fileno(), buffering=1), + open(process.stderr.fileno(), buffering=1), + ) def is_done(self): return self.process.poll() is not None @@ -364,7 +418,7 @@ def __getattr__(self, name): return getattr(self.process, name) -if __name__ == '__main__': - logger = EthenoLogger('Testing', DEBUG) - logger.info('Info') - logger.critical('Critical') +if __name__ == "__main__": + logger = EthenoLogger("Testing", DEBUG) + logger.info("Info") + logger.critical("Critical") diff --git a/etheno/manticoreclient.py b/etheno/manticoreclient.py deleted file mode 100644 index 0c3355d..0000000 --- a/etheno/manticoreclient.py +++ /dev/null @@ -1,193 +0,0 @@ -import logging -import time - -import builtins -import sys - -# ####BEGIN#### -# Horrible hack to workaround Manticore's global logging system. -# This can be removed after https://github.com/trailofbits/manticore/issues/1369 -# is resolved. -from . import manticorelogger - -oldimport = builtins.__import__ -def manticoreimport(name, *args, **kwargs): - if name == 'manticore.utils.log': - manticorelogger.__name__ = 'manticore.utils.log' - sys.modules[name] = manticorelogger - return manticorelogger - else: - return oldimport(name, *args, **kwargs) - -builtins.__import__ = manticoreimport -try: - import manticore.utils.log - import manticore.utils -finally: - builtins.__import__ = oldimport - -manticore.utils.log = manticorelogger -# ####END#### - -from manticore.ethereum import ManticoreEVM -from manticore.exceptions import NoAliveStates -import manticore - -from . import logger -from . import threadwrapper -from .client import EthenoClient, jsonrpc, DATA, QUANTITY -from .etheno import _CONTROLLER -from .manticoreutils import manticore_is_new_enough - -def encode_hex(data): - if data is None: - return None - elif isinstance(data, int) or isinstance(data, long): - encoded = hex(data) - if encoded[-1] == 'L': - encoded = encoded[:-1] - return encoded - else: - return "0x%s" % data.encode('hex') - -class ManticoreClient(EthenoClient): - def __init__(self, manticore=None): - self._assigned_manticore = manticore - self._manticore = None - self.contracts = [] - self.short_name = 'Manticore' - self._accounts_to_create = [] - - @property - def manticore(self): - if self._manticore is None: - if self._assigned_manticore is None: - # we do lazy evaluation of ManticoreClient.manticore so self.log_directory will be assigned already - if self.log_directory is None: - workspace = None - else: - workspace = self.log_directory - self._assigned_manticore = ManticoreEVM(workspace_url=workspace) - self._manticore = threadwrapper.MainThreadWrapper(self._assigned_manticore, _CONTROLLER) - self._finalize_manticore() - return self._manticore - - def _finalize_manticore(self): - if not self._manticore: - return - for balance, address in self._accounts_to_create: - self._manticore.create_account(balance=balance, address=address) - self._accounts_to_create = [] - self.reassign_manticore_loggers() - self.logger.cleanup_empty = True - - def create_account(self, balance, address): - self._accounts_to_create.append((balance, address)) - self._finalize_manticore() - - def reassign_manticore_loggers(self): - # Manticore uses a global to track its loggers: - manticore.utils.log.ETHENO_LOGGER = self.logger - manticore_loggers = (name for name in logging.root.manager.loggerDict if name.startswith('manticore')) - logger_parents = {} - for name in sorted(manticore_loggers): - sep = name.rfind('.') - if sep > 0: - path = name[:sep] - parent = logger_parents[path] - displayname = name[len(path)+1:] - else: - parent = self.logger - displayname = name - m_logger = logger.EthenoLogger(name, parent=parent, cleanup_empty=True, displayname=displayname) - m_logger.propagate = False - logger_parents[name] = m_logger - - @jsonrpc(from_addr = QUANTITY, to = QUANTITY, gas = QUANTITY, gasPrice = QUANTITY, value = QUANTITY, data = DATA, nonce = QUANTITY, RETURN = DATA) - def eth_sendTransaction(self, from_addr, to = None, gas = 90000, gasPrice = None, value = 0, data = None, nonce = None, rpc_client_result = None): - if to is None or to == 0: - # we are creating a new contract - if rpc_client_result is not None: - tx_hash = rpc_client_result['result'] - while True: - receipt = self.etheno.master_client.post({ - 'id' : "%s_receipt" % rpc_client_result['id'], - 'method' : 'eth_getTransactionReceipt', - 'params' : [tx_hash] - }) - if 'result' in receipt and receipt['result']: - address = int(receipt['result']['contractAddress'], 16) - break - # The transaction is still pending - time.sleep(1.0) - else: - address = None - contract_address = self.manticore.create_contract(owner = from_addr, balance = value, init=data) - self.contracts.append(contract_address) - self.logger.info(f"Manticore contract created: {encode_hex(contract_address.address)}") - #self.logger.info("Block number: %s" % self.manticore.world.block_number()) - else: - self.manticore.transaction(address = to, data = data, caller=from_addr, value = value) - # Just mimic the result from the master client - # We need to return something valid to appease the differential tester - return rpc_client_result - - @jsonrpc(TX_HASH = QUANTITY) - def eth_getTransactionReceipt(self, tx_hash, rpc_client_result = None): - # Mimic the result from the master client - # to appease the differential tester - return rpc_client_result - - def multi_tx_analysis(self, contract_address = None, tx_limit=None, tx_use_coverage=True, args=None): - if contract_address is None: - for contract_address in self.contracts: - self.multi_tx_analysis( - contract_address=contract_address, - tx_limit=tx_limit, - tx_use_coverage=tx_use_coverage, - args=args - ) - return - - tx_account = self.etheno.accounts - - current_coverage = 0 - tx_no = 0 - if manticore_is_new_enough(0, 3, 0): - shutdown_test = 'is_killed' - else: - shutdown_test = 'is_shutdown' - - while (current_coverage < 100 or not tx_use_coverage) and not getattr(self.manticore, shutdown_test)(): - try: - self.logger.info("Starting symbolic transaction: %d" % tx_no) - - # run_symbolic_tx - symbolic_data = self.manticore.make_symbolic_buffer(320) - symbolic_value = self.manticore.make_symbolic_value() - self.manticore.transaction(caller=tx_account[min(tx_no, len(tx_account) - 1)], - address=contract_address, - data=symbolic_data, - value=symbolic_value) - if manticore_is_new_enough(0, 3, 0): - # TODO: find the equivalent functions to get state counts in v0.3.0 - pass - else: - self.logger.info("%d alive states, %d terminated states" % (self.manticore.count_running_states(), self.manticore.count_terminated_states())) - except NoAliveStates: - break - - # Check if the maximun number of tx was reached - if tx_limit is not None and tx_no + 1 >= tx_limit: - break - - # Check if coverage has improved or not - if tx_use_coverage: - prev_coverage = current_coverage - current_coverage = self.manticore.global_coverage(contract_address) - found_new_coverage = prev_coverage < current_coverage - - if not found_new_coverage: - break - - tx_no += 1 diff --git a/etheno/manticorelogger.py b/etheno/manticorelogger.py deleted file mode 100644 index 2245223..0000000 --- a/etheno/manticorelogger.py +++ /dev/null @@ -1,26 +0,0 @@ -# This is a horrible hack that is used to replace manticore.utils.log -# Remove this once https://github.com/trailofbits/manticore/issues/1369 -# is resolved. - -ETHENO_LOGGER = None - -@property -def manticore_verbosity(): - return ETHENO_LOGGER.log_level - -@property -def DEFAULT_LOG_LEVEL(): - return ETHENO_LOGGER.log_level - -def set_verbosity(setting): - pass - #global manticore_verbosity - #manticore_verbosity = min(max(setting, 0), len(get_levels()) - 1) - #for logger_name in all_loggers: - # logger = logging.getLogger(logger_name) - # # min because more verbosity == lower numbers - # # This means if you explicitly call setLevel somewhere else in the source, and it's *more* - # # verbose, it'll stay that way even if manticore_verbosity is 0. - # logger.setLevel(min(get_verbosity(logger_name), logger.getEffectiveLevel())) - -all_loggers = set() diff --git a/etheno/manticoreutils.py b/etheno/manticoreutils.py deleted file mode 100644 index 22455fb..0000000 --- a/etheno/manticoreutils.py +++ /dev/null @@ -1,148 +0,0 @@ -import inspect -import itertools -import pkg_resources - -# Import manticoreclient before we load any actual Manticore classes. -# We don't need it here, but we do rely on it to hook in the Manticore loggers: -from . import manticoreclient -del manticoreclient - -from manticore.core.smtlib.operators import AND -from manticore.ethereum import ManticoreEVM, Detector -import manticore.ethereum.detectors - - -def manticore_version(): - return pkg_resources.get_distribution('manticore').version - - -def manticore_is_new_enough(*required_version): - """Checks if Manticore is newer than the given version. Returns True or False if known, or None if uncertain.""" - if required_version is None or len(required_version) == 0: - required_version = (0, 2, 2) - try: - version = manticore_version() - version = list(map(int, version.split('.'))) - for v, required in itertools.zip_longest(version, required_version, fillvalue=0): - if v < required: - return False - elif v > required: - return True - except Exception: - return None - return True - - -"""Detectors that should not be included in the results from `get_detectors()` (e.g., because they are buggy)""" -if manticore_is_new_enough(0, 2, 3): - # At some point after Manticore 0.2.2, these all stopped working: - DETECTOR_BLACKLIST = { - manticore.ethereum.detectors.DetectDelegatecall, - manticore.ethereum.detectors.DetectEnvInstruction, - manticore.ethereum.detectors.DetectExternalCallAndLeak, - manticore.ethereum.detectors.DetectIntegerOverflow, - manticore.ethereum.detectors.DetectInvalid, - manticore.ethereum.detectors.DetectRaceCondition, - manticore.ethereum.detectors.DetectReentrancyAdvanced, - manticore.ethereum.detectors.DetectReentrancySimple, - manticore.ethereum.detectors.DetectSuicidal, - manticore.ethereum.detectors.DetectUninitializedMemory, - manticore.ethereum.detectors.DetectUninitializedStorage, - manticore.ethereum.detectors.DetectUnusedRetVal - } -else: - DETECTOR_BLACKLIST = set() - - -def get_detectors(): - for name, obj in inspect.getmembers(manticore.ethereum.detectors): - if inspect.isclass(obj)\ - and issubclass(obj, manticore.ethereum.detectors.Detector)\ - and obj != manticore.ethereum.detectors.Detector\ - and obj not in DETECTOR_BLACKLIST: - yield obj - - -def register_all_detectors(manticore): - for detector in get_detectors(): - try: - manticore.register_detector(detector()) - except Exception as e: - manticore.logger.warning(f"Unable to register detector {detector!r}: {e!s}") - - -class StopAtDepth(Detector): - """This just aborts explorations that are too deep""" - - def __init__(self, max_depth): - self.max_depth = max_depth - - stop_at_death = self - - def will_start_run_callback(*args): - with stop_at_death.manticore.locked_context('seen_rep', dict) as reps: - reps.clear() - - # this callback got renamed to `will_run_callback` in Manticore 0.3.0 - if manticore_is_new_enough(0, 3, 0): - self.will_run_callback = will_start_run_callback - else: - self.will_start_run_callback = will_start_run_callback - - super().__init__() - - def will_decode_instruction_callback(self, state, pc): - world = state.platform - with self.manticore.locked_context('seen_rep', dict) as reps: - item = (world.current_transaction.sort == 'CREATE', world.current_transaction.address, pc) - if item not in reps: - reps[item] = 0 - reps[item] += 1 - if reps[item] > self.max_depth: - state.abandon() - - -class ManticoreTest: - def __init__(self, state, expression): - self.state = state - self.expression = expression - - def __bool__(self): - return self.can_be_true() - - def can_be_true(self): - return self.state.can_be_true(self.expression) - - def _solve_one(self, *variables, initial_state): - with initial_state as state: - state.constrain(self.expression) - for v in variables: - value = state.solve_one(v) - yield value - state.constrain(v == value) - - def solve_one(self, *variables): - """Finds a solution to the state and returns all of the variables in that solution""" - return self._solve_one(*variables, initial_state=self.state) - - def solve_all(self, *variables): - """Enumerates all solutions to the state for the given variables""" - with self.state as state: - while state.can_be_true(self.expression): - solution = tuple(self._solve_one(*variables, initial_state=state)) - if len(solution) < len(variables): - break - yield solution - state.constrain(AND(*(v != s for v, s in zip(variables, solution)))) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - pass - - -if __name__ == '__main__': - print('Available Manticore Detectors:') - for detector in get_detectors(): - print(" %s" % detector) diff --git a/etheno/parity.py b/etheno/parity.py index 1163646..2564c38 100644 --- a/etheno/parity.py +++ b/etheno/parity.py @@ -9,6 +9,7 @@ from .keyfile import create_keyfile_json from .utils import find_open_port, format_hex_address, int_to_bytes + def make_config(genesis_path, base_path, port, accounts, password_file, **kwargs): return """[parity] public_node = false @@ -91,24 +92,29 @@ def make_config(genesis_path, base_path, port, accounts, password_file, **kwargs log_file = "{log_path}" color = true """.format( - genesis_path=genesis_path, - base_path=base_path, - port=find_open_port(30303), - rpc_port=port, - log_path=kwargs.get('log_path', "%s/parity.log" % base_path), - chainId=kwargs.get('chainId', 1), - miner=format_hex_address(accounts[-1], True), - account_addresses=', '.join(map(lambda s : "\"0x%s\"" % s, map(format_hex_address, accounts))), - password_file=password_file - ).encode('utf-8') + genesis_path=genesis_path, + base_path=base_path, + port=find_open_port(30303), + rpc_port=port, + log_path=kwargs.get("log_path", "%s/parity.log" % base_path), + chainId=kwargs.get("chainId", 1), + miner=format_hex_address(accounts[-1], True), + account_addresses=", ".join( + map(lambda s: '"0x%s"' % s, map(format_hex_address, accounts)) + ), + password_file=password_file, + ).encode( + "utf-8" + ) + class ParityClient(JSONRPCClient): def __init__(self, genesis, port=8546): - super().__init__('Parity', genesis, port) + super().__init__("Parity", genesis, port) self._unlock_accounts = True self.config = None - + atexit.register(ParityClient.shutdown.__get__(self, ParityClient)) def etheno_set(self): @@ -119,30 +125,34 @@ def etheno_set(self): genesis_path=self.logger.to_log_path(self.genesis_file), base_path=self.logger.to_log_path(self.datadir), port=self.port, - chainId=self.genesis['config']['chainId'], + chainId=self.genesis["config"]["chainId"], accounts=tuple(self.accounts), - password_file=self.logger.to_log_path(self.passwords) + password_file=self.logger.to_log_path(self.passwords), ), - prefix='config', - suffix='.toml' + prefix="config", + suffix=".toml", ) - + def write_passwords(self, outfile): - outfile.write(b'etheno') + outfile.write(b"etheno") def write_genesis(self, outfile): parity_genesis = geth_to_parity(self.genesis) - parity_genesis['genesis']['author'] = format_hex_address(self.miner_account.address, True) - outfile.write(json.dumps(parity_genesis).encode('utf-8')) + parity_genesis["genesis"]["author"] = format_hex_address( + self.miner_account.address, True + ) + outfile.write(json.dumps(parity_genesis).encode("utf-8")) def import_account(self, private_key): - keyfile = create_keyfile_json(int_to_bytes(private_key), b'etheno') + keyfile = create_keyfile_json(int_to_bytes(private_key), b"etheno") keyfile_json = json.dumps(keyfile) - keysdir = os.path.join(self.datadir, 'keys', 'etheno') + keysdir = os.path.join(self.datadir, "keys", "etheno") os.makedirs(keysdir, exist_ok=True) - output = tempfile.NamedTemporaryFile(prefix='account', suffix='.key', dir=keysdir, delete=False) + output = tempfile.NamedTemporaryFile( + prefix="account", suffix=".key", dir=keysdir, delete=False + ) try: - output.write(keyfile_json.encode('utf-8')) + output.write(keyfile_json.encode("utf-8")) finally: output.close() if self.log_directory is None: @@ -151,12 +161,14 @@ def import_account(self, private_key): def unlock_account(self, account): addr = format_hex_address(account, True) self.logger.info("Unlocking Parity account %s..." % addr) - return self.post({ - 'id': addr, - 'jsonrpc': '2.0', - 'method': 'personal_unlockAccount', - 'params': [addr, 'etheno', None] # Unlock the account for one day - }) + return self.post( + { + "id": addr, + "jsonrpc": "2.0", + "method": "personal_unlockAccount", + "params": [addr, "etheno", None], # Unlock the account for one day + } + ) def post(self, data, unlock_if_necessary=None): if unlock_if_necessary is None: @@ -164,15 +176,26 @@ def post(self, data, unlock_if_necessary=None): try: return super().post(data) except JSONRPCError as e: - if unlock_if_necessary and 'data' in e.result['error'] and e.result['error']['data'].lower() == 'notunlocked': - self.unlock_account(int(data['params'][0]['from'], 16)) + if ( + unlock_if_necessary + and "data" in e.result["error"] + and e.result["error"]["data"].lower() == "notunlocked" + ): + self.unlock_account(int(data["params"][0]["from"], 16)) return self.post(data, unlock_if_necessary=False) else: raise e def get_start_command(self, unlock_accounts=True): - return ['/usr/bin/env', 'parity', '--config', self.logger.to_log_path(self.config), '--fast-unlock', '--jsonrpc-apis=all'] - + return [ + "/usr/bin/env", + "parity", + "--config", + self.logger.to_log_path(self.config), + "--fast-unlock", + "--jsonrpc-apis=all", + ] + def start(self, unlock_accounts=True): self._unlock_accounts = unlock_accounts super().start(unlock_accounts=unlock_accounts) diff --git a/etheno/precompiler.py b/etheno/precompiler.py new file mode 100644 index 0000000..c442552 --- /dev/null +++ b/etheno/precompiler.py @@ -0,0 +1,72 @@ +from .etheno import EthenoPlugin +from .utils import ConstantTemporaryFile, format_hex_address +import subprocess +import os +class Precompiler(EthenoPlugin): + def __init__(self, deploy_arb=False, deploy_opt=False): + self._deploy_arb = deploy_arb + self._deploy_opt = deploy_opt + + self._arb_sys_file = os.path.join(os.path.dirname(__file__), '..', "models/l2/arbitrum/ArbSys.sol") + self._arb_retryable_tx_file = os.path.join(os.path.dirname(__file__), '..', "models/l2/arbitrum/ArbRetryableTx.sol") + + + def run(self): + from_address = self._etheno.accounts[0] + if self._deploy_arb: + # Deploy ArbSys + # TODO: could decrease cyclomatic complexity here a bit + if os.path.exists(self._arb_sys_file): + with open(self._arb_sys_file, 'rb') as arb_sys_file: + arb_sys_file_bytes = arb_sys_file.read() + arb_sys_bytecode = self.compile(arb_sys_file_bytes) + # If solc returns None, throw error and move on. + if arb_sys_bytecode: + arb_sys_contract_address = self._etheno.deploy_contract(from_address=from_address, bytecode=arb_sys_bytecode) + else: + self.logger.error(f"Could not deploy ArbSys due to compilation issues") + else: + self.logger.error(f"Could not find ArbSys.sol file at:\n{self._arb_sys_file}") + + # Deploy ArbRetryableTx + if os.path.exists(self._arb_retryable_tx_file): + with open(self._arb_retryable_tx_file, 'rb') as arb_retryable_tx_file: + arb_retryable_tx_file_bytes = arb_retryable_tx_file.read() + arb_retryable_tx_file_bytecode = self.compile(arb_retryable_tx_file_bytes) + # If solc returns None, throw error and move on. + if arb_retryable_tx_file_bytecode: + arb_retryable_tx_contract_address = self._etheno.deploy_contract(from_address=from_address, bytecode=arb_retryable_tx_file_bytecode) + print(arb_retryable_tx_contract_address) + else: + self.logger.error(f"Could not deploy ArbRetryableTx due to compilation issues") + return + + + def compile(self, solidity): + # TODO: Why was prefix and suffix given? + with ConstantTemporaryFile(solidity) as contract: + solc = subprocess.Popen(['/usr/bin/env', 'solc', '--bin', contract], stderr=subprocess.PIPE, + stdout=subprocess.PIPE, bufsize=1, universal_newlines=True) + errors = solc.stderr.read().strip() + output = solc.stdout.read() + if solc.wait() != 0: + self.logger.error(f"{errors}\n{output}") + return None + self.logger.warning(errors) + # Only the last contract in the compiled bytecode is deployed. + # TODO: do we need the interface? + binary_key = 'Binary:' + binary_key_len = len(binary_key) + total_offset = 0 + while True: + offset = output[total_offset:].find(binary_key) + if offset < 0: + break + total_offset += (offset + binary_key_len) + try: + code = hex(int(output[total_offset:].strip(), 16)) + self.logger.debug(f"Compiled contract code: {code}") + return code + except Exception as e: + self.logger.error(f"Could not parse `solc` output:\n{output}\n with this error:\n{e}") + return None diff --git a/etheno/signals.py b/etheno/signals.py index 7e51ddb..477de97 100644 --- a/etheno/signals.py +++ b/etheno/signals.py @@ -10,6 +10,7 @@ def new_handler(sig_type, frame): if current_handler: current_handler(sig_type, frame) handler(sig_type, frame) + signal.signal(signal_type, new_handler) diff --git a/etheno/synchronization.py b/etheno/synchronization.py index 81eb9fe..f28603e 100644 --- a/etheno/synchronization.py +++ b/etheno/synchronization.py @@ -3,9 +3,18 @@ import eth_utils from web3.auto import w3 -from .client import EthenoClient, SelfPostingClient, jsonrpc, JSONRPCError, DATA, QUANTITY, transaction_receipt_succeeded +from .client import ( + EthenoClient, + SelfPostingClient, + jsonrpc, + JSONRPCError, + DATA, + QUANTITY, + transaction_receipt_succeeded, +) from .utils import decode_hex, format_hex_address, int_to_bytes + def _decode_value(value): if isinstance(value, int): return value @@ -14,58 +23,77 @@ def _decode_value(value): except Exception: return None -def _remap_params(client, params, mapping, method, remap_data = False): + +def _remap_params(client, params, mapping, method, remap_data=False): if isinstance(params, dict): for key, value in params.items(): decoded = _decode_value(value) if decoded is None: - params[key] = _remap_params(client, value, mapping, "%s['%s']" % (method, key)) + params[key] = _remap_params( + client, value, mapping, "%s['%s']" % (method, key) + ) elif decoded in mapping: - client.logger.debug("Converting %s parameter '%s' from %x to %x" % (method, key, decoded, mapping[decoded])) + client.logger.debug( + "Converting %s parameter '%s' from %x to %x" + % (method, key, decoded, mapping[decoded]) + ) params[key] = format_hex_address(mapping[decoded], True) - elif remap_data and key == 'data': - new_value = params['data'] + elif remap_data and key == "data": + new_value = params["data"] for old, new in mapping.items(): prev = new_value - new_value = new_value.replace(format_hex_address(old), format_hex_address(new)) + new_value = new_value.replace( + format_hex_address(old), format_hex_address(new) + ) if prev != new_value: - client.logger.debug("Converting %x in %s['data'] to %x" % (old, method, new)) - if new_value != params['data']: - params['data'] = new_value + client.logger.debug( + "Converting %x in %s['data'] to %x" % (old, method, new) + ) + if new_value != params["data"]: + params["data"] = new_value elif isinstance(params, list) or isinstance(params, tuple): for i, p in enumerate(params): decoded = _decode_value(p) if decoded is None: params[i] = _remap_params(client, p, mapping, "%s['%d']" % (method, i)) elif decoded in mapping: - client.logger.debug("Converting %s parameter %d from %x to %x" % (method, i, decoded, mapping[decoded])) + client.logger.debug( + "Converting %s parameter %d from %x to %x" + % (method, i, decoded, mapping[decoded]) + ) params[i] = format_hex_address(mapping[decoded], True) else: decoded = _decode_value(params) if decoded is not None and decoded in mapping: - client.logger.debug("Converting %s from %x to %x" % (method, decoded, mapping[decoded])) + client.logger.debug( + "Converting %s from %x to %x" % (method, decoded, mapping[decoded]) + ) return mapping[decoded] return params + class ChainSynchronizer(object): def __init__(self, client): if not isinstance(client, SelfPostingClient): - raise TypeError('TODO: Implement support for address synchronization on clients other than SelfPostingClients') + raise TypeError( + "TODO: Implement support for address synchronization on clients other than SelfPostingClients" + ) self.mapping = {} self.filter_mapping = {} - self._old_post = getattr(client, 'post') - self._old_create_account = getattr(client, 'create_account') + self._old_post = getattr(client, "post") + self._old_create_account = getattr(client, "create_account") self._client = client - def create_account(self, balance = 0, address = None): + def create_account(self, balance=0, address=None): + # TODO: not sure what the data field is supposed to do here if self._client == self._client.etheno.master_client: return self._old_create_account(data) try: # First, see if the client can handle creating this address: - return self._old_create_account(balance = balance, address = address) + return self._old_create_account(balance=balance, address=address) except NotImplementedError: pass - new_address = self._old_create_account(balance = balance, address = None) + new_address = self._old_create_account(balance=balance, address=None) if address is not None and address != new_address: self.mapping[address] = new_address return new_address @@ -73,77 +101,131 @@ def create_account(self, balance = 0, address = None): def post(self, data, *args, **kwargs): if self._client == self._client.etheno.master_client: return self._old_post(data, *args, **kwargs) - - method = data['method'] - if method == 'eth_getTransactionReceipt': + method = data["method"] + + if method == "eth_getTransactionReceipt": # first, make sure the master client's transaction succeeded; if not, we can just ignore this if not transaction_receipt_succeeded(self._client.etheno.rpc_client_result): # the master client's transaction receipt command failed, so we can skip calling this client's return self._client.etheno.rpc_client_result - elif _decode_value(data['params'][0]) not in self.mapping: + elif _decode_value(data["params"][0]) not in self.mapping: # we don't know about this transaction receipt, which probably means that the transaction failed # on this client. So return the receipt here, because below we will block on a result: return self._old_post(data, *args, **kwargs) - + uninstalling_filter = None - if 'params' in data: - data['params'] = _remap_params(self._client, data['params'], self.mapping, method, remap_data = True) - if ('filter' in method.lower() and 'get' in method.lower()) or method == 'eth_uninstallFilter': + if "params" in data: + data["params"] = _remap_params( + self._client, data["params"], self.mapping, method, remap_data=True + ) + if ( + "filter" in method.lower() and "get" in method.lower() + ) or method == "eth_uninstallFilter": # we are accessing a filter by its ID, so remap the ID - old_id = data['params'][0] + old_id = data["params"][0] if old_id not in self.filter_mapping: - self._client.logger.warn("%s called on unknown filter ID %s; ignoring..." % (method, old_id)) + self._client.logger.warn( + "%s called on unknown filter ID %s; ignoring..." + % (method, old_id) + ) else: - self._client.logger.info("Mapping filter ID %s to %s for %s" % (old_id, self.filter_mapping[old_id], method)) - data['params'] = [self.filter_mapping[old_id]] - if method == 'eth_uninstallFilter': + self._client.logger.info( + "Mapping filter ID %s to %s for %s" + % (old_id, self.filter_mapping[old_id], method) + ) + data["params"] = [self.filter_mapping[old_id]] + if method == "eth_uninstallFilter": uninstalling_filter = old_id ret = self._old_post(data, *args, **kwargs) if uninstalling_filter is not None: - if ret['result']: + if ret["result"]: # the uninstall succeeded, so we no longer need to keep the mapping: del self.filter_mapping[uninstalling_filter] - elif 'filter' in method.lower() and 'new' in method.lower() and 'result' in ret: + elif "filter" in method.lower() and "new" in method.lower() and "result" in ret: # a new filter was just created, so record the mapping - self.filter_mapping[self._client.etheno.rpc_client_result['result']] = ret['result'] - elif method == 'eth_sendTransaction' or method == 'eth_sendRawTransaction': + self.filter_mapping[self._client.etheno.rpc_client_result["result"]] = ret[ + "result" + ] + elif method == "eth_sendTransaction" or method == "eth_sendRawTransaction": # record the transaction hash mapping - if ret and 'result' in ret and ret['result']: - if self._client.etheno.rpc_client_result and not isinstance(self._client.etheno.rpc_client_result, JSONRPCError) and 'result' in self._client.etheno.rpc_client_result and self._client.etheno.rpc_client_result['result']: - old_decoded = _decode_value(self._client.etheno.rpc_client_result['result']) - new_decoded = _decode_value(ret['result']) + if ret and "result" in ret and ret["result"]: + if ( + self._client.etheno.rpc_client_result + and not isinstance( + self._client.etheno.rpc_client_result, JSONRPCError + ) + and "result" in self._client.etheno.rpc_client_result + and self._client.etheno.rpc_client_result["result"] + ): + old_decoded = _decode_value( + self._client.etheno.rpc_client_result["result"] + ) + new_decoded = _decode_value(ret["result"]) if old_decoded is not None and new_decoded is not None: - self._client.logger.info("Mapping transaction hash %x to %x" % (old_decoded, new_decoded)) + self._client.logger.info( + "Mapping transaction hash %x to %x" + % (old_decoded, new_decoded) + ) self.mapping[old_decoded] = new_decoded elif not (old_decoded is None and new_decoded is None): - self._client.logger.warn("Call to %s returned %s from the master client but %s from this client; ignoring..." % (method, self._client.etheno.rpc_client_result['result'], ret['result'])) - elif method == 'eth_getTransactionReceipt': + self._client.logger.warn( + "Call to %s returned %s from the master client but %s from this client; ignoring..." + % ( + method, + self._client.etheno.rpc_client_result["result"], + ret["result"], + ) + ) + elif method == "eth_getTransactionReceipt": # by this point we know that the master client has already successfully mined the transaction and returned a receipt # so make sure that we block until this client has also mined the transaction and returned a receipt while transaction_receipt_succeeded(ret) is None: - self._client.logger.info("Waiting to mine transaction %s..." % data['params'][0]) + self._client.logger.info( + "Waiting to mine transaction %s..." % data["params"][0] + ) time.sleep(5.0) ret = self._old_post(data, *args, **kwargs) # update the mapping with the address if a new contract was created - if 'contractAddress' in ret['result'] and ret['result']['contractAddress']: - master_address = _decode_value(self._client.etheno.rpc_client_result['result']['contractAddress']) - our_address = _decode_value(ret['result']['contractAddress']) + if "contractAddress" in ret["result"] and ret["result"]["contractAddress"]: + master_address = _decode_value( + self._client.etheno.rpc_client_result["result"]["contractAddress"] + ) + our_address = _decode_value(ret["result"]["contractAddress"]) if master_address is not None and our_address is not None: self.mapping[master_address] = our_address elif not (master_address is None and our_address is None): - self._client.logger.warn("Call to %s returned %s from the master client but %s from this client; ignoring..." % (method, self._client.etheno.rpc_client_result['result']['contractAddress'], ret['result']['contractAddress'])) + self._client.logger.warn( + "Call to %s returned %s from the master client but %s from this client; ignoring..." + % ( + method, + self._client.etheno.rpc_client_result["result"][ + "contractAddress" + ], + ret["result"]["contractAddress"], + ) + ) return ret + def AddressSynchronizingClient(etheno_client): synchronizer = ChainSynchronizer(etheno_client) - setattr(etheno_client, 'create_account', ChainSynchronizer.create_account.__get__(synchronizer, ChainSynchronizer)) - setattr(etheno_client, 'post', ChainSynchronizer.post.__get__(synchronizer, ChainSynchronizer)) + setattr( + etheno_client, + "create_account", + ChainSynchronizer.create_account.__get__(synchronizer, ChainSynchronizer), + ) + setattr( + etheno_client, + "post", + ChainSynchronizer.post.__get__(synchronizer, ChainSynchronizer), + ) return etheno_client - + + class RawTransactionSynchronizer(ChainSynchronizer): def __init__(self, client, accounts): super().__init__(client) @@ -152,22 +234,30 @@ def __init__(self, client, accounts): self._account_index = -1 self._chain_id = client.get_net_version() - def create_account(self, balance = 0, address = None): + def create_account(self, balance=0, address=None): self._account_index += 1 new_address = self.accounts[self._account_index].address - self._private_keys[new_address] = int_to_bytes(self.accounts[self._account_index].private_key) + self._private_keys[new_address] = int_to_bytes( + self.accounts[self._account_index].private_key + ) if address is not None and address != new_address: self.mapping[address] = new_address return new_address def post(self, data, *args, **kwargs): - method = data['method'] + method = data["method"] - if method == 'eth_sendTransaction': + if method == "eth_sendTransaction": # This client does not support sendTransaction because it does not have any of the requisite accounts. # So let's manually sign the transaction and send it to the client using eth_sendRawTransaction, instead. - params = _remap_params(self._client, dict(data['params'][0]), self.mapping, method, remap_data = True) - from_str = params['from'] + params = _remap_params( + self._client, + dict(data["params"][0]), + self.mapping, + method, + remap_data=True, + ) + from_str = params["from"] from_address = int(from_str, 16) if from_address in self._private_keys: private_key = self._private_keys[from_address] @@ -178,33 +268,51 @@ def post(self, data, *args, **kwargs): private_key = account.private_key break else: - raise Exception("Error: eth_sendTransaction sent from unknown address %s:\n%s" % (from_str, data)) - params['chainId'] = self._client.get_net_version() + raise Exception( + "Error: eth_sendTransaction sent from unknown address %s:\n%s" + % (from_str, data) + ) + params["chainId"] = self._client.get_net_version() # Workaround for a bug in web3.eth.account: # the signTransaction function checks to see if the 'from' field is present, and if so it validates that it # corresponds to the address of the private key. However, web3.eth.account doesn't perform this check case # insensitively, so it can erroneously fail. Therefore, set the 'from' field using the same value that # this call validates against: - params['from'] = w3.eth.account.privateKeyToAccount(private_key).address + params["from"] = w3.eth.account.privateKeyToAccount(private_key).address # web3.eth.acount.signTransaction expects the `to` field to be a checksum address: - if 'to' in params: - params['to'] = eth_utils.address.to_checksum_address(params['to']) + if "to" in params: + params["to"] = eth_utils.address.to_checksum_address(params["to"]) transaction_count = self._client.get_transaction_count(from_address) - params['nonce'] = transaction_count + params["nonce"] = transaction_count signed_txn = w3.eth.account.signTransaction(params, private_key=private_key) - return super().post({ - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'eth_sendRawTransaction', - 'params': [signed_txn.rawTransaction.hex()] - }) + return super().post( + { + "id": 1, + "jsonrpc": "2.0", + "method": "eth_sendRawTransaction", + "params": [signed_txn.rawTransaction.hex()], + } + ) else: return super().post(data, *args, **kwargs) + def RawTransactionClient(etheno_client, accounts): synchronizer = RawTransactionSynchronizer(etheno_client, accounts) - setattr(etheno_client, 'create_account', RawTransactionSynchronizer.create_account.__get__(synchronizer, RawTransactionSynchronizer)) - setattr(etheno_client, 'post', RawTransactionSynchronizer.post.__get__(synchronizer, RawTransactionSynchronizer)) - + setattr( + etheno_client, + "create_account", + RawTransactionSynchronizer.create_account.__get__( + synchronizer, RawTransactionSynchronizer + ), + ) + setattr( + etheno_client, + "post", + RawTransactionSynchronizer.post.__get__( + synchronizer, RawTransactionSynchronizer + ), + ) + return etheno_client diff --git a/etheno/threadwrapper.py b/etheno/threadwrapper.py index 177ce2c..440026e 100644 --- a/etheno/threadwrapper.py +++ b/etheno/threadwrapper.py @@ -3,9 +3,11 @@ import threading from threading import Condition + def is_main_thread(): return isinstance(threading.current_thread(), threading._MainThread) + class MainThreadController(object): def __init__(self): if not is_main_thread(): @@ -18,6 +20,7 @@ def __init__(self): self._kwargs = None self._return = None self._quit = False + def invoke(self, obj, *args, **kwargs): if is_main_thread(): return obj.__call__(*args, **kwargs) @@ -46,6 +49,7 @@ def invoke(self, obj, *args, **kwargs): self._return = None if not released: self._main_wake_up.release() + def quit(self): self._main_wake_up.acquire() try: @@ -53,12 +57,15 @@ def quit(self): self._main_wake_up.notify_all() finally: self._main_wake_up.release() + def run(self): if not is_main_thread(): raise Exception("run can only be called from the main thread!") from . import signals + def signal_handler(signal, frame): self._quit = True + signals.add_sigint_handler(signal_handler) while True: try: @@ -78,25 +85,30 @@ def signal_handler(signal, frame): if self._quit: return + class MainThreadWrapper(object): def __init__(self, mainobj, controller): self._main = mainobj self._controller = controller + def __call__(self, *args, **kwargs): ret = self._controller.invoke(self._main, *args, **kwargs) if id(self._main) == id(ret): return MainThreadWrapper(ret, self._controller) else: return ret + def __getattribute__(self, name): - if name == '_main' or name == '_controller': + if name == "_main" or name == "_controller": return object.__getattribute__(self, name) elif isinstance(getattr(type(self._main), name), property): return getattr(self._main, name) else: return MainThreadWrapper(getattr(self._main, name), self._controller) -if __name__ == '__main__': + +if __name__ == "__main__": + class MainThreadOnlyClass(object): def do_stuff(self): if not is_main_thread(): @@ -109,9 +121,12 @@ def do_stuff(self): def dostuff(mtoc): print(mtoc.do_stuff()) - + from threading import Thread - thread = Thread(target = dostuff, args = (MainThreadWrapper(main_thread_only, controller),)) + + thread = Thread( + target=dostuff, args=(MainThreadWrapper(main_thread_only, controller),) + ) thread.start() controller.run() thread.join() diff --git a/etheno/truffle.py b/etheno/truffle.py index 451cd04..d7424b3 100644 --- a/etheno/truffle.py +++ b/etheno/truffle.py @@ -20,35 +20,40 @@ def make_list(args: Iterable): class Truffle(object): - def __init__(self, truffle_cmd='truffle', parent_logger=None, log_level=None): + def __init__(self, truffle_cmd="truffle", parent_logger=None, log_level=None): self._running = False - self.logger = EthenoLogger('Truffle', log_level=log_level, parent=parent_logger) + self.logger = EthenoLogger("Truffle", log_level=log_level, parent=parent_logger) self.truffle_cmd = make_list(truffle_cmd) def terminate(self): self._running = False def run_tests(self): - return self.run('test') + return self.run("test") def run_migrate(self): - return self.run('migrate') + return self.run("migrate") def run(self, args): self._running = True args = make_list(args) - p = PtyLogger(self.logger, ['/usr/bin/env'] + self.truffle_cmd + args) + p = PtyLogger(self.logger, ["/usr/bin/env"] + self.truffle_cmd + args) p.start() try: while p.isalive(): if not self._running: - self.logger.info("Etheno received a shutdown signal; terminating truffle %s" % ' '.join(args)) + self.logger.info( + "Etheno received a shutdown signal; terminating truffle %s" + % " ".join(args) + ) break time.sleep(1.0) except KeyboardInterrupt as e: - self.logger.info("Caught keyboard interrupt; terminating truffle %s" % ' '.join(args)) + self.logger.info( + "Caught keyboard interrupt; terminating truffle %s" % " ".join(args) + ) raise e finally: p.close(force=True) diff --git a/etheno/utils.py b/etheno/utils.py index a8ecc41..202fd0b 100644 --- a/etheno/utils.py +++ b/etheno/utils.py @@ -12,8 +12,8 @@ def __init__(self, constant_content, **kwargs): self.constant_content = constant_content self._file = None self._kwargs = dict(kwargs) - self._kwargs['mode'] = 'w+b' - self._kwargs['delete'] = False + self._kwargs["mode"] = "w+b" + self._kwargs["delete"] = False def __enter__(self) -> str: self._file = tempfile.NamedTemporaryFile(**self._kwargs) @@ -29,13 +29,13 @@ def __exit__(self, type, value, traceback): def int_to_bytes(n: int) -> bytes: number_of_bytes = int(math.ceil(n.bit_length() / 8)) - return n.to_bytes(number_of_bytes, byteorder='big') + return n.to_bytes(number_of_bytes, byteorder="big") def decode_hex(data: Optional[str]) -> Optional[bytes]: if data is None: return None - if data.startswith('0x'): + if data.startswith("0x"): data = data[2:] return bytes.fromhex(data) @@ -43,7 +43,9 @@ def decode_hex(data: Optional[str]) -> Optional[bytes]: def decode_value(v: Union[str, int]) -> int: if isinstance(v, int): return v - elif v.startswith('0x') or (frozenset(['a', 'b', 'c', 'd', 'e', 'f']) & frozenset(v)): + elif v.startswith("0x") or ( + frozenset(["a", "b", "c", "d", "e", "f"]) & frozenset(v) + ): # this is a hex string return int(v, 16) else: @@ -51,18 +53,20 @@ def decode_value(v: Union[str, int]) -> int: return int(v) -def format_hex_address(addr: Optional[Union[int, str]], add_0x: bool = False) -> Optional[str]: +def format_hex_address( + addr: Optional[Union[int, str]], add_0x: bool = False +) -> Optional[str]: if addr is None: return None if isinstance(addr, int): addr = "%x" % addr - if addr.lower().startswith('0x'): + if addr.lower().startswith("0x"): addr = addr[2:] if len(addr) < 40: - addr = "%s%s" % ('0' * (40 - len(addr)), addr) + addr = "%s%s" % ("0" * (40 - len(addr)), addr) elif 40 < len(addr) < 64: # this is likely something like a transaction hash, so round up to 32 bytes: - addr = "%s%s" % ('0' * (64 - len(addr)), addr) + addr = "%s%s" % ("0" * (64 - len(addr)), addr) if add_0x: addr = "0x%s" % addr return addr @@ -80,7 +84,7 @@ def webserver_is_up(url: str) -> bool: def is_port_free(port: int) -> bool: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - return sock.connect_ex(('127.0.0.1', port)) != 0 + return sock.connect_ex(("127.0.0.1", port)) != 0 def find_open_port(starting_port: int = 1025) -> int: @@ -92,7 +96,7 @@ def find_open_port(starting_port: int = 1025) -> int: def clear_directory(path: str): """ - Deletes the contents of a directory, but not the directory itself. + Deletes the contents of a directory, but not the directory itself. This is safe to use on symlinked directories. Symlinks will be deleted, but the files and directories they point to will not be deleted. If `path` itself is a symlink, the symlink will be deleted. @@ -113,7 +117,7 @@ def ynprompt(prompt: str) -> bool: while True: yn = input(prompt) yn = yn[0:1].lower() - if yn == 'n' or yn == '': + if yn == "n" or yn == "": return False - elif yn == 'y': + elif yn == "y": return True diff --git a/examples/BrokenMetaCoin/ExploitMetaCoinManticoreScript.py b/examples/BrokenMetaCoin/ExploitMetaCoinManticoreScript.py deleted file mode 100644 index ad512a3..0000000 --- a/examples/BrokenMetaCoin/ExploitMetaCoinManticoreScript.py +++ /dev/null @@ -1,31 +0,0 @@ -# global variables `logger`, `manticore`, and `manticoreutils` are provided by Etheno - -# No need to set up accounts or contracts the way we usually do with Manticore alone! -# They are already pre-provisioned in the `manticore` object -# and we can simply access them from there: - -# The Truffle migrations deploy three contracts: [Migrations contract, ConvertLib, MetaCoin] -contract_account = list(manticore.contract_accounts.values())[2] - -# The contract was loaded from bytecode, so we need to manually set the ABI: -contract_account.add_function('setMetadata(uint256,uint256)') - -# Create symbolic variables for which Manticore will discover values: -key1 = manticore.make_symbolic_value(name='key1') -value1 = manticore.make_symbolic_value(name='val1') -key2 = manticore.make_symbolic_value(name='key2') - -# Make two calls to the `setMetadata` function of the `MetaCoin` contract -# using the symbolic variables: -contract_account.setMetadata(key1, value1) -contract_account.setMetadata(key2, 1) - -for st in manticore.all_states: - # The value we want to overwrite is the `balances` mapping in storage slot 0 - balances_value = st.platform.get_storage_data(contract_account.address, 0) - with manticoreutils.ManticoreTest(st, balances_value == 1) as test: - for k1, v1, k2 in test.solve_all(key1, value1, key2): - result = f"\nFound a way to overwrite balances! Check {manticore.workspace}\n" - result += f" setMetadata({hex(k1)}, {hex(v1)})\n" - result += f" setMetadata({hex(k2)}, 0x1)\n" - logger.info(result) diff --git a/examples/BrokenMetaCoin/LICENSE b/examples/BrokenMetaCoin/LICENSE deleted file mode 100644 index bb98c92..0000000 --- a/examples/BrokenMetaCoin/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018 Truffle - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/examples/BrokenMetaCoin/README.md b/examples/BrokenMetaCoin/README.md deleted file mode 100644 index 02c0fc2..0000000 --- a/examples/BrokenMetaCoin/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Broken MetaCoin Truffle and Manticore Example - -This example is the same `MetaCoin` project used in the Truffle -documentation and tutorials, however, we have added common Solidity -errors to the code, including some that are quite subtle. - -A [`run_etheno.sh`](run_etheno.sh) script is provided to give some -examples of how one might use Etheno and Manticore to automatically -discover the bugs in this project. diff --git a/examples/BrokenMetaCoin/contracts/ConvertLib.sol b/examples/BrokenMetaCoin/contracts/ConvertLib.sol deleted file mode 100644 index 5d83fa9..0000000 --- a/examples/BrokenMetaCoin/contracts/ConvertLib.sol +++ /dev/null @@ -1,8 +0,0 @@ -pragma solidity ^0.5.0; - -library ConvertLib{ - function convert(uint amount,uint conversionRate) public pure returns (uint convertedAmount) - { - return amount * conversionRate; - } -} diff --git a/examples/BrokenMetaCoin/contracts/MetaCoin.sol b/examples/BrokenMetaCoin/contracts/MetaCoin.sol deleted file mode 100644 index 25494f2..0000000 --- a/examples/BrokenMetaCoin/contracts/MetaCoin.sol +++ /dev/null @@ -1,48 +0,0 @@ -pragma solidity ^0.5.0; - -import "./ConvertLib.sol"; - -/** - * This is a simple example token with several vulnerabilities added. - */ -contract MetaCoin { - mapping (address => uint) balances; - uint256[] metadata; - - event Transfer(address indexed _from, address indexed _to, uint256 _value); - - constructor() public { - balances[tx.origin] = 10000; - } - - function setMetadata(uint256 key, uint256 value) public { - if (metadata.length <= key) { - metadata.length = key + 1; - } - metadata[key] = value; - } - - function getMetadata(uint256 key) public view returns (uint256) { - return metadata[key]; - } - - function backdoor() public { - selfdestruct(msg.sender); - } - - function sendCoin(address receiver, uint amount) public returns(bool sufficient) { - if (balances[msg.sender] < amount) return false; - balances[msg.sender] -= amount; - balances[receiver] += amount; - emit Transfer(msg.sender, receiver, amount); - return true; - } - - function getBalanceInEth(address addr) public view returns(uint){ - return ConvertLib.convert(getBalance(addr),2); - } - - function getBalance(address addr) public view returns(uint) { - return balances[addr]; - } -} diff --git a/examples/BrokenMetaCoin/contracts/Migrations.sol b/examples/BrokenMetaCoin/contracts/Migrations.sol deleted file mode 100644 index 89b4d5c..0000000 --- a/examples/BrokenMetaCoin/contracts/Migrations.sol +++ /dev/null @@ -1,23 +0,0 @@ -pragma solidity ^0.5.0; - -contract Migrations { - address public owner; - uint public last_completed_migration; - - modifier restricted() { - if (msg.sender == owner) _; - } - - constructor() public { - owner = msg.sender; - } - - function setCompleted(uint completed) public restricted { - last_completed_migration = completed; - } - - function upgrade(address new_address) public restricted { - Migrations upgraded = Migrations(new_address); - upgraded.setCompleted(last_completed_migration); - } -} diff --git a/examples/BrokenMetaCoin/migrations/1_initial_migration.js b/examples/BrokenMetaCoin/migrations/1_initial_migration.js deleted file mode 100644 index 4d5f3f9..0000000 --- a/examples/BrokenMetaCoin/migrations/1_initial_migration.js +++ /dev/null @@ -1,5 +0,0 @@ -var Migrations = artifacts.require("./Migrations.sol"); - -module.exports = function(deployer) { - deployer.deploy(Migrations); -}; diff --git a/examples/BrokenMetaCoin/migrations/2_deploy_contracts.js b/examples/BrokenMetaCoin/migrations/2_deploy_contracts.js deleted file mode 100644 index b3dc3e9..0000000 --- a/examples/BrokenMetaCoin/migrations/2_deploy_contracts.js +++ /dev/null @@ -1,8 +0,0 @@ -var ConvertLib = artifacts.require("./ConvertLib.sol"); -var MetaCoin = artifacts.require("./MetaCoin.sol"); - -module.exports = function(deployer) { - deployer.deploy(ConvertLib); - deployer.link(ConvertLib, MetaCoin); - deployer.deploy(MetaCoin); -}; diff --git a/examples/BrokenMetaCoin/run_etheno.sh b/examples/BrokenMetaCoin/run_etheno.sh deleted file mode 100755 index b77bcb4..0000000 --- a/examples/BrokenMetaCoin/run_etheno.sh +++ /dev/null @@ -1,16 +0,0 @@ -# First, remove the Truffle build directory. -# This shouldn't be necessary, but Truffle will often fail with -# confusing error messages if it is upgraded between builds. -# So, we just rebuild everything from scratch each time to ensure -# that it always works. -rm -rf build - -echo "Running the custom Manticore script ExploitMetaCoinManticoreScript.py" -# Set the max depth for Manticore to 2 because this script only needs to -# find a sequence of two transactions to exploit the bug -etheno --manticore --truffle --ganache --manticore-max-depth 2 -r ExploitMetaCoinManticoreScript.py - -echo "Running a full Manticore analysis with standard vulnerability detectors (this can take roughly 30 minutes)" -# Set the max depth for Manticore to 2 because we can get ~98% coverage -# with that setting, and it drastically reduces compute time -etheno -m -t -g --manticore-max-depth 2 diff --git a/examples/BrokenMetaCoin/test/TestMetacoin.sol b/examples/BrokenMetaCoin/test/TestMetacoin.sol deleted file mode 100644 index 7af110c..0000000 --- a/examples/BrokenMetaCoin/test/TestMetacoin.sol +++ /dev/null @@ -1,25 +0,0 @@ -pragma solidity ^0.4.2; - -import "truffle/Assert.sol"; -import "truffle/DeployedAddresses.sol"; -import "../contracts/MetaCoin.sol"; - -contract TestMetacoin { - - function testInitialBalanceUsingDeployedContract() public { - MetaCoin meta = MetaCoin(DeployedAddresses.MetaCoin()); - - uint expected = 10000; - - Assert.equal(meta.getBalance(tx.origin), expected, "Owner should have 10000 MetaCoin initially"); - } - - function testInitialBalanceWithNewMetaCoin() public { - MetaCoin meta = new MetaCoin(); - - uint expected = 10000; - - Assert.equal(meta.getBalance(tx.origin), expected, "Owner should have 10000 MetaCoin initially"); - } - -} diff --git a/examples/BrokenMetaCoin/test/metacoin.js b/examples/BrokenMetaCoin/test/metacoin.js deleted file mode 100644 index c61c093..0000000 --- a/examples/BrokenMetaCoin/test/metacoin.js +++ /dev/null @@ -1,63 +0,0 @@ -var MetaCoin = artifacts.require("./MetaCoin.sol"); - -contract('MetaCoin', function(accounts) { - it("should put 10000 MetaCoin in the first account", function() { - return MetaCoin.deployed().then(function(instance) { - return instance.getBalance.call(accounts[0]); - }).then(function(balance) { - assert.equal(balance.valueOf(), 10000, "10000 wasn't in the first account"); - }); - }); - it("should call a function that depends on a linked library", function() { - var meta; - var metaCoinBalance; - var metaCoinEthBalance; - - return MetaCoin.deployed().then(function(instance) { - meta = instance; - return meta.getBalance.call(accounts[0]); - }).then(function(outCoinBalance) { - metaCoinBalance = outCoinBalance.toNumber(); - return meta.getBalanceInEth.call(accounts[0]); - }).then(function(outCoinBalanceEth) { - metaCoinEthBalance = outCoinBalanceEth.toNumber(); - }).then(function() { - assert.equal(metaCoinEthBalance, 2 * metaCoinBalance, "Library function returned unexpected function, linkage may be broken"); - }); - }); - it("should send coin correctly", function() { - var meta; - - // Get initial balances of first and second account. - var account_one = accounts[0]; - var account_two = accounts[1]; - - var account_one_starting_balance; - var account_two_starting_balance; - var account_one_ending_balance; - var account_two_ending_balance; - - var amount = 10; - - return MetaCoin.deployed().then(function(instance) { - meta = instance; - return meta.getBalance.call(account_one); - }).then(function(balance) { - account_one_starting_balance = balance.toNumber(); - return meta.getBalance.call(account_two); - }).then(function(balance) { - account_two_starting_balance = balance.toNumber(); - return meta.sendCoin(account_two, amount, {from: account_one}); - }).then(function() { - return meta.getBalance.call(account_one); - }).then(function(balance) { - account_one_ending_balance = balance.toNumber(); - return meta.getBalance.call(account_two); - }).then(function(balance) { - account_two_ending_balance = balance.toNumber(); - - assert.equal(account_one_ending_balance, account_one_starting_balance - amount, "Amount wasn't correctly taken from the sender"); - assert.equal(account_two_ending_balance, account_two_starting_balance + amount, "Amount wasn't correctly sent to the receiver"); - }); - }); -}); diff --git a/examples/BrokenMetaCoin/truffle.js b/examples/BrokenMetaCoin/truffle.js deleted file mode 100644 index aed5f84..0000000 --- a/examples/BrokenMetaCoin/truffle.js +++ /dev/null @@ -1,9 +0,0 @@ -module.exports = { - networks: { - development: { - host: "127.0.0.1", - port: 8545, - network_id: "*" - } - } -}; diff --git a/examples/ConstantinopleGasUsage/README.md b/examples/ConstantinopleGasUsage/README.md deleted file mode 100644 index 460331f..0000000 --- a/examples/ConstantinopleGasUsage/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# Constantinople Gas Usage Consensus Bug - -This example is able to automatically reproduce [the Constantinople -gas usage -discrepancy](https://github.com/paritytech/parity-ethereum/pull/9746) -that caused a hard-fork on Ropsten in October of 2018. This bug was -related to how clients interpreted [a new -EIP](https://eips.ethereum.org/EIPS/eip-1283) changing how gas refunds -are accounted across calls. - -Run this example by using the included -[`run_etheno.sh`](run_etheno.sh) script. - -This example uses [Echidna](https://github.com/trailofbits/echidna), a -property-based fuzzer, so results are nondeterminstic. But generally -running this example should result in at least one failed differential -test. You can get additional details of the transaction that triggered -the bug by examining `log/DifferentialTester/GAS_USAGE/FAILED.log`. - -Note that this example was tested with Geth 1.8.17-stable and Parity -v2.0.8-stable. Newer versions of these clients will likely have -patched the Constantinople consensus bug and Etheno's differential -tester will therefore pass all tests. \ No newline at end of file diff --git a/examples/ConstantinopleGasUsage/constantinople.sol b/examples/ConstantinopleGasUsage/constantinople.sol deleted file mode 100644 index 17a7e57..0000000 --- a/examples/ConstantinopleGasUsage/constantinople.sol +++ /dev/null @@ -1,15 +0,0 @@ -pragma solidity ^0.5.4; -contract C { - int public stored = 1337; - function setStored(int value) public { - stored = value; - } - function increment() public { - int newValue = stored + 1; - stored = 0; - address(this).call(abi.encodeWithSignature("setStored(int256)", newValue)); - } - function echidna_() public returns (bool) { - return true; - } -} diff --git a/examples/ConstantinopleGasUsage/run_etheno.sh b/examples/ConstantinopleGasUsage/run_etheno.sh deleted file mode 100755 index c3fa179..0000000 --- a/examples/ConstantinopleGasUsage/run_etheno.sh +++ /dev/null @@ -1 +0,0 @@ -etheno --echidna --fuzz-limit 20 --fuzz-contract constantinople.sol --accounts 2 --parity --geth --constantinople --log-dir log diff --git a/models/l2/arbitrum/ArbRetryableTx.sol b/models/l2/arbitrum/ArbRetryableTx.sol new file mode 100644 index 0000000..6a2d810 --- /dev/null +++ b/models/l2/arbitrum/ArbRetryableTx.sol @@ -0,0 +1,170 @@ +pragma solidity >=0.4.21 <0.9.0; + +/** +* @title precompiled contract in every Arbitrum chain for retryable transaction related data retrieval and interactions. Exists at 0x000000000000000000000000000000000000006E +*/ +interface ArbRetryableTx { + + /** + * @notice Redeem a redeemable tx. + * Revert if called by an L2 contract, or if userTxHash does not exist, or if userTxHash reverts. + * If this returns, userTxHash has been completed and is no longer available for redemption. + * If this reverts, userTxHash is still available for redemption (until it times out or is canceled). + * @param userTxHash unique identifier of retryable message: keccak256(keccak256(ArbchainId, inbox-sequence-number), uint(0) ) + */ + function redeem(bytes32 userTxHash) external; + + /** + * @notice Return the minimum lifetime of redeemable txn. + * @return lifetime in seconds + */ + function getLifetime() external view returns(uint); + + /** + * @notice Return the timestamp when userTxHash will age out, or zero if userTxHash does not exist. + * The timestamp could be in the past, because aged-out tickets might not be discarded immediately. + * @param userTxHash unique ticket identifier + * @return timestamp for ticket's deadline + */ + function getTimeout(bytes32 userTxHash) external view returns(uint); + + /** + * @notice Return the price, in wei, of submitting a new retryable tx with a given calldata size. + * @param calldataSize call data size to get price of (in wei) + * @return (price, nextUpdateTimestamp). Price is guaranteed not to change until nextUpdateTimestamp. + */ + function getSubmissionPrice(uint calldataSize) external view returns (uint, uint); + + /** + * @notice Return the price, in wei, of extending the lifetime of userTxHash by an additional lifetime period. Revert if userTxHash doesn't exist. + * @param userTxHash unique ticket identifier + * @return (price, nextUpdateTimestamp). Price is guaranteed not to change until nextUpdateTimestamp. + */ + function getKeepalivePrice(bytes32 userTxHash) external view returns(uint, uint); + + /** + @notice Deposits callvalue into the sender's L2 account, then adds one lifetime period to the life of userTxHash. + * If successful, emits LifetimeExtended event. + * Revert if userTxHash does not exist, or if the timeout of userTxHash is already at least one lifetime period in the future, or if the sender has insufficient funds (after the deposit). + * @param userTxHash unique ticket identifier + * @return New timeout of userTxHash. + */ + function keepalive(bytes32 userTxHash) external payable returns(uint); + + /** + * @notice Return the beneficiary of userTxHash. + * Revert if userTxHash doesn't exist. + * @param userTxHash unique ticket identifier + * @return address of beneficiary for ticket + */ + function getBeneficiary(bytes32 userTxHash) external view returns (address); + + /** + * @notice Cancel userTxHash and refund its callvalue to its beneficiary. + * Revert if userTxHash doesn't exist, or if called by anyone other than userTxHash's beneficiary. + * @param userTxHash unique ticket identifier + */ + function cancel(bytes32 userTxHash) external; + + event TicketCreated(bytes32 indexed userTxHash); + event LifetimeExtended(bytes32 indexed userTxHash, uint newTimeout); + event Redeemed(bytes32 indexed userTxHash); + event Canceled(bytes32 indexed userTxHash); +} + +contract ArbRetryableTxEmulated is ArbRetryableTx { + mapping(bytes32 => uint256) private ticketTimeout; + mapping(bytes32 => address) private ticketBeneficiary; + + + function createTicket(bytes32 userTxHash, address beneficiary) public { + require(ticketTimeout[userTxHash] == 0); + ticketTimeout[userTxHash] = block.timestamp + 1 days; + ticketBeneficiary[userTxHash] = beneficiary; + } + + /** + * @notice Redeem a redeemable tx. + * Revert if called by an L2 contract, or if userTxHash does not exist, or if userTxHash reverts. + * If this returns, userTxHash has been completed and is no longer available for redemption. + * If this reverts, userTxHash is still available for redemption (until it times out or is canceled). + * @param userTxHash unique identifier of retryable message: keccak256(keccak256(ArbchainId, inbox-sequence-number), uint(0) ) + */ + function redeem(bytes32 userTxHash) external override { + require(ticketTimeout[userTxHash] > 0); + delete ticketTimeout[userTxHash]; + delete ticketBeneficiary[userTxHash]; + } + + /** + * @notice Return the minimum lifetime of redeemable txn. + * @return lifetime in seconds + */ + function getLifetime() external override view returns(uint) { + return 1 days; + } + + /** + * @notice Return the timestamp when userTxHash will age out, or zero if userTxHash does not exist. + * The timestamp could be in the past, because aged-out tickets might not be discarded immediately. + * @param userTxHash unique ticket identifier + * @return timestamp for ticket's deadline + */ + function getTimeout(bytes32 userTxHash) external override view returns(uint) { + return ticketTimeout[userTxHash]; + } + + /** + * @notice Return the price, in wei, of submitting a new retryable tx with a given calldata size. + * @param calldataSize call data size to get price of (in wei) + * @return (price, nextUpdateTimestamp). Price is guaranteed not to change until nextUpdateTimestamp. + */ + function getSubmissionPrice(uint calldataSize) external override view returns (uint, uint) { + return (1, 1); + } + + /** + * @notice Return the price, in wei, of extending the lifetime of userTxHash by an additional lifetime period. Revert if userTxHash doesn't exist. + * @param userTxHash unique ticket identifier + * @return (price, nextUpdateTimestamp). Price is guaranteed not to change until nextUpdateTimestamp. + */ + function getKeepalivePrice(bytes32 userTxHash) external override view returns(uint, uint) { + return (1, 1); + } + + /** + @notice Deposits callvalue into the sender's L2 account, then adds one lifetime period to the life of userTxHash. + * If successful, emits LifetimeExtended event. + * Revert if userTxHash does not exist, or if the timeout of userTxHash is already at least one lifetime period in the future, or if the sender has insufficient funds (after the deposit). + * @param userTxHash unique ticket identifier + * @return New timeout of userTxHash. + */ + function keepalive(bytes32 userTxHash) external override payable returns(uint) { + require(msg.value > 0); + require(ticketTimeout[userTxHash] > 0); + ticketTimeout[userTxHash] += 1 days; + return ticketTimeout[userTxHash]; + } + + /** + * @notice Return the beneficiary of userTxHash. + * Revert if userTxHash doesn't exist. + * @param userTxHash unique ticket identifier + * @return address of beneficiary for ticket + */ + function getBeneficiary(bytes32 userTxHash) external override view returns (address) { + require(ticketTimeout[userTxHash] > 0); + return ticketBeneficiary[userTxHash]; + } + + /** + * @notice Cancel userTxHash and refund its callvalue to its beneficiary. + * Revert if userTxHash doesn't exist, or if called by anyone other than userTxHash's beneficiary. + * @param userTxHash unique ticket identifier + */ + function cancel(bytes32 userTxHash) external override { + require(ticketTimeout[userTxHash] > 0); + delete ticketTimeout[userTxHash]; + delete ticketBeneficiary[userTxHash]; + } +} diff --git a/models/l2/arbitrum/ArbSys.sol b/models/l2/arbitrum/ArbSys.sol new file mode 100644 index 0000000..e1750d8 --- /dev/null +++ b/models/l2/arbitrum/ArbSys.sol @@ -0,0 +1,194 @@ +pragma solidity >=0.4.21 <0.9.0; + +/** +* @title Precompiled contract that exists in every Arbitrum chain at address(100), 0x0000000000000000000000000000000000000064. Exposes a variety of system-level functionality. + */ +interface ArbSys { + /** + * @notice Get internal version number identifying an ArbOS build + * @return version number as int + */ + function arbOSVersion() external pure returns (uint); + + function arbChainID() external view returns(uint); + + /** + * @notice Get Arbitrum block number (distinct from L1 block number; Arbitrum genesis block has block number 0) + * @return block number as int + */ + function arbBlockNumber() external view returns (uint); + + /** + * @notice Send given amount of Eth to dest from sender. + * This is a convenience function, which is equivalent to calling sendTxToL1 with empty calldataForL1. + * @param destination recipient address on L1 + * @return unique identifier for this L2-to-L1 transaction. + */ + function withdrawEth(address destination) external payable returns(uint); + + /** + * @notice Send a transaction to L1 + * @param destination recipient address on L1 + * @param calldataForL1 (optional) calldata for L1 contract call + * @return a unique identifier for this L2-to-L1 transaction. + */ + function sendTxToL1(address destination, bytes calldata calldataForL1) external payable returns(uint); + + /** + * @notice get the number of transactions issued by the given external account or the account sequence number of the given contract + * @param account target account + * @return the number of transactions issued by the given external account or the account sequence number of the given contract + */ + function getTransactionCount(address account) external view returns(uint256); + + /** + * @notice get the value of target L2 storage slot + * This function is only callable from address 0 to prevent contracts from being able to call it + * @param account target account + * @param index target index of storage slot + * @return stotage value for the given account at the given index + */ + function getStorageAt(address account, uint256 index) external view returns (uint256); + + /** + * @notice check if current call is coming from l1 + * @return true if the caller of this was called directly from L1 + */ + function isTopLevelCall() external view returns (bool); + + /** + * @notice check if the caller (of this caller of this) is an aliased L1 contract address + * @return true iff the caller's address is an alias for an L1 contract address + */ + function wasMyCallersAddressAliased() external view returns (bool); + + /** + * @notice return the address of the caller (of this caller of this), without applying L1 contract address aliasing + * @return address of the caller's caller, without applying L1 contract address aliasing + */ + function myCallersAddressWithoutAliasing() external view returns (address); + + /** + * @notice map L1 sender contract address to its L2 alias + * @param sender sender address + * @param dest destination address + * @return aliased sender address + */ + function mapL1SenderContractAddressToL2Alias(address sender, address dest) external pure returns(address); + + /** + * @notice get the caller's amount of available storage gas + * @return amount of storage gas available to the caller + */ + function getStorageGasAvailable() external view returns(uint); + + event L2ToL1Transaction(address caller, address indexed destination, uint indexed uniqueId, + uint indexed batchNumber, uint indexInBatch, + uint arbBlockNum, uint ethBlockNum, uint timestamp, + uint callvalue, bytes data); +} + +contract ArbSysEmulated is ArbSys { + // Simulates a unique identifier for L2-to-L1 transactions + uint256 private id = 1; + + function arbOSVersion() external override pure returns (uint) { + return 1; + } + + function arbChainID() external override view returns(uint) { + return 42161; + } + + /** + * @notice Get Arbitrum block number (distinct from L1 block number; Arbitrum genesis block has block number 0) + * @return block number as int + */ + function arbBlockNumber() external override view returns (uint) { + return 0; + } + + /** + * @notice Send given amount of Eth to dest from sender. + * This is a convenience function, which is equivalent to calling sendTxToL1 with empty calldataForL1. + * @param destination recipient address on L1 + * @return unique identifier for this L2-to-L1 transaction. + */ + function withdrawEth(address destination) external override payable returns(uint) { + return this.sendTxToL1(destination, ""); + } + + /** + * @notice Send a transaction to L1 + * @param destination recipient address on L1 + * @param calldataForL1 (optional) calldata for L1 contract call + * @return a unique identifier for this L2-to-L1 transaction. + */ + function sendTxToL1(address destination, bytes calldata calldataForL1) public override payable returns(uint) { + return id++; + } + + /** + * @notice get the number of transactions issued by the given external account or the account sequence number of the given contract + * @param account target account + * @return the number of transactions issued by the given external account or the account sequence number of the given contract + */ + function getTransactionCount(address account) external override view returns(uint256) { + return 0; + } + + /** + * @notice get the value of target L2 storage slot + * This function is only callable from address 0 to prevent contracts from being able to call it + * @param account target account + * @param index target index of storage slot + * @return stotage value for the given account at the given index + */ + function getStorageAt(address account, uint256 index) external override view returns (uint256) { + revert(); // Not callable from Echidna + } + + /** + * @notice check if current call is coming from l1 + * @return true if the caller of this was called directly from L1 + */ + function isTopLevelCall() external override view returns (bool) { + return false; // Not sure if this is possible to emulate + } + + /** + * @notice check if the caller (of this caller of this) is an aliased L1 contract address + * @return true iff the caller's address is an alias for an L1 contract address + */ + function wasMyCallersAddressAliased() external override view returns (bool) { + return false; // Not sure if this is possible to emulate + } + + /** + * @notice return the address of the caller (of this caller of this), without applying L1 contract address aliasing + * @return address of the caller's caller, without applying L1 contract address aliasing + */ + function myCallersAddressWithoutAliasing() external override view returns (address) { + return address(0x0); // Not sure if this is possible to emulate + } + + /** + * @notice map L1 sender contract address to its L2 alias + * @param sender sender address + * @param dest destination address + * @return aliased sender address + */ + function mapL1SenderContractAddressToL2Alias(address sender, address dest) external override pure returns(address) { + return address(0x0); // Not sure if this is possible to emulate + } + + + /** + * @notice get the caller's amount of available storage gas + * @return amount of storage gas available to the caller + */ + function getStorageGasAvailable() external override view returns(uint) { + return 0; // Not sure if this is possible to emulate + } + +} \ No newline at end of file diff --git a/setup.py b/setup.py index bde9f4c..d5a670b 100644 --- a/setup.py +++ b/setup.py @@ -1,38 +1,40 @@ from setuptools import setup, find_packages setup( - name='etheno', - description='Etheno is a JSON RPC multiplexer, Manticore wrapper, differential fuzzer, and test framework integration tool.', - url='https://github.com/trailofbits/etheno', - author='Trail of Bits', - version='0.2.4', + name="etheno", + description="Etheno is a JSON RPC multiplexer, differential fuzzer, and test framework integration tool.", + url="https://github.com/trailofbits/etheno", + author="Trail of Bits", + version="0.3a1", packages=find_packages(), - python_requires='>=3.6', + python_requires=">=3.7", install_requires=[ - 'ptyprocess', - 'pysha3>=1.0.2', - 'flask>=1.0.2', - 'web3', - # The following two requirements are for our fork of `keyfile.py`, - # but they should already be satisfied by the `web3` requirement - 'cytoolz>=0.9.0,<1.0.0', - 'pycryptodome>=3.4.7,<4.0.0', - 'setuptools' + "ptyprocess", + "pysha3>=1.0.2", + # TODO: identify what is the oldest flask version that the new shutdown mechanism is compatible with + "flask", + # Pinning web3 to a low version to prevent conflicts with other packages + "web3>=3.16.4", + # Contextual version conflicts between eth-hash, eth-utils, eth-rlp, and rusty-rlp + # This works only if `--platform linux/amd64` is set since rusty-rlp==0.1.15 is not available for ARM architectures + # This is super hacky but it works for now + # This is likely going to cause conflicts with other packages :( + "eth-hash>=0.3.1,<0.4.0", + "eth-utils==1.10.0", + "eth-rlp<0.3.0", + "setuptools", ], - extras_require={ - 'manticore': ['manticore>=0.2.2'] - }, - entry_points={ - 'console_scripts': [ - 'etheno = etheno.__main__:main' - ] - }, + # rusty-rlp==0.1.15 has to be downloaded as a tarball + dependency_links=[ + "https://github.com/cburgdorf/rusty-rlp/archive/refs/tags/0.1.15.tar.gz" + ], + entry_points={"console_scripts": ["etheno = etheno.__main__:main"]}, classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU Affero General Public License v3', - 'Topic :: Security', - 'Topic :: Software Development :: Testing' - ] + "Development Status :: 4 - Beta", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: GNU Affero General Public License v3", + "Topic :: Security", + "Topic :: Software Development :: Testing", + ], ) diff --git a/tests/drizzle/contracts/ComplexStorage.sol b/tests/drizzle/contracts/ComplexStorage.sol new file mode 100644 index 0000000..4cd5cad --- /dev/null +++ b/tests/drizzle/contracts/ComplexStorage.sol @@ -0,0 +1,44 @@ +// SPDX-License-Identifier: MIT +pragma solidity >=0.4.21 <0.7.0; + +contract ComplexStorage { + uint public storeduint1 = 15; + uint public constant constuint = 16; + uint128 public investmentsLimit = 17055; + uint32 public investmentsDeadlineTimeStamp = uint32(now); + + bytes16 public string1 = "test1"; + bytes32 public string2 = "test1236"; + string public string3 = "lets string something"; + + mapping (address => uint) uints1; + mapping (address => DeviceData) structs1; + + uint[] public uintarray; + DeviceData[] public deviceDataArray; + DeviceData public singleDD; + + struct DeviceData { + string deviceBrand; + string deviceYear; + string batteryWearLevel; + } + + constructor() public { + address address1 = 0xbCcc714d56bc0da0fd33d96d2a87b680dD6D0DF6; + address address2 = 0xaee905FdD3ED851e48d22059575b9F4245A82B04; + + uints1[address1] = 88; + uints1[address2] = 99; + + structs1[address1] = DeviceData("deviceBrand", "deviceYear", "wearLevel"); + structs1[address2] = DeviceData("deviceBrand2", "deviceYear2", "wearLevel2"); + singleDD = DeviceData("deviceBrand3", "deviceYear3", "wearLevel3"); + + uintarray.push(8000); + uintarray.push(9000); + + deviceDataArray.push(structs1[address1]); + deviceDataArray.push(structs1[address2]); + } +} diff --git a/tests/drizzle/contracts/Migrations.sol b/tests/drizzle/contracts/Migrations.sol new file mode 100644 index 0000000..d5bbeca --- /dev/null +++ b/tests/drizzle/contracts/Migrations.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT +pragma solidity >=0.4.21 <0.7.0; + +contract Migrations { + address public owner; + uint public last_completed_migration; + + constructor() public { + owner = msg.sender; + } + + modifier restricted() { + if (msg.sender == owner) _; + } + + function setCompleted(uint completed) public restricted { + last_completed_migration = completed; + } +} diff --git a/tests/drizzle/contracts/SimpleStorage.sol b/tests/drizzle/contracts/SimpleStorage.sol new file mode 100644 index 0000000..2cb243e --- /dev/null +++ b/tests/drizzle/contracts/SimpleStorage.sol @@ -0,0 +1,14 @@ +// SPDX-License-Identifier: MIT +pragma solidity >=0.4.21 <0.7.0; + +contract SimpleStorage { + event StorageSet(string _message); + + uint256 public storedData; + + function set(uint256 x) public { + storedData = x; + + emit StorageSet("Data stored successfully!"); + } +} diff --git a/tests/drizzle/contracts/TutorialToken.sol b/tests/drizzle/contracts/TutorialToken.sol new file mode 100644 index 0000000..2e16e1c --- /dev/null +++ b/tests/drizzle/contracts/TutorialToken.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: MIT +pragma solidity >=0.4.21 <0.7.0; + +import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; + +contract TutorialToken is ERC20 { + string public name = "TutorialToken"; + string public symbol = "TT"; + uint256 public decimals = 2; + uint256 public INITIAL_SUPPLY = 12000; + + constructor() public { + _mint(msg.sender, INITIAL_SUPPLY); + } +} diff --git a/tests/drizzle/hardhat.config.js b/tests/drizzle/hardhat.config.js new file mode 100644 index 0000000..20d3ed8 --- /dev/null +++ b/tests/drizzle/hardhat.config.js @@ -0,0 +1,20 @@ +require("@nomiclabs/hardhat-waffle"); + +module.exports = { + networks: { + localhost: { + host: "127.0.0.1", + port: 8545, + } + }, + solidity: { + compilers: [ + { + version: "0.6.12", + }, + { + version: "0.5.0", + } + ] + } +}; diff --git a/tests/drizzle/package.json b/tests/drizzle/package.json new file mode 100644 index 0000000..3167eb3 --- /dev/null +++ b/tests/drizzle/package.json @@ -0,0 +1,15 @@ +{ + "name": "drizzle-box", + "dependencies": { + "@openzeppelin/contracts": "^2.4.0" + }, + "devDependencies": { + "@nomiclabs/hardhat-waffle": "^2.0.3", + "chai": "^4.3.6", + "ethers": "^5.6.2", + "ganache": "^7.3.2", + "hardhat": "^2.9.2", + "@nomiclabs/hardhat-ethers": "^2.0.5", + "ethereum-waffle": "^3.4.4" + } +} \ No newline at end of file diff --git a/tests/drizzle/test/TestSimpleStorage.sol b/tests/drizzle/test/TestSimpleStorage.sol new file mode 100644 index 0000000..3b75b86 --- /dev/null +++ b/tests/drizzle/test/TestSimpleStorage.sol @@ -0,0 +1,17 @@ +pragma solidity >=0.4.21 <0.7.0; + +import "truffle/Assert.sol"; +import "truffle/DeployedAddresses.sol"; +import "../contracts/SimpleStorage.sol"; + +contract TestSimpleStorage { + function testItStoresAValue() public { + SimpleStorage simpleStorage = SimpleStorage(DeployedAddresses.SimpleStorage()); + + simpleStorage.set(89); + + uint expected = 89; + + Assert.equal(simpleStorage.storedData(), expected, "It should store the value 89."); + } +} \ No newline at end of file diff --git a/tests/drizzle/test/simplestorage.js b/tests/drizzle/test/simplestorage.js new file mode 100644 index 0000000..04d1e9a --- /dev/null +++ b/tests/drizzle/test/simplestorage.js @@ -0,0 +1,18 @@ +const {expect} = require("chai"); +const {ethers} = require("hardhat") + +describe("Deploy SimpleStorage", function() { + it("Deploy", async function() { + // Get Factory + const SimpleStorageFactory = await ethers.getContractFactory("SimpleStorage"); + // Deploy + const SimpleStorage = await SimpleStorageFactory.deploy(); + + const [account] = await ethers.getSigners() + await SimpleStorage.set(89, { from: account.address }); + // Check stored value + expect(await SimpleStorage.storedData()).to.equal(89); + // Deploy another + const SimpleStorageTwo = await SimpleStorageFactory.deploy(); + }); +});