diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fb5c66ec9c0..2127de8f935 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,23 +1,26 @@ -name: Run CI with Earthly +name: Earthly CI on: push: - branches: - - master + branches: [master] pull_request: {} workflow_dispatch: {} jobs: - ci: + e2e: runs-on: ubuntu-latest - # run ci for both x86_64 and arm64 - # strategy: {matrix: {environment: [x86, arm]}} - # TODO figure out why arm64 doesn't exit properly - strategy: {matrix: {environment: [x86]}} + env: + EARTHLY_TOKEN: ${{ secrets.EARTHLY_TOKEN }} + # TODO currently names are coupled to platform + strategy: { matrix: { environment: [x86, arm], test: [e2e-escrow-contract, e2e-account-contracts] } } # cancel if reran on same PR if exists, otherwise if on same commit concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-${{ matrix.environment }} + group: ${{ matrix.test }}-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-${{ matrix.environment }} cancel-in-progress: true steps: + - uses: earthly/actions-setup@v1 + with: + version: v0.8.5 + - name: Checkout uses: actions/checkout@v4 with: @@ -25,24 +28,61 @@ jobs: submodules: recursive - name: Setup + working-directory: ./scripts + run: ./setup_env.sh ${{ matrix.environment }} ${{ secrets.DOCKERHUB_PASSWORD }} ${{ secrets.BUILD_INSTANCE_SSH_KEY }} + + - name: Test + working-directory: ./yarn-project/end-to-end run: | - mkdir -p ~/.ssh - echo DOCKER_HOST=ssh://build-instance-${{ matrix.environment }}.aztecprotocol.com >> $GITHUB_ENV - echo ${{ secrets.DOCKERHUB_PASSWORD}} | docker login -u aztecprotocolci --password-stdin - echo ${{ secrets.BUILD_INSTANCE_SSH_KEY }} | base64 -d > ~/.ssh/build_instance_key - chmod 600 ~/.ssh/build_instance_key - cat > ~/.ssh/config < >(awk -W interactive -v g="$g" -v r="$r" '$0=g"native: "r $0')) & -(build_wasm > >(awk -W interactive -v b="$b" -v r="$r" '$0=b"wasm: "r $0')) & -(build_wasm_threads > >(awk -W interactive -v p="$p" -v r="$r" '$0=p"wasm_threads: "r $0')) & +(build_native > >(awk -v g="$g" -v r="$r" '{print g "native: " r $0}')) & +(build_wasm > >(awk -v b="$b" -v r="$r" '{print b "wasm: " r $0}')) & +(build_wasm_threads > >(awk -v p="$p" -v r="$r" '{print p "wasm_threads: "r $0}')) & for job in $(jobs -p); do wait $job || exit 1 diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp index 1a6827f9989..17720a6dd5d 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp @@ -94,6 +94,7 @@ size_t generate_ecdsa_constraint(EcdsaSecp256r1Constraint& ecdsa_r1_constraint, TEST(ECDSASecp256r1, test_hardcoded) { + bb::srs::init_crs_factory("../srs_db/ignition"); EcdsaSecp256r1Constraint ecdsa_r1_constraint; WitnessVector witness_values; @@ -168,6 +169,7 @@ TEST(ECDSASecp256r1, test_hardcoded) TEST(ECDSASecp256r1, TestECDSAConstraintSucceed) { + bb::srs::init_crs_factory("../srs_db/ignition"); EcdsaSecp256r1Constraint ecdsa_r1_constraint; WitnessVector witness_values; size_t num_variables = generate_ecdsa_constraint(ecdsa_r1_constraint, witness_values); @@ -216,6 +218,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintSucceed) // even though we are just building the circuit. TEST(ECDSASecp256r1, TestECDSACompilesForVerifier) { + bb::srs::init_crs_factory("../srs_db/ignition"); EcdsaSecp256r1Constraint ecdsa_r1_constraint; WitnessVector witness_values; size_t num_variables = generate_ecdsa_constraint(ecdsa_r1_constraint, witness_values); @@ -252,6 +255,7 @@ TEST(ECDSASecp256r1, TestECDSACompilesForVerifier) TEST(ECDSASecp256r1, TestECDSAConstraintFail) { + bb::srs::init_crs_factory("../srs_db/ignition"); EcdsaSecp256r1Constraint ecdsa_r1_constraint; WitnessVector witness_values; size_t num_variables = generate_ecdsa_constraint(ecdsa_r1_constraint, witness_values); diff --git a/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/proving_key.test.cpp b/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/proving_key.test.cpp index 846d04951f1..ec6f62141d8 100644 --- a/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/proving_key.test.cpp +++ b/barretenberg/cpp/src/barretenberg/plonk/proof_system/proving_key/proving_key.test.cpp @@ -17,6 +17,7 @@ using namespace bb::plonk; // Test proving key serialization/deserialization to/from buffer TEST(proving_key, proving_key_from_serialized_key) { + bb::srs::init_crs_factory("../srs_db/ignition"); auto builder = StandardCircuitBuilder(); auto composer = StandardComposer(); fr a = fr::one(); @@ -25,7 +26,7 @@ TEST(proving_key, proving_key_from_serialized_key) plonk::proving_key& p_key = *composer.compute_proving_key(builder); auto pk_buf = to_buffer(p_key); auto pk_data = from_buffer(pk_buf); - auto crs = std::make_unique>("../srs_db/ignition"); + auto crs = bb::srs::get_bn254_crs_factory(); auto proving_key = std::make_shared(std::move(pk_data), crs->get_prover_crs(pk_data.circuit_size + 1)); @@ -54,6 +55,7 @@ TEST(proving_key, proving_key_from_serialized_key) // Test proving key serialization/deserialization to/from buffer using UltraPlonkComposer TEST(proving_key, proving_key_from_serialized_key_ultra) { + bb::srs::init_crs_factory("../srs_db/ignition"); auto builder = UltraCircuitBuilder(); auto composer = UltraComposer(); fr a = fr::one(); @@ -62,7 +64,7 @@ TEST(proving_key, proving_key_from_serialized_key_ultra) plonk::proving_key& p_key = *composer.compute_proving_key(builder); auto pk_buf = to_buffer(p_key); auto pk_data = from_buffer(pk_buf); - auto crs = std::make_unique>("../srs_db/ignition"); + auto crs = bb::srs::get_bn254_crs_factory(); auto proving_key = std::make_shared(std::move(pk_data), crs->get_prover_crs(pk_data.circuit_size + 1)); diff --git a/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp b/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp index bfdab78e996..190fa75cef1 100644 --- a/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp +++ b/barretenberg/cpp/src/barretenberg/srs/factories/mem_crs_factory.test.cpp @@ -45,14 +45,14 @@ TEST(reference_string, mem_bn254_file_consistency) 0); } -TEST(reference_string, mem_grumpkin_file_consistency) +TEST(reference_string, DISABLED_mem_grumpkin_file_consistency) { // Load 1024 from file. - auto file_crs = FileCrsFactory("../srs_db/ignition", 1024); + auto file_crs = FileCrsFactory("../srs_db/grumpkin", 1024); // Use low level io lib to read 1024 from file. std::vector points(1024); - ::srs::IO::read_transcript_g1(points.data(), 1024, "../srs_db/ignition"); + ::srs::IO::read_transcript_g1(points.data(), 1024, "../srs_db/grumpkin"); MemGrumpkinCrsFactory mem_crs(points); auto file_prover_crs = file_crs.get_prover_crs(1024); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/goblin_verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/goblin_verifier.test.cpp index 00c70136ded..e49c72b0c3a 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/goblin_verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/goblin_verifier.test.cpp @@ -266,12 +266,12 @@ HEAVY_TYPED_TEST(GoblinRecursiveVerifierTest, RecursiveVerificationKey) TestFixture::test_recursive_verification_key_creation(); } -HEAVY_TYPED_TEST(GoblinRecursiveVerifierTest, SingleRecursiveVerification) +HEAVY_TYPED_TEST(GoblinRecursiveVerifierTest, DISABLED_SingleRecursiveVerification) { TestFixture::test_recursive_verification(); }; -HEAVY_TYPED_TEST(GoblinRecursiveVerifierTest, SingleRecursiveVerificationFailure) +HEAVY_TYPED_TEST(GoblinRecursiveVerifierTest, DISABLED_SingleRecursiveVerificationFailure) { TestFixture::test_recursive_verification_fails(); }; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp index be51b184ee6..e1158eba311 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp @@ -248,12 +248,12 @@ HEAVY_TYPED_TEST(HonkRecursiveVerifierTest, RecursiveVerificationKey) TestFixture::test_recursive_verification_key_creation(); } -HEAVY_TYPED_TEST(HonkRecursiveVerifierTest, SingleRecursiveVerification) +HEAVY_TYPED_TEST(HonkRecursiveVerifierTest, DISABLED_SingleRecursiveVerification) { TestFixture::test_recursive_verification(); }; -HEAVY_TYPED_TEST(HonkRecursiveVerifierTest, SingleRecursiveVerificationFailure) +HEAVY_TYPED_TEST(HonkRecursiveVerifierTest, DISABLED_SingleRecursiveVerificationFailure) { TestFixture::test_recursive_verification_fails(); }; diff --git a/barretenberg/cpp/srs_db/Earthfile b/barretenberg/cpp/srs_db/Earthfile index 36688dac614..4b4a503b6b7 100644 --- a/barretenberg/cpp/srs_db/Earthfile +++ b/barretenberg/cpp/srs_db/Earthfile @@ -10,7 +10,7 @@ build: RUN ./download_grumpkin.sh # export srs-db for runners SAVE ARTIFACT ignition ignition - SAVE ARTIFACT ignition grumpkin + SAVE ARTIFACT grumpkin grumpkin build-local: # copy files locally diff --git a/barretenberg/cpp/srs_db/download_srs.sh b/barretenberg/cpp/srs_db/download_srs.sh index df7ffd03041..5bf8adc6e04 100755 --- a/barretenberg/cpp/srs_db/download_srs.sh +++ b/barretenberg/cpp/srs_db/download_srs.sh @@ -25,19 +25,19 @@ checksum() { download() { # Initialize an empty variable for the Range header RANGE_HEADER="" - + # If both RANGE_START and RANGE_END are set, add them to the Range header if [ -n "$RANGE_START" ] && [ -n "$RANGE_END" ]; then RANGE_HEADER="-H Range:bytes=$RANGE_START-$RANGE_END" fi - + # Download the file if [ "$APPEND" = "true" ]; then - curl $RANGE_HEADER https://aztec-ignition.s3-eu-west-2.amazonaws.com/$AWS_BUCKET/monomial/transcript${1}.dat >> transcript${1}.dat + curl -s $RANGE_HEADER https://aztec-ignition.s3-eu-west-2.amazonaws.com/$AWS_BUCKET/monomial/transcript${1}.dat >> transcript${1}.dat else - curl $RANGE_HEADER https://aztec-ignition.s3-eu-west-2.amazonaws.com/$AWS_BUCKET/monomial/transcript${1}.dat > transcript${1}.dat + curl -s $RANGE_HEADER https://aztec-ignition.s3-eu-west-2.amazonaws.com/$AWS_BUCKET/monomial/transcript${1}.dat > transcript${1}.dat fi - + } for TRANSCRIPT in $(seq 0 $NUM); do diff --git a/barretenberg/ts/Earthfile b/barretenberg/ts/Earthfile index 744a7c7b750..9d4ef8fd373 100644 --- a/barretenberg/ts/Earthfile +++ b/barretenberg/ts/Earthfile @@ -5,11 +5,11 @@ WORKDIR /build # minimum files to download yarn packages # keep timestamps for incremental builds -COPY --keep-ts --dir .yarn package.json yarn.lock .yarnrc.yml . +COPY --dir .yarn package.json yarn.lock .yarnrc.yml . RUN yarn --immutable # other source files -COPY --keep-ts --dir src *.json *.js *.cjs . +COPY --dir src *.json *.js *.cjs . # copy over wasm build from cpp folder COPY ../cpp/+preset-wasm/bin/barretenberg.wasm src/barretenberg_wasm/barretenberg-threads.wasm @@ -23,7 +23,7 @@ esm: SAVE ARTIFACT /build cjs: - COPY --keep-ts scripts/cjs_postprocess.sh scripts/ + COPY scripts/cjs_postprocess.sh scripts/ RUN yarn build:cjs SAVE ARTIFACT /build diff --git a/l1-contracts/Earthfile b/l1-contracts/Earthfile index ee016750a79..6f6f42a85d2 100644 --- a/l1-contracts/Earthfile +++ b/l1-contracts/Earthfile @@ -15,7 +15,7 @@ RUN foundryup RUN npm install --global yarn solhint WORKDIR /build -COPY --keep-ts --dir lib scripts src terraform test *.json *.toml *.sh . +COPY --dir lib scripts src terraform test *.json *.toml *.sh . build: RUN git init && git add . && yarn lint && yarn slither && yarn slither-has-diff diff --git a/noir-projects/bootstrap.sh b/noir-projects/bootstrap.sh index 00e72ce1944..fc265ec76cc 100755 --- a/noir-projects/bootstrap.sh +++ b/noir-projects/bootstrap.sh @@ -22,9 +22,9 @@ g="\033[32m" # Green b="\033[34m" # Blue r="\033[0m" # Reset -((cd "./noir-contracts" && ./bootstrap.sh) > >(awk -v g="$g" -v r="$r" '$0=g"contracts: "r $0')) & -((cd "./noir-protocol-circuits" && ./bootstrap.sh) > >(awk -v b="$b" -v r="$r" '$0=b"protocol-circuits: "r $0')) & +((cd "./noir-contracts" && ./bootstrap.sh) > >(awk -v g="$g" -v r="$r" '{print g "contracts: " r $0}')) & +((cd "./noir-protocol-circuits" && ./bootstrap.sh) > >(awk -v b="$b" -v r="$r" '{print b "protocol-circuits: " r $0}')) & for job in $(jobs -p); do wait $job || exit 1 -done \ No newline at end of file +done diff --git a/noir/Earthfile b/noir/Earthfile index d3f86ad5fa1..54fbd8bde3b 100644 --- a/noir/Earthfile +++ b/noir/Earthfile @@ -5,7 +5,7 @@ nargo: RUN apt update && apt install -y libc++1 WORKDIR /build # Relevant source (TODO finer-grained 'tooling') - COPY --keep-ts --dir \ + COPY --dir \ noir-repo/acvm-repo \ noir-repo/aztec_macros \ noir-repo/compiler \ @@ -18,10 +18,11 @@ nargo: # TODO(AD) is this OK as a content hash? ENV COMMIT_HASH=$(find . -type f -exec sha256sum {} ';' | sort | sha256sum | awk '{print $1}') - COPY --keep-ts ./scripts/bootstrap_native.sh ./scripts/bootstrap_native.sh + COPY ./scripts/bootstrap_native.sh ./scripts/bootstrap_native.sh RUN ./scripts/bootstrap_native.sh SAVE ARTIFACT /build/noir-repo/target/release/nargo nargo SAVE ARTIFACT /build/noir-repo/target/release/acvm acvm + SAVE IMAGE aztecprotocol/nargo packages: FROM node:20 @@ -36,7 +37,7 @@ packages: WORKDIR /build # Relevant source (TODO finer-grained) - COPY --keep-ts --dir \ + COPY --dir \ noir-repo/acvm-repo \ noir-repo/aztec_macros \ noir-repo/compiler \ @@ -58,13 +59,14 @@ packages: noir-repo/.envrc \ noir-repo - COPY --keep-ts noir-repo/.github/scripts noir-repo/.github/scripts - COPY --keep-ts ./scripts/bootstrap_packages.sh ./scripts/bootstrap_packages.sh + COPY noir-repo/.github/scripts noir-repo/.github/scripts + COPY ./scripts/bootstrap_packages.sh ./scripts/bootstrap_packages.sh # TODO(AD) is this OK as a content hash? ENV COMMIT_HASH=$(find . -type f -exec sha256sum {} ';' | sort | sha256sum | awk '{print $1}') RUN PATH="/root/.cargo/bin:$PATH" ./scripts/bootstrap_packages.sh SAVE ARTIFACT packages + SAVE IMAGE --cache-hint run: # When running the container, mount the users home directory to same location. diff --git a/scripts/earthly b/scripts/earthly index ff0137b8415..4f62eb66cd9 100755 --- a/scripts/earthly +++ b/scripts/earthly @@ -3,7 +3,7 @@ [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace # Aztec build/test/bench tool -# Thin wrapper for earthly that helps with building targets. +# Thin wrapper for earthly that helps with building targets. # Adds autodownloading earthly and timing code. # Usage: # Go to folder, e.g. docs, use az to build @@ -18,7 +18,7 @@ EARTHLY=$(dirname $0)/.earthly/earthly EARTHLY_CONFIG=$(dirname $0)/earthly-config.yml if ! [ -f "$EARTHLY" ] ; then - wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O "$EARTHLY"; + wget -q https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O "$EARTHLY"; chmod +x "$EARTHLY" fi diff --git a/scripts/setup_env.sh b/scripts/setup_env.sh new file mode 100755 index 00000000000..a1d6a25ddcb --- /dev/null +++ b/scripts/setup_env.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# Setup environment variables +echo "Setting up environment variables..." +echo FORCE_COLOR=1 >> $GITHUB_ENV +echo DOCKER_HOST=ssh://build-instance-$1.aztecprotocol.com >> $GITHUB_ENV + +# Docker login +echo "Logging in to Docker..." +echo $2 | docker login -u aztecprotocolci --password-stdin + +# Configure SSH +echo "Configuring SSH..." +mkdir -p ~/.ssh +echo $3 | base64 -d > ~/.ssh/build_instance_key +chmod 600 ~/.ssh/build_instance_key +cat > ~/.ssh/config <> $GITHUB_ENV \ No newline at end of file diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index d2dba999259..68a98dbe823 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -12,7 +12,7 @@ COPY ../l1-contracts/+build/out /build/l1-contracts/out WORKDIR /build/yarn-project # copy source -COPY --keep-ts --dir * *.json .yarn .yarnrc.yml . +COPY --dir * *.json .yarn .yarnrc.yml . # We install a symlink to yarn-project's node_modules at a location that all portalled packages can find as they # walk up the tree as part of module resolution. The supposedly idiomatic way of supporting module resolution diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index aa4758b52ae..c85026fc926 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -25,8 +25,9 @@ E2E_TEST: FUNCTION ARG test ARG compose_file=./scripts/docker-compose.yml - LOCALLY + FROM earthly/dind:alpine-3.19-docker-25.0.2-r0 ENV TEST=$test + COPY $compose_file $compose_file WITH DOCKER \ --load aztecprotocol/aztec:latest=+get-aztec \ --load aztecprotocol/end-to-end:latest=+get-end-to-end \ @@ -34,7 +35,6 @@ E2E_TEST: # Run our docker compose, ending whenever sandbox ends, filtering out noisy eth_getLogs RUN docker compose -f $compose_file up --exit-code-from=sandbox --force-recreate END - # we could use a parameterized target, but these just print cleaner in earthly log e2e-block-building: diff --git a/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml b/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml index db646499b49..322ac1674a2 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml @@ -5,9 +5,9 @@ services: entrypoint: > sh -c ' if [ -n "$FORK_BLOCK_NUMBER" ] && [ -n "$FORK_URL" ]; then - exec anvil -p 8545 --host 0.0.0.0 --chain-id 31337 --fork-url "$FORK_URL" --fork-block-number "$FORK_BLOCK_NUMBER" + exec anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337 --fork-url "$FORK_URL" --fork-block-number "$FORK_BLOCK_NUMBER" else - exec anvil -p 8545 --host 0.0.0.0 --chain-id 31337 + exec anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337 fi' ports: - '8545:8545' diff --git a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml index f61b7bd3d8f..3b5a5a31939 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml @@ -2,7 +2,7 @@ version: '3' services: fork: image: ghcr.io/foundry-rs/foundry:nightly-de33b6af53005037b463318d2628b5cfcaf39916 - entrypoint: 'anvil -p 8545 --host 0.0.0.0 --chain-id 31337' + entrypoint: 'anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337' ports: - '8545:8545' diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index 86bc514ac81..ce7fd63d57f 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -5,12 +5,10 @@ services: entrypoint: > sh -c ' if [ -n "$FORK_BLOCK_NUMBER" ] && [ -n "$FORK_URL" ]; then - { anvil -p 8545 --host 0.0.0.0 --chain-id 31337 --fork-url "$FORK_URL" --fork-block-number "$FORK_BLOCK_NUMBER" ; echo $$? > .status ; } | grep -v eth_getLogs + anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337 --fork-url "$FORK_URL" --fork-block-number "$FORK_BLOCK_NUMBER" else - { anvil -p 8545 --host 0.0.0.0 --chain-id 31337 ; echo $$? > .status ; } | grep -v eth_getLogs - fi - # final status - grep -q '^0$$' .status' + anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337 + fi' ports: - '8545:8545'