From e7d71bf38866fcc790897880ed324e737dadaa0a Mon Sep 17 00:00:00 2001 From: bap2pecs Date: Wed, 31 Jul 2024 17:42:20 -0400 Subject: [PATCH] disable CI --- .circleci/config.yml | 1815 +++++++++++++++++++++--------------------- 1 file changed, 908 insertions(+), 907 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0c5241396988..70525ed04613 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,89 +36,89 @@ parameters: type: boolean default: false -# orbs: -# go: circleci/go@1.8.0 -# gcp-cli: circleci/gcp-cli@3.0.1 -# slack: circleci/slack@4.10.1 -# shellcheck: circleci/shellcheck@3.2.0 -# commands: -# gcp-oidc-authenticate: -# description: "Authenticate with GCP using a CircleCI OIDC token." -# parameters: -# project_id: -# type: env_var_name -# default: GCP_PROJECT_ID -# workload_identity_pool_id: -# type: env_var_name -# default: GCP_WIP_ID -# workload_identity_pool_provider_id: -# type: env_var_name -# default: GCP_WIP_PROVIDER_ID -# service_account_email: -# type: env_var_name -# default: GCP_SERVICE_ACCOUNT_EMAIL -# gcp_cred_config_file_path: -# type: string -# default: /home/circleci/gcp_cred_config.json -# oidc_token_file_path: -# type: string -# default: /home/circleci/oidc_token.json -# steps: -# - run: -# name: "Create OIDC credential configuration" -# command: | -# # Store OIDC token in temp file -# echo $CIRCLE_OIDC_TOKEN > << parameters.oidc_token_file_path >> -# # Create a credential configuration for the generated OIDC ID Token -# gcloud iam workload-identity-pools create-cred-config \ -# "projects/${<< parameters.project_id >>}/locations/global/workloadIdentityPools/${<< parameters.workload_identity_pool_id >>}/providers/${<< parameters.workload_identity_pool_provider_id >>}"\ -# --output-file="<< parameters.gcp_cred_config_file_path >>" \ -# --service-account="${<< parameters.service_account_email >>}" \ -# --credential-source-file=<< parameters.oidc_token_file_path >> -# - run: -# name: "Authenticate with GCP using OIDC" -# command: | -# # Configure gcloud to leverage the generated credential configuration -# gcloud auth login --brief --cred-file "<< parameters.gcp_cred_config_file_path >>" -# # Configure ADC -# echo "export GOOGLE_APPLICATION_CREDENTIALS='<< parameters.gcp_cred_config_file_path >>'" | tee -a "$BASH_ENV" +orbs: + go: circleci/go@1.8.0 + gcp-cli: circleci/gcp-cli@3.0.1 + slack: circleci/slack@4.10.1 + shellcheck: circleci/shellcheck@3.2.0 +commands: + gcp-oidc-authenticate: + description: "Authenticate with GCP using a CircleCI OIDC token." + parameters: + project_id: + type: env_var_name + default: GCP_PROJECT_ID + workload_identity_pool_id: + type: env_var_name + default: GCP_WIP_ID + workload_identity_pool_provider_id: + type: env_var_name + default: GCP_WIP_PROVIDER_ID + service_account_email: + type: env_var_name + default: GCP_SERVICE_ACCOUNT_EMAIL + gcp_cred_config_file_path: + type: string + default: /home/circleci/gcp_cred_config.json + oidc_token_file_path: + type: string + default: /home/circleci/oidc_token.json + steps: + - run: + name: "Create OIDC credential configuration" + command: | + # Store OIDC token in temp file + echo $CIRCLE_OIDC_TOKEN > << parameters.oidc_token_file_path >> + # Create a credential configuration for the generated OIDC ID Token + gcloud iam workload-identity-pools create-cred-config \ + "projects/${<< parameters.project_id >>}/locations/global/workloadIdentityPools/${<< parameters.workload_identity_pool_id >>}/providers/${<< parameters.workload_identity_pool_provider_id >>}"\ + --output-file="<< parameters.gcp_cred_config_file_path >>" \ + --service-account="${<< parameters.service_account_email >>}" \ + --credential-source-file=<< parameters.oidc_token_file_path >> + - run: + name: "Authenticate with GCP using OIDC" + command: | + # Configure gcloud to leverage the generated credential configuration + gcloud auth login --brief --cred-file "<< parameters.gcp_cred_config_file_path >>" + # Configure ADC + echo "export GOOGLE_APPLICATION_CREDENTIALS='<< parameters.gcp_cred_config_file_path >>'" | tee -a "$BASH_ENV" -# check-changed: -# description: "Conditionally halts a step if certain modules change" -# parameters: -# patterns: -# type: string -# description: "Comma-separated list of dependencies" -# no_go_deps: -# type: string -# default: "" -# description: "If set, does not trigger on `go.mod` / `go.sum` changes." -# steps: -# - run: -# name: "Check for changes" -# environment: -# CHECK_CHANGED_NO_GO_DEPS: "<>" -# command: | -# cd ops/check-changed -# pip3 install -r requirements.txt -# python3 main.py "<>" + check-changed: + description: "Conditionally halts a step if certain modules change" + parameters: + patterns: + type: string + description: "Comma-separated list of dependencies" + no_go_deps: + type: string + default: "" + description: "If set, does not trigger on `go.mod` / `go.sum` changes." + steps: + - run: + name: "Check for changes" + environment: + CHECK_CHANGED_NO_GO_DEPS: "<>" + command: | + cd ops/check-changed + pip3 install -r requirements.txt + python3 main.py "<>" -# notify-failures-on-develop: -# description: "Notify Slack" -# parameters: -# channel: -# type: string -# default: C03N11M0BBN -# mentions: -# type: string -# default: "" -# steps: -# - slack/notify: -# channel: << parameters.channel >> -# event: fail -# template: basic_fail_1 -# branch_pattern: develop -# mentions: "<< parameters.mentions >>" + notify-failures-on-develop: + description: "Notify Slack" + parameters: + channel: + type: string + default: C03N11M0BBN + mentions: + type: string + default: "" + steps: + - slack/notify: + channel: << parameters.channel >> + event: fail + template: basic_fail_1 + branch_pattern: develop + mentions: "<< parameters.mentions >>" jobs: cannon-go-lint-and-test: @@ -341,199 +341,199 @@ jobs: echo "$DOCKER_HUB_READ_ONLY_TOKEN" | docker login -u "$DOCKER_HUB_READ_ONLY_USER" --password-stdin fi -# export REGISTRY="<>" -# export REPOSITORY="<>" -# export IMAGE_TAGS="$(echo -ne "<>" | sed "s/[^a-zA-Z0-9\n,]/-/g")" -# export GIT_COMMIT="$(git rev-parse HEAD)" -# export GIT_DATE="$(git show -s --format='%ct')" -# export PLATFORMS="<>" + export REGISTRY="<>" + export REPOSITORY="<>" + export IMAGE_TAGS="$(echo -ne "<>" | sed "s/[^a-zA-Z0-9\n,]/-/g")" + export GIT_COMMIT="$(git rev-parse HEAD)" + export GIT_DATE="$(git show -s --format='%ct')" + export PLATFORMS="<>" -# echo "Checking git tags pointing at $GIT_COMMIT:" -# tags_at_commit=$(git tag --points-at $GIT_COMMIT) -# echo "Tags at commit:\n$tags_at_commit" + echo "Checking git tags pointing at $GIT_COMMIT:" + tags_at_commit=$(git tag --points-at $GIT_COMMIT) + echo "Tags at commit:\n$tags_at_commit" -# filtered_tags=$(echo "$tags_at_commit" | grep "^<>/" || true) -# echo "Filtered tags: $filtered_tags" + filtered_tags=$(echo "$tags_at_commit" | grep "^<>/" || true) + echo "Filtered tags: $filtered_tags" -# if [ -z "$filtered_tags" ]; then -# export GIT_VERSION="untagged" -# else -# sorted_tags=$(echo "$filtered_tags" | sed "s/<>\///" | sort -V) -# echo "Sorted tags: $sorted_tags" + if [ -z "$filtered_tags" ]; then + export GIT_VERSION="untagged" + else + sorted_tags=$(echo "$filtered_tags" | sed "s/<>\///" | sort -V) + echo "Sorted tags: $sorted_tags" -# # prefer full release tag over "-rc" release candidate tag if both exist -# full_release_tag=$(echo "$sorted_tags" | grep -v -- "-rc" || true) -# if [ -z "$full_release_tag" ]; then -# export GIT_VERSION=$(echo "$sorted_tags" | tail -n 1) -# else -# export GIT_VERSION=$(echo "$full_release_tag" | tail -n 1) -# fi -# fi + # prefer full release tag over "-rc" release candidate tag if both exist + full_release_tag=$(echo "$sorted_tags" | grep -v -- "-rc" || true) + if [ -z "$full_release_tag" ]; then + export GIT_VERSION=$(echo "$sorted_tags" | tail -n 1) + else + export GIT_VERSION=$(echo "$full_release_tag" | tail -n 1) + fi + fi -# echo "Setting GIT_VERSION=$GIT_VERSION" + echo "Setting GIT_VERSION=$GIT_VERSION" -# # Create, start (bootstrap) and use a *named* docker builder -# # This allows us to cross-build multi-platform, -# # and naming allows us to use the DLC (docker-layer-cache) -# docker buildx create --driver=docker-container --name=buildx-build --bootstrap --use + # Create, start (bootstrap) and use a *named* docker builder + # This allows us to cross-build multi-platform, + # and naming allows us to use the DLC (docker-layer-cache) + docker buildx create --driver=docker-container --name=buildx-build --bootstrap --use -# DOCKER_OUTPUT_DESTINATION="" -# if [ "<>" == "true" ]; then -# gcloud auth configure-docker <> -# echo "Building for platforms $PLATFORMS and then publishing to registry" -# DOCKER_OUTPUT_DESTINATION="--push" -# if [ "<>" != "" ]; then -# echo "ERROR: cannot save image to docker when publishing to registry" -# exit 1 -# fi -# else -# if [ "<>" == "" ]; then -# echo "Running $PLATFORMS build without destination (cache warm-up)" -# DOCKER_OUTPUT_DESTINATION="" -# elif [[ $PLATFORMS == *,* ]]; then -# echo "ERROR: cannot perform multi-arch (platforms: $PLATFORMS) build while also loading the result into regular docker" -# exit 1 -# else -# echo "Running single-platform $PLATFORMS build and loading into docker" -# DOCKER_OUTPUT_DESTINATION="--load" -# fi -# fi + DOCKER_OUTPUT_DESTINATION="" + if [ "<>" == "true" ]; then + gcloud auth configure-docker <> + echo "Building for platforms $PLATFORMS and then publishing to registry" + DOCKER_OUTPUT_DESTINATION="--push" + if [ "<>" != "" ]; then + echo "ERROR: cannot save image to docker when publishing to registry" + exit 1 + fi + else + if [ "<>" == "" ]; then + echo "Running $PLATFORMS build without destination (cache warm-up)" + DOCKER_OUTPUT_DESTINATION="" + elif [[ $PLATFORMS == *,* ]]; then + echo "ERROR: cannot perform multi-arch (platforms: $PLATFORMS) build while also loading the result into regular docker" + exit 1 + else + echo "Running single-platform $PLATFORMS build and loading into docker" + DOCKER_OUTPUT_DESTINATION="--load" + fi + fi -# # Let them cook! -# docker buildx bake \ -# --progress plain \ -# --builder=buildx-build \ -# -f docker-bake.hcl \ -# $DOCKER_OUTPUT_DESTINATION \ -# <> + # Let them cook! + docker buildx bake \ + --progress plain \ + --builder=buildx-build \ + -f docker-bake.hcl \ + $DOCKER_OUTPUT_DESTINATION \ + <> -# no_output_timeout: 45m -# - when: -# condition: "<>" -# steps: -# - notify-failures-on-develop -# - when: -# condition: "<>" -# steps: -# - run: -# name: Save -# command: | -# IMAGE_NAME="<>/<>/<>:<>" -# docker save -o /tmp/docker_images/<>.tar $IMAGE_NAME -# - persist_to_workspace: -# root: /tmp/docker_images -# paths: # only write the one file, to avoid concurrent workspace-file additions -# - "<>.tar" -# - when: -# condition: "<>" -# steps: -# - run: -# name: Tag -# command: | -# ./ops/scripts/ci-docker-tag-op-stack-release.sh <>/<> $CIRCLE_TAG $CIRCLE_SHA1 -# - when: -# condition: -# or: -# - and: -# - "<>" -# - "<>" -# - and: -# - "<>" -# - equal: [develop, << pipeline.git.branch >>] -# steps: -# - gcp-oidc-authenticate: -# service_account_email: GCP_SERVICE_ATTESTOR_ACCOUNT_EMAIL -# - run: -# name: Sign -# command: | -# git clone https://github.com/ethereum-optimism/binary_signer -# cd binary_signer/signer -# git checkout tags/v1.0.3 + no_output_timeout: 45m + - when: + condition: "<>" + steps: + - notify-failures-on-develop + - when: + condition: "<>" + steps: + - run: + name: Save + command: | + IMAGE_NAME="<>/<>/<>:<>" + docker save -o /tmp/docker_images/<>.tar $IMAGE_NAME + - persist_to_workspace: + root: /tmp/docker_images + paths: # only write the one file, to avoid concurrent workspace-file additions + - "<>.tar" + - when: + condition: "<>" + steps: + - run: + name: Tag + command: | + ./ops/scripts/ci-docker-tag-op-stack-release.sh <>/<> $CIRCLE_TAG $CIRCLE_SHA1 + - when: + condition: + or: + - and: + - "<>" + - "<>" + - and: + - "<>" + - equal: [develop, << pipeline.git.branch >>] + steps: + - gcp-oidc-authenticate: + service_account_email: GCP_SERVICE_ATTESTOR_ACCOUNT_EMAIL + - run: + name: Sign + command: | + git clone https://github.com/ethereum-optimism/binary_signer + cd binary_signer/signer + git checkout tags/v1.0.3 -# IMAGE_PATH="<>/<>/<>:<>" -# echo $IMAGE_PATH -# pip3 install -r requirements.txt + IMAGE_PATH="<>/<>/<>:<>" + echo $IMAGE_PATH + pip3 install -r requirements.txt -# python3 ./sign_image.py --command="sign"\ -# --attestor-project-name="$ATTESTOR_PROJECT_NAME"\ -# --attestor-name="$ATTESTOR_NAME"\ -# --image-path="$IMAGE_PATH"\ -# --signer-logging-level="INFO"\ -# --attestor-key-id="//cloudkms.googleapis.com/v1/projects/$ATTESTOR_PROJECT_NAME/locations/global/keyRings/$ATTESTOR_NAME-key-ring/cryptoKeys/$ATTESTOR_NAME-key/cryptoKeyVersions/1" + python3 ./sign_image.py --command="sign"\ + --attestor-project-name="$ATTESTOR_PROJECT_NAME"\ + --attestor-name="$ATTESTOR_NAME"\ + --image-path="$IMAGE_PATH"\ + --signer-logging-level="INFO"\ + --attestor-key-id="//cloudkms.googleapis.com/v1/projects/$ATTESTOR_PROJECT_NAME/locations/global/keyRings/$ATTESTOR_NAME-key-ring/cryptoKeys/$ATTESTOR_NAME-key/cryptoKeyVersions/1" -# # Verify newly published images (built on AMD machine) will run on ARM -# check-cross-platform: -# docker: -# - image: cimg/base:current -# resource_class: arm.medium -# parameters: -# registry: -# description: Docker registry -# type: string -# default: "us-docker.pkg.dev" -# repo: -# description: Docker repo -# type: string -# default: "oplabs-tools-artifacts/images" -# op_component: -# description: "Name of op-stack component (e.g. op-node)" -# type: string -# default: "" -# docker_tag: -# description: "Tag of docker image" -# type: string -# default: "<>" -# steps: -# - setup_remote_docker -# - run: -# name: "Verify Image Platform" -# command: | -# image_name="<>/<>/<>:<>" -# echo "Retrieving Docker image manifest: $image_name" -# MANIFEST=$(docker manifest inspect $image_name) + # Verify newly published images (built on AMD machine) will run on ARM + check-cross-platform: + docker: + - image: cimg/base:current + resource_class: arm.medium + parameters: + registry: + description: Docker registry + type: string + default: "us-docker.pkg.dev" + repo: + description: Docker repo + type: string + default: "oplabs-tools-artifacts/images" + op_component: + description: "Name of op-stack component (e.g. op-node)" + type: string + default: "" + docker_tag: + description: "Tag of docker image" + type: string + default: "<>" + steps: + - setup_remote_docker + - run: + name: "Verify Image Platform" + command: | + image_name="<>/<>/<>:<>" + echo "Retrieving Docker image manifest: $image_name" + MANIFEST=$(docker manifest inspect $image_name) -# echo "Verifying 'linux/arm64' is supported..." -# SUPPORTED_PLATFORM=$(echo "$MANIFEST" | jq -r '.manifests[] | select(.platform.architecture == "arm64" and .platform.os == "linux")') -# echo $SUPPORT_PLATFORM -# if [ -z "$SUPPORTED_PLATFORM" ]; then -# echo "Platform 'linux/arm64' not supported by this image" -# exit 1 -# fi -# - run: -# name: "Pull and run docker image" -# command: | -# image_name="<>/<>/<>:<>" -# docker pull $image_name || exit 1 -# docker run $image_name <> --version || exit 1 + echo "Verifying 'linux/arm64' is supported..." + SUPPORTED_PLATFORM=$(echo "$MANIFEST" | jq -r '.manifests[] | select(.platform.architecture == "arm64" and .platform.os == "linux")') + echo $SUPPORT_PLATFORM + if [ -z "$SUPPORTED_PLATFORM" ]; then + echo "Platform 'linux/arm64' not supported by this image" + exit 1 + fi + - run: + name: "Pull and run docker image" + command: | + image_name="<>/<>/<>:<>" + docker pull $image_name || exit 1 + docker run $image_name <> --version || exit 1 -# contracts-bedrock-coverage: -# docker: -# - image: <> -# resource_class: xlarge -# steps: -# - checkout -# - run: -# name: git submodules -# command: make submodules -# - check-changed: -# patterns: contracts-bedrock,op-node -# - run: -# name: print forge version -# command: forge --version -# working_directory: packages/contracts-bedrock -# # We do not use the pre-built contracts becuase forge coverage uses different optimizer settings -# - run: -# name: test and generate coverage -# command: pnpm coverage:lcov -# no_output_timeout: 18m -# environment: -# FOUNDRY_PROFILE: ci -# working_directory: packages/contracts-bedrock -# - run: -# name: upload coverage -# command: codecov --verbose --clean --flags contracts-bedrock-tests -# environment: -# FOUNDRY_PROFILE: ci + contracts-bedrock-coverage: + docker: + - image: <> + resource_class: xlarge + steps: + - checkout + - run: + name: git submodules + command: make submodules + - check-changed: + patterns: contracts-bedrock,op-node + - run: + name: print forge version + command: forge --version + working_directory: packages/contracts-bedrock + # We do not use the pre-built contracts becuase forge coverage uses different optimizer settings + - run: + name: test and generate coverage + command: pnpm coverage:lcov + no_output_timeout: 18m + environment: + FOUNDRY_PROFILE: ci + working_directory: packages/contracts-bedrock + - run: + name: upload coverage + command: codecov --verbose --clean --flags contracts-bedrock-tests + environment: + FOUNDRY_PROFILE: ci contracts-bedrock-tests: docker: @@ -709,379 +709,379 @@ jobs: command: ./ops/scripts/todo-checker.sh --verbose - notify-failures-on-develop -# fuzz-golang: -# parameters: -# package_name: -# description: Go package name -# type: string -# on_changes: -# description: changed pattern to fire fuzzer on -# type: string -# uses_artifacts: -# description: should load in foundry artifacts -# type: boolean -# default: false -# docker: -# - image: <> -# steps: -# - checkout -# - check-changed: -# patterns: "<>" -# - attach_workspace: -# at: "." -# if: ${{ uses_artifacts }} -# - restore_cache: -# name: Restore Go modules cache -# key: gomod-{{ checksum "go.sum" }} -# - restore_cache: -# name: Restore Go build cache -# key: golang-build-cache -# - run: -# name: Fuzz -# command: make fuzz -# working_directory: "<>" -# - save_cache: -# key: golang-build-cache -# paths: -# - "/root/.cache/go-build" + fuzz-golang: + parameters: + package_name: + description: Go package name + type: string + on_changes: + description: changed pattern to fire fuzzer on + type: string + uses_artifacts: + description: should load in foundry artifacts + type: boolean + default: false + docker: + - image: <> + steps: + - checkout + - check-changed: + patterns: "<>" + - attach_workspace: + at: "." + if: ${{ uses_artifacts }} + - restore_cache: + name: Restore Go modules cache + key: gomod-{{ checksum "go.sum" }} + - restore_cache: + name: Restore Go build cache + key: golang-build-cache + - run: + name: Fuzz + command: make fuzz + working_directory: "<>" + - save_cache: + key: golang-build-cache + paths: + - "/root/.cache/go-build" -# l1-geth-version-check: -# docker: -# - image: <> -# steps: -# - checkout -# - run: -# name: "Check L1 geth version" -# command: ./ops/scripts/geth-version-checker.sh || (echo "geth version is wrong, update ci-builder"; false) + l1-geth-version-check: + docker: + - image: <> + steps: + - checkout + - run: + name: "Check L1 geth version" + command: ./ops/scripts/geth-version-checker.sh || (echo "geth version is wrong, update ci-builder"; false) -# go-lint: -# docker: -# - image: <> -# steps: -# - checkout -# - restore_cache: -# name: Restore Go modules cache -# key: gomod-{{ checksum "go.sum" }} -# - restore_cache: -# key: golang-build-cache -# - restore_cache: -# key: golang-lint-cache -# - run: -# name: run Go linter -# command: | -# # Identify how many cores it defaults to -# golangci-lint --help | grep concurrency -# make lint-go -# working_directory: . -# - save_cache: -# key: golang-build-cache -# paths: -# - "/root/.cache/go-build" -# - save_cache: -# key: golang-lint-cache -# paths: -# - "/root/.cache/golangci-lint" + go-lint: + docker: + - image: <> + steps: + - checkout + - restore_cache: + name: Restore Go modules cache + key: gomod-{{ checksum "go.sum" }} + - restore_cache: + key: golang-build-cache + - restore_cache: + key: golang-lint-cache + - run: + name: run Go linter + command: | + # Identify how many cores it defaults to + golangci-lint --help | grep concurrency + make lint-go + working_directory: . + - save_cache: + key: golang-build-cache + paths: + - "/root/.cache/go-build" + - save_cache: + key: golang-lint-cache + paths: + - "/root/.cache/golangci-lint" -# go-test: -# parameters: -# module: -# description: Go Module Name -# type: string -# docker: -# - image: <> # only used to enable codecov. -# resource_class: xlarge -# steps: -# - checkout -# - restore_cache: -# name: Restore Go modules cache -# key: gomod-{{ checksum "go.sum" }} -# - restore_cache: -# keys: -# - golang-build-cache-<> -# - golang-build-cache- -# - run: -# name: prep results dir -# command: mkdir -p /tmp/test-results && mkdir -p /testlogs -# - run: -# name: run tests -# command: | -# gotestsum --format=testname --junitfile=/tmp/test-results/<>.xml --jsonfile=/testlogs/log.json \ -# -- -parallel=8 -coverpkg=github.com/ethereum-optimism/optimism/... -coverprofile=coverage.out ./... -# working_directory: <> -# - save_cache: -# key: golang-build-cache-<> -# paths: -# - "/root/.cache/go-build" -# # TODO(CLI-148): Fix codecov -# #- run: -# #name: upload coverage -# #command: codecov --verbose --clean --flags bedrock-go-tests -# - store_test_results: -# path: /tmp/test-results -# - store_artifacts: -# path: /testlogs -# when: always + go-test: + parameters: + module: + description: Go Module Name + type: string + docker: + - image: <> # only used to enable codecov. + resource_class: xlarge + steps: + - checkout + - restore_cache: + name: Restore Go modules cache + key: gomod-{{ checksum "go.sum" }} + - restore_cache: + keys: + - golang-build-cache-<> + - golang-build-cache- + - run: + name: prep results dir + command: mkdir -p /tmp/test-results && mkdir -p /testlogs + - run: + name: run tests + command: | + gotestsum --format=testname --junitfile=/tmp/test-results/<>.xml --jsonfile=/testlogs/log.json \ + -- -parallel=8 -coverpkg=github.com/ethereum-optimism/optimism/... -coverprofile=coverage.out ./... + working_directory: <> + - save_cache: + key: golang-build-cache-<> + paths: + - "/root/.cache/go-build" + # TODO(CLI-148): Fix codecov + #- run: + #name: upload coverage + #command: codecov --verbose --clean --flags bedrock-go-tests + - store_test_results: + path: /tmp/test-results + - store_artifacts: + path: /testlogs + when: always -# go-e2e-test: -# parameters: -# variant: -# type: string -# default: '' -# module: -# description: Go Module Name -# type: string -# target: -# description: The make target to execute -# type: string -# parallelism: -# description: Number of parallel test runs -# type: integer -# default: 6 -# notify: -# description: Whether to notify on failure -# type: boolean -# default: false -# mentions: -# description: Slack user or group to mention when notifying of failures -# type: string -# default: "" -# environment: -# DEVNET_L2OO: 'false' -# OP_E2E_USE_L2OO: 'false' -# docker: -# - image: <> -# resource_class: xlarge -# parallelism: <> -# steps: -# - checkout -# - when: -# condition: -# equal: ['-l2oo', <>] -# steps: -# - run: -# name: Set DEVNET_L2OO = true -# command: echo 'export DEVNET_L2OO=true' >> $BASH_ENV -# - run: -# name: Set OP_E2E_USE_L2OO = true -# command: echo 'export OP_E2E_USE_L2OO=true' >> $BASH_ENV -# - when: -# condition: -# equal: ['-plasma', <>] -# steps: -# - run: -# name: Set OP_E2E_USE_PLASMA = true -# command: echo 'export OP_E2E_USE_PLASMA=true' >> $BASH_ENV -# - check-changed: -# patterns: op-(.+),cannon,contracts-bedrock -# - run: -# name: prep results dir -# command: mkdir -p /tmp/test-results -# - restore_cache: -# name: Restore Go modules cache -# key: gomod-{{ checksum "go.sum" }} -# - restore_cache: -# name: Restore Go build cache -# key: golang-build-cache -# - attach_workspace: -# at: /tmp/workspace -# - run: -# name: Load devnet-allocs -# command: | -# mkdir -p .devnet -# cp /tmp/workspace/.devnet<>/allocs-l2-delta.json .devnet/allocs-l2-delta.json -# cp /tmp/workspace/.devnet<>/allocs-l2-ecotone.json .devnet/allocs-l2-ecotone.json -# cp /tmp/workspace/.devnet<>/allocs-l2-fjord.json .devnet/allocs-l2-fjord.json -# cp /tmp/workspace/.devnet<>/allocs-l1.json .devnet/allocs-l1.json -# cp /tmp/workspace/.devnet<>/addresses.json .devnet/addresses.json -# cp /tmp/workspace/packages/contracts-bedrock/deploy-config/devnetL1.json packages/contracts-bedrock/deploy-config/devnetL1.json -# cp -r /tmp/workspace/packages/contracts-bedrock/deployments/devnetL1 packages/contracts-bedrock/deployments/devnetL1 -# - run: -# name: print go's available MIPS targets -# command: go tool dist list | grep mips -# - run: -# name: run tests -# no_output_timeout: 20m -# command: | -# mkdir -p /testlogs + go-e2e-test: + parameters: + variant: + type: string + default: '' + module: + description: Go Module Name + type: string + target: + description: The make target to execute + type: string + parallelism: + description: Number of parallel test runs + type: integer + default: 6 + notify: + description: Whether to notify on failure + type: boolean + default: false + mentions: + description: Slack user or group to mention when notifying of failures + type: string + default: "" + environment: + DEVNET_L2OO: 'false' + OP_E2E_USE_L2OO: 'false' + docker: + - image: <> + resource_class: xlarge + parallelism: <> + steps: + - checkout + - when: + condition: + equal: ['-l2oo', <>] + steps: + - run: + name: Set DEVNET_L2OO = true + command: echo 'export DEVNET_L2OO=true' >> $BASH_ENV + - run: + name: Set OP_E2E_USE_L2OO = true + command: echo 'export OP_E2E_USE_L2OO=true' >> $BASH_ENV + - when: + condition: + equal: ['-plasma', <>] + steps: + - run: + name: Set OP_E2E_USE_PLASMA = true + command: echo 'export OP_E2E_USE_PLASMA=true' >> $BASH_ENV + - check-changed: + patterns: op-(.+),cannon,contracts-bedrock + - run: + name: prep results dir + command: mkdir -p /tmp/test-results + - restore_cache: + name: Restore Go modules cache + key: gomod-{{ checksum "go.sum" }} + - restore_cache: + name: Restore Go build cache + key: golang-build-cache + - attach_workspace: + at: /tmp/workspace + - run: + name: Load devnet-allocs + command: | + mkdir -p .devnet + cp /tmp/workspace/.devnet<>/allocs-l2-delta.json .devnet/allocs-l2-delta.json + cp /tmp/workspace/.devnet<>/allocs-l2-ecotone.json .devnet/allocs-l2-ecotone.json + cp /tmp/workspace/.devnet<>/allocs-l2-fjord.json .devnet/allocs-l2-fjord.json + cp /tmp/workspace/.devnet<>/allocs-l1.json .devnet/allocs-l1.json + cp /tmp/workspace/.devnet<>/addresses.json .devnet/addresses.json + cp /tmp/workspace/packages/contracts-bedrock/deploy-config/devnetL1.json packages/contracts-bedrock/deploy-config/devnetL1.json + cp -r /tmp/workspace/packages/contracts-bedrock/deployments/devnetL1 packages/contracts-bedrock/deployments/devnetL1 + - run: + name: print go's available MIPS targets + command: go tool dist list | grep mips + - run: + name: run tests + no_output_timeout: 20m + command: | + mkdir -p /testlogs -# # The below env var gets overridden when running make test-cannon, but we -# # need to explicitly set it here to prevent Cannon from running when we don't -# # want it to. -# export OP_E2E_CANNON_ENABLED="false" -# # Note: We don't use circle CI test splits because we need to split by test name, not by package. There is an additional -# # constraint that gotestsum does not currently (nor likely will) accept files from different pacakges when building. -# JUNIT_FILE=/tmp/test-results/<>_<>.xml JSON_LOG_FILE=/testlogs/test.log make <> -# working_directory: <> -# - store_artifacts: -# path: /testlogs -# when: always -# - store_artifacts: -# path: /tmp/test-results -# when: always -# - store_test_results: -# path: /tmp/test-results -# - when: -# condition: "<>" -# steps: -# - notify-failures-on-develop: -# mentions: "<>" + # The below env var gets overridden when running make test-cannon, but we + # need to explicitly set it here to prevent Cannon from running when we don't + # want it to. + export OP_E2E_CANNON_ENABLED="false" + # Note: We don't use circle CI test splits because we need to split by test name, not by package. There is an additional + # constraint that gotestsum does not currently (nor likely will) accept files from different pacakges when building. + JUNIT_FILE=/tmp/test-results/<>_<>.xml JSON_LOG_FILE=/testlogs/test.log make <> + working_directory: <> + - store_artifacts: + path: /testlogs + when: always + - store_artifacts: + path: /tmp/test-results + when: always + - store_test_results: + path: /tmp/test-results + - when: + condition: "<>" + steps: + - notify-failures-on-develop: + mentions: "<>" -# go-lint-test-build: -# parameters: -# binary_name: -# description: Binary name to build -# type: string -# working_directory: -# description: Working directory -# type: string -# build: -# description: Whether or not to build the binary -# type: boolean -# default: true -# dependencies: -# description: Regex matching dependent packages -# type: string -# default: this-package-does-not-exist -# docker: -# - image: <> -# resource_class: medium -# steps: -# - checkout -# - check-changed: -# patterns: <>,<> -# - run: -# name: Lint -# command: make lint -# working_directory: <> -# - run: -# name: Test -# command: | -# mkdir -p /test-results -# gotestsum --format=testname --junitfile /test-results/tests.xml --jsonfile /test-results/log.json -- -parallel=2 -# working_directory: <> -# - store_test_results: -# path: /test-results -# - store_artifacts: -# path: /testlogs -# when: always -# - when: -# condition: -# equal: [ true, <> ] -# steps: -# - run: -# name: Build -# command: make <> -# working_directory: <> + go-lint-test-build: + parameters: + binary_name: + description: Binary name to build + type: string + working_directory: + description: Working directory + type: string + build: + description: Whether or not to build the binary + type: boolean + default: true + dependencies: + description: Regex matching dependent packages + type: string + default: this-package-does-not-exist + docker: + - image: <> + resource_class: medium + steps: + - checkout + - check-changed: + patterns: <>,<> + - run: + name: Lint + command: make lint + working_directory: <> + - run: + name: Test + command: | + mkdir -p /test-results + gotestsum --format=testname --junitfile /test-results/tests.xml --jsonfile /test-results/log.json -- -parallel=2 + working_directory: <> + - store_test_results: + path: /test-results + - store_artifacts: + path: /testlogs + when: always + - when: + condition: + equal: [ true, <> ] + steps: + - run: + name: Build + command: make <> + working_directory: <> -# cannon-prestate: -# docker: -# - image: <> -# steps: -# - checkout -# - restore_cache: -# name: Restore Go modules cache -# key: gomod-{{ checksum "go.sum" }} -# - restore_cache: -# key: golang-build-cache -# - restore_cache: -# key: cannon-prestate-{{ checksum "./cannon/bin/cannon" }}-{{ checksum "op-program/bin/op-program-client.elf" }} -# name: Load cannon prestate cache -# - run: -# name: generate cannon prestate -# command: make cannon-prestate -# - save_cache: -# key: cannon-prestate-{{ checksum "./cannon/bin/cannon" }}-{{ checksum "op-program/bin/op-program-client.elf" }} -# name: Save Cannon prestate to cache -# paths: -# - "op-program/bin/prestate.json" -# - "op-program/bin/meta.json" -# - "op-program/bin/prestate-proof.json" -# - persist_to_workspace: -# root: . -# paths: -# - "op-program/bin/prestate.json" -# - "op-program/bin/meta.json" -# - "op-program/bin/prestate-proof.json" + cannon-prestate: + docker: + - image: <> + steps: + - checkout + - restore_cache: + name: Restore Go modules cache + key: gomod-{{ checksum "go.sum" }} + - restore_cache: + key: golang-build-cache + - restore_cache: + key: cannon-prestate-{{ checksum "./cannon/bin/cannon" }}-{{ checksum "op-program/bin/op-program-client.elf" }} + name: Load cannon prestate cache + - run: + name: generate cannon prestate + command: make cannon-prestate + - save_cache: + key: cannon-prestate-{{ checksum "./cannon/bin/cannon" }}-{{ checksum "op-program/bin/op-program-client.elf" }} + name: Save Cannon prestate to cache + paths: + - "op-program/bin/prestate.json" + - "op-program/bin/meta.json" + - "op-program/bin/prestate-proof.json" + - persist_to_workspace: + root: . + paths: + - "op-program/bin/prestate.json" + - "op-program/bin/meta.json" + - "op-program/bin/prestate-proof.json" -# preimage-reproducibility: -# docker: -# - image: <> -# parameters: -# version: -# type: string -# steps: -# - checkout -# - setup_remote_docker -# - run: -# name: Switch to tag -# command: | -# git fetch -# git checkout "op-program/v<>" -# git submodule update --init --recursive -# - run: -# name: Set expected prestate hashes -# command: | -# if [[ "<>" == "0.1.0" ]]; then -# echo 'export EXPECTED_PRESTATE_HASH="0x038942ec840131a63c49fa514a3f0577ae401fd5584d56ad50cdf5a8b41d4538"' >> $BASH_ENV -# elif [[ "<>" == "0.2.0" ]]; then -# echo 'export EXPECTED_PRESTATE_HASH="0x031e3b504740d0b1264e8cf72b6dde0d497184cfb3f98e451c6be8b33bd3f808"' >> $BASH_ENV -# elif [[ "<>" == "0.3.0" ]]; then -# echo 'export EXPECTED_PRESTATE_HASH="0x034c8cc69f22c35ae386a97136715dd48aaf97fd190942a111bfa680c2f2f421"' >> $BASH_ENV -# elif [[ "<>" == "1.0.0" ]]; then -# echo 'export EXPECTED_PRESTATE_HASH="0x037ef3c1a487960b0e633d3e513df020c43432769f41a634d18a9595cbf53c55"' >> $BASH_ENV -# elif [[ "<>" == "1.1.0" ]]; then -# echo 'export EXPECTED_PRESTATE_HASH="0x03e69d3de5155f4a80da99dd534561cbddd4f9dd56c9ecc704d6886625711d2b"' >> $BASH_ENV -# elif [[ "<>" == "1.2.0" ]]; then -# echo 'export EXPECTED_PRESTATE_HASH="0x03617abec0b255dc7fc7a0513a2c2220140a1dcd7a1c8eca567659bd67e05cea"' >> $BASH_ENV -# else -# echo "Unknown prestate version <>" -# exit 1 -# fi -# - run: -# name: Build prestate -# command: make reproducible-prestate -# - run: -# name: Verify prestate -# command: | -# ACTUAL=$(jq -r .pre ./op-program/bin/prestate-proof.json) -# echo "Expected: ${EXPECTED_PRESTATE_HASH}" -# echo "Actual: ${ACTUAL}" -# if [[ "${EXPECTED_PRESTATE_HASH}" != "${ACTUAL}" ]] -# then -# echo "Prestate did not match expected" -# exit 1 -# fi -# - notify-failures-on-develop: -# mentions: "@proofs-squad" + preimage-reproducibility: + docker: + - image: <> + parameters: + version: + type: string + steps: + - checkout + - setup_remote_docker + - run: + name: Switch to tag + command: | + git fetch + git checkout "op-program/v<>" + git submodule update --init --recursive + - run: + name: Set expected prestate hashes + command: | + if [[ "<>" == "0.1.0" ]]; then + echo 'export EXPECTED_PRESTATE_HASH="0x038942ec840131a63c49fa514a3f0577ae401fd5584d56ad50cdf5a8b41d4538"' >> $BASH_ENV + elif [[ "<>" == "0.2.0" ]]; then + echo 'export EXPECTED_PRESTATE_HASH="0x031e3b504740d0b1264e8cf72b6dde0d497184cfb3f98e451c6be8b33bd3f808"' >> $BASH_ENV + elif [[ "<>" == "0.3.0" ]]; then + echo 'export EXPECTED_PRESTATE_HASH="0x034c8cc69f22c35ae386a97136715dd48aaf97fd190942a111bfa680c2f2f421"' >> $BASH_ENV + elif [[ "<>" == "1.0.0" ]]; then + echo 'export EXPECTED_PRESTATE_HASH="0x037ef3c1a487960b0e633d3e513df020c43432769f41a634d18a9595cbf53c55"' >> $BASH_ENV + elif [[ "<>" == "1.1.0" ]]; then + echo 'export EXPECTED_PRESTATE_HASH="0x03e69d3de5155f4a80da99dd534561cbddd4f9dd56c9ecc704d6886625711d2b"' >> $BASH_ENV + elif [[ "<>" == "1.2.0" ]]; then + echo 'export EXPECTED_PRESTATE_HASH="0x03617abec0b255dc7fc7a0513a2c2220140a1dcd7a1c8eca567659bd67e05cea"' >> $BASH_ENV + else + echo "Unknown prestate version <>" + exit 1 + fi + - run: + name: Build prestate + command: make reproducible-prestate + - run: + name: Verify prestate + command: | + ACTUAL=$(jq -r .pre ./op-program/bin/prestate-proof.json) + echo "Expected: ${EXPECTED_PRESTATE_HASH}" + echo "Actual: ${ACTUAL}" + if [[ "${EXPECTED_PRESTATE_HASH}" != "${ACTUAL}" ]] + then + echo "Prestate did not match expected" + exit 1 + fi + - notify-failures-on-develop: + mentions: "@proofs-squad" -# devnet-allocs: -# docker: -# - image: <> -# resource_class: xlarge -# steps: -# - checkout -# - restore_cache: -# name: Restore Go modules cache -# key: gomod-{{ checksum "go.sum" }} -# - restore_cache: -# key: golang-build-cache -# - run: -# name: git submodules -# command: make submodules -# - run: -# name: generate devnet allocs -# command: make devnet-allocs -# - persist_to_workspace: -# root: . -# paths: -# - ".devnet/allocs-l2-delta.json" -# - ".devnet/allocs-l2-ecotone.json" -# - ".devnet/allocs-l2-fjord.json" -# - ".devnet/allocs-l1.json" -# - ".devnet/addresses.json" -# - "packages/contracts-bedrock/deploy-config/devnetL1.json" -# - "packages/contracts-bedrock/deployments/devnetL1" + devnet-allocs: + docker: + - image: <> + resource_class: xlarge + steps: + - checkout + - restore_cache: + name: Restore Go modules cache + key: gomod-{{ checksum "go.sum" }} + - restore_cache: + key: golang-build-cache + - run: + name: git submodules + command: make submodules + - run: + name: generate devnet allocs + command: make devnet-allocs + - persist_to_workspace: + root: . + paths: + - ".devnet/allocs-l2-delta.json" + - ".devnet/allocs-l2-ecotone.json" + - ".devnet/allocs-l2-fjord.json" + - ".devnet/allocs-l1.json" + - ".devnet/addresses.json" + - "packages/contracts-bedrock/deploy-config/devnetL1.json" + - "packages/contracts-bedrock/deployments/devnetL1" devnet: machine: @@ -1267,233 +1267,234 @@ jobs: when: on_fail working_directory: packages/contracts-bedrock -# semgrep-scan: -# parameters: -# diff_branch: -# type: string -# default: develop -# environment: -# TEMPORARY_BASELINE_REF: << parameters.diff_branch >> -# SEMGREP_REPO_URL: << pipeline.project.git_url >> -# SEMGREP_BRANCH: << pipeline.git.branch >> -# SEMGREP_COMMIT: << pipeline.git.revision >> + semgrep-scan: + parameters: + diff_branch: + type: string + default: develop + environment: + TEMPORARY_BASELINE_REF: << parameters.diff_branch >> + SEMGREP_REPO_URL: << pipeline.project.git_url >> + SEMGREP_BRANCH: << pipeline.git.branch >> + SEMGREP_COMMIT: << pipeline.git.revision >> -# # Change job timeout (default is 1800 seconds; set to 0 to disable) -# SEMGREP_TIMEOUT: 3000 + # Change job timeout (default is 1800 seconds; set to 0 to disable) + SEMGREP_TIMEOUT: 3000 -# docker: -# - image: returntocorp/semgrep -# resource_class: medium -# steps: -# - checkout -# - unless: -# condition: -# equal: [ "develop", << pipeline.git.branch >> ] -# steps: -# - run: -# # Scan changed files in PRs, block on new issues only (existing issues ignored) -# # Do a full scan when scanning develop, otherwise do an incremental scan. -# name: "Conditionally set BASELINE env var" -# command: | -# echo 'export SEMGREP_BASELINE_REF=${TEMPORARY_BASELINE_REF}' >> $BASH_ENV -# - run: -# name: "Set environment variables" # for PR comments and in-app hyperlinks to findings -# command: | -# echo 'export SEMGREP_PR_ID=${CIRCLE_PULL_REQUEST##*/}' >> $BASH_ENV -# echo 'export SEMGREP_JOB_URL=$CIRCLE_BUILD_URL' >> $BASH_ENV -# echo 'export SEMGREP_REPO_NAME=$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME' >> $BASH_ENV -# - run: -# name: "Semgrep scan" -# command: semgrep ci -# - notify-failures-on-develop + docker: + - image: returntocorp/semgrep + resource_class: medium + steps: + - checkout + - unless: + condition: + equal: [ "develop", << pipeline.git.branch >> ] + steps: + - run: + # Scan changed files in PRs, block on new issues only (existing issues ignored) + # Do a full scan when scanning develop, otherwise do an incremental scan. + name: "Conditionally set BASELINE env var" + command: | + echo 'export SEMGREP_BASELINE_REF=${TEMPORARY_BASELINE_REF}' >> $BASH_ENV + - run: + name: "Set environment variables" # for PR comments and in-app hyperlinks to findings + command: | + echo 'export SEMGREP_PR_ID=${CIRCLE_PULL_REQUEST##*/}' >> $BASH_ENV + echo 'export SEMGREP_JOB_URL=$CIRCLE_BUILD_URL' >> $BASH_ENV + echo 'export SEMGREP_REPO_NAME=$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME' >> $BASH_ENV + - run: + name: "Semgrep scan" + command: semgrep ci + - notify-failures-on-develop -# go-mod-download: -# docker: -# - image: <> -# parameters: -# file: -# default: go.sum -# description: The file name of checksum for restore_cache and save_cache. -# type: string -# key: -# default: gomod -# description: The key of restore_cache and save_cache. -# type: string -# steps: -# - checkout -# - restore_cache: -# key: << parameters.key >>-{{ checksum "<< parameters.file >>" }} -# name: Restore Go modules cache -# - run: -# name: Sanity check go mod cache path -# command: test "$(go env GOMODCACHE)" == "/go/pkg/mod" # yes, it's an odd path -# - run: -# command: go mod download -# name: Download Go module dependencies -# - run: -# name: "Go mod tidy" -# command: make mod-tidy && git diff --exit-code -# - run: -# name: run Go linter -# command: | -# # Identify how many cores it defaults to -# golangci-lint --help | grep concurrency -# make lint-go -# working_directory: . -# - save_cache: -# key: << parameters.key >>-{{ checksum "<< parameters.file >>" }} -# name: Save Go modules cache -# paths: -# - "/go/pkg/mod" + go-mod-download: + docker: + - image: <> + parameters: + file: + default: go.sum + description: The file name of checksum for restore_cache and save_cache. + type: string + key: + default: gomod + description: The key of restore_cache and save_cache. + type: string + steps: + - checkout + - restore_cache: + key: << parameters.key >>-{{ checksum "<< parameters.file >>" }} + name: Restore Go modules cache + - run: + name: Sanity check go mod cache path + command: test "$(go env GOMODCACHE)" == "/go/pkg/mod" # yes, it's an odd path + - run: + command: go mod download + name: Download Go module dependencies + - run: + name: "Go mod tidy" + command: make mod-tidy && git diff --exit-code + - run: + name: run Go linter + command: | + # Identify how many cores it defaults to + golangci-lint --help | grep concurrency + make lint-go + working_directory: . + - save_cache: + key: << parameters.key >>-{{ checksum "<< parameters.file >>" }} + name: Save Go modules cache + paths: + - "/go/pkg/mod" -# op-service-rethdb-tests: -# docker: -# - image: <> -# steps: -# - checkout -# - check-changed: -# patterns: op-service,op-node -# - restore_cache: -# name: Restore Go modules cache -# key: gomod-{{ checksum "go.sum" }} -# - run: -# name: Cargo fmt + clippy -# command: | -# cargo +nightly fmt -- --check -# cargo +nightly clippy --all --all-features -- -D warnings -# working_directory: op-service/rethdb-reader -# - run: -# name: Generate testdata db -# command: cargo test -# working_directory: op-service/rethdb-reader -# - run: -# name: Build dylib -# command: cargo build --release -# working_directory: op-service/rethdb-reader -# - run: -# name: Update LD_LIBRARY_PATH -# command: echo 'export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/root/project/op-service/rethdb-reader/target/release"' >> $BASH_ENV -# - run: -# name: Run op-service RethDB tests -# command: | -# gotestsum --format=standard-verbose --junitfile=/tmp/test-results/op-service.xml \ -# -- -parallel=8 -coverpkg=github.com/ethereum-optimism/optimism/... -coverprofile=coverage.out \ -# -run TestRethDB -tags rethdb -v -# working_directory: op-service/sources -# # TODO(CLI-148): Fix codecov -# #- run: -# #name: upload coverage -# #command: codecov --verbose --clean --flags bedrock-rethdb-go-tests + op-service-rethdb-tests: + docker: + - image: <> + steps: + - checkout + - check-changed: + patterns: op-service,op-node + - restore_cache: + name: Restore Go modules cache + key: gomod-{{ checksum "go.sum" }} + - run: + name: Cargo fmt + clippy + command: | + cargo +nightly fmt -- --check + cargo +nightly clippy --all --all-features -- -D warnings + working_directory: op-service/rethdb-reader + - run: + name: Generate testdata db + command: cargo test + working_directory: op-service/rethdb-reader + - run: + name: Build dylib + command: cargo build --release + working_directory: op-service/rethdb-reader + - run: + name: Update LD_LIBRARY_PATH + command: echo 'export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/root/project/op-service/rethdb-reader/target/release"' >> $BASH_ENV + - run: + name: Run op-service RethDB tests + command: | + gotestsum --format=standard-verbose --junitfile=/tmp/test-results/op-service.xml \ + -- -parallel=8 -coverpkg=github.com/ethereum-optimism/optimism/... -coverprofile=coverage.out \ + -run TestRethDB -tags rethdb -v + working_directory: op-service/sources + # TODO(CLI-148): Fix codecov + #- run: + #name: upload coverage + #command: codecov --verbose --clean --flags bedrock-rethdb-go-tests -# bedrock-go-tests: # just a helper, that depends on all the actual test jobs -# docker: -# # Use a smaller base image to avoid pulling the huge ci-builder -# # image which is not needed for this job and sometimes misses -# # the cache. -# - image: cimg/base:2024.01 -# resource_class: medium -# steps: -# - run: echo Done + bedrock-go-tests: # just a helper, that depends on all the actual test jobs + docker: + # Use a smaller base image to avoid pulling the huge ci-builder + # image which is not needed for this job and sometimes misses + # the cache. + - image: cimg/base:2024.01 + resource_class: medium + steps: + - run: echo Done -# fpp-verify: -# docker: -# - image: cimg/go:1.21 -# steps: -# - checkout -# - run: -# name: verify-sepolia -# command: | -# make verify-sepolia -# working_directory: op-program -# - notify-failures-on-develop: -# mentions: "@proofs-squad" + fpp-verify: + docker: + - image: cimg/go:1.21 + steps: + - checkout + - run: + name: verify-sepolia + command: | + make verify-sepolia + working_directory: op-program + - notify-failures-on-develop: + mentions: "@proofs-squad" -# op-program-compat: -# docker: -# - image: <> -# steps: -# - checkout -# - restore_cache: -# name: Restore Go modules cache -# key: gomod-{{ checksum "go.sum" }} -# - restore_cache: -# key: golang-build-cache -# - run: -# name: compat-sepolia -# command: | -# make verify-compat -# working_directory: op-program + op-program-compat: + docker: + - image: <> + steps: + - checkout + - restore_cache: + name: Restore Go modules cache + key: gomod-{{ checksum "go.sum" }} + - restore_cache: + key: golang-build-cache + - run: + name: compat-sepolia + command: | + make verify-compat + working_directory: op-program -# check-generated-mocks-op-node: -# docker: -# - image: <> -# steps: -# - checkout -# - check-changed: -# patterns: op-node -# - run: -# name: check-generated-mocks -# command: make generate-mocks-op-node && git diff --exit-code + check-generated-mocks-op-node: + docker: + - image: <> + steps: + - checkout + - check-changed: + patterns: op-node + - run: + name: check-generated-mocks + command: make generate-mocks-op-node && git diff --exit-code -# check-generated-mocks-op-service: -# docker: -# - image: <> -# steps: -# - checkout -# - check-changed: -# patterns: op-service -# - run: -# name: check-generated-mocks -# command: make generate-mocks-op-service && git diff --exit-code + check-generated-mocks-op-service: + docker: + - image: <> + steps: + - checkout + - check-changed: + patterns: op-service + - run: + name: check-generated-mocks + command: make generate-mocks-op-service && git diff --exit-code -# check-values-match: -# parameters: -# pattern_file1: -# type: string -# default: "" -# pattern_file2: -# type: string -# default: "" -# file1_path: -# type: string -# default: "" -# file2_path: -# type: string -# default: "" -# docker: -# - image: <> -# steps: -# - checkout -# - run: -# name: Verify Values Match -# command: | -# ./ops/scripts/ci-match-values-between-files.sh "<< parameters.file1_path >>" "<< parameters.pattern_file1 >>" "<< parameters.file2_path >>" "<< parameters.pattern_file2 >>" + check-values-match: + parameters: + pattern_file1: + type: string + default: "" + pattern_file2: + type: string + default: "" + file1_path: + type: string + default: "" + file2_path: + type: string + default: "" + docker: + - image: <> + steps: + - checkout + - run: + name: Verify Values Match + command: | + ./ops/scripts/ci-match-values-between-files.sh "<< parameters.file1_path >>" "<< parameters.pattern_file1 >>" "<< parameters.file2_path >>" "<< parameters.pattern_file2 >>" -# kontrol-tests: -# docker: -# - image: << pipeline.parameters.ci_builder_image >> -# resource_class: xlarge -# steps: -# - checkout -# - run: -# name: Checkout Submodule -# command: make submodules -# - check-changed: -# no_go_deps: "true" -# patterns: contracts-bedrock/test/kontrol,contracts-bedrock/src/L1/OptimismPortal\.sol,contracts-bedrock/src/L1/OptimismPortal2\.sol,contracts-bedrock/src/L1/L1CrossDomainMessenger\.sol,contracts-bedrock/src/L1/L1ERC721Bridge\.sol,contracts-bedrock/src/L1/L1StandardBridge\.sol,contracts-bedrock/src/L1/ResourceMetering\.sol,contracts-bedrock/src/universal/StandardBridge\.sol,contracts-bedrock/src/universal/ERC721Bridge\.sol,contracts-bedrock/src/universal/CrossDomainMessenger\.sol -# - setup_remote_docker: -# docker_layer_caching: true -# - run: -# name: Run Kontrol Tests -# command: pnpm test:kontrol -# working_directory: ./packages/contracts-bedrock -# - store_artifacts: -# path: ./packages/contracts-bedrock/test/kontrol/logs/kontrol-results_latest.tar.gz -# - store_test_results: -# path: ./packages/contracts-bedrock -# - notify-failures-on-develop + kontrol-tests: + docker: + - image: << pipeline.parameters.ci_builder_image >> + resource_class: xlarge + steps: + - checkout + - run: + name: Checkout Submodule + command: make submodules + - check-changed: + no_go_deps: "true" + patterns: contracts-bedrock/test/kontrol,contracts-bedrock/src/L1/OptimismPortal\.sol,contracts-bedrock/src/L1/OptimismPortal2\.sol,contracts-bedrock/src/L1/L1CrossDomainMessenger\.sol,contracts-bedrock/src/L1/L1ERC721Bridge\.sol,contracts-bedrock/src/L1/L1StandardBridge\.sol,contracts-bedrock/src/L1/ResourceMetering\.sol,contracts-bedrock/src/universal/StandardBridge\.sol,contracts-bedrock/src/universal/ERC721Bridge\.sol,contracts-bedrock/src/universal/CrossDomainMessenger\.sol + - setup_remote_docker: + docker_layer_caching: true + - run: + name: Run Kontrol Tests + command: pnpm test:kontrol + working_directory: ./packages/contracts-bedrock + - store_artifacts: + path: ./packages/contracts-bedrock/test/kontrol/logs/kontrol-results_latest.tar.gz + - store_test_results: + path: ./packages/contracts-bedrock + - notify-failures-on-develop workflows: + when: false # This will disable all workflows main: when: and: @@ -1972,66 +1973,66 @@ workflows: requires: - hold -# scheduled-todo-issues: -# when: -# equal: [ build_four_hours, <> ] -# jobs: -# - todo-issues: -# name: todo-issue-checks -# context: -# - slack + scheduled-todo-issues: + when: + equal: [ build_four_hours, <> ] + jobs: + - todo-issues: + name: todo-issue-checks + context: + - slack -# scheduled-fpp: -# when: -# equal: [ build_four_hours, <> ] -# jobs: -# - fpp-verify: -# context: -# - slack -# - oplabs-fpp-nodes + scheduled-fpp: + when: + equal: [ build_four_hours, <> ] + jobs: + - fpp-verify: + context: + - slack + - oplabs-fpp-nodes -# develop-fault-proofs: -# when: -# and: -# - or: -# - equal: [ "develop", <> ] -# - equal: [ true, <> ] -# - not: -# equal: [ scheduled_pipeline, << pipeline.trigger_source >> ] -# jobs: -# - go-mod-download -# - cannon-prestate: -# requires: -# - go-mod-download -# - pnpm-monorepo: -# name: pnpm-monorepo -# requires: -# - go-mod-download -# - go-e2e-test: -# name: op-e2e-cannon-tests -# module: op-e2e -# target: test-cannon -# parallelism: 4 -# notify: true -# mentions: "@proofs-squad" -# requires: -# - pnpm-monorepo -# - cannon-prestate -# context: -# - slack + develop-fault-proofs: + when: + and: + - or: + - equal: [ "develop", <> ] + - equal: [ true, <> ] + - not: + equal: [ scheduled_pipeline, << pipeline.trigger_source >> ] + jobs: + - go-mod-download + - cannon-prestate: + requires: + - go-mod-download + - pnpm-monorepo: + name: pnpm-monorepo + requires: + - go-mod-download + - go-e2e-test: + name: op-e2e-cannon-tests + module: op-e2e + target: test-cannon + parallelism: 4 + notify: true + mentions: "@proofs-squad" + requires: + - pnpm-monorepo + - cannon-prestate + context: + - slack -# develop-kontrol-tests: -# when: -# and: -# - or: -# - equal: [ "develop", <> ] -# - equal: [ true, <> ] -# - not: -# equal: [ scheduled_pipeline, << pipeline.trigger_source >> ] -# jobs: -# - kontrol-tests: -# context: -# - slack + develop-kontrol-tests: + when: + and: + - or: + - equal: [ "develop", <> ] + - equal: [ true, <> ] + - not: + equal: [ scheduled_pipeline, << pipeline.trigger_source >> ] + jobs: + - kontrol-tests: + context: + - slack scheduled-docker-publish: when: @@ -2176,16 +2177,16 @@ workflows: - oplabs-gcr - slack -# scheduled-preimage-reproducibility: -# when: -# or: -# - equal: [build_daily, <> ] -# # Trigger on manual triggers if explicitly requested -# - equal: [ true, << pipeline.parameters.reproducibility_dispatch >> ] -# jobs: -# - preimage-reproducibility: -# matrix: -# parameters: -# version: ["0.1.0", "0.2.0", "0.3.0", "1.0.0", "1.1.0", "1.2.0"] -# context: -# slack + scheduled-preimage-reproducibility: + when: + or: + - equal: [build_daily, <> ] + # Trigger on manual triggers if explicitly requested + - equal: [ true, << pipeline.parameters.reproducibility_dispatch >> ] + jobs: + - preimage-reproducibility: + matrix: + parameters: + version: ["0.1.0", "0.2.0", "0.3.0", "1.0.0", "1.1.0", "1.2.0"] + context: + slack \ No newline at end of file