Skip to content
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
17 changes: 17 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# EditorConfig is awesome: https://EditorConfig.org

# top-most EditorConfig file
root = true

# For every file:
# - unix-style newlines and a newline ending every file
# - spaces as indent
# - no trailing whitespace to reduce diff-noise
[*]
end_of_line = lf
insert_final_newline = true
indent_style = space
trim_trailing_whitespace = true

[*.sh]
indent_size = 2
99 changes: 90 additions & 9 deletions .envrc.vars
Original file line number Diff line number Diff line change
@@ -1,11 +1,18 @@
# -*- shell-script -*-

### Separate (out of direnv) the env vars that are sourced both locally *and* in CI

### Set UTC timezone, as CometBft always logs in UTC while java tools log using the local timezone
export TZ=UTC

### Separate (out of direnv) the env vars that are sourced both locally *and* in CircleCI
export SPLICE_ROOT="${PWD}"
export TOOLS_LIB=$SPLICE_ROOT/build-tools/lib
export LOGS_PATH=$SPLICE_ROOT/log
export DEPLOYMENT_DIR="${SPLICE_ROOT}/cluster/deployment"
export PULUMI_TEST_DIR=${SPLICE_ROOT}/cluster/pulumi
export EXPECTED_FILES_DIR="${SPLICE_ROOT}/cluster/expected"
export PRIVATE_CONFIGS_PATH=${SPLICE_ROOT}/cluster/configs/configs-private
export PUBLIC_CONFIGS_PATH=${SPLICE_ROOT}/cluster/configs/configs

# Inrease code heap sizes to avoid issues
# Defaults NonNMethodCodeHeapSize=7M,NonProfiledCodeHeapSize=122M,ProfiledCodeHeapSize=122M
Expand All @@ -14,7 +21,6 @@ export SBT_OPTS="-Xmx6G -Xms2G -Xss2M -XX:+UseG1GC -XX:NonNMethodCodeHeapSize=32
# Provide a simple way to get the path to `sbt-launch.jar` for IntelliJ setup
export SBT_LAUNCH_PATH="$(dirname "$(dirname "$(which sbt)")")/share/sbt/bin/sbt-launch.jar"


# Shortcut var to refer to release bundle contents
export BUNDLE=${SPLICE_ROOT}/apps/app/target/release/splice-node

Expand All @@ -25,7 +31,8 @@ export POSTGRES_HOST="localhost"
export POSTGRES_USER=postgres
export POSTGRES_PASSWORD=postgres

export DEPLOYMENT_DIR="${SPLICE_ROOT}/cluster/deployment"

# ** Docker&Helm registry configs

export GHCR=ghcr.io
export DEV_REGISTRY=$GHCR/digital-asset/decentralized-canton-sync-dev
Expand All @@ -40,9 +47,83 @@ export RELEASE_HELM_REGISTRY=$RELEASE_REGISTRY/helm
export OCI_RELEASE_HELM_REGISTRY=oci://$RELEASE_HELM_REGISTRY
export RELEASE_DOCKER_REGISTRY=$RELEASE_REGISTRY/docker

# Include all organization specific `.envrc.vars.*` files
if stat --printf='' .envrc.vars.* 2>/dev/null; then
for file in .envrc.vars.*; do
source_env $file || . $file
done
fi

# ** Cluster deployment configs

export CLOUDSDK_COMPUTE_REGION="us-central1"
# We set this everywhere, DBs should always be in a fixed zone.
# However, our clusters are not always in a fixed zone so we don't always set
# CLOUDSDK_COMPUTE_ZONE.
export DB_CLOUDSDK_COMPUTE_ZONE="${CLOUDSDK_COMPUTE_REGION}-a"
# Default to the scratch environment
export CLOUDSDK_CORE_PROJECT="da-cn-scratchnet"
# Default cluster sizing
export GCP_CLUSTER_NODE_TYPE=e2-standard-16
export GCP_CLUSTER_MIN_NODES=0
# A high max-nodes by default to support large deployments and hard migrations
# Should be set to a lower number (currently 8) on CI clusters that do neither of those.
export GCP_CLUSTER_MAX_NODES=20
# The logging variant supports default, that google recommends for up to 100kb/s logs (https://cloud.google.com/kubernetes-engine/docs/how-to/adjust-log-throughput)
# The max throughput variant supports multiple tens of MB/s of logs, but also the agents require 2CPUs and therefore we lose 2 CPUs per node
export GCP_CLUSTER_LOGGING_VARIANT="DEFAULT"
export GCP_DNS_PROJECT="da-gcp-canton-domain"
export GCP_DNS_SA_SECRET="clouddns-dns01-solver-svc-acct"
# DNS Service Account information
export DNS01_SA_KEY_SECRET=dns01-sa-key-secret
export DNS01_SA_IAM_ACCOUNT="dns01-solver@${GCP_DNS_PROJECT}.iam.gserviceaccount.com"
export IS_DEVNET=1
# Specify the default SV count. If IS_DEVNET is set to false, then this is ignored and forcibly overridden to 1 (sv1 only).
export DSO_SIZE=4
export MULTIVALIDATOR_SIZE=0
# We rarely take down some of the stacks in our clusters eg. the ones in the observability and cluster-ingress namespaces.
# This can over time lead to a very large number of secrets (which is how helm stores the release history) resulting in
# unnecessary slowness when running helm commands.
export HELM_MAX_HISTORY_SIZE=10
export OIDC_AUTHORITY_LEDGER_API_AUDIENCE=https://canton.network.global
export OIDC_AUTHORITY_VALIDATOR_AUDIENCE=https://canton.network.global
export OIDC_AUTHORITY_SV_AUDIENCE=https://canton.network.global
export DATA_EXPORT_BUCKET_SA_KEY_SECRET=gcp-bucket-sa-key-secret
export DATA_EXPORT_BUCKET_SA_IAM_ACCOUNT="da-cn-data-exports@da-cn-devnet.iam.gserviceaccount.com"
export SPLICE_OAUTH_TEST_AUTHORITY=canton-network-test.us.auth0.com
export SPLICE_OAUTH_TEST_FRONTEND_CLIENT_ID=Ob8YZSBvbZR3vsM2vGKllg3KRlRgLQSw

export SPLICE_OAUTH_TEST_VALIDATOR_WALLET_USER="auth0|6526fab5214c99a9a8e1e3cc"
export SPLICE_OAUTH_TEST_CLIENT_ID_VALIDATOR=cznBUeB70fnpfjaq9TzblwiwjkVyvh5z

export SPLICE_OAUTH_VALIDATOR_TEST_AUTHORITY=canton-network-validator-test.us.auth0.com

export SPLICE_OAUTH_DEV_AUTHORITY=canton-network-dev.us.auth0.com
export SPLICE_OAUTH_DEV_VALIDATOR_WALLET_USER="auth0|63e3d75ff4114d87a2c1e4f5"

export SPLICE_OAUTH_DEV_CLIENT_ID_SV1=OBpJ9oTyOLuAKF0H2hhzdSFUICt0diIn
export SPLICE_OAUTH_DEV_CLIENT_ID_SV2=rv4bllgKWAiW9tBtdvURMdHW42MAXghz
export SPLICE_OAUTH_DEV_CLIENT_ID_SV3=SeG68w0ubtLQ1dEMDOs4YKPRTyMMdDLk
export SPLICE_OAUTH_DEV_CLIENT_ID_SV4=CqKgSbH54dqBT7V1JbnCxb6TfMN8I1cN
export SPLICE_OAUTH_DEV_CLIENT_ID_SV1_VALIDATOR=7YEiu1ty0N6uWAjL8tCAWTNi7phr7tov
export SPLICE_OAUTH_DEV_CLIENT_ID_SV2_VALIDATOR=5N2kwYLOqrHtnnikBqw8A7foa01kui7h
export SPLICE_OAUTH_DEV_CLIENT_ID_SV3_VALIDATOR=V0RjcwPCsIXqYTslkF5mjcJn70AiD0dh
export SPLICE_OAUTH_DEV_CLIENT_ID_SV4_VALIDATOR=FqRozyrmu2d6dFQYC4J9uK8Y6SXCVrhL
export SPLICE_OAUTH_DEV_CLIENT_ID_VALIDATOR1=cf0cZaTagQUN59C1HBL2udiIBdFh2CWq
export SPLICE_OAUTH_DEV_CLIENT_ID_SPLITWELL_VALIDATOR=hqpZ6TP0wGyG2yYwhH6NLpuo0MpJMQZW

export SPLICE_OAUTH_SV_TEST_AUTHORITY=canton-network-sv-test.us.auth0.com
export SPLICE_OAUTH_SV_TEST_CLIENT_ID_VALIDATOR=bUfFRpl2tEfZBB7wzIo9iRNGTj8wMeIn


# Force auth through gke-gcloud-auth-plugin
# See https://cloud.google.com/blog/products/containers-kubernetes/kubectl-auth-changes-in-gke
export USE_GKE_GCLOUD_AUTH_PLUGIN=true

# CometBFT settings
export COMETBFT_DOCKER_IMAGE="digitalasset-canton-enterprise-docker.jfrog.io/cometbft-canton-network:${COMETBFT_RELEASE_VERSION}"

#Test containers config
## Speed up runs
export TESTCONTAINERS_CHECKS_DISABLE=true
## Use just normal cleanup of containers
export TESTCONTAINERS_RYUK_DISABLED=true
## sshd image required for exposing ports
## we explicitly specify it so we can pre-download it when running in CI
export TESTCONTAINERS_SSHD_CONTAINER_IMAGE="testcontainers/sshd:1.1.0"

export OTEL_COLLECTOR_IMAGE="otel/opentelemetry-collector-contrib:0.81.0"
10 changes: 10 additions & 0 deletions .github/actionlint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
self-hosted-runner:
labels:
- self-hosted-docker-tiny
- self-hosted-docker-medium
- self-hosted-docker-large
- self-hosted-k8s-x-small
- self-hosted-k8s-small
- self-hosted-k8s-medium
- self-hosted-k8s-large
- self-hosted-k8s-x-large
34 changes: 34 additions & 0 deletions .github/actions/cache/daml_artifacts/restore/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
name: "Restore Daml artifacts"
description: "Restore the Daml artifacts cache"
inputs:
cache_version:
description: "Version of the cache"
required: true
outputs:
cache_hit:
description: "Cache hit"
value: ${{ steps.restore.outputs.cache-hit }}

runs:
using: "composite"
steps:
- name: Restore Daml artifacts cache
id: restore
uses: actions/cache/restore@v4
with:
path: |
/tmp/daml
apps/common/frontend/daml.js
key: daml-artifacts-${{ inputs.cache_version }} branch:${{ github.ref_name }} dependencies:${{ hashFiles('project/build.properties', 'project/BuildCommon.scala', 'project/DamlPlugin.scala', 'build.sbt', 'daml/dars.lock', 'nix/canton-sources.json') }} rev:${{ github.sha }}
restore-keys: |
daml-artifacts-${{ inputs.cache_version }} branch:${{ github.ref_name }} dependencies:${{ hashFiles('project/build.properties', 'project/BuildCommon.scala', 'project/DamlPlugin.scala', 'build.sbt', 'daml/dars.lock', 'nix/canton-sources.json') }}
daml-artifacts-${{ inputs.cache_version }} branch:main dependencies:${{ hashFiles('project/build.properties', 'project/BuildCommon.scala', 'project/DamlPlugin.scala', 'build.sbt', 'daml/dars.lock', 'nix/canton-sources.json') }}
- name: Extract Daml artifacts
shell: bash
run: |
if [[ -e /tmp/daml/daml.tar.gz ]]; then
tar --use-compress-program=pigz -xf /tmp/daml/daml.tar.gz
else
echo "No cached daml artifacts files found. Skipping..."
fi

32 changes: 32 additions & 0 deletions .github/actions/cache/daml_artifacts/save/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: "Save Daml artifacts"
description: "Saves the Daml artifacts to the cache"
inputs:
cache_version:
description: "Version of the cache"
required: true
load_cache_hit:
description: "Cache hit from the restore Daml artifacts job (should be the cache_hit output from the restore Daml artifacts job)"
required: true

runs:
using: "composite"
steps:
- name: Archive Daml artifacts
if: ${{ ! fromJson(inputs.load_cache_hit) }}
shell: bash
run: |
mkdir -p /tmp/daml
find . -type d -name ".daml" | tar --use-compress-program=pigz -cf /tmp/daml/daml.tar.gz -T -
- name: Not archiving Daml artifacts
if: ${{ fromJson(inputs.load_cache_hit) }}
shell: bash
run: |
echo "Skipping Daml artifacts cache, as there was a cache hit"
- name: Cache precompiled classes
if: ${{ ! fromJson(inputs.load_cache_hit) }}
uses: actions/cache/save@v4
with:
path: |
/tmp/daml
apps/common/frontend/daml.js
key: daml-artifacts-${{ inputs.cache_version }} branch:${{ github.ref_name }} dependencies:${{ hashFiles('project/build.properties', 'project/BuildCommon.scala', 'project/DamlPlugin.scala', 'build.sbt', 'daml/dars.lock', 'nix/canton-sources.json') }} rev:${{ github.sha }}
32 changes: 32 additions & 0 deletions .github/actions/cache/frontend_node_modules/restore/action.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

31 changes: 31 additions & 0 deletions .github/actions/cache/frontend_node_modules/save/action.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

26 changes: 26 additions & 0 deletions .github/actions/cache/precompiled_classes/restore/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: "Restore Precompiled Classes"
description: "Restore the precompiled classes from the cache"
inputs:
cache_version:
description: "Version of the cache"
required: true
outputs:
cache_hit:
description: "Cache hit"
value: ${{ steps.restore.outputs.cache-hit }}

runs:
using: "composite"
steps:
- name: Restore precompiled classes
id: restore
uses: actions/cache/restore@v4
with:
path: /tmp/classes
key: classes-${{ inputs.cache_version }} branch:${{ github.ref_name }} dependencies:${{ hashFiles('project/build.properties', 'project/BuildCommon.scala', 'project/DamlPlugin.scala', 'project/Dependencies.scala', 'project/CantonDependencies.scala', 'project/Houserules.scala', 'project/plugins.sbt', 'build.sbt', 'daml/dars.lock', 'openapi-cache-key.txt') }} rev:${{ github.sha }}
restore-keys: |
classes-${{ inputs.cache_version }} branch:${{ github.ref_name }} dependencies:${{ hashFiles('project/build.properties', 'project/BuildCommon.scala', 'project/DamlPlugin.scala', 'project/Dependencies.scala', 'project/CantonDependencies.scala', 'project/Houserules.scala', 'project/plugins.sbt', 'build.sbt', 'daml/dars.lock', 'openapi-cache-key.txt') }}
classes-${{ inputs.cache_version }} branch:main dependencies:${{ hashFiles('project/build.properties', 'project/BuildCommon.scala', 'project/DamlPlugin.scala', 'project/Dependencies.scala', 'project/CantonDependencies.scala', 'project/Houserules.scala', 'project/plugins.sbt', 'build.sbt', 'daml/dars.lock', 'openapi-cache-key.txt') }}
- name: Extract precompiled classes
shell: bash
run: ./.github/actions/scripts/extract_precompiled_classes.sh
28 changes: 28 additions & 0 deletions .github/actions/cache/precompiled_classes/save/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: "Save Precompiled Classes"
description: "Saves the precompiled classes to the cache"
inputs:
cache_version:
description: "Version of the cache"
required: true
load_cache_hit:
description: "Cache hit from the load precompiled classes job (should be the cache_hit output from the load precompiled classes job)"
required: true

runs:
using: "composite"
steps:
- name: Archive precompiled classes
if: ${{ ! fromJson(inputs.load_cache_hit) }}
shell: bash
run: ./.github/actions/scripts/archive_precompiled_classes.sh
- name: Not archiving preceompiled classes
if: ${{ fromJson(inputs.load_cache_hit) }}
shell: bash
run: |
echo "Skipping precompiled classes cache, as there was a cache hit"
- name: Cache precompiled classes
if: ${{ ! fromJson(inputs.load_cache_hit) }}
uses: actions/cache/save@v4
with:
path: /tmp/classes
key: classes-${{ inputs.cache_version }} branch:${{ github.ref_name }} dependencies:${{ hashFiles('project/build.properties', 'project/BuildCommon.scala', 'project/DamlPlugin.scala', 'project/Dependencies.scala', 'project/CantonDependencies.scala', 'project/Houserules.scala', 'project/plugins.sbt', 'build.sbt', 'daml/dars.lock', 'openapi-cache-key.txt') }} rev:${{ github.sha }}
24 changes: 24 additions & 0 deletions .github/actions/cache/sbt/restore/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: "Restore SBT Cacht"
description: "Restore the SBT cache"
inputs:
cache_version:
description: "Version of the cache"
required: true
outputs:
cache_hit:
description: "Cache hit"
value: ${{ steps.restore.outputs.cache-hit }}

runs:
using: "composite"
steps:
- name: Restore SBT cache
id: restore
uses: actions/cache/restore@v4
with:
path: |
/github/home/.ivy2/cache
/github/home/.sbt
key: sbt-${{ inputs.cache_version }} job=${{ github.job }} dependencies=${{ hashFiles('project/Dependencies.scala', 'project/CantonDependencies.scala') }}-${{ hashFiles('project/BuildCommon.scala', 'project/build.properties', 'project/Houserules.scala', 'project/plugins.sbt', 'build.sbt') }}
restore-keys: |
sbt-${{ inputs.cache_version }} job=${{ github.job }} dependencies=${{ hashFiles('project/Dependencies.scala', 'project/CantonDependencies.scala') }}
26 changes: 26 additions & 0 deletions .github/actions/cache/sbt/save/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: "Save SBT Cache"
description: "Saves the SBT cache"
inputs:
cache_version:
description: "Version of the cache"
required: true
load_cache_hit:
description: "Cache hit from the restore SBT job (should be the cache_hit output from the restore SBT job)"
required: true

runs:
using: "composite"
steps:
- name: Store SBT cache
if: ${{ ! fromJson(inputs.load_cache_hit) }}
uses: actions/cache/save@v4
with:
path: |
/github/home/.ivy2/cache
/github/home/.sbt
key: sbt-${{ inputs.cache_version }} job=${{ github.job }} dependencies=${{ hashFiles('project/Dependencies.scala', 'project/CantonDependencies.scala') }}-${{ hashFiles('project/BuildCommon.scala', 'project/build.properties', 'project/Houserules.scala', 'project/plugins.sbt', 'build.sbt') }}
- name: Not storing SBT cache
if: ${{ fromJson(inputs.load_cache_hit) }}
shell: bash
run: |
echo "Skipping SBT cache, as there was a cache hit"
Loading
Loading