Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions .github/actions/tests/pre_scala_test/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@ inputs:
description: "Skip the test if the commit messages on the branch match this regex"
required: false
default: ""
run_if_commit_tag:
description: "Run the job only if the commit message contains this tag"
required: false
default: ""
skip_if_files_changed:
description: "Skip the test if any of the given files was changed since the last release"
required: false
Expand Down Expand Up @@ -52,6 +56,18 @@ runs:
echo "Commit message ($msg) does not match regex (${{ inputs.skip_if_regex }}), not skipping"
fi

- name: Check if include commit message tag matches
if: ${{ inputs.run_if_commit_tag != '' }}
shell: bash
run: |
last_commit_msg=$(git log -1 --pretty=%B)
if [[ $last_commit_msg =~ "[${{ inputs.run_if_commit_tag }}]" ]]; then
echo "Commit message ($last_commit_msg) includes tag (${{ inputs.run_if_commit_tag }}), running"
else
echo "Commit message ($last_commit_msg) does not include tag (${{ inputs.run_if_commit_tag }}), skipping"
echo "skip=true" >> "$GITHUB_ENV"
fi

- name: Check if skip files changed
if: ${{ inputs.skip_if_files_changed != '' }}
shell: bash
Expand Down
5 changes: 5 additions & 0 deletions .github/workflows/build.scala_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ on:
type: string
required: false
default: ""
run_if_commit_tag:
type: string
required: false
default: ""
skip_if_files_changed:
type: string
required: false
Expand Down Expand Up @@ -72,6 +76,7 @@ jobs:
test_names_file: ${{ inputs.test_names_file }}
skip_if_regex: ${{ inputs.skip_if_regex }}
skip_if_files_changed: ${{ inputs.skip_if_files_changed }}
run_if_commit_tag: ${{ inputs.run_if_commit_tag }}

scala_test:
runs-on: ${{ inputs.runs_on }}
Expand Down
13 changes: 13 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,19 @@ jobs:
commit_sha: ${{ inputs.commit_sha }}
secrets: inherit

scala_test_wall_clock_time_bft:
uses: ./.github/workflows/build.scala_test.yml
with:
runs_on: self-hosted-k8s-large
test_names_file: 'test-full-class-names.log'
start_canton_options: -we
parallelism: 10
test_name: wall-clock-time-bft
with_gcp_creds: true
pre_sbt_cmd: "export SPLICE_USE_BFT_SEQUENCER=1"
run_if_commit_tag: 'bft'
secrets: inherit

scala_test_frontend_wall_clock_time:
uses: ./.github/workflows/build.scala_test.yml
with:
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ repos:
# ignore external sources
args: [-e, SC1091]
types: [shell]
exclude: '^canton/'
exclude: '(^canton/|\.envrc)'
- id: typescriptfmt
name: typescriptfmt
language: system
Expand Down
16 changes: 16 additions & 0 deletions TESTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,10 @@ To request a cluster test to be run on your PR, comment on your pr `/cluster_tes
for a basic test or a hard-migration test respectively. After commenting, reach out to the
[Splice Contributors](CONTRIBUTORS.md) to approve and trigger the actual test on your behalf.

### Enabling the new Canton bft ordering layer

If you want to run the integration tests with the new Canton bft, you can do so by including the message `[bft]` in your commit message.

## Running Tests Locally

### Managing Canton for Tests
Expand Down Expand Up @@ -208,6 +212,17 @@ No installation of `lnav` is required, as it is provided by default by our `dire
Documentation about common pitfalls when writing new integration tests and debugging existing ones can be found [here](/apps/app/src/test/scala/org/lfdecentralizedtrust/splice/integration/tests/README.md).
If you wish to extend our testing topology please also consult [this README](/apps/app/src/test/resources/README.md) about name and port allocation.

### Enabling the new Canton bft ordering layer

If you want to run the integration tests locally with the new Canton bft, canton must be started with the `-e` flag.
This can be done by running `./start-canton.sh -we`.
Furthermore the integration test must run with the `SPLICE_USE_BFT_SEQUENCER` environment variable set to `true`.
Eg of test run:

```bash
SPLICE_USE_BFT_SEQUENCER=1 sbt 'apps-app/ testOnly org.lfdecentralizedtrust.splice.integration.tests.SvDevNetReonboardingIntegrationTest'
```

### Testing App Behaviour Outside of Tests Without Running Bundle

Sometimes, you may need to debug startup behaviour of the Splice apps that is causing issues for the
Expand Down Expand Up @@ -380,6 +395,7 @@ They are meant to provide a quick feedback loop and to offer additional protecti
We use [helm-unittest](https://github.com/helm-unittest/helm-unittest/) for some of our Helm charts.
To run all Helm chart tests locally run `make cluster/helm/test`.
To run only the tests for a specific chart `CHART`, run `helm unittest cluster/helm/CHART`.
If this produces an error: "### Error: Chart.yaml file is missing", please run `make cluster/helm/build`.

Refer to the documentation of `helm-unittest` for more information on how to extend our Helm tests.
When writing or debugging Helm tests, it is often useful to run `helm unittest` with the `-d` flag.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
package org.lfdecentralizedtrust.splice.config

import org.lfdecentralizedtrust.splice.auth.AuthUtil
import org.lfdecentralizedtrust.splice.scan.config.ScanAppBackendConfig
import org.lfdecentralizedtrust.splice.scan.config.{BftSequencerConfig, ScanAppBackendConfig}
import org.lfdecentralizedtrust.splice.splitwell.config.{
SplitwellAppBackendConfig,
SplitwellAppClientConfig,
Expand All @@ -27,6 +27,7 @@ import com.digitalasset.canton.config.*
import com.digitalasset.canton.config.RequireTypes.Port
import monocle.macros.syntax.lens.*
import org.apache.pekko.http.scaladsl.model.Uri
import org.lfdecentralizedtrust.splice.sv.automation.singlesv.SvBftSequencerPeerOffboardingTrigger

import scala.collection.mutable
import scala.collection.parallel.CollectionConverters.ImmutableMapIsParallelizable
Expand All @@ -35,6 +36,8 @@ import scala.io.Source

object ConfigTransforms {

val IsTheCantonSequencerBFTEnabled: Boolean = sys.env.contains("SPLICE_USE_BFT_SEQUENCER")

sealed abstract class ConfigurableApp extends Product with Serializable

object ConfigurableApp {
Expand Down Expand Up @@ -207,6 +210,7 @@ object ConfigTransforms {
updateAutomationConfig(ConfigurableApp.Sv)(
_.withPausedTrigger[SvOffboardingMediatorTrigger]
.withPausedTrigger[SvOffboardingSequencerTrigger]
.withPausedTrigger[SvBftSequencerPeerOffboardingTrigger]
)

def withPausedSvOffboardingMediatorAndPartyToParticipantTriggers(): ConfigTransform =
Expand All @@ -215,6 +219,14 @@ object ConfigTransforms {
.withPausedTrigger[SvOffboardingPartyToParticipantProposalTrigger]
)

def withResumedOffboardingTriggers(): ConfigTransform = {
updateAutomationConfig(ConfigurableApp.Sv)(
_.withResumedTrigger[SvOffboardingMediatorTrigger]
.withResumedTrigger[SvOffboardingSequencerTrigger]
.withResumedTrigger[SvBftSequencerPeerOffboardingTrigger]
)
}

def setAmuletPrice(price: BigDecimal): ConfigTransform =
config =>
Seq(
Expand Down Expand Up @@ -357,7 +369,9 @@ object ConfigTransforms {
})

def bumpCantonDomainPortsBy(bump: Int): ConfigTransform =
bumpSvAppCantonDomainPortsBy(bump) compose bumpValidatorAppCantonDomainPortsBy(bump)
bumpSvAppCantonDomainPortsBy(bump) compose bumpValidatorAppCantonDomainPortsBy(
bump
) compose bumpScanCantonDomainPortsBy(bump)

def bumpSvAppCantonDomainPortsBy(bump: Int): ConfigTransform = {
updateAllSvAppConfigs_(
Expand All @@ -368,13 +382,27 @@ object ConfigTransforms {
_.map(d =>
d.copy(
sequencer = d.sequencer
.copy(externalPublicApiUrl = bumpUrl(bump, d.sequencer.externalPublicApiUrl))
.copy(
externalPublicApiUrl = bumpUrl(bump, d.sequencer.externalPublicApiUrl)
)
)
)
)
)
}

def bumpScanCantonDomainPortsBy(bump: Int) = {
updateAllScanAppConfigs_(
_.focus(_.bftSequencers).modify(
_.map(
_.focus(_.p2pUrl).modify(
bumpUrl(bump, _)
)
)
)
)
}

def bumpValidatorAppCantonDomainPortsBy(bump: Int): ConfigTransform = {
def bumpUrl(s: String): String = {
val uri = Uri(s)
Expand All @@ -400,7 +428,14 @@ object ConfigTransforms {
.focus(_.localSynchronizerNode)
.modify(_.map(portTransform(bump, _)))
),
updateAllScanAppConfigs_(_.focus(_.participantClient).modify(portTransform(bump, _))),
updateAllScanAppConfigs_(
_.focus(_.participantClient)
.modify(portTransform(bump, _))
.focus(_.sequencerAdminClient)
.modify(portTransform(bump, _))
.focus(_.bftSequencers)
.modify(_.map(_.focus(_.sequencerAdminClient).modify(portTransform(bump, _))))
),
updateAllValidatorConfigs_(
_.focus(_.participantClient)
.modify(portTransform(bump, _))
Expand Down Expand Up @@ -528,6 +563,8 @@ object ConfigTransforms {
.modify(_.map(setPortPrefix(range)))
.focus(_.sequencerAdminClient.port)
.modify(setPortPrefix(range))
.focus(_.bftSequencers)
.modify(_.map(_.focus(_.sequencerAdminClient.port).modify(setPortPrefix(range))))
} else {
config
}
Expand Down Expand Up @@ -608,6 +645,34 @@ object ConfigTransforms {
transforms.foldLeft((c: SpliceConfig) => c)((f, tf) => f compose tf)
}

def withBftSequencers(): ConfigTransform = {
updateAllSvAppConfigs_(appConfig =>
appConfig
.focus(_.localSynchronizerNode)
.modify(
_.map(
_.focus(_.sequencer).modify(
_.copy(
isBftSequencer = true
)
)
)
)
) compose {
updateAllScanAppConfigs((scan, config) =>
config.copy(
bftSequencers = Seq(
BftSequencerConfig(
0,
config.sequencerAdminClient,
s"http://localhost:${5010 + Integer.parseInt(scan.stripPrefix("sv").take(1)) * 100}",
)
)
)
)
}
}

private def portTransform(bump: Int, c: AdminServerConfig): AdminServerConfig =
c.copy(internalPort = c.internalPort.map(_ + bump))

Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,3 @@
_sequencer_bft_template {
type = "BFT"
config = {
storage=${_shared.storage}
initial-network {
server-endpoint {
address="0.0.0.0"
external-address = "localhost"
external-tls-config.enabled=false
}
peer-endpoints = []
}
}
}
canton {
sequencers {
globalSequencerSv1.sequencer = ${_sequencer_bft_template}
Expand Down
4 changes: 4 additions & 0 deletions apps/app/src/test/resources/include/nodes/sv4-node.conf
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@ canton {
}
}

scan-apps {
sv4Scan { include required("../scans/sv4-scan") }
}

validator-apps {
sv4Validator { include required("../validators/sv4-validator") }
}
Expand Down
15 changes: 15 additions & 0 deletions apps/app/src/test/resources/include/sequencers.conf
Original file line number Diff line number Diff line change
@@ -1,5 +1,20 @@
include required("storage-postgres.conf")

_sequencer_bft_template {
type = "BFT"
config = {
storage=${_shared.storage}
initial-network {
server-endpoint {
address="0.0.0.0"
external-address = "localhost"
external-tls-config.enabled=false
}
peer-endpoints = []
}
}
}

_sequencer_reference_template {
init {
# In Splice, the node identifier is always set by the application controlling the canton node.
Expand Down
6 changes: 6 additions & 0 deletions apps/app/src/test/resources/include/svs/sv4.conf
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,12 @@
private-key = "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgxED/gH8AeSwNujZAVLhBRSN55Hx0ntC6FKKhgn+7h92hRANCAARkw2wMmvW5PAxMgiXNRmlR7FMupUYywPtxHhjyyphgViGV1Ux4cbnNK5t/6n5ZlssTIxQJPmcEIIGHSiJRj1ys"
}

scan {
public-url = "http://localhost:5312"
internal-url = "http://localhost:5312"
}


approved-sv-identities = [
{
include required("_sv2-id")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@
sv-validator = true
sv-user = "sv3"

// Override to use own scan
scan-client.url="http://127.0.0.1:"${canton.scan-apps.sv3Scan.admin-api.port}
scan-client.url=${?SV3_SCAN_URL}

contact-point = "sv3@example.com"
canton-identifier-config = {
participant = "sv3"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
sv-user = "sv4"

// We only have two scans, so we point to SV-2's
scan-client.url="http://127.0.0.1:"${?canton.scan-apps.sv2Scan.admin-api.port}
scan-client.url="http://127.0.0.1:"${?canton.scan-apps.sv4Scan.admin-api.port}
scan-client.url=${?SV2_SCAN_URL}

contact-point = "sv4@example.com"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
include required("include/canton-basic.conf")
include required("include/sequencers.conf")

canton {
sequencers {

sv4StandaloneSequencer.sequencer = ${_sequencer_bft_template}
sv4StandaloneSequencer.sequencer.config.initial-network.server-endpoint.port = 27410
sv4StandaloneSequencer.sequencer.config.initial-network.server-endpoint.external-port = 27410
sv4StandaloneSequencer.sequencer.config.storage.config.properties.databaseName = "sequencer_sv4_standalone_bft"
sv4StandaloneSequencer.sequencer.config.storage.config.properties.databaseName = ${?SV4_SEQUENCER_DB_BFT}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
include required("include/canton-basic.conf")
include required("include/sequencers.conf")

canton {
sequencers {

sv1StandaloneSequencer.sequencer = ${_sequencer_bft_template}
sv1StandaloneSequencer.sequencer.config.initial-network.server-endpoint.port = 27110
sv1StandaloneSequencer.sequencer.config.initial-network.server-endpoint.external-port = 27110
sv1StandaloneSequencer.sequencer.config.storage.config.properties.databaseName = "sequencer_sv1_standalone_bft"
sv1StandaloneSequencer.sequencer.config.storage.config.properties.databaseName = ${?SV1_SEQUENCER_DB_BFT}

sv2StandaloneSequencer.sequencer = ${_sequencer_bft_template}
sv2StandaloneSequencer.sequencer.config.initial-network.server-endpoint.port = 27210
sv2StandaloneSequencer.sequencer.config.initial-network.server-endpoint.external-port = 27210
sv2StandaloneSequencer.sequencer.config.storage.config.properties.databaseName = "sequencer_sv2_standalone_bft"
sv2StandaloneSequencer.sequencer.config.storage.config.properties.databaseName = ${?SV2_SEQUENCER_DB_BFT}

sv3StandaloneSequencer.sequencer = ${_sequencer_bft_template}
sv3StandaloneSequencer.sequencer.config.initial-network.server-endpoint.port = 27310
sv3StandaloneSequencer.sequencer.config.initial-network.server-endpoint.external-port = 27310
sv3StandaloneSequencer.sequencer.config.storage.config.properties.databaseName = "sequencer_sv3_standalone_bft"
sv3StandaloneSequencer.sequencer.config.storage.config.properties.databaseName = ${?SV3_SEQUENCER_DB_BFT}

}

}
Loading
Loading