Skip to content

Update solo-test.yml #7

Update solo-test.yml

Update solo-test.yml #7

Workflow file for this run

name: Solo Test Sandbox
on:
push:
branches:
- 4900-solo-reduce-json-rpc-relay-memory-footprint
permissions:
contents: read
jobs:
solo-test:
runs-on: hiero-smart-contracts-linux-large
timeout-minutes: 50
strategy:
fail-fast: false
matrix:
memory_limit: [1000Mi, 512Mi, 256Mi]
permissions:
contents: read
checks: write
env:
SOLO_CLUSTER_NAME: solo
SOLO_NAMESPACE: solo
SOLO_CLUSTER_SETUP_NAMESPACE: solo-cluster
SOLO_DEPLOYMENT: solo-deployment
name: Solo Test (${{ matrix.memory_limit }})
steps:
- name: Set env variables
run: |
if [ -n "${{ inputs.operator_id }}" ]; then
echo "OPERATOR_ID_MAIN=${{ inputs.operator_id }}" >> $GITHUB_ENV
fi
- name: Harden Runner
uses: step-security/harden-runner@5ef0c079ce82195b2a36a210272d6b661572d83e # v2.14.2
with:
egress-policy: audit
- name: Checkout Code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Setup node
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version: 22
# Set up kind; needed for configuring the solo environment
- name: Setup Kind
uses: helm/kind-action@a1b0e391336a6ee6713a0583f8c6240d70863de3 # v1.12.0
with:
install_only: true
node_image: kindest/node:v1.31.4@sha256:2cb39f7295fe7eafee0842b1052a599a4fb0f8bcf3f83d96c7f4864c357c6c30
version: v0.26.0
kubectl_version: v1.31.4
verbosity: 3
wait: 120s
- name: Install Solo
run: npm install -g @hashgraph/solo
- name: Configure and run solo
run: |
kind create cluster -n "${SOLO_CLUSTER_NAME}"
# initialize solo
solo init
solo cluster-ref config connect --cluster-ref kind-${SOLO_CLUSTER_NAME} --context kind-${SOLO_CLUSTER_NAME}
solo deployment config create -n "${SOLO_NAMESPACE}" --deployment "${SOLO_DEPLOYMENT}"
solo deployment cluster attach --deployment "${SOLO_DEPLOYMENT}" --cluster-ref kind-${SOLO_CLUSTER_NAME} --num-consensus-nodes 1
solo keys consensus generate --gossip-keys --tls-keys --deployment "${SOLO_DEPLOYMENT}"
solo cluster-ref config setup -s "${SOLO_CLUSTER_SETUP_NAMESPACE}"
solo consensus network deploy --deployment "${SOLO_DEPLOYMENT}"
solo consensus node setup --deployment "${SOLO_DEPLOYMENT}"
solo consensus node start --deployment "${SOLO_DEPLOYMENT}"
solo mirror node add --deployment "${SOLO_DEPLOYMENT}" --cluster-ref kind-${SOLO_CLUSTER_NAME} --enable-ingress --pinger
- name: Run Solo Relay
run: |
cat <<EOF > relay-resources.yaml
rpcRelay:
resources:
requests:
cpu: 0
memory: 0
limits:
cpu: 1100m
memory: ${{ matrix.memory_limit }}
EOF
cat relay-resources.yaml
solo relay node add -i node1 --deployment "${SOLO_DEPLOYMENT}" -f relay-resources.yaml
- name: check Relay Resources
run: |
echo "Describing Relay pod resources:"
kubectl -n "${SOLO_NAMESPACE}" describe pod relay-1
- name: Port-forward Consensus Node
run: |
sudo kill -9 $(sudo lsof -ti :50211) || true
kubectl port-forward -n "${SOLO_NAMESPACE}" network-node1-0 50211:50211 &
- name: Checkout Relay repo
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: 4900-solo-reduce-json-rpc-relay-memory-footprint
- name: Install packages
run: npm ci
- name: Start json-rpc-relay-test-client
env:
CHAIN_ID: "0x12a"
MIRROR_NODE_URL: "http://127.0.0.1:8081"
HEDERA_NETWORK: '{"127.0.0.1:50211":"0.0.3"}'
OPERATOR_ID_MAIN: 0.0.2
OPERATOR_KEY_MAIN: 302e020100300506032b65700422042091132178e72057a1d7528025956fe39b0b847f200ab59b2fdd367017f3087137
REDIS_ENABLED: false
USE_ASYNC_TX_PROCESSING: false
E2E_RELAY_HOST: http://localhost:7546
SDK_LOG_LEVEL: trace
USE_INTERNAL_RELAY: false
run: npm run build && npm run acceptancetest:xts