Skip to content

Add mock LA server for local Azure Monitor Exporter testing #19

Add mock LA server for local Azure Monitor Exporter testing

Add mock LA server for local Azure Monitor Exporter testing #19

# This action runs the pipeline perf continuous benchmarking suite on every PR.
# - With 'pipelineperf' label: runs on dedicated Oracle bare-metal hardware for accurate benchmarks
# - Without label: runs on ubuntu-latest for basic validation
# In either case, the results does not update the charts.
name: Pipeline Perf Pre-Merge
on:
pull_request:
branches:
- main
permissions:
contents: read
# Cancel in-progress runs for the same PR if a new commit is pushed
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
# Check for the pipelineperf label to determine which runner to use
label-check:
name: Check for pipelineperf label
runs-on: ubuntu-latest
outputs:
has_label: ${{ steps.check_label.outputs.has_label }}
steps:
- name: Check if PR has 'pipelineperf' label
id: check_label
run: |
labels=$(echo '${{ toJson(github.event.pull_request.labels) }}' | jq -r '.[].name')
if echo "$labels" | grep -q "pipelineperf"; then
echo "Label pipelineperf found - will use dedicated hardware"
echo "has_label=true" >> $GITHUB_OUTPUT
else
echo "Label 'pipelineperf' not found - will use ubuntu-latest"
echo "has_label=false" >> $GITHUB_OUTPUT
fi
# Run on dedicated Oracle hardware when 'pipelineperf' label is present
pipeline-perf-test-dedicated:
needs: label-check
if: needs.label-check.outputs.has_label == 'true'
runs-on: oracle-bare-metal-64cpu-1024gb-x86-64-ubuntu-24
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@a90bcbc6539c36a85cdfeb73f7e2f433735f215b # v2.15.0
with:
egress-policy: audit
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: "3.14"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Set up Python (non-OL8)
if: ${{ steps.detect_os.outputs.id != 'ol' }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: "3.14"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build dataflow_engine
run: |
git submodule init
git submodule update
cd rust/otap-dataflow
docker buildx build --load --build-context otel-arrow=../../ -f Dockerfile -t df_engine .
cd ../..
- name: Install dependencies
run: |
python -m pip install --user --require-hashes -r tools/pipeline_perf_test/orchestrator/requirements.lock.txt
python -m pip install --user --require-hashes -r tools/pipeline_perf_test/load_generator/requirements.lock.txt
- name: Run pipeline performance test suite
run: |
cd tools/pipeline_perf_test
python orchestrator/run_orchestrator.py --config test_suites/integration/continuous/100klrps-docker.yaml
# Run on ubuntu-latest for basic validation when no label is present
pipeline-perf-test-basic:
needs: label-check
if: needs.label-check.outputs.has_label == 'false'
runs-on: ubuntu-latest
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@a90bcbc6539c36a85cdfeb73f7e2f433735f215b # v2.15.0
with:
egress-policy: audit
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: "3.14"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build dataflow_engine
run: |
git submodule init
git submodule update
cd rust/otap-dataflow
docker buildx build --load --build-context otel-arrow=../../ -f Dockerfile -t df_engine .
cd ../..
- name: Install dependencies
run: |
python -m pip install --user --require-hashes -r tools/pipeline_perf_test/orchestrator/requirements.lock.txt
python -m pip install --user --require-hashes -r tools/pipeline_perf_test/load_generator/requirements.lock.txt
- name: Run pipeline performance test suite
run: |
cd tools/pipeline_perf_test
python orchestrator/run_orchestrator.py --config test_suites/integration/continuous/100klrps-docker.yaml