Skip to content

Load Test

Load Test #27

Workflow file for this run

name: Load Test
on:
# Manual trigger
workflow_dispatch:
inputs:
test_case:
description: 'Test case to run (leave empty to run all tests)'
required: false
default: ''
type: choice
options:
- ''
- test-completion-standard
- test-completion-conversation
- test-responses
base_url:
description: 'Base URL for testing'
required: true
default: 'https://api-stag.jan.ai'
type: string
model:
description: 'Model to test'
required: true
default: 'jan-v1-4b'
type: string
duration_minutes:
description: 'Test duration in minutes'
required: true
default: '5'
type: string
nonstream_rps:
description: 'Non-streaming requests per second'
required: true
default: '2'
type: string
stream_rps:
description: 'Streaming requests per second'
required: true
default: '1'
type: string
env:
# Test configuration - use inputs for workflow_dispatch, defaults for push
BASE: ${{ github.event.inputs.base_url || 'https://api-stag.jan.ai' }}
MODEL: ${{ github.event.inputs.model || 'jan-v1-4b' }}
DURATION_MIN: ${{ github.event.inputs.duration_minutes || '2' }}
NONSTREAM_RPS: ${{ github.event.inputs.nonstream_rps || '2' }}
STREAM_RPS: ${{ github.event.inputs.stream_rps || '1' }}
# Cloudflare load test token (required for API access)
LOADTEST_TOKEN: ${{ secrets.LOADTEST_TOKEN }}
# Guest authentication - no API keys needed
# Tests automatically use guest login
# Prometheus remote write configuration (k6 standard env vars)
K6_PROMETHEUS_RW_SERVER_URL: ${{ secrets.K6_PROMETHEUS_RW_SERVER_URL }}
K6_PROMETHEUS_RW_USERNAME: ${{ secrets.K6_PROMETHEUS_RW_USERNAME }}
K6_PROMETHEUS_RW_PASSWORD: ${{ secrets.K6_PROMETHEUS_RW_PASSWORD }}
K6_PROMETHEUS_RW_TREND_STATS: ${{ vars.K6_PROMETHEUS_RW_TREND_STATS || 'p(95),p(99),min,max' }}
K6_PROMETHEUS_RW_PUSH_INTERVAL: ${{ vars.K6_PROMETHEUS_RW_PUSH_INTERVAL || '5s' }}
jobs:
load-test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup k6
uses: grafana/setup-k6-action@v1
- name: Clear k6 defaults
run: |
# Clear any potential k6 config files that might have localhost defaults
rm -f ~/.k6rc || true
rm -f .k6rc || true
# Unset any k6 environment variables that might interfere
unset K6_OUT || true
unset K6_PROMETHEUS_URL || true
- name: Install jq for metrics parsing
run: sudo apt-get update && sudo apt-get install -y jq
- name: Validate inputs
run: |
echo "πŸš€ Load Test Execution"
echo "Trigger: ${{ github.event_name }}"
echo ""
echo "Test Configuration:"
if [[ -n "${{ github.event.inputs.test_case }}" ]]; then
echo " Test Case: ${{ github.event.inputs.test_case }} (specific test)"
else
echo " Test Case: ALL TESTS (manual trigger)"
fi
echo " Base URL: ${{ github.event.inputs.base_url }}"
echo " Model: ${{ github.event.inputs.model }}"
echo " Duration: ${{ github.event.inputs.duration_minutes }} minutes"
echo " Non-stream RPS: ${{ github.event.inputs.nonstream_rps }}"
echo " Stream RPS: ${{ github.event.inputs.stream_rps }}"
# Cloudflare load test token validation
if [[ -n "$LOADTEST_TOKEN" ]]; then
echo "βœ… Cloudflare load test token configured: [CONFIGURED]"
else
echo "❌ ERROR: LOADTEST_TOKEN is required for Cloudflare API access"
echo "Please configure LOADTEST_TOKEN secret in GitHub repository settings"
exit 1
fi
# Guest authentication - no secrets needed
echo "βœ… Using guest authentication (no API keys required)"
echo ""
echo "Prometheus Configuration:"
if [[ -n "$K6_PROMETHEUS_RW_SERVER_URL" ]]; then
echo "βœ… k6 Prometheus remote write endpoint configured: [CONFIGURED]"
if [[ -n "$K6_PROMETHEUS_RW_USERNAME" ]]; then
echo "βœ… k6 Prometheus username configured: [CONFIGURED]"
else
echo "⚠️ k6 Prometheus username not configured"
fi
if [[ -n "$K6_PROMETHEUS_RW_PASSWORD" ]]; then
echo "βœ… k6 Prometheus password configured: [HIDDEN]"
else
echo "⚠️ k6 Prometheus password not configured"
fi
echo "πŸ“Š Trend stats: $K6_PROMETHEUS_RW_TREND_STATS"
echo "⏱️ Push interval: $K6_PROMETHEUS_RW_PUSH_INTERVAL"
else
echo "⚠️ Warning: K6_PROMETHEUS_RW_SERVER_URL is not configured"
fi
- name: Run load test
id: loadtest
run: |
cd tests
if [[ "${{ github.event_name }}" == "workflow_dispatch" && -n "${{ github.event.inputs.test_case }}" ]]; then
echo "Running specific test case: ${{ github.event.inputs.test_case }}"
./run-loadtest.sh ${{ github.event.inputs.test_case }}
else
echo "Running all test cases"
./run-loadtest.sh
fi
- name: Parse test results
id: parse_results
if: always()
run: |
cd tests/results
# Find the latest results file
LATEST_FILE=$(ls -t *_*.json 2>/dev/null | head -1 || echo "")
if [[ -n "$LATEST_FILE" && -f "$LATEST_FILE" ]]; then
echo "results_file=$LATEST_FILE" >> $GITHUB_OUTPUT
# Extract key metrics using jq
if command -v jq &> /dev/null; then
echo "=== Load Test Results ===" >> results_summary.txt
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
echo "Test Case: ${{ github.event.inputs.test_case || 'All Tests' }}" >> results_summary.txt
echo "Duration: ${{ github.event.inputs.duration_minutes || '2' }} minutes" >> results_summary.txt
else
echo "Test Case: All Tests (auto-triggered)" >> results_summary.txt
echo "Duration: $DURATION_MIN minutes" >> results_summary.txt
fi
echo "Trigger: ${{ github.event_name }}" >> results_summary.txt
echo "Date: $(date)" >> results_summary.txt
echo "" >> results_summary.txt
# Parse metrics
jq -r '.metrics | to_entries[] | select(.key | contains("completion_") or contains("conversation_") or contains("response_") or contains("guest_") or contains("refresh_")) | "\(.key): avg=\(.value.avg // "N/A"), min=\(.value.min // "N/A"), max=\(.value.max // "N/A"), p95=\(.value.p95 // "N/A")"' "$LATEST_FILE" >> results_summary.txt 2>/dev/null || echo "Failed to parse detailed metrics" >> results_summary.txt
# Check for errors
ERROR_COUNT=$(jq -r '.metrics.completion_errors.count // .metrics.conversation_errors.count // .metrics.response_errors.count // 0' "$LATEST_FILE" 2>/dev/null || echo "0")
echo "" >> results_summary.txt
echo "Error Count: $ERROR_COUNT" >> results_summary.txt
# Set output for next steps
echo "error_count=$ERROR_COUNT" >> $GITHUB_OUTPUT
# Display summary
echo "=== Test Results Summary ==="
cat results_summary.txt
else
echo "jq not available, skipping detailed metrics parsing"
fi
else
echo "No results file found"
echo "error_count=999" >> $GITHUB_OUTPUT
fi
- name: Upload test results
uses: actions/upload-artifact@v4
if: always()
with:
name: loadtest-results-${{ github.event_name == 'workflow_dispatch' && github.event.inputs.test_case || 'all-tests' }}-${{ github.run_number }}
path: |
tests/results/
- name: Fail job if errors detected
if: steps.parse_results.outputs.error_count != '0'
run: |
echo "❌ Load test detected ${{ steps.parse_results.outputs.error_count }} errors"
exit 1
- name: Success notification
if: success()
run: |
echo "βœ… Load test completed successfully!"
echo "Trigger: ${{ github.event_name }}"
if [[ "${{ github.event_name }}" == "workflow_dispatch" && -n "${{ github.event.inputs.test_case }}" ]]; then
echo "Test case: ${{ github.event.inputs.test_case }}"
else
echo "Test case: All tests"
fi
echo "Error count: ${{ steps.parse_results.outputs.error_count }}"