Skip to content

Migrate Brute Force Test to CVSS 4.0 #76

Migrate Brute Force Test to CVSS 4.0

Migrate Brute Force Test to CVSS 4.0 #76

Workflow file for this run

name: Security Tests
on:
push:
branches: [ main, develop ]
pull_request:
branches: [ main, develop ]
schedule:
- cron: "0 0 * * *"
workflow_dispatch:
jobs:
security-tests:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout code
uses: actions/checkout@v4
# Starting early since OpenMRS takes time to spin up; this allows it to initialize
# in the background while dependencies are being installed.
- name: Start an OpenMRS instance
run: docker compose -f docker-compose.yml up -d
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
echo "Searching for requirements.txt files..."
find . -name "requirements.txt" -type f | while read req; do
echo "Installing dependencies from: $req"
pip install -r "$req"
done
- name: Install Playwright browsers
run: |
python -m playwright install chromium
- name: Wait for the OpenMRS instance to start
id: wait-for-omrs-instance
run: while [[ "$(echo $(curl -s -o /dev/null -w '%{http_code}' http://localhost/openmrs/health/started))" != "200" ]]; do echo "$(curl -i http://localhost/openmrs/health/started)"; sleep 10; done
- name: Download previous test results DB
uses: dawidd6/action-download-artifact@v3
with:
name: test-results-db
path: .
continue-on-error: true # First run won't have a DB artifact yet
- name: Run Security Tests
if: always() && (steps.wait-for-omrs-instance.outcome == 'success')
run: |
# Run pytest and capture ALL output (including CVSS scores)
pytest tests/ \
--html=report.html \
--self-contained-html \
--json-report \
--json-report-file=report.json \
-v -s 2>&1 | tee test_output.log
continue-on-error: true
- name: Extract CVSS Scores and Generate Dashboard
if: always()
run: python scripts/generate_security_dashboard.py
- name: Upload test results DB
if: always()
uses: actions/upload-artifact@v4
with:
name: test-results-db
path: test_results.db
retention-days: 90
overwrite: true
- name: Upload Security Dashboard
if: always()
uses: actions/upload-artifact@v4
with:
name: security-dashboard
path: security_dashboard.html
retention-days: 30
- name: Upload HTML Report
if: always()
uses: actions/upload-artifact@v4
with:
name: security-test-report-html
path: report.html
retention-days: 30
- name: Upload JSON Results
if: always()
uses: actions/upload-artifact@v4
with:
name: security-test-report-json
path: |
report.json
security_results.json
retention-days: 30
- name: Prepare GitHub Pages Deployment
if: github.ref == 'refs/heads/main'
run: |
mkdir -p gh-pages
cp security_dashboard.html gh-pages/index.html
cp report.html gh-pages/detailed-report.html
cp security_results.json gh-pages/results.json
- name: Deploy to GitHub Pages
if: github.ref == 'refs/heads/main'
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./gh-pages
cname: cvss-report.openmrs.org
keep_files: false
- name: Comment on PR
if: ${{ github.event_name == 'pull_request' && github.repository_owner == 'openmrs' }}
continue-on-error: true
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
let summary = '## πŸ”’ Security Test Results\n\n';
if (fs.existsSync('security_results.json')) {
const data = JSON.parse(fs.readFileSync('security_results.json', 'utf8'));
summary += `- **Total Tests:** ${data.total_tests}\n`;
summary += `- **Completed:** βœ… ${data.passed}\n`;
summary += `- **Errors:** ❌ ${data.failed}\n\n`;
summary += '### Test Details with CVSS Scores\n\n';
summary += '| Test | Execution | CVSS Score | Severity | Duration |\n';
summary += '|------|-----------|------------|----------|----------|\n';
data.tests.forEach(test => {
const icon = test.outcome === 'passed' ? 'βœ…' : '❌';
const status = test.outcome === 'passed' ? 'Completed' : 'Error';
const name = test.name.replace(/_/g, ' ');
const cvss = test.cvss_score ? test.cvss_score.toFixed(1) : 'N/A';
let severity = 'N/A';
if (test.cvss_score) {
if (test.cvss_score >= 9.0) severity = 'πŸ”΄ CRITICAL';
else if (test.cvss_score >= 7.0) severity = '🟠 HIGH';
else if (test.cvss_score >= 4.0) severity = '🟑 MEDIUM';
else severity = '🟒 LOW';
}
summary += `| ${name} | ${icon} ${status} | ${cvss} | ${severity} | ${test.duration.toFixed(2)}s |\n`;
});
summary += '\nπŸ“Š [Download Security Dashboard](../actions/runs/${{ github.run_id }})\n';
} else {
summary += '❌ No test results available\n';
}
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: summary
});
- name: Capture Server Logs
if: always()
uses: jwalton/gh-docker-logs@v2
with:
dest: "./logs"
- name: Upload Logs as Artifact
uses: actions/upload-artifact@v4
if: always()
with:
name: server-logs
path: "./logs"
retention-days: 2
overwrite: true