diff --git a/.github/workflows/README.md b/.github/workflows/README.md new file mode 100644 index 0000000000..aa597d2730 --- /dev/null +++ b/.github/workflows/README.md @@ -0,0 +1,803 @@ +# GitHub Actions Workflows Documentation + +This document provides an overview of all GitHub Actions workflows in the OpenMQTTGateway project. + + +## Architecture Overview + +The workflow system is organized in two layers: + +### **Main Workflows** (User-facing triggers) +Entry points triggered by user actions, schedules, or events: +- `build.yml` - CI validation on push/PR +- `build_and_docs_to_dev.yml` - Daily development builds +- `release.yml` - Production releases +- `manual_docs.yml` - Documentation deployment +- `security-scan.yml` - Security vulnerability scanning +- `stale.yml` - Issue management + +### **Task Workflows** (Reusable components) +Parameterized building blocks called by main workflows: +- `task-build.yml` - Configurable firmware build +- `task-docs.yml` - Configurable documentation build +- `task-lint.yml` - Configurable code formatting check +- `task-security-scan.yml` - Configurable security scanning + + + +## Workflow Overview Table + +| Workflow | Trigger | Purpose | Artifacts | +|----------|---------|---------|-----------| +| `build.yml` | Push, Pull Request | CI Build Validation | Firmware binaries (7 days) | +| `build_and_docs_to_dev.yml` | Daily Cron, Manual | Development Builds + Docs | Firmware + Docs deployment | +| `release.yml` | Release Published | Production Release | Release assets + Docs | +| `manual_docs.yml` | Manual, Workflow Call | Documentation Only | GitHub Pages docs | +| `security-scan.yml` | Weekly Cron, Manual | Security Vulnerability Scanning | SARIF, SBOM reports | +| `stale.yml` | Daily Cron | Issue Management | None | +| **`task-build.yml`** | **Workflow Call** | **Reusable Build Logic** | **Configurable** | +| **`task-docs.yml`** | **Workflow Call** | **Reusable Docs Logic** | **GitHub Pages** | +| **`task-lint.yml`** | **Workflow Call** | **Reusable Lint Logic** | **None** | +| **`task-security-scan.yml`** | **Workflow Call** | **Reusable Security Scan** | **SARIF, SBOM, Reports** | + + + + +## Workflow Dependencies and Call Chain + +```mermaid + +flowchart TD + %% Triggers + subgraph triggers ["🎯 Triggers"] + push["Push"] + pr["Pull Request"] + release["Release Published"] + cron1["Cron: Daily 00:00 UTC"] + manual["Manual Trigger"] + cron3["Cron: Weekly Monday 02:00 UTC"] + cron2["Cron: Daily 00:30 UTC"] + + end + +subgraph github_workflows ["📋 GitHub Workflows"] + %% Main Workflows + subgraph main ["📋 Main Workflows"] + build["build.yml
CI Build"] + release_wf["release.yml
Production Release"] + build_dev["build_and_docs_to_dev.yml
Dev Builds"] + security_scan["security-scan.yml
Security Scan"] + stale["stale.yml
Issue Management"] + manual_docs["manual_docs.yml
Docs Only"] + end + + %% Task Workflows + subgraph tasks ["⚙️ Task Workflows"] + task_build["task-build.yml
Build Firmware"] + task_lint["task-lint.yml
Code Format"] + task_security["task-security-scan.yml
Security Scan"] + task_docs["task-docs.yml
Build & Deploy Docs"] + + end +end + +subgraph ci_scripts ["🔧 CI Scripts"] + %% CI Scripts Layer + subgraph bash ["🔧 Orchestrator"] + ci_main["ci.sh
(main dispatcher)"] + ci_build_script["ci_build.sh
(build orchestrator)"] + ci_site_script["ci_site.sh
(docs orchestrator)"] + ci_qa_script["ci_qa.sh
(lint orchestrator)"] + ci_security_script["ci_security.sh
(security orchestrator)"] + end + + %% Sub-Scripts Layer + subgraph sub_scripts ["⚙️ Workers"] + ci_build_fw["ci_build_firmware.sh
(PlatformIO build)"] + ci_prep_art["ci_prepare_artifacts.sh
(artifact packaging)"] + gen_board["generate_board_docs
(npm package)"] + gen_wu["gen_wu
(npm package)"] + clang_fmt["clang-format
(code formatter)"] + trivy["Trivy
(vulnerability scanner)"] + end +end + + %% Trigger connections + push -->|"all or subset
(depends on branch)"| build + pr -->|"all
(always full)"| build + + release --> release_wf + manual --> manual_docs + cron1 --> build_dev + cron2 --> stale + cron3 --> security_scan + + %% Main workflow to task workflow connections + build --> task_build + build_dev --> task_build + build_dev --> task_docs + release_wf --> task_build + release_wf --> task_docs + manual_docs --> task_docs + security_scan --> task_security + + %% Task workflow internal dependencies + task_build --> task_lint + task_build --> task_security + + %% Task workflows to CI scripts + task_build -->|"ci.sh build ..."| ci_main + task_docs -->|"ci.sh site ..."| ci_main + task_lint -->|"ci.sh qa ..."| ci_main + task_security -->|"ci.sh security ..."| ci_main + + %% CI main dispatcher to orchestrators + ci_main -->|"route: build"| ci_build_script + ci_main -->|"route: site"| ci_site_script + ci_main -->|"route: qa"| ci_qa_script + ci_main -->|"route: security"| ci_security_script + + %% Orchestrators to workers + ci_build_script --> ci_build_fw + ci_build_script --> ci_prep_art + + ci_site_script --> gen_board + ci_site_script --> gen_wu + + ci_qa_script --> clang_fmt + + ci_security_script --> trivy + + %% Styling + classDef triggerStyle fill:#e1f5ff,stroke:#0066cc,stroke-width:2px + classDef mainStyle fill:#fff4e6,stroke:#ff9900,stroke-width:2px + classDef taskStyle fill:#e6f7e6,stroke:#00aa00,stroke-width:2px + classDef ciStyle fill:#ffe6f0,stroke:#cc0066,stroke-width:2px + classDef subStyle fill:#f0e6ff,stroke:#9933ff,stroke-width:2px + + class push,pr,release,manual,cron1,cron2,cron3 triggerStyle + class build,release_wf,manual_docs,build_dev,stale,security_scan mainStyle + class task_build,task_docs,task_lint,task_security taskStyle + class ci_main,ci_build_script,ci_site_script,ci_qa_script,ci_security_script ciStyle + class ci_build_fw,ci_prep_art,gen_board,gen_wu,clang_fmt,trivy subStyle + + + style github_workflows stroke:#6A7BD8,stroke-dasharray:6 4,stroke-width:1.8px,fill:#fbfbfc + style main stroke:#6A7BD8,stroke-dasharray:6 4,stroke-width:0.6px,fill:#fcfdff + style tasks stroke:#6A7BD8,stroke-dasharray:6 4,stroke-width:0.6px,fill:#fcfdff + + style ci_scripts stroke:#FF9A3C,stroke-dasharray:6 4,stroke-width:1.8px,fill:#fffaf5 + style bash stroke:#FF9A3C,stroke-dasharray:6 4,stroke-width:0.6px,fill:#fffaf5 + style sub_scripts stroke:#FF9A3C,stroke-dasharray:6 4,stroke-width:0.6px,fill:#fffaf5 + + style triggers fill:none,stroke:none + + linkStyle 0,1,2,3,4,5,6 stroke:#0066cc,stroke-width:3px; + linkStyle 7,8,9,10,11,12,13,14,15 stroke:orange, stroke-width:2px; + linkStyle 16,17,18,19,20,21,22,23,24,25,26,27,28,29 stroke:red, stroke-width:2px; + +``` + +### Workflow Relationships + +**Main → Task Mapping**: +- `build.yml` → calls `task-build.yml` (also contains inline documentation job) +- `build_and_docs_to_dev.yml` → calls `task-build.yml` + `task-docs.yml` +- `release.yml` → calls `task-build.yml` + `task-docs.yml` +- `manual_docs.yml` → calls `task-docs.yml` +- `security-scan.yml` → calls `task-security-scan.yml` +- `stale.yml` → standalone (no dependencies) + +**Task → CI Script Mapping**: +- `task-build.yml` → `ci.sh build --version --mode --deploy-ready` + - Routes to: `ci_build.sh` → `ci_build_firmware.sh`, `ci_prepare_artifacts.sh` + - Output: `generated/artifacts/firmware_build/` +- `task-docs.yml` → `ci.sh site --mode --version --url-prefix` + - Routes to: `ci_site.sh` → `generate_board_docs` (npm), `gen_wu` (npm), VuePress + - Output: `generated/site/` +- `task-lint.yml` → `ci.sh qa --check --source --extensions --clang-format-version` + - Routes to: `ci_qa.sh` → `clang-format` +- `task-security-scan.yml` → `ci.sh security --scan-type --severity --generate-sbom` + - Routes to: `ci_security.sh` → Trivy (vulnerability scanner) + - Output: `generated/reports/` (SARIF, JSON, SBOM) + +**Job Dependencies**: +- `build_and_docs_to_dev.yml`: prepare → build (task) → deploy & documentation (task) +- `release.yml`: prepare → build (task) → deploy → documentation (task) + +**Script Execution Flow**: +``` +GitHub Action (task-*.yml) + ↓ +./scripts/ci.sh [OPTIONS] ← Main dispatcher + ↓ +./scripts/ci_.sh ← Command orchestrator + ↓ +./scripts/ci_*.sh / *.py ← Worker scripts +``` + +--- + +## Detailed Workflow Documentation + +### 1. `build.yml` - Continuous Integration Build + +**Purpose**: Validates that code changes compile successfully with intelligent environment selection based on branch importance. + +**Triggers**: +- **Push**: Every commit pushed to any branch +- **Pull Request**: Every PR creation or update + +**What it does**: +1. **Determine build scope**: Selects environment list based on branch name + - **Full build** (`all` environment): All PRs and Push on important branches (development, master, edge, stable, release/*, hotfix/*) + - **Quick build** (`ci` subset environment): All PRs and Push on non-critical branches +2. **Build job**: Calls `task-build.yml` with appropriate environment set + - Builds firmware in parallel +3. **Documentation job**: Inline job that validates docs build (doesn't deploy) + - Downloads common config from theengs.io + - Runs `npm install` and `npm run docs:build` + - Uses Node.js 14.x + +**Technical Details**: +- **Calls**: `task-build.yml` only (documentation is inline) +- Python version: 3.13 (for build job) +- Build strategy: Parallel matrix via task workflow +- Artifact retention: 7 days +- Development OTA: Enabled (`enable-dev-ota: true`) +- Environment selection logic: + - **Full build** (`all`): All Pull Requests + branches: development, master, edge, stable, release/*, hotfix/* + - **Quick build** (`ci` subset): All other feature branches + +**Outputs**: +- Firmware binaries for selected environments +- No documentation deployment (validation only) + +**Use Case**: Ensures no breaking changes before merge. Fast feedback for feature branches (~10 min), comprehensive validation for PRs and critical branches (~40 min). + +**Execution Context**: Runs for ALL contributors on ALL branches with smart scaling based on branch importance. + +--- + +### 2. `build_and_docs_to_dev.yml` - Development Deployment Pipeline + +**Purpose**: Creates nightly development builds and deploys documentation to the `/dev` subdirectory for testing. + +**Triggers**: +- **Schedule**: Daily at midnight UTC (`0 0 * * *`) +- **Manual**: Via workflow_dispatch button + +**What it does**: +1. **Prepare job**: Generates 6-character short SHA +2. **Handle-firmwares job**: Calls `task-build.yml` with development parameters + - Builds firmware for **all environments** in parallel + - Enables development OTA updates with SHA commit version + - Artifact retention: 1 day +3. **Handle-documentation job**: Calls `task-docs.yml` with development parameters + - Deploys to `/dev` subdirectory on GitHub Pages + - Uses short SHA as version identifier + - Runs PageSpeed Insights on dev site + +**Technical Details**: +- **Calls**: `task-build.yml` + `task-docs.yml` +- Repository restriction: Hardcoded to `1technophile` owner only +- Version: 6-character short SHA (e.g., `abc123`) +- Documentation URL prefix: `/dev/` +- GitHub Pages destination: `dev` subdirectory +- PageSpeed URL: `https://docs.openmqttgateway.com/dev/` + +**Workflow Parameters**: +- Build: `enable-dev-ota: true`, `version-tag: `, `artifact-retention-days: 1` +- Docs: `mode: "dev"`, `version: `, `url-prefix: "/dev/"`, `destination-dir: "dev"`, `run-pagespeed: true` + +**Outputs**: +- Firmware binaries with `-firmware.bin` suffix (1 day retention) +- Bootloader and partition binaries +- Documentation deployed to `docs.openmqttgateway.com/dev/` + +**Version Labeling**: +- Git SHA (6 chars) injected into firmware via `version-tag` +- Docs display short SHA as version + +**Use Case**: Daily bleeding-edge builds for early adopters and testing. Preview documentation changes before production release. + +**Execution Context**: Only runs on `1technophile` repository owner. Forks will skip this workflow automatically. + +--- + +### 3. `release.yml` - Production Release Pipeline + +**Purpose**: Creates official release builds when a new version is published. + +**Triggers**: +- **Release**: When a GitHub release is published (tagged) + +**What it does**: +1. **Prepare job**: Extracts version tag and release info from GitHub event +2. **Build job**: Calls `task-build.yml` with production parameters + - Builds firmware for **all environments** in parallel + - Injects release tag version into firmware + - Artifact retention: 90 days +3. **Deploy job**: Downloads and uploads release assets + - Downloads all firmware artifacts from build job + - Uploads binaries to GitHub Release +4. **Documentation job**: Calls `task-docs.yml` for production docs + - Deploys to root (`/`) of GitHub Pages + - Uses release tag as version + +**Technical Details**: +- **Calls**: `task-build.yml` + `task-docs.yml` +- Build flag: Standard (no DEVELOPMENTOTA) +- Artifact retention: 90 days +- Deploy uses `bgpat/release-asset-action` to attach assets to GitHub Release + +**Workflow Parameters**: +- Build: `enable-dev-ota: false`, `version-tag: `, `artifact-retention-days: 90` +- Docs: `mode: "prod"`, `version: `, `url-prefix: "/"`, `destination-dir: "."` + +**Outputs**: +- Production firmware binaries attached to GitHub Release (all prepared by `task-build.yml` with `--deploy-ready`) +- Production documentation at `docs.openmqttgateway.com/` + +**Version Labeling**: +- Git tag (e.g., `v1.2.3`) injected into firmware + +**Workflow Chain**: +``` +prepare → build (task-build.yml) → deploy → documentation (task-docs.yml) +``` + +**Use Case**: Official releases for end users. Stable, versioned firmware. + +**Execution Context**: Triggered by repository maintainers creating releases. + +--- + +### 4. `manual_docs.yml` - Documentation Deployment + +**Purpose**: Entry point for standalone documentation deployment to GitHub Pages. + +**Triggers**: +- **Manual**: Via workflow_dispatch button +- **Workflow Call**: Can be called by other workflows (legacy compatibility) + +**What it does**: +1. Calls `task-docs.yml` with production parameters +2. Deploys to root directory (`/`) of GitHub Pages + +**Technical Details**: +- **Calls**: `task-docs.yml` +- Mode: `prod` +- Version: Uses latest release tag by default (or provided input) +- URL prefix: `/` +- Destination: Root of GitHub Pages +- PageSpeed: Optional (disabled by default) + +**Outputs**: +- Production documentation at `docs.openmqttgateway.com/` +- Custom domain: `docs.openmqttgateway.com` (via CNAME) + +**Use Case**: Standalone documentation updates without full release process. + +**Execution Context**: Manual trigger or legacy workflow calls. + +--- + +### 5. `security-scan.yml` - Security Vulnerability Scanning + +**Purpose**: Scans the project for security vulnerabilities and generates Software Bill of Materials (SBOM) for supply chain security. + +**Triggers**: +- **Schedule**: Weekly on Monday at 02:00 UTC (`0 2 * * 1`) +- **Manual**: Via workflow_dispatch button with input parameters + +**Manual Trigger Inputs**: +- `severity`: Severity levels to scan (choices: UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL; default: HIGH,CRITICAL) +- Allows filtering results to specific severity levels + +**What it does**: +1. **Prepare job**: Sets up environment for scanning +2. **Security scan job**: Calls `task-security-scan.yml` with parameters + - Runs Trivy vulnerability scanner on filesystem + - Filters results by severity level + - Generates SARIF format for GitHub Security tab integration + - Creates SBOM in CycloneDX and SPDX formats + - Uploads findings to GitHub Security tab (code scanning dashboard) +3. **Artifact upload job**: Stores generated reports and SBOM + - SARIF results for GitHub integration + - SBOM files for supply chain tracking + - Retention: 90 days + +**Technical Details**: +- **Calls**: `task-security-scan.yml` +- Vulnerability scanner: Trivy (vulnerability database updated automatically) +- Report formats: SARIF (GitHub), JSON (detailed), Markdown (summary) +- SBOM formats: CycloneDX and SPDX (standard formats) +- Failure behavior: Does NOT fail the workflow on vulnerabilities (exit-code: 0) +- GitHub Security tab: Auto-uploads SARIF for code scanning dashboard visibility + +**Workflow Parameters**: +- `scan-type: "fs"` (filesystem scan) +- `severity: "HIGH,CRITICAL"` (default, or manual input) +- `generate-sbom: true` (always enabled) +- `upload-to-security-tab: true` (GitHub Security integration) + +**Outputs**: +- SARIF report: `generated/reports/trivy-results.sarif` (GitHub Security tab) +- JSON report: `generated/reports/trivy-results.json` (detailed findings) +- Markdown summary: `generated/reports/security-summary.md` +- SBOM: `generated/reports/sbom/sbom.cyclonedx.json` + `sbom.spdx.json` +- Artifacts retained for 90 days + +**Use Case**: Regular security audits, compliance tracking, vulnerability management, supply chain security. + +**Execution Context**: Weekly automated scans + manual on-demand scanning for developers. + +--- + +### 6. `stale.yml` - Issue and PR Management + +**Purpose**: Automatically closes inactive issues and pull requests to reduce maintenance burden. + +**Triggers**: +- **Schedule**: Daily at 00:30 UTC (`30 0 * * *`) + +**What it does**: +1. Marks issues/PRs as stale after 90 days of inactivity +2. Closes stale issues/PRs after 14 additional days +3. Exempts issues labeled "enhancement" + +**Configuration**: +- Stale after: 90 days +- Close after: 14 days (104 days total) +- Stale label: `stale` +- Exempt labels: `enhancement` + +**Messages**: +- Stale: "This issue is stale because it has been open for 90 days with no activity." +- Close: "This issue was closed because it has been inactive for 14 days since being marked as stale." + +**Use Case**: Housekeeping. Reduces backlog of abandoned issues. + +**Execution Context**: Automated maintenance by GitHub bot. + +--- + +## Task Workflows (Reusable Components) + +### 7. `task-build.yml` - Reusable Build Workflow + +**Purpose**: Parameterized firmware build logic used by multiple workflows. + +**Trigger**: `workflow_call` only (called by other workflows) + +**Parameters**: +- `python-version`: Python version to use (default: '3.13') +- `pio-version`: PlatformIO version to use (default: 'v6.1.18') +- `environment-set`: Which set of environments to build: 'all' or 'ci' (default: 'all') +- `enable-dev-ota`: Enable development OTA builds (default: false) +- `version-tag`: Optional version tag to pass to ci.sh build - omitted if empty (default: '') +- `artifact-retention-days`: Number of days to retain build artifacts (default: 7) + +**What it does**: +1. **Lint code**: Runs `task-lint.yml` to check code formatting (main directory, .h and .ino files) +2. **Load environments**: Reads environment list from `environments.json` based on `environment-set` input (`all` or `ci`) +3. **Install PlatformIO**: Uses `uv` to install the `pio-version` input (custom `pioarduino/platformio-core` fork) +4. **Matrix build**: Builds selected environments in parallel, blocking on lint job completion +5. **Build execution**: Calls unified `ci.sh build [OPTIONS]`: + - ``: Target hardware (e.g., `esp32dev-ble`) + - `--version `: Version to inject (SHA for dev, tag for prod) + - `--mode `: Build mode (enables/disables OTA) + - `--deploy-ready`: Prepare artifacts for deployment + - `--output `: Output directory for artifacts (default: `generated/artifacts/`) + +**Command Flow**: +```bash +./scripts/ci.sh build esp32dev-ble --version v1.8.0 --mode prod --deploy-ready + ↓ + ├─→ ci_build.sh (orchestrator) + │ ├─→ ci_build_firmware.sh esp32dev-ble [--dev-ota] + │ └─→ ci_prepare_artifacts.sh esp32dev-ble [--deploy] → outputs to generated/artifacts/ +``` + +**Technical Details**: +- Runs on: Ubuntu latest +- PlatformIO version: Configurable via `pio-version` input (default: v6.1.18, custom fork: `pioarduino/platformio-core`) +- Python package manager: `uv` (astral-sh/setup-uv@v6) +- Environment sets: `all` (complete set) or `ci` (subset for quick validation) +- Strategy: Matrix with fail-fast: false (builds complete even if one environment fails) +- Dependencies: Lint job must pass before build matrix starts +- Main orchestrator: `ci.sh` → `ci_build.sh` → sub-scripts + +**Callers**: +- `build.yml` (CI validation) +- `build_and_docs_to_dev.yml` (development builds) +- `release.yml` (production releases) + +--- + +### 8. `task-docs.yml` - Reusable Documentation Workflow + +**Purpose**: Parameterized documentation build and deployment logic. + +**Trigger**: `workflow_call` only (called by other workflows) + +**Parameters**: +- `mode`: Documentation mode (`prod` or `dev`, default: `prod`) +- `version`: Version string for docs (default: `auto` → tag or short SHA depending on caller) +- `url-prefix`: Base URL path (default: `/` for prod, `/dev/` for dev callers) +- `destination-dir`: Deploy directory on GitHub Pages (default: `.`) +- `run-pagespeed`: Run PageSpeed Insights after deploy (default: false) +- `pagespeed-url`: URL to test with PageSpeed (default: `https://docs.openmqttgateway.com/`) + +**What it does**: +1. **Build documentation**: Calls unified `ci.sh site --mode --version --url-prefix ` +2. **Deploy**: Publishes to GitHub Pages using `peaceiris/actions-gh-pages@v3` +3. **PageSpeed test**: Optionally runs performance audit on the provided URL + +**Command Flow**: +```bash +./scripts/ci.sh site --mode prod --version v1.2.3 --url-prefix / + ↓ + └─→ ci_site.sh (orchestrator) + └─→ npm run docs:build (VuePress compilation) +``` + +**Callers**: +- `build_and_docs_to_dev.yml` (dev docs to `/dev`) +- `release.yml` (production docs to `/`) +- `manual_docs.yml` (manual production docs) + +--- + +### 9. `task-lint.yml` - Reusable Lint Workflow + +**Purpose**: Parameterized code formatting validation for consistent code style. + +**Trigger**: `workflow_call` only (called by other workflows) + +**Parameters**: +- `source`: Source directory to lint (default: 'main') +- `extensions`: File extensions to check, comma-separated (default: 'h,ino') +- `clang-format-version`: clang-format version to use (default: '9') + +**What it does**: +1. Checks out code +2. Installs clang-format (specified version) +3. Runs unified `ci.sh qa [OPTIONS]`: + - `--check`: Validation mode (exit on violations) + - `--fix`: Auto-fix formatting issues + - `--source `: Directory to lint + - `--extensions `: File extensions (comma-separated) + - `--clang-format-version `: Formatter version +4. Fails if formatting violations found + +**Command Flow**: +```bash +./scripts/ci.sh qa --check --source main --extensions h,ino --clang-format-version 9 + ↓ + └─→ ci_qa.sh (formatter) + └─→ clang-format (checks/fixes code style) +``` + +**Technical Details**: +- Runs on: Ubuntu latest +- Script: `ci_qa.sh` (custom formatting check script) +- Formatter: `clang-format-$version` installed via apt-get +- Default scope: `main` directory only (not lib) +- Default file types: `.h` and `.ino` (not `.cpp`) +- Strategy: Single sequential job (not parallelized) + +**Callers**: +- `build.yml` (inline lint check before build) +- Can be called by other workflows as needed + +**Default Behavior**: If called without parameters, lints `main` directory for `.h` and `.ino` files only. + + + +### 10. `task-security-scan.yml` - Reusable Security Scan Workflow + +**Purpose**: Parameterized security vulnerability scanning and SBOM generation logic. + +**Trigger**: `workflow_call` only (called by other workflows) + +**Parameters**: +- `scan-type`: Type of scan: 'fs' (filesystem), 'config' (configuration), or 'image' (container) (default: 'fs') +- `severity`: Severity levels to report (comma-separated: UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL) (default: 'HIGH,CRITICAL') +- `scan-path`: Path to scan (default: '.') +- `exit-code`: Exit code when vulnerabilities found (0=continue, 1=fail) (default: '0') +- `upload-to-security-tab`: Upload SARIF to GitHub Security tab (default: true) +- `generate-sbom`: Generate SBOM artifacts (default: true) + +**What it does**: +1. **Install Trivy**: Retrieves and installs Trivy vulnerability scanner +2. **Run security scan**: Calls unified `ci.sh security` with parameters: + - `--scan-type `: Target type + - `--severity `: Filter by severity + - `--scan-path `: Directory to scan + - `--generate-sbom`: Generate SBOM in CycloneDX and SPDX formats + - `--exit-code <0|1>`: Fail behavior on critical vulnerabilities + - `--upload-to-security-tab`: Upload SARIF to GitHub +3. **Upload artifacts**: Stores reports and SBOM for later download +4. **GitHub Security integration**: SARIF automatically appears in Security tab + +**Command Flow**: +```bash +./scripts/ci.sh security --scan-type fs --severity HIGH,CRITICAL --generate-sbom --upload-to-security-tab + ↓ + └─→ ci_security.sh (security orchestrator) + └─→ Trivy (vulnerability scanner) + ├─→ Generate SARIF, JSON, summary + ├─→ Generate SBOM (CycloneDX, SPDX) + └─→ Upload to GitHub Security tab +``` + +**Technical Details**: +- Runs on: Ubuntu latest +- Scanner: Trivy (latest version auto-installed) +- Report formats: SARIF (GitHub integration), JSON (detailed), Markdown (summary) +- SBOM formats: CycloneDX and SPDX (industry standards) +- GitHub Security tab: Auto-uploads SARIF for code scanning dashboard +- Strategy: Single sequential job (not parallelized) +- Artifact retention: As configured by caller + +**Output Files**: +- `generated/reports/trivy-results.sarif` - SARIF format (GitHub Security tab upload) +- `generated/reports/trivy-results.json` - JSON format (detailed results) +- `generated/reports/security-summary.md` - Human-readable summary +- `generated/reports/sbom/sbom.cyclonedx.json` - CycloneDX SBOM +- `generated/reports/sbom/sbom.spdx.json` - SPDX SBOM + +**Callers**: +- `security-scan.yml` (weekly + manual scanning) +- Can be called by other workflows for custom security workflows + +--- + +## Environment Configuration + +### Centralized Environment Management + +All build environments are defined in `.github/workflows/environments.json`: + +```json +{ + "environments": { + "all": [ ...all environments ], + "ci": [...a subset of environments] + } + } +} +``` + +**Environment Sets**: +- `all`: Complete production set for releases and comprehensive validation +- `ci`: Representative subset for fast CI feedback + + +### Environment Categories + +**ESP32 Family** (~50 environments): +- Standard: `esp32dev-*` variants +- ESP32-S3: `esp32s3-*` variants +- ESP32-C3: `esp32c3-*` variants +- Specialized boards: M5Stack, Heltec, LilyGO, Theengs + +**ESP8266 Family** (~20 environments): +- NodeMCU: `nodemcuv2-*` variants +- Sonoff: `sonoff-*` variants +- Generic: `esp8266-*` variants + +**Specialized Boards** (~13 environments): +- Theengs Plug +- Theengs Bridge +- RF Bridge variants +- Custom board configurations + +--- + +## Configuration Variables + +### Repository Restrictions + +**Development Builds** (`build_and_docs_to_dev.yml`): +- Hardcoded restriction: `github.repository_owner == '1technophile'` +- Only runs for the main repository owner +- Prevents accidental deployments from forks +- No configuration variable needed + +**Release Builds** (`release.yml`): +- No repository restrictions +- Runs on any fork when a release is published +- Deploy step requires proper GitHub token permissions + +**Documentation** (`manual_docs.yml`): +- No repository restrictions +- Can be triggered manually from any fork +- Requires GitHub Pages to be configured + +--- + +## Glossary + +- **Environment**: A specific hardware board + gateway combination (e.g., `esp32dev-ble`) +- **Matrix Build**: Parallel execution of builds across multiple environments +- **Artifact**: Build output stored temporarily for download (firmware binaries) +- **workflow_call**: GitHub Actions feature for calling one workflow from another +- **workflow_dispatch**: Manual trigger button for workflows +- **Task Workflow**: Reusable workflow component with parameterized inputs +- **Main Workflow**: Entry point workflow triggered by events or schedules +- **CNAME**: Custom domain configuration for GitHub Pages +- **OTA**: Over-The-Air firmware update capability +- **SHA**: Git commit hash used for version identification in dev builds + +--- + +## Maintenance Notes + +### CI/CD Script Architecture + +**Main Entry Point**: `ci.sh` (unified interface) +- Commands: `build`, `site`, `qa`, `all` +- Routes to specialized orchestrators +- Provides consistent CLI across all operations + +**Build System** (`ci.sh build`): +- PlatformIO 6.1.18 from custom fork: `pioarduino/platformio-core` +- Python package manager: `uv` for fast dependency installation +- Orchestrator: `ci_build.sh` + - Worker: `ci_build_firmware.sh` (PlatformIO compilation) + - Worker: `ci_prepare_artifacts.sh` (artifact packaging) + +**Documentation System** (`ci.sh site`): +- Documentation framework: VuePress +- Orchestrator: `ci_site.sh` + - Worker: `generate_board_docs` (npm package - auto-generate board pages) + - Worker: `gen_wu` (npm package - WebUpdater manifest) + - External: Common config from theengs.io + +**Code Quality** (`ci.sh qa`): +- Orchestrator: `ci_qa.sh` + - Worker: `clang-format` version 9 + - Worker: `shellcheck` for shell scripts + - Default scope: `main` directory, `.h` and `.ino` files + +**Security Scanning System** (`ci.sh security`): +- Vulnerability scanner: Trivy (container vulnerability database) +- Orchestrator: `ci_security.sh` + - Worker: Trivy (filesystem, configuration, and container image scanning) + - Report formats: SARIF (GitHub Security tab), JSON (detailed), Markdown (summary) + - SBOM generation: CycloneDX and SPDX formats + - GitHub Security integration: Auto-uploads SARIF for code scanning dashboard + - Output: `generated/reports/` (security findings and artifacts) + +**Configuration**: +- Environment list: `.github/workflows/environments.json` +- Task workflows: `task-*.yml` (reusable GitHub Actions components) +- Repository owner restriction: Hardcoded to `1technophile` for dev deployments +- All scripts located in: `./scripts/` + +**Local Development**: +```bash +# Build firmware locally +./scripts/ci.sh build esp32dev-ble --mode dev --version test + +# Build documentation locally +./scripts/ci.sh site --mode dev --version test + +# Check code format +./scripts/ci.sh qa --check --source main --extensions h,ino --clang-format-version 9 + +# Security scanning +./scripts/ci.sh security --scan-type fs --severity HIGH,CRITICAL --generate-sbom + +# Run complete pipeline +./scripts/ci.sh all --mode dev +``` + +--- + +**Document Version**: 2.3 +**Last Updated**: 01/14/2026 +**Maintainer**: OpenMQTTGateway Development Team diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7ad46d4e31..09f44f984f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,138 +1,47 @@ name: Build +# See details in .github/workflows/README.md (workflow docs) +run-name: "CI build for ${{ github.ref_name }}" on: [push, pull_request] jobs: - build: - strategy: - fail-fast: false - matrix: - environments: - - "rfbridge" - - "rfbridge-direct" - - "esp32dev-all-test" - - "esp32dev-rf" - - "esp32dev-pilight-cc1101" - - "esp32dev-somfy-cc1101" - - "esp32dev-pilight-somfy-cc1101" - - "esp32dev-weatherstation" - - "esp32dev-gf-sun-inverter" - - "esp32dev-ir" - - "esp32dev-ble" - - "esp32dev-ble-mqtt-undecoded" - - "esp32dev-ble-aws" - - "esp32feather-ble" - - "esp32-lolin32lite-ble" - - "esp32-olimex-gtw-ble-eth" - - "esp32-olimex-gtw-ble-poe" - - "esp32-olimex-gtw-ble-poe-iso" - - "esp32-wt32-eth01-ble-eth" - - "esp32-olimex-gtw-ble-wifi" - - "esp32-m5stick-ble" - - "esp32-m5stack-ble" - - "esp32-m5tough-ble" - - "esp32-m5stick-c-ble" - - "esp32-m5stick-cp-ble" - - "esp32s3-atomS3U" - - "esp32-m5atom-matrix" - - "esp32-m5atom-lite" - - "esp32dev-rtl_433" - - "esp32dev-rtl_433-fsk" - - "esp32doitv1-aithinker-r01-sx1278" - - "heltec-rtl_433" - - "heltec-rtl_433-fsk" - - "heltec-ble" - - "lilygo-rtl_433" - - "lilygo-rtl_433-fsk" - - "lilygo-ble" - - "esp32dev-multi_receiver" - - "esp32dev-multi_receiver-pilight" - - "tinypico-ble" - - "ttgo-lora32-v1" - - "ttgo-lora32-v21" - - "ttgo-t-beam" - - "heltec-wifi-lora-32" - - "shelly-plus1" - - "nodemcuv2-all-test" - - "nodemcuv2-fastled-test" - - "nodemcuv2-2g" - - "nodemcuv2-ir" - - "nodemcuv2-serial" - - "avatto-bakeey-ir" - - "nodemcuv2-rf" - - "nodemcuv2-rf-cc1101" - - "nodemcuv2-somfy-cc1101" - - "manual-wifi-test" - - "rf-wifi-gateway" - - "nodemcuv2-rf2" - - "nodemcuv2-rf2-cc1101" - - "nodemcuv2-pilight" - - "nodemcuv2-weatherstation" - - "sonoff-basic" - - "sonoff-basic-rfr3" - - "esp32dev-ble-datatest" - - "esp32s3-dev-c1-ble" - - "esp32c3-dev-m1-ble" - - "airm2m_core_esp32c3" - - "esp32c3_lolin_mini" - - "esp32c3-m5stamp" - - "thingpulse-espgateway" - - "theengs-bridge" - - "esp32dev-ble-idf" - - "theengs-bridge-v11" - - "theengs-plug" - - "esp32dev-ble-broker" - - "esp32s3-m5stack-stamps3" - - "esp32c3u-m5stamp" - - "lilygo-t3-s3-rtl_433" - - "lilygo-t3-s3-rtl_433-fsk" + determine-build-scope: runs-on: ubuntu-latest - name: Build with PlatformIO + outputs: + environment-set: ${{ steps.decide.outputs.environment-set }} steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.13' - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" - enable-cache: false - - name: Install dependencies - run: | - uv pip install --system -U https://github.com/pioarduino/platformio-core/archive/refs/tags/v6.1.18.zip - - name: Extract ESP32 platform version from platformio.ini + - name: Decide environment set + id: decide run: | - ESP32_VERSION=$(grep 'esp32_platform\s*=' platformio.ini | cut -d'@' -f2 | tr -d '[:space:]') - echo "ESP32_PLATFORM_VERSION=${ESP32_VERSION}" >> $GITHUB_ENV - - name: Run PlatformIO - env: - PYTHONIOENCODING: utf-8 - PYTHONUTF8: '1' - run: platformio run -e ${{ matrix.environments }} - - name: Upload Assets - uses: actions/upload-artifact@v4 - with: - name: ${{ matrix.environments }} - path: | - .pio/build/*/firmware.bin - .pio/build/*/partitions.bin - retention-days: 7 + # Full build for: development, master, edge, stable, release/*, hotfix/*, and all PRs + if [[ "${{ github.event_name }}" == "pull_request" ]] || \ + [[ "${{ github.ref }}" == "refs/heads/development" ]] || \ + [[ "${{ github.ref }}" == "refs/heads/master" ]] || \ + [[ "${{ github.ref }}" == "refs/heads/edge" ]] || \ + [[ "${{ github.ref }}" == "refs/heads/stable" ]] || \ + [[ "${{ github.ref }}" == refs/heads/release/* ]] || \ + [[ "${{ github.ref }}" == refs/heads/hotfix/* ]]; then + echo "environment-set=all" >> $GITHUB_OUTPUT + else + echo "environment-set=ci" >> $GITHUB_OUTPUT + fi + + + build: + needs: determine-build-scope + name: Build firmware + uses: ./.github/workflows/task-build.yml + with: + enable-dev-ota: true + artifact-retention-days: 7 + environment-set: ${{ needs.determine-build-scope.outputs.environment-set }} documentation: - runs-on: ubuntu-latest - name: Create the documentation - steps: - - uses: actions/checkout@v4 - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: "14.x" - - name: Download Common Config - run: | - curl -o docs/.vuepress/public/commonConfig.js https://www.theengs.io/commonConfig.js - - name: Install build dependencies - run: npm install - - name: Build documentation - run: npm run docs:build + needs: build + name: Build documentation + uses: ./.github/workflows/task-docs.yml + with: + mode: "dev" + run-pagespeed: false + just-check: true + diff --git a/.github/workflows/build_and_docs_to_dev.yml b/.github/workflows/build_and_docs_to_dev.yml index 3c2ea37c4e..02cc07ce44 100644 --- a/.github/workflows/build_and_docs_to_dev.yml +++ b/.github/workflows/build_and_docs_to_dev.yml @@ -1,221 +1,43 @@ name: Build binaries, docs and publish to dev folder + on: workflow_dispatch: schedule: - - cron: '0 0 * * *' + - cron: "0 0 * * *" + jobs: - build: - strategy: - fail-fast: false - matrix: - environments: - - "rfbridge" - - "rfbridge-direct" - - "theengs-bridge" - - "theengs-bridge-v11" - - "theengs-plug" - - "esp32dev-all-test" - - "esp32dev-rf" - - "esp32dev-pilight" - - "esp32dev-pilight-cc1101" - - "esp32dev-somfy-cc1101" - - "esp32dev-pilight-somfy-cc1101" - - "esp32dev-weatherstation" - - "esp32dev-gf-sun-inverter" - - "esp32dev-ir" - - "esp32dev-ble" - - "esp32dev-ble-broker" - - "esp32dev-ble-mqtt-undecoded" - - "esp32dev-ble-aws" - - "esp32feather-ble" - - "esp32-lolin32lite-ble" - - "esp32-olimex-gtw-ble-eth" - - "esp32-olimex-gtw-ble-poe" - - "esp32-olimex-gtw-ble-poe-iso" - - "esp32-wt32-eth01-ble-eth" - - "esp32-olimex-gtw-ble-wifi" - - "esp32-m5stick-ble" - - "esp32-m5stack-ble" - - "esp32-m5tough-ble" - - "esp32-m5stick-c-ble" - - "esp32-m5stick-cp-ble" - - "esp32-m5atom-matrix" - - "esp32-m5atom-lite" - - "esp32doitv1-aithinker-r01-sx1278" - - "esp32dev-rtl_433" - - "esp32dev-rtl_433-fsk" - - "heltec-rtl_433" - - "heltec-rtl_433-fsk" - - "heltec-ble" - - "lilygo-rtl_433" - - "lilygo-rtl_433-fsk" - - "lilygo-t3-s3-rtl_433" - - "lilygo-t3-s3-rtl_433-fsk" - - "lilygo-ble" - - "esp32dev-multi_receiver" - - "esp32dev-multi_receiver-pilight" - - "tinypico-ble" - - "ttgo-lora32-v1" - - "ttgo-lora32-v21" - - "ttgo-t-beam" - - "heltec-wifi-lora-32" - - "shelly-plus1" - - "nodemcuv2-all-test" - - "nodemcuv2-fastled-test" - - "nodemcuv2-2g" - - "nodemcuv2-ir" - - "nodemcuv2-serial" - - "avatto-bakeey-ir" - - "nodemcuv2-rf" - - "nodemcuv2-rf-cc1101" - - "nodemcuv2-somfy-cc1101" - - "manual-wifi-test" - - "rf-wifi-gateway" - - "nodemcuv2-rf2" - - "nodemcuv2-rf2-cc1101" - - "nodemcuv2-pilight" - - "nodemcuv2-weatherstation" - - "sonoff-basic" - - "sonoff-basic-rfr3" - - "esp32dev-ble-datatest" - - "esp32s3-dev-c1-ble" - - "esp32s3-m5stack-stamps3" - - "esp32s3-atomS3U" - - "esp32c3-dev-m1-ble" - - "airm2m_core_esp32c3" - - "esp32c3-dev-c2-ble" - - "esp32c3-dev-c2-ble-no-serial" - - "esp32c3_lolin_mini" - - "esp32c3_lolin_mini_with_serial" - - "esp32c3-m5stamp" - - "esp32c3u-m5stamp" - - "thingpulse-espgateway" - - "esp32dev-ble-idf" + prepare: runs-on: ubuntu-latest if: github.repository_owner == '1technophile' - name: Build ${{ matrix.environments }} + outputs: + short-sha: ${{ steps.short-sha.outputs.sha }} steps: - uses: actions/checkout@v4 - uses: benjlevesque/short-sha@v2.1 id: short-sha with: length: 6 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.13" - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" - enable-cache: false - - name: Install dependencies - run: | - uv pip install --system -U https://github.com/pioarduino/platformio-core/archive/refs/tags/v6.1.18.zip - - name: Set sha tag - run: | - sed -i "s/version_tag/${{ steps.short-sha.outputs.sha }}/g" main/User_config.h scripts/latest_version_dev.json - - name: Run PlatformIO - env: - PYTHONIOENCODING: utf-8 - PYTHONUTF8: '1' - run: | - export PLATFORMIO_BUILD_FLAGS="'-DDEVELOPMENTOTA=true'" - platformio run -e ${{ matrix.environments }} - - name: Prepare firmware artifacts - run: | - mkdir -p firmware - cp .pio/build/${{ matrix.environments }}/firmware.bin firmware/${{ matrix.environments }}-firmware.bin - if [ -f .pio/build/${{ matrix.environments }}/partitions.bin ]; then - cp .pio/build/${{ matrix.environments }}/partitions.bin firmware/${{ matrix.environments }}-partitions.bin - fi - if [ -f .pio/build/${{ matrix.environments }}/bootloader.bin ]; then - cp .pio/build/${{ matrix.environments }}/bootloader.bin firmware/${{ matrix.environments }}-bootloader.bin - fi - - name: Upload firmware - uses: actions/upload-artifact@v4 - with: - name: firmware-${{ matrix.environments }} - path: firmware/ - retention-days: 1 - deploy: - needs: build - runs-on: ubuntu-latest - if: github.repository_owner == '1technophile' - name: Deploy binaries and docs - steps: - - uses: actions/checkout@v4 - - uses: benjlevesque/short-sha@v2.1 - id: short-sha - with: - length: 6 - - name: Download all firmware artifacts - uses: actions/download-artifact@v4 - with: - pattern: firmware-* - path: toDeploy - merge-multiple: true - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.13" - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" - enable-cache: false - - name: Install dependencies - run: | - uv pip install --system -U https://github.com/pioarduino/platformio-core/archive/refs/tags/v6.1.18.zip - - name: Create library zips - run: | - # Install libraries for a representative environment to get libdeps - platformio pkg install -e esp32dev-ble --no-save - cd .pio/libdeps - # Replace spaces with underscores in folder names - find . -type d -name "* *" | while read FNAME; do mv "$FNAME" "${FNAME// /_}"; done - # Zip libraries per board - for i in */; do - zip -r "${i%/}-libraries.zip" "$i" - done - mv *.zip ../../toDeploy/ - - name: Prepare additional assets - run: | - cd toDeploy - # Remove binaries for *-all*, *-test* env - rm -f *-all*.bin *-test*.bin *-test*.zip || true - cd .. - # Zip source code - zip -r toDeploy/OpenMQTTGateway_sources.zip main LICENSE.txt - ls -lA toDeploy/ - - name: Set sha tag for docs - run: | - sed -i "s/version_tag/DEVELOPMENT SHA:${{ steps.short-sha.outputs.sha }} TEST ONLY/g" docs/.vuepress/config.js - sed -i "s|base: '/'|base: '/dev/'|g" docs/.vuepress/config.js - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: "16.x" - - name: Download Common Config - run: | - curl -o docs/.vuepress/public/commonConfig.js https://www.theengs.io/commonConfig.js - - name: Build documentation - run: | - python ./scripts/gen_wu.py --dev - npm install - npm run docs:build - - name: Deploy to GitHub Pages - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./docs/.vuepress/dist - destination_dir: dev - cname: docs.openmqttgateway.com - - name: Running Page Speed Insights - uses: jakepartusch/psi-action@v1.3 - id: psi - with: - url: "https://docs.openmqttgateway.com/dev/" - threshold: 60 - key: ${{ secrets.APIKEY }} + handle-firmwares: + needs: prepare + name: Build and deploy development firmwares artefacts + uses: ./.github/workflows/task-build.yml + with: + enable-dev-ota: true + version-tag: ${{ needs.prepare.outputs.short-sha }} + artifact-retention-days: 1 + + handle-documentation: + needs: [prepare, handle-firmwares] + name: Build and deploy development documentation + uses: ./.github/workflows/task-docs.yml + with: + mode: "dev" + version: ${{ needs.prepare.outputs.short-sha }} + url-prefix: "/dev/" + destination-dir: "dev" + run-pagespeed: true + pagespeed-url: 'https://docs.openmqttgateway.com/dev/' + secrets: + github-token: ${{ secrets.GITHUB_TOKEN }} + pagespeed-apikey: ${{ secrets.APIKEY }} \ No newline at end of file diff --git a/.github/workflows/environments.json b/.github/workflows/environments.json new file mode 100644 index 0000000000..fa2a2eff56 --- /dev/null +++ b/.github/workflows/environments.json @@ -0,0 +1,109 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "description": "Centralized list of all PlatformIO build environments for OpenMQTTGateway workflows", + "version": "1.0.0", + "lastUpdated": "2025-12-24", + "environments": { + "all": [ + "rfbridge", + "rfbridge-direct", + "theengs-bridge", + "theengs-bridge-v11", + "theengs-plug", + "esp32dev-all-test", + "esp32dev-rf", + "esp32dev-pilight", + "esp32dev-pilight-cc1101", + "esp32dev-somfy-cc1101", + "esp32dev-pilight-somfy-cc1101", + "esp32dev-weatherstation", + "esp32dev-gf-sun-inverter", + "esp32dev-ir", + "esp32dev-ble", + "esp32dev-ble-broker", + "esp32dev-ble-mqtt-undecoded", + "esp32dev-ble-aws", + "esp32dev-ble-datatest", + "esp32dev-ble-idf", + "esp32dev-rtl_433", + "esp32dev-rtl_433-fsk", + "esp32dev-multi_receiver", + "esp32dev-multi_receiver-pilight", + "esp32feather-ble", + "esp32-lolin32lite-ble", + "esp32-olimex-gtw-ble-eth", + "esp32-olimex-gtw-ble-poe", + "esp32-olimex-gtw-ble-poe-iso", + "esp32-wt32-eth01-ble-eth", + "esp32-olimex-gtw-ble-wifi", + "esp32-m5stick-ble", + "esp32-m5stack-ble", + "esp32-m5tough-ble", + "esp32-m5stick-c-ble", + "esp32-m5stick-cp-ble", + "esp32-m5atom-matrix", + "esp32-m5atom-lite", + "esp32doitv1-aithinker-r01-sx1278", + "esp32s3-dev-c1-ble", + "esp32s3-m5stack-stamps3", + "esp32s3-atomS3U", + "esp32c3-dev-m1-ble", + "esp32c3-dev-c2-ble", + "esp32c3-dev-c2-ble-no-serial", + "esp32c3_lolin_mini", + "esp32c3_lolin_mini_with_serial", + "esp32c3-m5stamp", + "esp32c3u-m5stamp", + "airm2m_core_esp32c3", + "heltec-rtl_433", + "heltec-rtl_433-fsk", + "heltec-ble", + "heltec-wifi-lora-32", + "lilygo-rtl_433", + "lilygo-rtl_433-fsk", + "lilygo-t3-s3-rtl_433", + "lilygo-t3-s3-rtl_433-fsk", + "lilygo-ble", + "tinypico-ble", + "ttgo-lora32-v1", + "ttgo-lora32-v21", + "ttgo-t-beam", + "thingpulse-espgateway", + "shelly-plus1", + "nodemcuv2-all-test", + "nodemcuv2-fastled-test", + "nodemcuv2-2g", + "nodemcuv2-ir", + "nodemcuv2-serial", + "nodemcuv2-rf", + "nodemcuv2-rf-cc1101", + "nodemcuv2-somfy-cc1101", + "nodemcuv2-rf2", + "nodemcuv2-rf2-cc1101", + "nodemcuv2-pilight", + "nodemcuv2-weatherstation", + "avatto-bakeey-ir", + "manual-wifi-test", + "rf-wifi-gateway", + "sonoff-basic", + "sonoff-basic-rfr3" + ], + "ci": [ + "esp32dev-all-test", + "esp32dev-ble", + "esp32dev-ir", + "esp32dev-rf", + "esp32dev-rtl_433", + "esp32s3-dev-c1-ble", + "esp32c3-dev-m1-ble", + "heltec-ble", + "lilygo-ble", + "nodemcuv2-all-test", + "nodemcuv2-ir", + "nodemcuv2-rf", + "theengs-bridge", + "theengs-plug", + "rfbridge" + ] + } +} \ No newline at end of file diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 08a0eeca26..0000000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: Check Code Format - -on: [push, pull_request] - -jobs: - lint: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - name: Check main format - uses: DoozyX/clang-format-lint-action@v0.6 - with: - source: "./main" - extensions: "h,ino" - clangFormatVersion: 9 diff --git a/.github/workflows/manual_docs.yml b/.github/workflows/manual_docs.yml index 343a1d5b5f..b79afd59e9 100644 --- a/.github/workflows/manual_docs.yml +++ b/.github/workflows/manual_docs.yml @@ -1,45 +1,17 @@ name: Create and publish documentation + on: workflow_dispatch: workflow_call: + jobs: documentation: - runs-on: ubuntu-latest - name: Create the documentation and deploy it to GitHub Pages - steps: - - uses: actions/checkout@v4 - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: "14.x" - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - name: Install build dependencies - run: | - python -m pip install --upgrade pip - pip install requests pandas markdown pytablereader tabulate - npm install - - name: Download Common Config - run: | - curl -o docs/.vuepress/public/commonConfig.js https://www.theengs.io/commonConfig.js - - name: get lastest release tag - id: last_release - uses: InsonusK/get-latest-release@v1.0.1 - with: - myToken: ${{ github.token }} - view_top: 1 - - name: Set version tag from git - run: sed -i "s/version_tag/${{steps.last_release.outputs.tag_name}}/g" docs/.vuepress/config.js scripts/latest_version.json - - name: Build documentation - run: | - python ./scripts/generate_board_docs.py - python ./scripts/gen_wu.py ${GITHUB_REPOSITORY} - npm run docs:build - - name: Deploy to GitHub Pages - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./docs/.vuepress/dist - cname: docs.openmqttgateway.com \ No newline at end of file + name: Build and deploy production documentation + uses: ./.github/workflows/task-docs.yml + with: + mode: "prod" + version: "auto" # Will detect latest git tag + url-prefix: "/" + destination-dir: "." + secrets: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 31b2d31a6a..a2809fdf03 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -5,47 +5,66 @@ on: types: [published] jobs: - build-upload: + prepare: runs-on: ubuntu-latest - name: Build and upload Assets to Release + outputs: + version-tag: ${{ steps.extract-tag.outputs.version }} + release-id: ${{ steps.extract-release.outputs.id }} + upload-url: ${{ steps.extract-release.outputs.upload_url }} steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.13" - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" - enable-cache: false - - name: Install platformio - run: | - uv pip install --system -U https://github.com/pioarduino/platformio-core/archive/refs/tags/v6.1.18.zip - - name: Set version tag from git - run: sed -i "s/version_tag/${GITHUB_REF#refs/tags/}/g" main/User_config.h scripts/latest_version.json - - name: Extract ESP32 platform version from platformio.ini + - name: Extract version tag + id: extract-tag run: | - ESP32_VERSION=$(grep 'esp32_platform\s*=' platformio.ini | cut -d'@' -f2 | tr -d '[:space:]') - echo "ESP32_PLATFORM_VERSION=${ESP32_VERSION}" >> $GITHUB_ENV - - name: Run PlatformIO - run: platformio run - - name: Prepare Release Assets - run: | - sudo apt install rename - ./scripts/prepare_deploy.sh - - name: Get upload url - id: release-id + VERSION_TAG=${GITHUB_REF#refs/tags/} + echo "version=${VERSION_TAG}" >> $GITHUB_OUTPUT + echo "Extracted version: ${VERSION_TAG}" + + - name: Extract release info + id: extract-release run: | RELEASE_ID=$(jq --raw-output '.release.id' $GITHUB_EVENT_PATH) - echo "::set-output name=upload_url::https://uploads.github.com/repos/${GITHUB_REPOSITORY}/releases/${RELEASE_ID}/assets{?name,label}" + UPLOAD_URL="https://uploads.github.com/repos/${GITHUB_REPOSITORY}/releases/${RELEASE_ID}/assets{?name,label}" + echo "id=${RELEASE_ID}" >> $GITHUB_OUTPUT + echo "upload_url=${UPLOAD_URL}" >> $GITHUB_OUTPUT + echo "Release ID: ${RELEASE_ID}" + + build: + needs: prepare + name: Build release firmware + uses: ./.github/workflows/task-build.yml + with: + enable-dev-ota: false + version-tag: ${{ needs.prepare.outputs.version-tag }} + artifact-retention-days: 90 + + deploy: + needs: [prepare, build] + runs-on: ubuntu-latest + name: Deploy release assets + steps: + - name: Download all firmware artifacts + uses: actions/download-artifact@v4 + with: + pattern: "*" + path: generated/artifacts + merge-multiple: true + - name: Upload Release Assets uses: bgpat/release-asset-action@03b0c30db1c4031ce3474740b0e4275cd7e126a3 with: - pattern: "toDeploy/*" + pattern: "generated/artifacts/*" github-token: ${{ secrets.GITHUB_TOKEN }} - release-url: ${{ steps.release-id.outputs.upload_url }} + release-url: ${{ needs.prepare.outputs.upload-url }} allow-overwrite: true - call-workflow-passing-data: - needs: build-upload - uses: ./.github/workflows/manual_docs.yml + + documentation: + needs: [prepare, deploy] + name: Build and deploy release documentation + uses: ./.github/workflows/task-docs.yml + with: + mode: "prod" + version: ${{ needs.prepare.outputs.version-tag }} + url-prefix: "/" + destination-dir: "." + secrets: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/security-scan.yml b/.github/workflows/security-scan.yml new file mode 100644 index 0000000000..f27993c9bb --- /dev/null +++ b/.github/workflows/security-scan.yml @@ -0,0 +1,44 @@ +name: Security Scan + +on: + schedule: + - cron: '0 2 * * 1' # Every Monday at 2 AM + workflow_dispatch: + inputs: + severity: + description: 'Severity levels to scan' + required: false + type: choice + default: 'HIGH,CRITICAL' + options: + - 'CRITICAL' + - 'HIGH,CRITICAL' + - 'MEDIUM,HIGH,CRITICAL' + - 'LOW,MEDIUM,HIGH,CRITICAL' + scan-path: + description: 'Path to scan (default: entire repo)' + required: false + type: string + default: '.' + exit-on-error: + description: 'Fail if vulnerabilities found' + required: false + type: boolean + default: false + pull_request: + paths: + - 'main/**' + - 'lib/**' + - 'platformio.ini' + - '.github/workflows/**' + +jobs: + security-scan: + name: Scan for vulnerabilities + uses: ./.github/workflows/task-security-scan.yml + with: + scan-type: 'fs' + severity: ${{ inputs.severity || 'HIGH,CRITICAL' }} + exit-code: ${{ inputs.exit-on-error && '1' || '0' }} + upload-to-security-tab: true + scan-path: ${{ inputs.scan-path || '.' }} diff --git a/.github/workflows/task-build.yml b/.github/workflows/task-build.yml new file mode 100644 index 0000000000..838f24bbbd --- /dev/null +++ b/.github/workflows/task-build.yml @@ -0,0 +1,121 @@ +name: Reusable Build Workflow + +on: + workflow_call: + inputs: + python-version: + description: "Python version to use" + required: false + type: string + default: "3.13" + pio-version: + description: "PlatformIO version to use" + required: false + type: string + default: "v6.1.18" + environment-set: + description: 'Which set of environments to build (all, ci)' + required: false + type: string + default: 'all' + enable-dev-ota: + description: "Enable development OTA builds" + required: false + type: boolean + default: false + version-tag: + description: "Optional version tag to pass to ci.sh build (omitted if empty)" + required: false + type: string + default: "" + artifact-retention-days: + description: "Number of days to retain build artifacts" + required: false + type: number + default: 7 + +jobs: + lint: + name: Lint code format + uses: ./.github/workflows/task-lint.yml + with: + source: "main" + extensions: "h,ino,cpp" + clang-format-version: "9" + + security-scan: + name: Security vulnerability scan + uses: ./.github/workflows/task-security-scan.yml + with: + severity: "UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL" + exit-code: "1" + upload-to-security-tab: true + + load-environments: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + - id: set-matrix + run: | + ENV_SET="${{ inputs.environment-set }}" + if [ "$ENV_SET" = "ci" ]; then + ENVIRONMENTS=$(jq -c '.environments.ci' .github/workflows/environments.json) + else + ENVIRONMENTS=$(jq -c '.environments.all' .github/workflows/environments.json) + fi + echo "matrix=${ENVIRONMENTS}" >> $GITHUB_OUTPUT + + + build: + needs: [lint, security-scan, load-environments] + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + environments: ${{ fromJson(needs.load-environments.outputs.matrix) }} + name: Build ${{ matrix.environments }} + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python-version }} + + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + version: "latest" + enable-cache: false + + - name: Install PlatformIO dependencies + run: | + uv pip install --system -U https://github.com/pioarduino/platformio-core/archive/refs/tags/${{ inputs.pio-version }}.zip + + - name: Build firmware using ci.sh + run: | + BUILD_ARGS="${{ matrix.environments }}" + BUILD_ARGS="$BUILD_ARGS --deploy-ready" + + # Optional version tag + if [ -n "${{ inputs.version-tag }}" ]; then + BUILD_ARGS="$BUILD_ARGS --version ${{ inputs.version-tag }}" + fi + + # Mode + if [ "${{ inputs.enable-dev-ota }}" = "true" ]; then + BUILD_ARGS="$BUILD_ARGS --mode dev" + else + BUILD_ARGS="$BUILD_ARGS --mode prod" + fi + + ./scripts/ci.sh build $BUILD_ARGS + + - name: Upload firmware artifacts + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.environments }} + path: generated/artifacts/ + retention-days: ${{ inputs.artifact-retention-days }} diff --git a/.github/workflows/task-docs.yml b/.github/workflows/task-docs.yml new file mode 100644 index 0000000000..beb68644b4 --- /dev/null +++ b/.github/workflows/task-docs.yml @@ -0,0 +1,146 @@ +name: Reusable Documentation Workflow + +on: + workflow_call: + inputs: + mode: + description: 'Build mode: "prod" or "dev"' + required: false + type: string + default: "prod" + version: + description: 'Version string for documentation (use "auto" for automatic detection)' + required: false + type: string + default: "auto" + just-check: + description: "Just create a quick build to check for errors" + required: false + type: boolean + default: false + url-prefix: + description: 'URL prefix for docs (e.g., "/" for prod, "/dev/" for dev)' + required: false + type: string + default: "/" + run-pagespeed: + description: "Run PageSpeed Insights after deploy" + required: false + type: boolean + default: false + pagespeed-url: + description: "URL to test with PageSpeed" + required: false + type: string + default: "https://docs.openmqttgateway.com/" + destination-dir: + description: "GitHub Pages destination directory" + required: false + type: string + default: "." + python-version: + description: "Python version to use" + required: false + type: string + default: "3.13" + pio-version: + description: "PlatformIO version to use" + required: false + type: string + default: "v6.1.18" + secrets: + github-token: + description: "GitHub token for deploying to GitHub Pages" + required: false + pagespeed-apikey: + description: "API key for PageSpeed Insights" + required: false + + +jobs: + documentation: + runs-on: ubuntu-latest + name: Create and deploy documentation + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 # Fetch all history for git tags + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python-version }} + + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + version: "latest" + enable-cache: false + + - name: Install PlatformIO dependencies + run: | + uv pip install --system -U https://github.com/pioarduino/platformio-core/archive/refs/tags/${{ inputs.pio-version }}.zip + + - name: Determine version + id: version + run: | + VERSION="${{ inputs.version }}" + MODE="${{ inputs.mode }}" + + # If version is 'auto', determine it based on mode + if [ "$VERSION" = "auto" ]; then + if [ "$MODE" = "dev" ]; then + # Dev mode: always use short SHA + VERSION="$(git rev-parse --short HEAD)" + echo "Dev mode - using SHA: $VERSION" + else + # Prod mode: try to get git tag + if GIT_TAG=$(git describe --tags --exact-match 2>/dev/null); then + VERSION="$GIT_TAG" + echo "Using exact git tag: $VERSION" + elif LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null); then + VERSION="$LATEST_TAG" + echo "Using latest tag: $VERSION" + else + VERSION="v0.0.0-unknown" + echo "Warning: No tag found, using: $VERSION" + fi + fi + fi + + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Final version: $VERSION" + + - name: Download all firmware artifacts + if: ${{ inputs.mode == 'dev' }} + uses: actions/download-artifact@v4 + with: + pattern: "*" + path: generated/artifacts + merge-multiple: true + + - name: Build documentation site + run: | + ./scripts/ci.sh site \ + --clean \ + --mode ${{ inputs.mode }} \ + --version ${{ steps.version.outputs.version }} \ + --url-prefix ${{ inputs.url-prefix }} + + - name: Deploy to GitHub Pages + uses: peaceiris/actions-gh-pages@v3 + if: ${{ inputs.just-check == false }} + with: + github_token: ${{ secrets.github-token }} + publish_dir: ./generated/site + destination_dir: ${{ inputs.destination-dir }} + cname: docs.openmqttgateway.com + + - name: Run PageSpeed Insights + if: ${{ inputs.just-check == false && inputs.run-pagespeed == true }} + uses: jakepartusch/psi-action@v1.3 + id: psi + with: + url: ${{ inputs.pagespeed-url }} + threshold: 60 + key: ${{ secrets.pagespeed-apikey }} diff --git a/.github/workflows/task-lint.yml b/.github/workflows/task-lint.yml new file mode 100644 index 0000000000..6c94e60c9a --- /dev/null +++ b/.github/workflows/task-lint.yml @@ -0,0 +1,41 @@ +name: Reusable Lint Workflow + +on: + workflow_call: + inputs: + source: + description: "Source directory to lint" + required: false + type: string + default: "main" + extensions: + description: "File extensions to check (comma-separated)" + required: false + type: string + default: "h,ino,cpp" + clang-format-version: + description: "clang-format version to use" + required: false + type: string + default: "9" + +jobs: + lint: + runs-on: ubuntu-latest + name: Check code format + steps: + - uses: actions/checkout@v4 + + - name: Install clang-format + run: | + sudo apt-get update + sudo apt-get install -y clang-format shellcheck + + - name: Check code format with ci.sh qa + run: | + ./scripts/ci.sh qa \ + --check \ + --source "${{ inputs.source }}" \ + --extensions "${{ inputs.extensions }}" \ + --clang-format-version "${{ inputs.clang-format-version }}" + diff --git a/.github/workflows/task-security-scan.yml b/.github/workflows/task-security-scan.yml new file mode 100644 index 0000000000..00cf5e5352 --- /dev/null +++ b/.github/workflows/task-security-scan.yml @@ -0,0 +1,121 @@ +name: Reusable Security Scan Workflow + +on: + workflow_call: + inputs: + scan-type: + description: 'Type of scan: fs (filesystem), config, or image' + required: false + type: string + default: 'fs' + severity: + description: 'Severity levels to report (comma-separated: UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL)' + required: false + type: string + default: 'HIGH,CRITICAL' + exit-code: + description: 'Exit code when vulnerabilities found (0=continue, 1=fail)' + required: false + type: string + default: '0' + upload-to-security-tab: + description: 'Upload SARIF to GitHub Security tab (GitHub-specific feature)' + required: false + type: boolean + default: true + scan-path: + description: 'Path to scan (default: entire repository)' + required: false + type: string + default: '.' + generate-sbom: + description: 'Generate Software Bill of Materials (SBOM) in CycloneDX and SPDX formats' + required: false + type: boolean + default: true + +jobs: + security-scan: + runs-on: ubuntu-latest + name: Security vulnerability scan + permissions: + contents: read + security-events: write # Required only for GitHub Security tab upload + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Trivy + run: | + wget -qO - https://aquasecurity.github.io/trivy-repo/deb/public.key | sudo apt-key add - + echo "deb https://aquasecurity.github.io/trivy-repo/deb $(lsb_release -sc) main" | sudo tee -a /etc/apt/sources.list.d/trivy.list + sudo apt-get update && sudo apt-get install -y trivy + + - name: Run Security Scan using ci_security.sh + run: | + # Build command with provided inputs + CMD="./scripts/ci_security.sh" + CMD="$CMD --scan-type ${{ inputs.scan-type }}" + CMD="$CMD --scan-path ${{ inputs.scan-path }}" + CMD="$CMD --severity ${{ inputs.severity }}" + CMD="$CMD --exit-code ${{ inputs.exit-code }}" + + # Add SBOM generation if requested + if [[ "${{ inputs.generate-sbom }}" == "true" ]]; then + CMD="$CMD --generate-sbom" + fi + + # Execute the security scan + $CMD + + # GitHub-specific: Upload to Security tab + - name: Upload SARIF to GitHub Security tab + if: inputs.upload-to-security-tab == true && always() + uses: github/codeql-action/upload-sarif@v4 + with: + sarif_file: './generated/reports/trivy-results.sarif' + category: 'trivy-security-scan' + + # Agnostic: Upload all reports as artifacts (works on any CI) + - name: Upload security reports as artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: security-scan-reports + path: | + ./generated/reports/trivy-results.sarif + ./generated/reports/trivy-report.json + ./generated/reports/trivy-report.txt + ./generated/reports/security-summary.md + ./generated/reports/trivy-scan.log + retention-days: 90 + + # Display summary in workflow output + - name: Display Security Summary + if: always() + run: | + cat ./generated/reports/security-summary.md >> $GITHUB_STEP_SUMMARY 2>/dev/null || echo "Summary not available" + + # + - name: Upload SBOM reports + uses: actions/upload-artifact@v4 + if: always() + with: + name: sbom-reports + path: | + ./generated/reports/sbom/ + retention-days: 90 + + # Optional: Fail the build if critical vulnerabilities found + - name: Check for critical vulnerabilities + if: inputs.exit-code == '1' + run: | + if [ -f ./generated/reports/trivy-results.sarif ]; then + CRITICAL=$(jq '[.runs[].results[] | select(.level == "error")] | length' ./generated/reports/trivy-results.sarif 2>/dev/null || echo "0") + if [ "$CRITICAL" -gt 0 ]; then + echo "❌ Found ${CRITICAL} critical vulnerabilities!" + echo "Review the security reports in artifacts." + exit 1 + fi + fi diff --git a/.gitignore b/.gitignore index 84ef10d0e9..276ab0743a 100644 --- a/.gitignore +++ b/.gitignore @@ -21,4 +21,28 @@ managed_components .github/chatmodes .github/prompts -.github/*instructions.md \ No newline at end of file +.github/*instructions.md +.github/workflows/_docs +# CI/CD Generated outputs +docs/.vuepress/dist/ +docs/.vuepress/public/boards-info.json +docs/.vuepress/public/commonConfig.js +docs/.vuepress/public/firmware_build + + +# Generated site/firmware/reports files +generated/ + +# Generated ssl for testing +.ssl + +# Generated JSON files from templates +scripts/latest_version.json +scripts/latest_version_dev.json + +# Generated configuration files +docs/.vuepress/meta.json + +# Generated full documentation pages +docs/prerequisites/board-full.md +docs/upload/web-install-full.md diff --git a/.shellcheckrc b/.shellcheckrc new file mode 100644 index 0000000000..db9bb4268c --- /dev/null +++ b/.shellcheckrc @@ -0,0 +1 @@ +disable=SC1091 \ No newline at end of file diff --git a/docs/.vuepress/components/BoardEnvironmentTable.vue b/docs/.vuepress/components/BoardEnvironmentTable.vue new file mode 100644 index 0000000000..576adfaf38 --- /dev/null +++ b/docs/.vuepress/components/BoardEnvironmentTable.vue @@ -0,0 +1,440 @@ + + + + + diff --git a/docs/.vuepress/components/FlashEnvironmentSelector.vue b/docs/.vuepress/components/FlashEnvironmentSelector.vue new file mode 100644 index 0000000000..94c3b37cd3 --- /dev/null +++ b/docs/.vuepress/components/FlashEnvironmentSelector.vue @@ -0,0 +1,686 @@ + + + + + diff --git a/docs/.vuepress/config.js b/docs/.vuepress/config.js index 4a4eea071e..0662e7488d 100644 --- a/docs/.vuepress/config.js +++ b/docs/.vuepress/config.js @@ -1,20 +1,35 @@ +let meta = require('./defaults.json'); +try { + meta_overload = require('./meta.json'); + meta = { ...meta, ...meta_overload }; +} catch (e) { + console.warn('meta.json not found or not valid. Using default configuration.'); +} + + + +const fs = require('fs'); +const path = require('path'); +const commonConfigPath = path.resolve(__dirname, 'public/commonConfig.js'); +if (!fs.existsSync(commonConfigPath)) { + throw new Error(`commonConfig.js not found in ${commonConfigPath}.\nPlease download from https://www.theengs.io/commonConfig.js or create this file before you build the documentation.`); +} const commonConfig = require('./public/commonConfig'); module.exports = { ...commonConfig, - title: 'Theengs OpenMQTTGateway version_tag', - base: '/', + title: `${meta.title} - ${meta.version}`, + base: meta.url_prefix, + dest: meta.dest, // default is generated/site description: 'One gateway, many technologies: MQTT gateway for ESP8266 or ESP32 with bidirectional 433mhz/315mhz/868mhz, Infrared communications, BLE, LoRa, beacons detection, mi flora / mi jia / LYWSD02/ Mi Scale compatibility, SMS & LORA.', - head: [ - ...commonConfig.head, - ['script', {type: 'module', src: 'https://unpkg.com/esp-web-tools@9.4.3/dist/web/install-button.js?module'}] - ], + head: [...commonConfig.head], themeConfig: { - repo: '1technophile/OpenMQTTGateway', - docsDir: 'docs', ...commonConfig.themeConfig, + repo: meta.theme_config_repo, + docsDir: 'docs', + mode: meta.mode, sidebar: [ - ['/','0 - What is it for 🏠'], + ['/', '0 - What is it for 🏠'], { title: '1 - Prerequisites🧭', // required //collapsable: true, // optional, defaults to true @@ -42,9 +57,10 @@ module.exports = { }, { title: '3 - Upload ➡️', // required + path: '/upload/', sidebarDepth: 1, // optional, defaults to 1 children: [ - 'upload/web-install', + ['upload/web-install', "(Option 1) Upload from the web"], 'upload/binaries', 'upload/builds', 'upload/gitpod', @@ -86,23 +102,25 @@ module.exports = { }, { title: '6 - Participate 💻', // required + path: '/participate/', sidebarDepth: 1, // optional, defaults to 1 children: [ + 'participate/quick_start', 'participate/support', 'participate/development', 'participate/adding-protocols', 'participate/community', - ['https://github.com/1technophile/OpenMQTTGateway/blob/development/LICENSE.txt','License'] + [meta.url_license_file, 'License'] ] } - ] + ] }, plugins: { 'sitemap': { - hostname: 'https://docs.openmqttgateway.com', + hostname: meta.hostname, urls: [ 'https://decoder.theengs.io/devices/devices.html', - 'https://community.openmqttgateway.com/', + meta.url_community_forum, 'https://shop.theengs.io/', 'https://shop.theengs.io/products/theengs-plug-smart-plug-ble-gateway-and-energy-consumption', 'https://shop.theengs.io/products/theengs-bridge-esp32-ble-mqtt-gateway-with-ethernet-and-external-antenna', diff --git a/docs/.vuepress/defaults.json b/docs/.vuepress/defaults.json new file mode 100644 index 0000000000..2c8ddcaed7 --- /dev/null +++ b/docs/.vuepress/defaults.json @@ -0,0 +1,12 @@ +{ + "title": "Theengs OpenMQTTGateway", + "version": "edge", + "url_prefix": "/", + "dest": "generated/site", + "mode": "prod", + "url_license_file": "https://github.com/1technophile/OpenMQTTGateway/blob/development/LICENSE.txt", + "url_community_forum": "https://community.openmqttgateway.com", + "hostname": "https://docs.openmqttgateway.com", + "repo": "1technophile/OpenMQTTGateway", + "theme_config_repo": "1technophile/OpenMQTTGateway" +} \ No newline at end of file diff --git a/docs/.vuepress/public/img/microcontroller.gif b/docs/.vuepress/public/img/microcontroller.gif new file mode 100644 index 0000000000..40a9bc2f77 Binary files /dev/null and b/docs/.vuepress/public/img/microcontroller.gif differ diff --git a/docs/README.md b/docs/README.md index 7b8fcb9f88..01443ad030 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,3 +1,13 @@ + + + +::: warning Development Version +This is the edge version of the documentation. It is under active development and may contain bugs, incomplete features, or breaking changes. Use it at your own risk. +::: + + + + OpenMQTTGateway aims to unify various technologies and protocols into a single firmware. This reduces the need for multiple physical bridges and streamlines diverse technologies under the widely-used [MQTT](http://mqtt.org/) protocol.
diff --git a/docs/img/upload.png b/docs/img/upload.png new file mode 100644 index 0000000000..9ea66169b7 Binary files /dev/null and b/docs/img/upload.png differ diff --git a/docs/participate/README.md b/docs/participate/README.md new file mode 100644 index 0000000000..34618ee100 --- /dev/null +++ b/docs/participate/README.md @@ -0,0 +1,46 @@ +# Participate in OpenMQTTGateway + +OpenMQTTGateway is community-driven: every feature, bug fix, and doc improvement comes from contributors like you. Whether you want to add support for a new sensor, fix a bug, improve documentation, or just help others—there's a place for you here. + +## Choose your path + +### 🚀 New to the project? Start here +[**Quick Start Guide**](./quick_start.md) walks you through setting up your development environment, building firmware, and previewing the docs. It covers PlatformIO, VS Code, terminal workflows, and troubleshooting. Perfect if you're new to ESP32/ESP8266 development or just want to get your hands dirty quickly. + +**What you'll learn:** Installing tools · Building firmware · Flashing boards · Running tests · Working with the docs + +--- + +### 💻 Ready to contribute code? +[**Development Contributions**](./development.md) covers the contribution workflow: naming conventions, code quality checks, testing on multiple boards, and opening pull requests. This is your checklist before submitting code. + +**What you'll find:** PR workflow · Naming rules (ZgatewayXXX, ZsensorYYY) · QA automation · DCO · CI/CD integration + +--- + +### 🔌 Adding a new protocol or device? +[**Adding Protocols**](./adding-protocols.md) explains how to integrate RF, IR, or BLE devices. Most protocol work happens upstream (RCSwitch, Pilight, IRRemoteESP8266, Theengs Decoder), and this guide shows you how to wire it into OMG. + +**What you'll learn:** Upstream contribution · Protocol integration · Testing decoders · Payload validation + +--- + +### 💬 Join the community +[**Community Participation**](./community.md) is where you ask questions, share your projects, and help others. The forum is the first stop for feature requests, debugging help, and general IoT/home automation discussion. + +**What you can do:** Ask questions · Share builds · Propose features · Help newcomers + +--- + +### ❤️ Support the project +[**Supporting the Project**](./support.md) lists all the ways you can help beyond code: answering forum questions, improving docs, purchasing Theengs products, sponsoring developers, or creating content about your OMG setup. + +**How you can help:** Forum support · Documentation · Sponsorship · Content creation + +--- + +::: tip Not sure where to start? +- **First time here?** → [Quick Start](./quick_start.md) +- **Have a fix or feature ready?** → [Development contributions](./development.md) +- **Need help or want to discuss?** → [Community](./community.md) +::: diff --git a/docs/participate/adding-protocols.md b/docs/participate/adding-protocols.md index 88b174cb09..5a19e993c1 100644 --- a/docs/participate/adding-protocols.md +++ b/docs/participate/adding-protocols.md @@ -1,12 +1,22 @@ # Adding protocols -Adding your device protocol to OpenMQTTGateway enables it to increase interoperability and to create new use cases with your device. Below you will find some guidance to do that. +Adding your device protocol to OpenMQTTGateway increases interoperability and creates new use cases. Use the steps below to pick the right path and upstream your changes. + +::: tip Before you open a PR +Read the [Development contributions guide](./development.md) for naming rules, QA, and PR steps. It keeps your protocol addition reviewable. +::: ## RF or IR -For adding RF and IR protocols to OpenMQTTGateway the best way is to do a pull request to [RCSwitch](https://github.com/1technophile/rc-switch), [Pilight](https://github.com/pilight/pilight) for RF, and [IRRemoteESP8266](https://github.com/crankyoldgit/IRremoteESP8266) for IR. +1. Implement or extend the protocol in the upstream libs first: [RCSwitch](https://github.com/1technophile/rc-switch) or [Pilight](https://github.com/pilight/pilight) for RF, and [IRRemoteESP8266](https://github.com/crankyoldgit/IRremoteESP8266) for IR. +2. Once merged (or while your PR is pending), wire it in OMG as a new decoder/mapping. +3. Add a short test sniff/build locally with the relevant environment (for example `esp32dev-all-test`). ## BLE -For BLE message decoding OpenMQTTGateway uses the [Theengs Decoder](https://decoder.theengs.io/) library. New device decoder pull requests can be submitted directly to the [GitHub repository](https://github.com/theengs/decoder). +1. For BLE message decoding, OpenMQTTGateway uses [Theengs Decoder](https://decoder.theengs.io/). Submit new device decoders directly to the [GitHub repository](https://github.com/theengs/decoder). +2. After the decoder exists, ensure the device is reported correctly through OMG (advertising devices are supported without connection). +3. Build locally for a BLE-enabled environment and validate the payload. -Currently we support the reading of advertizing BLE devices, advertizing means that the BLE device broadcasts regularly its sensor data without the need of a BLE connection. +Notes: +- We support reading **advertising** BLE devices (they broadcast regularly without a connection). +- Keep payloads compact and follow existing JSON fields for consistency. diff --git a/docs/participate/community.md b/docs/participate/community.md index d8494b6b81..ded39a8523 100644 --- a/docs/participate/community.md +++ b/docs/participate/community.md @@ -1,4 +1,9 @@ # Community participation -The first entry step for participating to OMG is sharing and discussing with the [community](https://community.openmqttgateway.com), by sharing your experiences and answering to other questions you are giving back what the others gave to you. +The first entry step for participating to OMG is sharing and discussing with the [community](https://community.openmqttgateway.com). By sharing your experiences and answering questions you give back what others gave to you. -You can use the forum to ask questions, post answers, suggest features and discuss about home automation or Internet of things in a more general way. \ No newline at end of file +Use the forum to: +- Ask questions about hardware, builds, MQTT, BLE/RF/IR, or docs. +- Share projects, logs, and payload samples that help others. +- Suggest features or confirm if something already exists. + +When you have a change to contribute, follow the [Development contributions guide](./development.md) for the PR flow. \ No newline at end of file diff --git a/docs/participate/development.md b/docs/participate/development.md index f2488c9640..f67a235928 100644 --- a/docs/participate/development.md +++ b/docs/participate/development.md @@ -1,30 +1,37 @@ # Development contributions -We like pull requests from everyone. By participating in this project, you -agree to follow the code of conduct below +We like pull requests from everyone and ask you to follow the code of conduct below. -[code of conduct](https://github.com/1technophile/OpenMQTTGateway/blob/master/CODE_OF_CONDUCT.md) +* [Code of conduct](https://github.com/1technophile/OpenMQTTGateway/blob/master/CODE_OF_CONDUCT.md) -[code style guide](https://google.github.io/styleguide/cppguide.html#Formatting) +* [Code style guide](https://google.github.io/styleguide/cppguide.html#Formatting) -So as to format automatically your document you have to add the "clang-Format" extension to VSCode, once done, you can format the code by doing a right click into the code file window and clicking "Format document". +If you need a step-by-step install and build guide, read the [Quick Start to Develop OpenMQTTGateway](./quick_start.md). It explains the tools, PlatformIO, and docs workflow. Use this page as a fast checklist once you know the flow. -Fork the [development branch](https://github.com/1technophile/OpenMQTTGateway/tree/development), then clone the repo +**Quick checklist** +1. Fork the [development branch](https://github.com/1technophile/OpenMQTTGateway/tree/development) and clone the repo. +2. Make your changes and follow the naming rules: + * New gateway: `ZgatewayXXX` where `XXX` is the protocol name. + * New sensor: `ZsensorYYY` where `YYY` is the sensor type. + * New actuator: `ZactuatorZZZ` where `ZZZ` is the actuator type. +3. Review your code and compile for ESP32 and ESP8266. +4. Test on your hardware. +5. Open a pull request, verify the GitHub Actions CI build, and request a review. -Make your modification, -* If you want to add a new gateway, name it `ZgatewayXXX`, `XXX` replaced by your gateway communication type, can be more than three letters -* If you want to add a new sensor, name it `ZsensorYYY`, `YYY` replaced by your sensor type, can be more than three letters -* If you want to add a new actuator, name it `ZactuatorZZZ`, `ZZZ` replaced by your actuator type, can be more than three letters +## Automated Testing and CI -Review your code, compile it for ESP32 and ESP8266 +Your pull request will be automatically tested by GitHub Actions. If you want to run the same checks locally before pushing, you can use the `ci.sh` script in the scripts folder. -Test it locally on your hardware config +**Need help with the ci.sh commands?** See [section 5 in Quick Start](./quick_start.md#5-firmware-development-from-the-terminal-with-cish) for detailed examples of running QA checks and builds from the terminal. -Emit a pull request +For more details on how CI works and available scripts, see the [CI documentation](https://github.com/1technophile/OpenMQTTGateway/blob/development/scripts) in the scripts folder. -Verify the GitHub Actions CI compilation results +For a comprehensive overview of all GitHub Actions workflows used in this project, check the [Workflows README](https://github.com/1technophile/OpenMQTTGateway/tree/development/.github/workflows). + +## Code Quality + +To format your code automatically, add the "clang-Format" extension to VSCode, then right click in the file and choose "Format document". -Request for review We may suggest some changes, improvements or alternatives. diff --git a/docs/participate/quick_start.md b/docs/participate/quick_start.md new file mode 100644 index 0000000000..23367f06f7 --- /dev/null +++ b/docs/participate/quick_start.md @@ -0,0 +1,840 @@ +--- +title: Quick Start to Develop OpenMQTTGateway +permalink: /participate/quick_start.html +sidebarDepth: 2 +lang: en-US +--- + +# Quick Start to Develop OpenMQTTGateway + +This document helps a new contributor. Maybe this is your first time with: + +- Git +- PlatformIO +- ESP32 or ESP8266 + +It is normal to feel a bit lost at the beginning. This guide walks with you, step by step. + +::: tip Already experienced? +If you only need the contribution rules and PR flow, jump to [Development contributions](./development.md). For community help or non-code support, see [Community participation](./community.md) and [Supporting the project](./support.md). +::: + + + +In this document you will learn how to: + +- Prepare the development environment on Windows or Linux. +- Build and flash the firmware (ESP32 / ESP8266 and other boards). +- Build and preview the documentation website in the docs folder. + +At the end of each step you see a **Check**. Use it to confirm that the previous step is correct before you go to the next one. If a check fails, do not worry: read the step again, fix it, and try one more time. + + + +## 1. Understand this project + +OpenMQTTGateway is a firmware project. It runs on ESP32, ESP8266 and other boards. It connects many protocols (Bluetooth, RF, IR, LoRa, sensors) to MQTT. + +The repository also contains the documentation website. The docs use VuePress and live in the docs folder. + +Very simple view of the repository: + +- main: C++ source code for the firmware. +- lib: reusable libraries. +- scripts: helper scripts for build, test, CI. +- docs: documentation website. + +You will mainly work on: + +- Firmware code: files under main and sometimes lib. +- Docs site: files under docs and docs/.vuepress. + +**Check:** open the repository in a file explorer and confirm you see at least main, lib, scripts and docs folders. If not, read section 3 again. + + + +## 2. Prepare your computer + +You can use Windows or Linux. The steps are very similar. The examples use a terminal. On Windows you can use **Git Bash** or **WSL**; on Linux you can use your normal shell. + +Not everybody needs to install the same tools. It depends on what you want to do: + +- **Only firmware (with VS Code or terminal):** you mainly need Git, PlatformIO, VS Code (if you like GUI) and a bash shell. +- **Only documentation website:** you mainly need Git, Node.js and npm. +- **Both firmware and docs:** you need everything. + +The next subsections explain this. + +### 2.1 Common tools (for everyone) + +Install these tools first. They are useful for **all** types of work: + +- Git +- Visual Studio Code (recommended, but you can also use another editor) +- A bash shell + - On Windows: Git Bash or WSL (Ubuntu is a good choice). + +On Windows you can download: + +- Git from the official site: +- Visual Studio Code from +- Git Bash comes with "Git for Windows". + +On Linux you can install these with your package manager (for example apt on Ubuntu). For more help you can read the Git book: . + +**Check (common tools):** open a terminal (Git Bash, WSL, or Linux shell) and run: + +```bash +git --version +``` + +If this command fails, install Git again and then close and reopen the terminal. + +### 2.2 Tools for firmware development + +For firmware builds you need **PlatformIO**. PlatformIO itself will install the right compiler and frameworks for your boards. + +You have two main options: + +1. Use **PlatformIO inside VS Code** (recommended for most users). +2. Use **PlatformIO CLI in the terminal** (good for advanced or CI usage). + +#### 2.2.1 PlatformIO inside VS Code (recommended) + +If you only plan to build firmware from VS Code, you usually do **not** need to install Python or PlatformIO CLI manually. The PlatformIO extension will download what it needs. + +- Just install VS Code and the PlatformIO extension (see section 2.3). +- When you first open the project, PlatformIO will set up its own tools. + +**Check:** after installing the extension and opening the project, the VS Code status bar shows something like "PlatformIO: Ready". + +#### 2.2.2 PlatformIO command line (CLI) with Python (for terminal builds) + +If you want to use the bash scripts (ci.sh, ci_build_firmware.sh, etc.) or build from a plain terminal, you need Python and PlatformIO CLI. + +Install these extra tools: + +- Python 3.10 or newer +- PlatformIO CLI (pip package) + +PlatformIO is the main build system for this project. We will use it from VS Code and from the terminal. If you want to read more, the official docs are here: . + +On Windows you can download Python from: + +On Linux you can install Python with your package manager. + +Then install PlatformIO CLI globally with Python: + +```bash +python3 -m pip install -U platformio +``` + +**Check (firmware tools):** + +```bash +python3 --version +platformio --version +``` + +If you see version numbers, the install is good. If the platformio command is not found, check that Python added the scripts folder (for example ~/.local/bin on Linux) to your PATH, then open a new terminal and try again. + +### 2.3 Tools for documentation website (Node.js and npm) +::: warning Note +This section is **only needed** if you want to **build or preview the site on your own computer**. +::: + +If you want to build or edit the **documentation website**, you also need Node.js and npm. You do **not** need these tools just to build firmware. + +Install: + +- Node.js 18 or newer +- npm (usually comes with Node.js) + +On Windows you can get Node.js from: + +On Linux you can install Node.js with your package manager or by following the docs: . + +**Check (docs tools):** + +```bash +node --version +npm --version +``` + +If one of these commands fails, reinstall Node.js, close the terminal, and try again. + +### 2.4 PlatformIO extension in Visual Studio Code +Now add PlatformIO inside VS Code. You can also see pictures of these steps in many ESP32 + PlatformIO tutorials, for example on the PlatformIO site: . + +Steps (based on common PlatformIO + VS Code ESP32 guides, adapted for this project): + +1. Open Visual Studio Code. +2. Click the Extensions icon on the left. +3. In the search box, type "PlatformIO IDE". +4. Click **Install** on the "PlatformIO IDE" extension. + +When the install finishes, a new icon appears on the left sidebar. It looks like an ant or an alien head. This opens the PlatformIO view. + +**Check:** + +- In VS Code, you see the PlatformIO icon on the left. +- If you click it, PlatformIO opens without errors. + +If you do not see the icon, restart VS Code and wait a few seconds. + +**Friendly tip:** the first install can take several minutes because PlatformIO downloads tools for many boards. It happens only once. You can get a coffee while it works. + + + +## 3. Get the source code + +Now download this repository to your computer from GitHub. + +1. Open a terminal (Git Bash, WSL or Linux shell). +2. Choose a parent folder where you want to keep your projects. +3. Run: + +```bash +git clone https://github.com/1technophile/OpenMQTTGateway.git +cd OpenMQTTGateway +``` + +**Check:** + +- Run: + + ```bash + git status + ``` + +- The output should say "On branch" and "nothing to commit". +- In your file explorer or with: + + ```bash + ls + ``` + + you should see main, lib, scripts, docs and other files. + +If the clone fails, check your internet connection or a possible proxy. Then try again. If you are new to Git, the GitHub "Hello World" guide can also help: . + + + +## 4. Firmware development with Visual Studio Code and PlatformIO + +This section explains how to build and flash the firmware using Visual Studio Code with the PlatformIO extension. + +If you know the classic Arduino IDE, you can think of PlatformIO as a more powerful alternative. It manages many boards, libraries, and environments for you. The official introduction is here: . + +You do **not** create a new PlatformIO project. This repository is already a PlatformIO project. You just open it. + +### 4.1 Open the project in VS Code + +1. Start Visual Studio Code. +2. Click **File → Open Folder…**. +3. Select the OpenMQTTGateway folder you cloned in section 3. +4. Click **Open**. + +VS Code will load the folder. PlatformIO will detect the platformio.ini file and start to prepare its toolchain (compiler, libraries, board support). You do not need to install these by hand. + +**Check:** + +- In the bottom status bar you see messages like "PlatformIO: Installing" and then "PlatformIO: Ready". +- You can also open a terminal inside VS Code (View → Terminal) and run: + + ```bash + pio --version + ``` + + You should see the PlatformIO version. + +If PlatformIO stays stuck on "Installing" for a long time, close VS Code, reopen it and wait again. Check that your internet connection is working. + +### 4.2 Look at the PlatformIO project structure + +PlatformIO projects usually have folders like (you will see similar names in many tutorials): + +- src: main source code. +- lib: libraries. +- include: headers. +- platformio.ini: configuration for boards and environments. + +In OpenMQTTGateway the structure is a bit different, because it is a large project: + +- main: main C++ files for the firmware. +- lib: libraries used by the firmware. +- platformio.ini and environments.ini: list many PlatformIO environments. +- test: tests for PlatformIO unit test framework. + +**Check:** open platformio.ini in VS Code and scroll. You should see several [env:...] sections. This means PlatformIO understands the project. + +If you want to know more about this file, see the PlatformIO docs about configuration: . + +### 4.3 Choose a PlatformIO environment + +An "environment" in PlatformIO is a build target. It defines the board, framework and options. You can think of it as a named profile for a specific device. + +To select the environment for your board: + +1. Press Ctrl+Shift+P (on macOS use Cmd+Shift+P). +2. Type "PlatformIO: Switch Project Environment" and press Enter. +3. A list of environments appears (for example esp32dev-all-test, theengs-bridge and many others). +4. Choose an environment that matches your board. For a generic ESP32 DevKit you can start with **esp32dev-all-test**. + +**Check:** + +- Look at the bottom blue bar. You should see the selected environment name. + +If you chose the wrong environment, repeat the steps and pick another one. + +### 4.4 Build the firmware in VS Code + +Now compile the firmware. + +1. In VS Code, open the PlatformIO view (left sidebar icon). +2. In the PlatformIO toolbar (bottom of the window), click the **check** (✓) icon. This runs the "Build" task. + +PlatformIO now compiles the code for the selected environment. This can take several minutes the first time. + +**Check:** + +- In the Terminal panel, the build ends with the word "SUCCESS". +- On disk, a file like: + + ``` + .pio/build//firmware.bin + ``` + + exists. + +If the build fails, read the error at the end of the log. Take your time; errors are part of the normal developer life. + +Common problems: + +- Missing tools: check that platformio works from the terminal (section 2.2). +- Wrong environment: try with esp32dev-all-test first. + +Fix the problem, then run the build again. + +### 4.5 Upload the firmware to the board + +After a successful build you can flash the firmware (upload the program to the board). + +1. Connect your ESP board to the computer with a good USB cable. +2. In the PlatformIO toolbar click the **right arrow** (→) icon. This runs the "Upload" task. + +PlatformIO selects a serial port automatically in many cases. + +**Check:** + +- The upload log ends with success, and no "Failed to connect" error. +- The board reboots after upload. + +If the upload fails: + +- Check the USB cable (some cables are power-only and do not carry data). +- On some boards you must press and hold the BOOT button during upload. +- Make sure no other program is using the same serial port (for example another serial monitor). +- You can set the serial port manually in platformio.ini with the upload_port option. + +### 4.6 Open the Serial Monitor + +The Serial Monitor lets you see log messages from the firmware. + +1. In the PlatformIO toolbar click the **plug** icon. This opens the Serial Monitor. +2. Set the baud rate to 115200 if it is not already set. + +**Check:** + +- After reset, you see text from the board, for example boot messages and MQTT logs. + +If the text is unreadable, check that the baud rate matches the value in platformio.ini (monitor_speed). In this project the default is usually 115200. + +If you want to learn more about Serial Monitor problems, the PlatformIO docs have a short page: . + +### 4.7 Run PlatformIO tests (optional) + +You can run unit tests for the project. + +1. Open a terminal inside VS Code (View → Terminal). +2. In the project root, run: + +```bash +pio test -e test +``` + +**Check:** + +- The output ends with "SUCCESS". If tests fail, read which test failed and open the file under test to understand why. + + +## 5. Firmware development from the terminal with ci.sh + +You can also work only from the terminal. This is useful if you prefer command line, use remote development, or want to reproduce the same steps as the CI (Continuous Integration, the automatic build that runs on GitHub). + +All helper scripts live in the scripts folder. The main entry point is: + +- scripts/ci.sh + +This script calls other scripts: + +- scripts/ci_build.sh +- scripts/ci_build_firmware.sh +- scripts/ci_prepare_artifacts.sh +- scripts/ci_site.sh +- scripts/ci_qa.sh +- scripts/ci_list-env.sh + +The scripts are written for bash. On Windows always use **Git Bash** or **WSL** for them, not plain PowerShell or cmd. + +If you are new to bash, a gentle introduction is here: . + +### 5.1 Check that you are in the right place + +1. Open a bash terminal. +2. Go to your clone: + +```bash +cd path/to/OpenMQTTGateway +``` + +**Check:** run: + +```bash +ls scripts main docs +``` + +You should see these folders listed. If not, you are in the wrong directory. + +### 5.2 List available firmware environments + +You can ask the helper scripts which PlatformIO environments exist. + +```bash +./scripts/ci.sh list-env +``` + +**Check:** + +- The command prints many names in columns (for example esp32dev-all-test, theengs-bridge, rfbridge-direct, and so on). + +If the command fails with "command not found" or similar, confirm that bash is used and that the file has execute permission (on Linux you may need chmod +x scripts/ci.sh). + +### 5.3 Quick build of one firmware (simple mode) + +Use ci.sh with the build command to build one firmware from the terminal. + +Example for a development build on esp32dev-all-test: + +```bash +./scripts/ci.sh build esp32dev-all-test --mode dev --verbose +``` + +What this does: + +- Checks that Python, PlatformIO and git are installed. +- Calls ci_build_firmware.sh to run platformio run -e esp32dev-all-test. +- Optionally prepares artifacts if you add deployment options. + +Useful flags: + +- --mode dev or --mode prod: choose development or production mode. For local work, **dev** is usually enough. +- --clean: remove old build files before building. +- --verbose: show more logs from PlatformIO. + +**Check:** + +- The command ends with a "Build Summary" block and "Status: SUCCESS". +- The folder .pio/build/esp32dev-all-test contains firmware.bin. + +If the build fails, read the error line near the bottom. Fix missing tools (see section 2) or wrong environment name, then run the command again. + +### 5.4 Use ci_build_firmware.sh directly (advanced control) + +Sometimes you want to work closer to PlatformIO. + +You can call scripts/ci_build.sh directly when you want more control or when you debug a CI issue: + +```bash +./scripts/ci_build.sh esp32dev-all-test --dev-ota --clean --verbose +``` + +This script: + +- Validates the environment name. +- Sets environment variables for the build (for example OMG_VERSION, OTA flags). +- Runs platformio run for the chosen environment. +- Verifies that firmware.bin and other files exist. + +**Check:** + +- At the end you see a "Build Summary" for that environment. +- The directory .pio/build/esp32dev-all-test contains .bin and .elf files. + +If there are no artifacts, check that PlatformIO is installed and that the environment name matches one from ci_list-env.sh. + +### 5.5 Prepare artifacts for sharing or flashing tools + +After a successful build, you can create a clean set of files ready for release or for use with external flashing tools (for example esptool.py or web uploaders). + +Use scripts/ci_prepare_artifacts.sh through ci_build.sh, or call it directly. + +Example (after a build) with ci_build.sh wrapper: + +```bash +./scripts/ci_build.sh esp32dev-all-test --mode prod --deploy-ready +``` + +Example direct call to ci_prepare_artifacts.sh: + +```bash +./scripts/ci_prepare_artifacts.sh esp32dev-all-test --clean +``` + +This script: + +- Copies firmware.bin, partitions.bin, bootloader.bin and other files from .pio/build/ into the output folder. +- Renames them with the environment name, for example esp32dev-all-test-firmware.bin. +- Creates archives for the libraries used by that environment. + +**Check:** + +- The folder generated/toDeploy (or your custom output) exists. +- It contains files like -firmware.bin and one or more *-libraries.tgz archives. + +If the script says the build directory is missing, run a firmware build first (section 5.3 or 5.4). + +### 5.6 Run QA checks from the terminal + +Before you send a pull request, it is good to check the code style. + +Run: + +```bash +./scripts/ci.sh qa --check +``` + +This uses scripts/ci_qa.sh to run clang-format checks on files under main by default. + +**Check:** + +- The summary says "Status: SUCCESS". + +If there are formatting problems, you can auto-fix them: + +```bash +./scripts/ci.sh qa --fix +``` + +After this, review the changes with git diff, then commit them. + +### 5.7 Run the full pipeline (QA + firmware build + docs) + +For a full local run, similar to CI, use: + +```bash +./scripts/ci.sh all esp32dev-all-test --mode prod +``` + +This will: + +- Run QA checks. +- Build the firmware. +- Build the documentation site in production mode (unless you add --no-site). + +**Check:** + +- At the end you see a "Complete Pipeline Summary" with "Status: SUCCESS". + +If QA fails, fix style issues first. If the build fails, fix code or environment problems, then run again. + +::: tip Next Steps +Once your code builds successfully, check the [Development contributions guide](./development.md) for: +- Code naming rules (ZgatewayXXX, ZsensorYYY, etc.) +- Code style and quality requirements +- How to open a pull request +- Contributing workflow +::: + + + +## 6. Work on the documentation website (from the terminal) + +The documentation website uses VuePress and Node.js. The site source is in the docs folder. + +::: warning Note +This section is **only needed** if you want to **build or preview the site on your own computer**. +If you make a **very small change** in a markdown (.md) file (for example fix a typo or add one sentence), you can: + +- Edit the file in VS Code. +- Push your change and open a pull request. + +In that case it is **not mandatory** to install Node.js, npm, or to run `./scripts/ci.sh site`. The CI on GitHub will build the site for you and show problems if there are any. + +If you want to see your doc changes locally before you push (recommended for bigger edits), then follow the steps below. +::: + +### 6.1 Install Node dependencies + +From the project root: + +```bash +cd OpenMQTTGateway # only if you are not already inside +npm install +``` + +This command reads package.json and installs all Node modules needed for the docs. + +**Check:** + +- The command finishes without error. +- A node_modules folder exists in the project root. + +If install is slow, your internet may be the reason. If it fails, try again later or clear the npm cache with: + +```bash +npm cache clean --force +``` + +### 6.2 Build the docs site with ci.sh + +If you want to check that everything still builds well after bigger doc changes, you can build the site locally. + +The recommended way is: + +```bash +./scripts/ci.sh site --mode prod --url-prefix / --version 1.8.0 +``` + +Important options: + +- `--mode dev` or `--mode prod`: development or production build (default: dev). +- `--url-prefix PATH`: base URL path for links, e.g. `/` for root or `/dev/` for dev (default: /dev/). +- `--version TAG`: version string written into docs/.vuepress/meta.json (default: edge). +- `--preview`: if added, starts a local HTTPS preview server after build. +- `--clean`: remove generated/site folder before build. +- `--insecure-curl`: allow curl to skip TLS verification if needed. + +The script (scripts/ci_site.sh) will: + +- Check that node, npm and openssl are available. +- Download a shared configuration file for the site. +- Create docs/.vuepress/meta.json with site info. +- Run npm run docs:build to build the site. + +**Check:** + +- The summary at the end says "Site Build Summary" and "Status: SUCCESS". +- The folder docs/.vuepress/dist exists and contains HTML files. + +If openssl or node is missing, go back to section 2 and install them. + +### 6.3 Preview the docs site locally + +To preview the site in your browser: + +```bash +./scripts/ci.sh site --mode dev --url-prefix /dev/ --version edge --preview +``` + +The script will start a local HTTPS server. + +**Check:** + +- The log prints a line similar to: "Preview server running at https://localhost:8443/dev/". +- Open that URL in your browser. You should see the OpenMQTTGateway documentation. + +To stop the preview, go back to the terminal and press Ctrl+C. + +### 6.4 Work on docs with plain npm commands + +If you prefer not to use the ci.sh wrapper, you can work directly with npm and VuePress. + +From the project root: + +```bash +npm run docs:dev +``` + +This runs VuePress in development mode with hot reload. When you change a .md file under docs, the browser reloads. + +For a production build: + +```bash +npm run docs:build +``` + +**Check:** + +- For docs:dev, the terminal prints a local URL like http://localhost:8080. Open it in the browser and see the docs. +- For docs:build, the folder docs/.vuepress/dist is created. + +If docs:build fails with an OpenSSL error on new Node versions, set: + +```bash +export NODE_OPTIONS="--openssl-legacy-provider" +``` + +then run the command again. The ci_site.sh script already does this for you. + +### 6.5 Where to edit docs + +- All documentation pages are markdown (.md) files under docs. +- This file itself is in docs/participate/quick_start.md. + +To add or edit docs: + +1. Open the docs folder in VS Code. +2. Change or create markdown files. +3. Run npm run docs:dev to check how the page looks. + +**Check:** after a change and a page refresh, your new text appears on the site. + + + +## 7. Typical workflows for contributors + +This section gives some example "day to day" flows. + +### 7.1 Quick firmware change with VS Code + +1. Open the project folder in VS Code. +2. Choose the right environment (section 4.3). +3. Edit code in main or lib. +4. Build (section 4.4). +5. Upload (section 4.5). +6. Watch logs in Serial Monitor (section 4.6). + +**Check:** your change has the expected effect on the real device. + +### 7.2 Firmware change using terminal only + +1. Open a bash terminal. +2. Go to the project root. +3. Build firmware with: + + ```bash + ./scripts/ci.sh build --mode dev --verbose + ``` + +4. Flash firmware using PlatformIO CLI or another flash tool with the generated firmware file. + +**Check:** the device boots with your new firmware and behaves as expected. + +### 7.3 Change documentation and preview + +1. Edit markdown files under docs. +2. In a terminal, run npm run docs:dev or ./scripts/ci.sh site --mode dev --preview. +3. Open the local URL in your browser. + +**Check:** your text appears on the page and looks correct. + + + +## 8. Troubleshooting + +This list gives quick help for common problems. + +- **Problem:** platformio command not found. + - **Fix:** install it with python3 -m pip install -U platformio and open a new terminal. + +- **Problem:** Build fails for all environments. + - **Fix:** run ./scripts/ci.sh list-env to confirm you use a valid env. Check that you did not change platformio.ini or environments.ini in a wrong way. + +- **Problem:** Build fails with missing libraries. + - **Fix:** run a clean build (add --clean). If you work only with VS Code, press the trash icon or use the clean target from PlatformIO. + +- **Problem:** Upload fails ("Failed to connect to ESP32"). + - **Fix:** choose the right serial port, check the cable, try holding BOOT during upload. + +- **Problem:** Serial output is garbled. + - **Fix:** set Serial Monitor baud to 115200 and confirm monitor_speed in platformio.ini is the same. + +- **Problem:** npm install is very slow or fails. + - **Fix:** check your internet connection. If needed run npm cache clean --force and try again. + +- **Problem:** Site build fails with an OpenSSL error. + - **Fix:** export NODE_OPTIONS="--openssl-legacy-provider" before running npm run docs:build, or use ./scripts/ci.sh site which already sets this. + +If you still have problems after these steps, you can open an issue on the project GitHub page. Include: + +- Your operating system (Windows or Linux, version). +- What you tried to do. +- The exact command you ran. +- The error message from the end of the log. + +This information helps maintainers reproduce and fix the problem. + + + +## 9. Glossary (simple words) + +This glossary explains some words used in this guide. + +- **Firmware**: the program that runs inside your ESP32 or ESP8266 board. +- **Repository (repo)**: the project folder stored on GitHub and on your computer. +- **Commit**: a saved set of changes in Git, with a message. +- **Branch**: a line of development in Git, like a separate copy where you work. +- **Pull Request (PR)**: a request to merge your branch into the main project on GitHub. +- **Environment (env)**: a PlatformIO configuration for a specific board and options. +- **CI (Continuous Integration)**: automatic scripts that build and test the project on every change. +- **Serial Monitor**: a window that shows text messages sent by your board over USB. +- **MQTT**: a lightweight network protocol used to send messages between devices. +- **VuePress**: a static site generator used to build this documentation. + +If any word in this guide is not clear, you can search it on the web or ask in the project community. Many people had the same question before you. + + + +## 10. External links and further reading + +Here is a list of useful links related to tools used in this project: + +- **OpenMQTTGateway project**: +- **PlatformIO main site**: +- **PlatformIO installation for VS Code**: +- **PlatformIO documentation**: +- **Visual Studio Code documentation**: +- **Git official site**: +- **Pro Git book (free)**: +- **GitHub getting started**: +- **Node.js documentation**: +- **npm documentation**: +- **VuePress documentation** (v1, similar to what this project uses): +- **MQTT introduction (HiveMQ)**: + +You do **not** need to read all of them now. Keep this list as a bookmark and come back when you are curious or stuck. + + +## 11. Final words and friendly advice + +Contributing to an open source project is a journey. The first steps (install tools, learn Git, learn PlatformIO) can feel slow. This is normal. + +Some final tips: + +- Change one thing at a time and test often. +- Keep your changes small; they are easier to review. +- Write clear commit messages. +- When something breaks, read the last 10–20 lines of the log first. +- Do not be afraid to ask questions; everyone started as a beginner. + +You are now ready to work on both firmware and documentation for OpenMQTTGateway. Take your time, follow the checks after each step, and you will build confidence with the toolchain and the project. Step by step, you will become faster and more comfortable. + + +If you are new and want to "play" a bit before you change real logic, here are some safe exercises: + +1. **Practice the build chain only.** + - Clone the repo, open it in VS Code, select `esp32dev-all-test`, build, and upload to your board. + - Check that you can see logs in the Serial Monitor. +2. **Make a tiny log change.** + - Find a `LOG` or `Serial` message in a gateway or sensor file under `main`. + - Change the text slightly (for example add a word), rebuild, upload, and verify you see the new message. +3. **Edit a small doc page.** + - Fix a typo or add one sentence of clarification in a markdown file under `docs`. + - Run `./scripts/ci.sh site --mode prod --preview` and confirm your change appears in the browser. +4. **Run QA locally.** + - Run `./scripts/ci.sh qa --check`. If there are issues, run `./scripts/ci.sh qa --fix` and see how files are changed. +5. **Open a small PR.** + - For example, only the doc change or a very small code improvement. This lets you learn the review process without much stress. + +These small steps build confidence. After that you can move to bigger things: adding new sensors, improving MQTT payloads, or extending the web documentation. diff --git a/docs/participate/support.md b/docs/participate/support.md index 16c718b8f9..0e808a4152 100644 --- a/docs/participate/support.md +++ b/docs/participate/support.md @@ -1,15 +1,22 @@ # Supporting the project -If you like the project and/or used it please consider supporting it! It can be done in different ways: -* Purchase the [Theengs mobile application](https://app.theengs.io) -* Purchase the [Theengs plug](https://shop.theengs.io) -* Helping other users in the [community](https://community.openmqttgateway.com) -* [Contribute](development) to the [code](https://github.com/1technophile/OpenMQTTGateway) or the [documentation](https://docs.openmqttgateway.com) -* Buy devices, boards or parts from the [compatible web site](https://compatible.openmqttgateway.com), the devices and parts linked use affiliated links. -* Donate or sponsor the project [developers](https://github.com/1technophile/OpenMQTTGateway/graphs/contributors) -* Make a video or a blog article about what you have done with [OpenMQTTGateway](https://docs.openmqttgateway.com) and share it. +If you like the project, you can help in different ways. Choose what fits you best. -Support open-source development through sponsorship and gain exclusive access to our private forum. Your questions, issues, and feature requests will receive priority attention, plus you'll gain insider access to our roadmap. +## Give time and knowledge +- Help other users in the [community](https://community.openmqttgateway.com). +- Share logs, payload samples, or how-to posts that unblock others. + +## Contribute code or docs +- Follow the [Development contributions guide](./development.md) to open a PR. +- Improve docs or examples if you spot unclear steps. + +## Financial support +- Purchase the [Theengs mobile application](https://app.theengs.io) or the [Theengs plug](https://shop.theengs.io). +- Buy devices or parts via the [compatible web site](https://compatible.openmqttgateway.com) (affiliate links help the project). +- Sponsor the project [developers](https://github.com/1technophile/OpenMQTTGateway/graphs/contributors). +- Make a video or blog article about what you built with [OpenMQTTGateway](https://docs.openmqttgateway.com) and share it. + +Support open-source development through sponsorship and gain exclusive access to our private forum. Your questions, issues, and feature requests receive priority attention, plus you'll see the roadmap early.
diff --git a/docs/prerequisites/board.md b/docs/prerequisites/board.md index 3ece66cbae..71cec487ab 100644 --- a/docs/prerequisites/board.md +++ b/docs/prerequisites/board.md @@ -1,3 +1,7 @@ +--- +pageClass: table-generated-page +--- + # Boards OpenMQTTGateway is not closed to one board or type of board, by using the power of the Arduino framework and libraries that are cross compatibles it let you many choice of hardware, from an ESP8266 to an ESP32. @@ -43,4 +47,10 @@ The plug is available in North America only, other regions are planned. Choosing your board depends heavily on the technologies you want to use with it. To have a good overview of the compatibilities per board you can refer to the compatible modules attributes of each [board](https://compatible.openmqttgateway.com/index.php/boards/). -The choice between these boards will depend on your knowledge and your requirements in terms of reliability, situation, modules wanted and devices you already have. The table below present those (auto-generated) +The choice between these boards will depend on your knowledge and your requirements in terms of reliability, situation, modules wanted and devices you already have. Use the table below to explore the latest environments. + + + diff --git a/docs/prerequisites/boards/.gitignore b/docs/prerequisites/boards/.gitignore deleted file mode 100644 index 86d0cb2726..0000000000 --- a/docs/prerequisites/boards/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore \ No newline at end of file diff --git a/docs/setitup/rf.md b/docs/setitup/rf.md index e6f3c7bb53..0c44fdb594 100644 --- a/docs/setitup/rf.md +++ b/docs/setitup/rf.md @@ -51,8 +51,22 @@ With SRX882S receiver connect the CS pin to 3.3V |ESP8266|D2/**D3**/D1/D8|**RX**/D2|D5|**3V3**|D7|D6|D8|GND |ESP32|**D27**|D12|D18|**3V3**|D23|D19|D5|GND -To use the CC1101 module, `ZradioCC1101` must be uncomment in the `User_config.h` or added to the `build_flags`. -More information about the [CC1101 wiring](https://github.com/LSatan/SmartRC-CC1101-Driver-Lib#wiring). ( Please note that with OMG we are recommending CC1101 GDO2 to be connected to ESP32 D27 and GDO0 to be connected to D12, this is different than the LSatan diagram. This is due to the ESP32 using D2 as part of the boot process. ) +To use the CC1101 module, `ZradioCC1101` must be uncomment in the `User_config.h` or added to the `build_flags`. + +More information about the [CC1101 wiring](https://github.com/LSatan/SmartRC-CC1101-Driver-Lib#wiring). + + +:::tip Please note that with OMG we are recommending CC1101 GDO2 to be connected to ESP32 D27 and GDO0 to be connected to D12, this is different than the LSatan diagram. This is due to the ESP32 using D2 as part of the boot process. + +If you want to use custom SPI pins for the CC1101 module, you can define the following variables in your `User_config.h` or as `build_flags` in `platformio.ini`: + - `RF_CC1101_SCK`: SPI clock pin (SCK) + - `RF_CC1101_SCK`: SPI clock pin (SCK) + - `RF_CC1101_MISO`: SPI MISO pin (Master In Slave Out) + - `RF_CC1101_MOSI`: SPI MOSI pin (Master Out Slave In) + - `RF_CC1101_CS`: SPI chip select pin (CSN) + +When **all** these variables are defined, OpenMQTTGateway will use your custom pinout for the CC1101 connection. This is useful if your board does not use the default pins or if you want to avoid conflicts with other devices. +::: ## ESP32 Hardware setup ![Addon_RF](../img/OpenMQTTgateway_ESP32_Addon_RF.png) @@ -73,3 +87,5 @@ The RF processing can be achieved after the modification by either RF, RF2 or Pi ## WIFI RF GATEWAY Hardware setup This board doesn't require any hardware modifications. + + diff --git a/docs/up.html b/docs/up.html new file mode 100644 index 0000000000..b88b0c73e6 --- /dev/null +++ b/docs/up.html @@ -0,0 +1,135 @@ + + + + + + Squeezelite-ESP32 installer + + + + + + + + +
+

Squeezelite-ESP32 installer

+

This is a prototype and not up-to-date. For latest installations, see the Squeezelite Web Installer.

+

Select your product

+
    +
  • + +
  • +
  • + +
  • +
  • + +
  • +
  • + +
  • +
  • + +
  • +
+

+ +

+ + + + + + \ No newline at end of file diff --git a/docs/upload/ReadMe.md b/docs/upload/ReadMe.md new file mode 100644 index 0000000000..7ec95e217b --- /dev/null +++ b/docs/upload/ReadMe.md @@ -0,0 +1,79 @@ +# Upload + +OpenMQTTGateway provides several installation approaches suited for different use cases. The quickest path uses pre-built binaries that work with standard configurations. For custom parameters or specific gateway module combinations, you can build firmware using your development environment. Alternatively, you can flash devices directly from your web browser without installing any software. + +

+ Firmware upload process +

+ +## Choosing Your Upload Method + +### Web-Based Installation + +The [web installation](web-install.md) method represents the easiest way to get started with OpenMQTTGateway. You can flash your device directly from your browser without downloading files or installing development tools. This method works on Windows, macOS, and Linux with Chrome, Edge, or Opera browsers. + +The web installer displays available environments for different boards and gateway configurations. Select your board type, connect via USB, and click the connect button. The installer handles everything automatically, including erasing old firmware and writing the new one. The web installer uses [ESP Web Tools](https://esphome.github.io/esp-web-tools/) technology to communicate directly with your ESP device. + +### Ready-to-Go Binary Installation + +[Downloading and installing pre-built binaries](binaries.md) offers control over the flashing process using desktop tools. Download the binary files for your board from the [GitHub releases page](https://github.com/1technophile/OpenMQTTGateway/releases). + +For ESP32 devices, you need the firmware binary, bootloader, and boot application partition files written to specific memory addresses. Windows users can use the ESP32 Flash Download Tool from Espressif. On Linux and macOS, the esptool.py command-line utility provides a straightforward upload method. + +This method works well for standard configurations without modifying source code. After flashing, you can still configure WiFi, MQTT broker settings, and basic parameters through the configuration portal. + +### Custom Build and Upload + +[Building from source](builds.md) becomes necessary when you need specific pin assignments, custom MQTT topics, or particular gateway module combinations not available in pre-built binaries. + +[PlatformIO](https://platformio.org/) provides the recommended build environment. After downloading the [source code from GitHub](https://github.com/1technophile/OpenMQTTGateway), you will find a `platformio.ini` file defining build environments for various hardware combinations. + +The configuration system uses a layering approach where default values from [`User_config.h`](https://github.com/1technophile/OpenMQTTGateway/blob/development/main/User_config.h) and [`config_XX.h`](https://github.com/1technophile/OpenMQTTGateway/tree/development/main) files can be overridden by build flags in `platformio.ini` or `environments.ini`. You can embed WiFi credentials and MQTT settings at build time for automatic connection on first boot. + +### Browser-Based Building with Gitpod + +For those who want to build custom firmware without setting up a local development environment, [Gitpod](gitpod.md) offers a cloud-based solution. By clicking on the [Gitpod link](https://gitpod.io#https://github.com/1technophile/OpenMQTTGateway/tree/development), you get a complete development environment running in your browser with PlatformIO already installed and configured. + +After the automatic initial build completes, modify the environment configuration by editing `environments.ini` and run build commands in the browser terminal. Download the generated firmware files and flash them using the binary installation method. + +## Configuring Network and MQTT Settings + +After flashing firmware, configure network connectivity and MQTT broker settings using either runtime or build-time approaches. + +### Runtime Configuration Portal + +When you power on a freshly flashed device, it creates a WiFi access point named OpenMQTTGateway or starting with OMG_. You can find detailed information about the [configuration portal here](portal.md). + +Connecting to this access point opens a portal where you configure your WiFi network, MQTT broker details, and optional security settings including TLS encryption and certificates. For devices with Ethernet, access the portal through the LAN IP address and configure WiFi as fallback connectivity. + +The portal accepts broker IP addresses or hostnames with mDNS support like homeassistant.local. Set a gateway password to protect future configuration changes, OTA updates, and web interface access. + +### Build-Time Configuration + +Alternatively, embed network and MQTT settings directly in firmware during the build process. Set parameters in `User_config.h` or add them as build flags in your PlatformIO environment definition. Store sensitive information in a separate `_env.ini` file excluded from version control. + +## Advanced Configuration Options + +Beyond basic connectivity, OpenMQTTGateway supports several [advanced features](advanced-configuration.md) that enhance security and integration capabilities. + +### Secure MQTT Connections + +For deployments over the internet or public networks, enable TLS encryption to secure communication between the gateway and MQTT broker. Configure your broker with a valid certificate and obtain the Certificate Authority certificate. The gateway can verify server identity against this certificate or connect with encryption without validation. + +Provide the CA certificate at build time in `default_server_cert.h` or paste it into the configuration portal. The gateway supports both self-signed certificates and those from public certificate authorities. + +### Home Assistant Auto-Discovery + +When you use [Home Assistant](https://www.home-assistant.io/) as your home automation platform, OpenMQTTGateway automatically creates device entries and sensors through Home Assistant's MQTT discovery protocol, enabled by default in all standard builds. + +Enable discovery in your Home Assistant MQTT integration settings and create a dedicated MQTT user. The gateway registers itself as a device and creates sensor entities automatically, appearing in Configuration → Devices section. + +### Topic Customization + +The gateway publishes messages to MQTT topics following the format `home/OpenMQTTGateway/GATEWAYtoMQTT`. Enable the `valueAsATopic` feature to append received values to the topic path, making topic-based filtering easier and avoiding warnings in certain controllers. + +## Next Steps + +After successfully uploading firmware and configuring your gateway, you can proceed to configure specific gateway modules for [RF](../setitup/rf.md), [IR](../setitup/ir.md), [Bluetooth](../setitup/ble.md), [LoRa](../setitup/lora.md), or other protocols you want to use. Each module has configuration options adjustable through MQTT commands or the web interface without rebuilding firmware. + +The [troubleshooting section](troubleshoot.md) covers common issues, but if you encounter problems not addressed here, the [OpenMQTTGateway community forum](https://community.openmqttgateway.com) provides an active place to ask questions and share solutions with other users. diff --git a/docs/upload/board-selector.md b/docs/upload/board-selector.md new file mode 100644 index 0000000000..3c498cbaa8 --- /dev/null +++ b/docs/upload/board-selector.md @@ -0,0 +1,10 @@ +--- +pageClass: table-generated-page +--- + + + + + \ No newline at end of file diff --git a/docs/upload/web-install.md b/docs/upload/web-install.md index aeb494f6c4..3547da575e 100644 --- a/docs/upload/web-install.md +++ b/docs/upload/web-install.md @@ -2,7 +2,14 @@ pageClass: table-generated-page --- -# (Option 1) Upload from the web +## Select your firmware +To upload firmware to your ESP device directly from this page, first connect your ESP device to a USB port on your computer. Then, choose the appropriate firmware from the available options. Next, click the **Connect** button and select the USB port where your ESP is plugged in. Wait for the upload process to finish. After the upload completes, you can set up your [WiFi and MQTT credentials](portal.md). + + + ::: tip Running on a tablet or phone If you want to use the BLE decoding capabilities of OpenMQTTGateway with a tablet or smartphone you can use [Theengs App](https://app.theengs.io/). @@ -15,16 +22,8 @@ The correct driver to then select in the popup of this web install is `/dev/cu.wchusbserialXXXXXXXXXXX` ::: -You can upload the firmware to your ESP device directly from here. -1. Plug in your ESP to a USB port. -2. Select the firmware in the box below. -3. Click the install button and choose the port that the ESP is connected to. -4. Wait until the process is complete. -5. Once completed you can configure your [WiFi and MQTT credentials](portal.md) - -Upload powered by [ESP Web Tools](https://esphome.github.io/esp-web-tools/) ## Using OpenMQTTGateway ? Support open-source development through sponsorship and gain exclusive access to our private forum. Your questions, issues, and feature requests will receive priority attention, plus you'll gain insider access to our roadmap. @@ -33,5 +32,4 @@ Support open-source development through sponsorship and gain exclusive access to
-## Environments characteristics -The auto-generated table below describes the libraries and the modules of each board configuration. + diff --git a/docsgen/boards-info.js b/docsgen/boards-info.js new file mode 100644 index 0000000000..14343d970b --- /dev/null +++ b/docsgen/boards-info.js @@ -0,0 +1,232 @@ +'use strict'; +/** + * Universal parser for PlatformIO dependencies. + * Formats URLs and registry strings into a consistent "Registry Style": + * Name @ Version (provider:user) + */ +function smartFormat(dep) { + if (!dep) return ""; + if (typeof dep !== 'string') return dep; + + const cleanDep = dep.trim(); + + // Configuration for Git providers with specific regex for archives, releases, and git repos + const providers = [ + { + id: 'gh', + name: 'github', + // Captures: 1. Author, 2. Repo, 3. Version from path (releases), 4. Version from filename/branch + regex: /github\.com\/([^/]+)\/([^/.]+)(?:\/(?:archive|releases\/download\/([^/]+)|tree)\/)?([^/]+)?(?:\.zip|\.git)?$/i + }, + { + id: 'gl', + name: 'gitlab', + // Captures: 1. Author, 2. Repo, 3. Version from path, 4. Version from filename + regex: /gitlab\.com\/([^/]+)\/([^/.]+)(?:\/(?:-\/)?(?:archive|releases)\/([^/]+))?\/([^/]+)?(?:\.zip|\.git)?$/i + }, + { + id: 'bb', + name: 'bitbucket', + // Captures: 1. Author, 2. Repo, 3. Version from path, 4. Version from filename + regex: /bitbucket\.org\/([^/]+)\/([^/.]+)(?:\/(?:get|downloads)\/([^/]+))?\/([^/]+)?(?:\.zip|\.git)?$/i + } + ]; + + // 1. Try to match against Git providers (GitHub, GitLab, Bitbucket) + for (const p of providers) { + const match = cleanDep.match(p.regex); + if (match) { + let [_, author, repo, pathVer, fileVer] = match; + + // Prioritize version from path (typical in releases) over filename + let version = pathVer || fileVer || "latest"; + + // Clean up version string: remove extensions and 'v' prefix + version = version + .replace(/\.(zip|git|tar\.gz)$/i, '') + .replace(/^v(\d)/i, '$1'); // Removes 'v' only if followed by a number + + // Avoid redundancy if the version string is identical to the repo name + if (version.toLowerCase() === repo.toLowerCase()) { + version = "latest"; + } + + return `${repo} @ ${version} (${p.id}:${author})`; + } + } + + // 2. Fallback for Standard PlatformIO Registry format (e.g., owner/lib @ ^1.0.0) + if (cleanDep.includes('/') || cleanDep.includes('@')) { + const parts = cleanDep.split('@'); + const fullName = parts[0].trim(); // Includes owner/name + + // Clean up version if present + let version = "latest"; + if (parts[1]) { + version = parts[1].trim().replace(/^[\^~=]/, ''); + } + + // Separate owner and library name for consistent formatting + if (fullName.includes('/')) { + const [owner, libName] = fullName.split('/'); + return `${libName.trim()} @ ${version} (pio:${owner.trim()})`; + } + + return `${fullName} @ ${version}`; + } + + // 3. Return original string if no patterns match + return cleanDep; +} + +function rowConfigFromPlatformIO() { + const { execSync } = require('child_process'); + + try { + const jsonConfig = execSync('pio project config --json-output').toString(); + const config = JSON.parse(jsonConfig); + return config; + } catch (error) { + console.error("Assicurati che PlatformIO Core sia installato e nel PATH"); + throw error; + } +} + +function cleanValue(v) { + if (typeof v !== 'string') return v; + return v + .replace(/{/g, '') + .replace(/}/g, '') + .replace(/\$/g, '') + .replace(/env:/g, '') + .replace(/'/g, '') + .replace(/-D/g, ''); +} + +function convertJsonToSections(jsonConfig) { + const sections = {}; + jsonConfig.forEach(([sectionName, configArray]) => { + sections[sectionName] = {}; + configArray.forEach(([key, value]) => { + sections[sectionName][key] = value; + }); + }); + return sections; +} + +function cleanLibraries(raw) { + if (!raw) return []; + if (typeof raw === 'string') { + raw = raw.split(',') + } + return raw.map((dep) => smartFormat(dep)); +} + +function extractModulesFromFlags(flags) { + if (!flags) return []; + let flagArray = []; + if (Array.isArray(flags)) { + flagArray = flags; + } else if (typeof flags === 'string') { + flagArray = flags.split(',').map(s => s.trim()).filter(s => s.length > 0); + } else { + return []; + } + const modules = []; + flagArray.forEach((flag) => { + // Match -DZmoduleName, allowing surrounding quotes + const match = flag.match(/^['" ]*-DZ([^=]+)/); + if (match) { + const moduleName = match[1]; + // Additional constraint: must contain 'gateway', 'sensor', or 'actuator' + if (moduleName.includes('gateway') || moduleName.includes('sensor') || moduleName.includes('actuator')) { + modules.push(moduleName); + } + } + //if MQTT_BROKER_MODE = true then modules.push("MQTT Broker Mode"); + const brokerMatch = flag.match(/^['" ]*-DMQTT_BROKER_MODE(?:=([^'"\s]+))?/); + if (brokerMatch) { + const value = brokerMatch[1]; + // Add only if not explicitly set to false (case insensitive) + if (!value || value.toLowerCase() !== 'false') { + modules.push("MQTT Broker Mode"); + } + } + + }); + return modules; +} + +function collectBoardsInformations(sections, { includeTests = false } = {}) { + const rows = []; + + Object.entries(sections).forEach(([section, items]) => { + if (!section.includes('env:')) return; + if (!includeTests && section.includes('-test')) return; + + const env = section.replace('env:', ''); + let uc = ''; + let hardware = ''; + let description = ''; + let modules = []; + let platform = ''; + let partitions = ''; + let libraries = []; + let options = []; + let customImg = ''; + + Object.entries(items).forEach(([k, raw]) => { + const v = cleanValue(raw); + + + if (k === 'board') uc = v; + if (k === 'platform') platform = smartFormat(v); + if (k === 'board_build.partitions') partitions = v; + if (k === 'custom_description') description = v; + if (k === 'custom_hardware') hardware = v; + if (k === 'custom_img') customImg = v; + + if (k === 'lib_deps') { + libraries = cleanLibraries(raw); + } + + if (k === 'build_flags') { + options = v; + modules = extractModulesFromFlags(v); + } + }); + + rows.push({ + Environment: env, + uC: uc, + Hardware: hardware, + Description: description, + Modules: modules, + Platform: platform, + Partitions: partitions, + Libraries: libraries, + Options: options, + CustomImg: customImg + }); + }); + + rows.sort((a, b) => a.Environment.localeCompare(b.Environment, 'en', { sensitivity: 'base' })); + return rows; +} + +function loadBoardsInfo(options = {}) { + const { includeTests = false } = options; + const config = rowConfigFromPlatformIO(); + const sections = convertJsonToSections(config); + return collectBoardsInformations(sections, { includeTests }); +} + +function ensureDir(dir) { + const fs = require('fs'); + if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }); +} + +module.exports = { + loadBoardsInfo, + ensureDir +}; diff --git a/docsgen/common_wu.js b/docsgen/common_wu.js new file mode 100644 index 0000000000..bccc3078d5 --- /dev/null +++ b/docsgen/common_wu.js @@ -0,0 +1,77 @@ +// Common templates and constants for web installer manifest generation +// Used by: scripts/gen_wu.js + +const mf_temp32 = (vars) => `{ + "name": "OpenMQTTGateway", + "new_install_prompt_erase": true, + "builds": [ + { + "chipFamily": "ESP32", + "improv": false, + "parts": [ + { "path": "${vars.cp}${vars.bl}", "offset": 4096 }, + { "path": "${vars.cp}${vars.part}", "offset": 32768 }, + { "path": "${vars.cp}${vars.boot}", "offset": 57344 }, + { "path": "${vars.cp}${vars.bin}", "offset": 65536 } + ] + } + ] +}`; + +const mf_temp32c3 = (vars) => `{ + "name": "OpenMQTTGateway", + "new_install_prompt_erase": true, + "builds": [ + { + "chipFamily": "ESP32-C3", + "improv": false, + "parts": [ + { "path": "${vars.cp}${vars.bl}", "offset": 0 }, + { "path": "${vars.cp}${vars.part}", "offset": 32768 }, + { "path": "${vars.cp}${vars.boot}", "offset": 57344 }, + { "path": "${vars.cp}${vars.bin}", "offset": 65536 } + ] + } + ] +}`; + +const mf_temp32s3 = (vars) => `{ + "name": "OpenMQTTGateway", + "new_install_prompt_erase": true, + "builds": [ + { + "chipFamily": "ESP32-S3", + "improv": false, + "parts": [ + { "path": "${vars.cp}${vars.bl}", "offset": 0 }, + { "path": "${vars.cp}${vars.part}", "offset": 32768 }, + { "path": "${vars.cp}${vars.boot}", "offset": 57344 }, + { "path": "${vars.cp}${vars.bin}", "offset": 65536 } + ] + } + ] +}`; + +const mf_temp8266 = (vars) => `{ + "name": "OpenMQTTGateway", + "new_install_prompt_erase": true, + "builds": [ + { + "chipFamily": "ESP8266", + "parts": [{ "path": "${vars.cp}${vars.bin}", "offset": 0 }] + } + ] +}`; + + +const cors_proxy = ''; // 'https://cors.bridged.cc/' +const esp32_boot = 'https://github.com/espressif/arduino-esp32/raw/2.0.7/tools/partitions/boot_app0.bin'; + +module.exports = { + mf_temp32, + mf_temp32c3, + mf_temp32s3, + mf_temp8266, + cors_proxy, + esp32_boot +}; diff --git a/docsgen/gen_wu.js b/docsgen/gen_wu.js new file mode 100644 index 0000000000..5c5ed63071 --- /dev/null +++ b/docsgen/gen_wu.js @@ -0,0 +1,349 @@ +#!/usr/bin/env node + +// Creates web installer manifests for ESP Web Tools firmware installation + +const fs = require('fs'); +const path = require('path'); +const https = require('https'); +const { loadBoardsInfo, ensureDir } = require('./boards-info'); + +const { + mf_temp32, + mf_temp32c3, + mf_temp32s3, + mf_temp8266, + cors_proxy, + esp32_boot +} = require('./common_wu.js'); + +// ============================================================================ +// Directory Configuration +// ============================================================================ + +// Base directories +const ROOT_DIR = path.join(__dirname, '..'); +const DOCS_DIR = path.join(ROOT_DIR, 'docs'); +const VUEPRESS_DIR = path.join(DOCS_DIR, '.vuepress'); +const PUBLIC_DIR = path.join(VUEPRESS_DIR, 'public'); +const COMPONENTS_DIR = path.join(VUEPRESS_DIR, 'components'); +const ARTIFACTS_DIR = path.join(ROOT_DIR, 'generated', 'artifacts'); + +// Feature-specific directories +const FIRMWARE_SRC_DIR = path.join(ARTIFACTS_DIR, 'firmware_build'); +const FIRMWARE_BUILD_DIR = path.join(PUBLIC_DIR, 'firmware_build'); +const BOARDS_INFO_FILE = path.join(PUBLIC_DIR, 'boards-info.json'); + +// Configuration files +const DEFAULTS_CONFIG_PATH = path.join(VUEPRESS_DIR, 'defaults.json'); +const META_CONFIG_PATH = path.join(VUEPRESS_DIR, 'meta.json'); + + + +let meta = require(DEFAULTS_CONFIG_PATH); +try { + const meta_overload = require(META_CONFIG_PATH); + meta = { ...meta, ...meta_overload }; +} catch (e) { + console.warn('meta.json not found or not valid. Using default configuration.'); +} + +// Parse command line arguments +const args = process.argv.slice(2); +const dev = args.includes('--dev') || meta.mode === 'dev'; +const repo = meta.repo || '1technophile/OpenMQTTGateway'; +const firmwareManifestFolder = dev ? `/dev/firmware_build/` : `/firmware_build/`; + +// ============================================================================ +// Utility Functions +// ============================================================================ + +function ensureFirmwareArtifacts() { + if (!fs.existsSync(FIRMWARE_SRC_DIR)) { + throw new Error(`Missing firmware artifacts in ${FIRMWARE_SRC_DIR}. Run "ci.sh build ..." first to populate this folder.`); + } + console.log(`Found firmware artifacts in: ${FIRMWARE_SRC_DIR}`); +} +// Replace version_tag in template and write to destination +function renderVersionTemplate(templatePath, outputPath, version) { + if (!fs.existsSync(templatePath)) { + throw new Error(`Template not found: ${templatePath}`); + } + const content = fs.readFileSync(templatePath, 'utf8').replace(/version_tag/g, version); + fs.writeFileSync(outputPath, content); + console.log(`Generated version file from template: ${outputPath}`); +} + +/** + * Download file from URL + */ +function downloadFile(url) { + return new Promise((resolve, reject) => { + https.get(url, (response) => { + // Handle redirects + if (response.statusCode === 302 || response.statusCode === 301) { + return downloadFile(response.headers.location).then(resolve).catch(reject); + } + if (response.statusCode !== 200) { + return reject(new Error(`Failed to download: ${response.statusCode}`)); + } + const chunks = []; + response.on('data', (chunk) => chunks.push(chunk)); + response.on('end', () => resolve(Buffer.concat(chunks))); + response.on('error', reject); + }).on('error', reject); + }); +} + +/** + * Fetch JSON from URL + */ +function fetchJson(url) { + return new Promise((resolve, reject) => { + https.get(url, { headers: { 'User-Agent': 'OpenMQTTGateway-Script' } }, (response) => { + if (response.statusCode !== 200) { + return reject(new Error(`Failed to fetch: ${response.statusCode}`)); + } + let data = ''; + response.on('data', (chunk) => data += chunk); + response.on('end', () => { + try { + resolve(JSON.parse(data)); + } catch (e) { + reject(e); + } + }); + response.on('error', reject); + }).on('error', reject); + }); +} + +/** + * Download and save asset + */ +async function downloadAsset(asset, destPath) { + const buffer = await downloadFile(asset.browser_download_url); + const filename = asset.browser_download_url.split('/').pop(); + fs.writeFileSync(path.join(destPath, filename), buffer); + console.log(`Downloaded asset: ${filename} to ${destPath}`); +} + +/** + * Create manifest and Vue option for a firmware + * Partition path uses filename only (matches Python; split is redundant but harmless) + */ +function createManifest(name, templateFn) { + const fw = name.split('-firmware')[0]; + const man_file = fw + '.manifest.json'; + const fwp_name = fw + '-partitions.bin'; + const fwb_name = fw + '-bootloader.bin'; + + // Use filename to mirror Python behavior (no directories present today) + const partPath = fwp_name.split('/').pop(); + + const mani_str = templateFn({ + cp: cors_proxy, + part: firmwareManifestFolder + partPath, + bin: firmwareManifestFolder + name, + bl: firmwareManifestFolder + fwb_name, + boot: firmwareManifestFolder + esp32_boot.split('/').pop() + }); + + const outPath = path.join(FIRMWARE_BUILD_DIR, man_file); + fs.writeFileSync(outPath, mani_str); + console.log(`Created manifest for ${fw}: ${outPath}`); +} + +/** + * Create manifest for ESP8266 + * Python adds manif_folder when writing to file, not in return + */ +function createManifest8266(name) { + const fw = name.split('-firmware')[0]; + const man_file = fw + '.manifest.json'; + + const mani_str = mf_temp8266({ + cp: cors_proxy, + bin: firmwareManifestFolder + name + }); + + const outPath = path.join(FIRMWARE_BUILD_DIR, man_file); + fs.writeFileSync(outPath, mani_str); + console.log(`Created manifest for ${fw} (ESP8266): ${outPath}`); + +} + +/** + * Device type matchers + */ +const ESP32_NAMES = ['esp32', 'ttgo', 'heltec', 'thingpulse', 'theengs', 'lilygo', 'shelly', 'tinypico']; +const ESP8266_NAMES = ['nodemcu', 'sonoff', 'rf-wifi-gateway', 'manual-wifi-test', 'rfbridge']; + +const deviceMatchers = { + esp32: (name) => name.includes('firmware.bin') && + !name.includes('esp32c3') && !name.includes('esp32s3') && + ESP32_NAMES.some(key => name.includes(key)), + + esp32c3: (name) => name.includes('firmware.bin') && name.includes('esp32c3'), + + esp32s3: (name) => name.includes('firmware.bin') && name.includes('esp32s3'), + + esp8266: (name) => name.includes('firmware.bin') && + ESP8266_NAMES.some(key => name.includes(key)) +}; + +/** + * Setup dev environment + */ +async function setupDevEnvironment() { + console.log('DEV mode: preparing web upload files...'); + ensureFirmwareArtifacts(); + // Generate OTA latest version definition from template + const tpl = path.join(__dirname, 'latest_version_dev.json.tpl'); + renderVersionTemplate(tpl, path.join(FIRMWARE_BUILD_DIR, 'latest_version_dev.json'), meta.version); + + // Copy the binaries from FIRMWARE_SRC_DIR to FIRMWARE_BUILD_DIR + const files = fs.readdirSync(FIRMWARE_SRC_DIR); + let copied = 0; + for (const name of files) { + if (name.includes('.bin')) { + fs.copyFileSync( + path.join(FIRMWARE_SRC_DIR, name), + path.join(FIRMWARE_BUILD_DIR, name) + ); + copied++; + console.log(`Copied binary: ${name}`); + } + } + console.log(`Copied ${copied} firmware binaries to ${FIRMWARE_BUILD_DIR}`); +} + +/** + * Setup release environment + */ +async function setupReleaseEnvironment() { + console.log('RELEASE mode: downloading and preparing web upload files...'); + + // Generate OTA latest version definition from template + const tpl = path.join(__dirname, 'latest_version.json.tpl'); + renderVersionTemplate(tpl, path.join(FIRMWARE_BUILD_DIR, 'latest_version.json'), meta.version); + + const releaseUrl = `https://api.github.com/repos/${repo}/releases/latest`; + console.log(`Fetching latest release info from: ${releaseUrl}`); + const rel_data = await fetchJson(releaseUrl); + + if (!rel_data.assets) { + console.error('No assets found in the latest release!'); + process.exit(1); + } + + // Download all assets + let downloaded = 0; + for (const asset of rel_data.assets) { + const name = asset.name; + if (name.includes('firmware.bin') || + name.includes('partitions.bin') || + name.includes('bootloader.bin')) { + await downloadAsset(asset, FIRMWARE_BUILD_DIR); + downloaded++; + } + } + console.log(`Downloaded ${downloaded} firmware assets to ${FIRMWARE_BUILD_DIR}`); +} + +/** + * Process firmware files and generate manifests + */ +function processFirmwareFiles(files) { + let manifestCount = 0; + for (const name of files) { + if (deviceMatchers.esp32(name)) { + createManifest(name, mf_temp32); + manifestCount++; + } + if (deviceMatchers.esp32c3(name)) { + createManifest(name, mf_temp32c3); + manifestCount++; + } + if (deviceMatchers.esp32s3(name)) { + createManifest(name, mf_temp32s3); + manifestCount++; + } + if (deviceMatchers.esp8266(name)) { + createManifest8266(name); + manifestCount++; + } + } + console.log(`Generated ${manifestCount} manifest files in ${FIRMWARE_BUILD_DIR}`); + +} + +/** + * Main execution function + */ +// ===================== OpenMQTTGateway Web Uploader Manifest Generator ===================== +// ===================== MAIN SCRIPT STARTS HERE ===================== +async function main() { + console.log('================================================================================'); + console.log(' OpenMQTTGateway Web Uploader Manifest Generator - START'); + console.log('================================================================================'); + + // === [1] Load and generate boards info === + console.log('\n[1/4] Generating boards-info.json ...'); + const boardsInfo = loadBoardsInfo({ verbose: 0 }); + const boardsJson = boardsInfo.map((row) => ({ + environment: row.Environment, + hardware: row.Hardware, + description: row.Description, + microcontroller: row.uC, + modules: row.Modules.filter(Boolean), + platform: row.Platform, + partitions: row.Partitions, + libraries: row.Libraries.filter(Boolean), + options: row.Options, + customImg: row.CustomImg + })); + ensureDir(path.dirname(BOARDS_INFO_FILE)); + fs.writeFileSync(BOARDS_INFO_FILE, JSON.stringify(boardsJson, null, 2), 'utf8'); + console.log(`Generated boards-info.json with ${boardsJson.length} boards: ${BOARDS_INFO_FILE}`); + + // === [2] Ensure output directory === + console.log('\n[2/4] Ensuring output directory ...'); + ensureDir(FIRMWARE_BUILD_DIR); + console.log(`Ensured output directory exists: ${FIRMWARE_BUILD_DIR}`); + + // === [3] Setup environment (dev or release) === + console.log('\n[3/4] Preparing firmware files ...'); + try { + if (dev) { + await setupDevEnvironment(); + } else { + await setupReleaseEnvironment(); + } + } catch (error) { + console.error(`Error setting up environment: ${error.message}`); + process.exit(1); + } + + // === [4] Download boot binary and generate manifests === + console.log('\n[4/4] Downloading boot binary and generating manifests ...'); + console.log(`Downloading boot binary: ${esp32_boot}`); + const boot_bin = await downloadFile(esp32_boot); + const boot_filename = esp32_boot.split('/').pop(); + fs.writeFileSync(path.join(FIRMWARE_BUILD_DIR, boot_filename), boot_bin); + console.log(`Saved boot binary as: ${boot_filename}`); + + const files = fs.readdirSync(FIRMWARE_BUILD_DIR).sort(); + console.log(`Processing firmware files in ${FIRMWARE_BUILD_DIR}...`); + processFirmwareFiles(files); + + console.log('\n================================================================================'); + console.log(' OpenMQTTGateway Web Uploader Manifest Generator - END'); + console.log('================================================================================'); +} +// ===================== MAIN SCRIPT ENDS HERE ===================== + +// Run main function +main().catch(error => { + console.error('Fatal error:', error); + process.exit(1); +}); diff --git a/scripts/latest_version.json b/docsgen/latest_version.json.tpl similarity index 100% rename from scripts/latest_version.json rename to docsgen/latest_version.json.tpl diff --git a/scripts/latest_version_dev.json b/docsgen/latest_version_dev.json.tpl similarity index 100% rename from scripts/latest_version_dev.json rename to docsgen/latest_version_dev.json.tpl diff --git a/docsgen/up.html b/docsgen/up.html new file mode 100644 index 0000000000..b88b0c73e6 --- /dev/null +++ b/docsgen/up.html @@ -0,0 +1,135 @@ + + + + + + Squeezelite-ESP32 installer + + + + + + + + +
+

Squeezelite-ESP32 installer

+

This is a prototype and not up-to-date. For latest installations, see the Squeezelite Web Installer.

+

Select your product

+
    +
  • + +
  • +
  • + +
  • +
  • + +
  • +
  • + +
  • +
  • + +
  • +
+

+ +

+ + + + + + \ No newline at end of file diff --git a/environments.ini b/environments.ini index a849a34397..d464ff6cb2 100644 --- a/environments.ini +++ b/environments.ini @@ -46,7 +46,7 @@ build_flags = board_build.flash_mode = dout board_build.ldscript = eagle.flash.1m64.ld ;this frees more space for firmware uplad via OTA. ;extra_scripts = scripts/compressFirmware.py ;uncomment this to compress the firmware. This helps updating e.g. Sonoff RF Bridge via OTA flash by saving space for the uploaded firmware. -custom_description = RF gateway for the Sonoff RF Bridge requiring direct hack, relying on ESPilight library, [tutorial](https://1technophile.blogspot.com/2019/04/sonoff-rf-bridge-pilight-or-how-to.html). +custom_description = 'RF gateway for the Sonoff RF Bridge requiring direct hack, relying on ESPilight library, tutorial.' custom_hardware = RFBridge v1 [env:theengs-bridge] @@ -84,7 +84,7 @@ build_flags = '-DGATEWAY_MODEL="TBRIDGE01"' ;'-DBOARD_HAS_PSRAM' ;'-mfix-esp32-psram-cache-issue' -custom_description = BLE gateway with external antenna and Ethernet/WiFi connectivity, [user guide](https://tbridge01.theengs.io/) +custom_description = 'BLE gateway with external antenna and Ethernet/WiFi connectivity, user guide' custom_hardware = Theengs Bridge gateway ethernet [env:theengs-bridge-v11] @@ -123,8 +123,9 @@ build_flags = ;'-DBOARD_HAS_PSRAM' ;'-DSELF_TEST=true' ;'-mfix-esp32-psram-cache-issue' -custom_description = BLE gateway with external antenna and Ethernet/WiFi connectivity, [user guide](https://tbridge02.theengs.io/) +custom_description = 'BLE gateway with external antenna and Ethernet/WiFi connectivity, user guide' custom_hardware = Theengs Bridge gateway ethernet +custom_img = img/Theengs-Bridge-ble-gateway.png ; DISCLAIMER: This is the default environment for Theengs Plug. ; Any modifications to this configuration are not covered by warranty. @@ -181,8 +182,9 @@ build_flags = '-DGATEWAY_MANUFACTURER="Theengs"' '-DGATEWAY_MODEL="TPLUG01"' '-UZwebUI="WebUI"' ; Disable WebUI -custom_description = Smart Plug, BLE Gateway and energy monitoring, [user guide](https://tplug01.theengs.io/) +custom_description = 'Smart Plug, BLE Gateway and energy monitoring, user guide' custom_hardware = Theengs Plug +custom_img = img/Theengs-Plug-OpenMQTTGateway.png [env:esp32dev-all-test] platform = ${com.esp32_platform} @@ -517,7 +519,7 @@ build_flags = ; '-DMQTT_SERVER="192.168.1.17"' ; '-Dwifi_ssid="WIFI_SSID"' ; '-Dwifi_password="WIFI_PASSWORD"' -custom_description = Suitable for low power with BLE gateway, [tutorial](https://1technophile.blogspot.com/2021/04/low-power-esp32-ble-gateway.html) +custom_description = 'Suitable for low power with BLE gateway, tutorial' custom_hardware = LOLIN 32 Lite [env:esp32-olimex-gtw-ble-eth] @@ -1581,7 +1583,8 @@ build_flags = '-DGateway_Name="OMG_AVATTO_IR"' board_build.flash_mode = dout custom_description = IR gateway bi directional -custom_hardware = Avatto Bakey IR first version, [tutorial](https://1technophile.blogspot.com/2020/07/avatto-s06-ir-gateway-compatible-with.html) +custom_hardware = 'Avatto Bakey IR first version,tutorial' +custom_img = https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEiWdLlg_I1Fxg8PKa0LNCwj3fDtSCVn50Zqima9QZvJfPpIyr3Rh7cvg_WPzRkZpzP4_Tu9inXfo3e6CbLLLpZzo5yGOUX_zFT0CnopCtyuEgJyHHJLP8ctfm1UyHP3KZJGzRbZul4F9JBX/ [env:nodemcuv2-rf] platform = ${com.esp8266_platform} @@ -1664,7 +1667,7 @@ build_flags = '-DRF_RECEIVER_GPIO=5' board_build.flash_mode = dout custom_description = RF gateway for USB stick using RCSwitch -custom_hardware = RF Wifi USB stick, [tutorial](https://1technophile.blogspot.com/2019/09/hack-of-rf-wifi-gateway-usb-stick.html) +custom_hardware = 'RF Wifi USB stick, tutorial' [env:nodemcuv2-rf2] platform = ${com.esp8266_platform} @@ -1773,8 +1776,8 @@ build_flags = '-DZgatewayRF="RF"' '-DGateway_Name="OMG_SONOFF_BASIC_RFR3"' board_build.flash_mode = dout -custom_description = Wifi relay and RF receiver using RCSwitch library, [tutorial](https://1technophile.blogspot.com/2019/08/new-sonoff-rfr3-as-433tomqtt-gateway.html) -custom_hardware = Sonoff Basic RFR3, [tutorial](https://1technophile.blogspot.com/2019/08/new-sonoff-rfr3-as-433tomqtt-gateway.html) +custom_description = 'Wifi relay and RF receiver using RCSwitch library, tutorial' +custom_hardware = 'Sonoff Basic RFR3, tutorial' [env:esp32dev-ble-datatest] platform = ${com.esp32_platform} diff --git a/main/TheengsCommon.h b/main/TheengsCommon.h index 5b53addeee..44b9276bf4 100644 --- a/main/TheengsCommon.h +++ b/main/TheengsCommon.h @@ -92,6 +92,19 @@ extern bool cmpToMainTopic(const char*, const char*); extern bool pub(const char*, const char*, bool); extern bool pub(const char*, const char*); +// Float-specific overload to log with the correct format specifier. +inline void Config_update(JsonObject& data, const char* key, float& var) { + if (data.containsKey(key)) { + float newVal = data[key].as(); + if (var != newVal) { + var = newVal; + THEENGS_LOG_NOTICE(F("Config %s changed to: %F" CR), key, newVal); + } else { + THEENGS_LOG_NOTICE(F("Config %s unchanged, currently: %F" CR), key, newVal); + } + } +} + template void Config_update(JsonObject& data, const char* key, T& var) { if (data.containsKey(key)) { diff --git a/main/User_config.h b/main/User_config.h index 31a8351004..df73eb8ef6 100644 --- a/main/User_config.h +++ b/main/User_config.h @@ -29,7 +29,7 @@ #define user_config_h /*-------------------VERSION----------------------*/ #ifndef OMG_VERSION -# define OMG_VERSION "version_tag" +# define OMG_VERSION "edge" #endif /*-------------CONFIGURE WIFIMANAGER-------------(only ESP8266 & SONOFF RFBridge)*/ diff --git a/main/blufi.cpp b/main/blufi.cpp index 669129c7f1..5449fb9bfd 100644 --- a/main/blufi.cpp +++ b/main/blufi.cpp @@ -49,9 +49,9 @@ static NimBLEOta* pNimBLEOta; static NimBLECharacteristic* pCommandCharacteristic; static NimBLECharacteristic* pRecvFwCharacteristic; -#ifndef BLUFI_MFG_ID -# define BLUFI_MFG_ID 0xFFFF // Default Manufacturer ID if not defined -#endif +# ifndef BLUFI_MFG_ID +# define BLUFI_MFG_ID 0xFFFF // Default Manufacturer ID if not defined +# endif struct pkt_info { uint8_t* pkt; @@ -201,7 +201,7 @@ void restart_connection_timer() {} void stop_connection_timer() {} # endif -void set_blufi_mfg_data () { +void set_blufi_mfg_data() { if (!NimBLEDevice::isInitialized() || !NimBLEDevice::getAdvertising()->isAdvertising()) { THEENGS_LOG_NOTICE(F("Unable to set advertising data" CR)); return; diff --git a/main/commonRF.cpp b/main/commonRF.cpp index 4eea813d00..85d150cd1f 100644 --- a/main/commonRF.cpp +++ b/main/commonRF.cpp @@ -40,6 +40,7 @@ extern rtl_433_ESP rtl_433; # endif int currentReceiver = ACTIVE_NONE; +boolean isDriverEnabled = false; extern void enableActiveReceiver(); extern void disableCurrentReceiver(); @@ -53,43 +54,63 @@ class ZCommonRFWrapper : public RFReceiver { ZCommonRFWrapper() : RFReceiver() {} void enable() override { enableActiveReceiver(); } void disable() override { disableCurrentReceiver(); } - int getReceiverID() const override { return currentReceiver; } }; ZCommonRFWrapper iRFReceiver; RFConfiguration iRFConfig(iRFReceiver); -//TODO review +// Initialize the CC1101 and tune the RX frequency. +// - Uses truncated exponential backoff to avoid tight retry loops +// - Validates the configured frequency before touching the radio +// - Emits explicit logs for success/failure so watchdog resets are traceable void initCC1101() { -# ifdef ZradioCC1101 //receiving with CC1101 - // Loop on getCC1101() until it returns true and break after 10 attempts +# ifdef ZradioCC1101 // receiving with CC1101 + const float freqMhz = iRFConfig.getFrequency(); + if (!iRFConfig.validFrequency(freqMhz)) { + THEENGS_LOG_ERROR(F("C1101 invalid frequency: %F MHz" CR), freqMhz); + return; + } + int delayMS = 16; int delayMaxMS = 500; - for (int i = 0; i < 10; i++) { -# if defined(RF_MODULE_SCK) && defined(RF_MODULE_MISO) && \ - defined(RF_MODULE_MOSI) && defined(RF_MODULE_CS) - ELECHOUSE_cc1101.setSpiPin(RF_MODULE_SCK, RF_MODULE_MISO, RF_MODULE_MOSI, RF_MODULE_CS); + bool connected = false; + + for (int attempt = 1; attempt <= 10; attempt++) { +# if defined(RF_CC1101_SCK) && defined(RF_CC1101_MISO) && \ + defined(RF_CC1101_MOSI) && defined(RF_CC1101_CS) + THEENGS_LOG_TRACE(F("initCC1101 with custom SPI pins, SCK=%d, MISO=%d, MOSI=%d, CS=%d" CR), RF_CC1101_SCK, RF_CC1101_MISO, RF_CC1101_MOSI, RF_CC1101_CS); + ELECHOUSE_cc1101.setSpiPin(RF_CC1101_SCK, RF_CC1101_MISO, RF_CC1101_MOSI, RF_CC1101_CS); # endif + if (ELECHOUSE_cc1101.getCC1101()) { - THEENGS_LOG_NOTICE(F("C1101 spi Connection OK" CR)); + connected = true; + THEENGS_LOG_NOTICE(F("C1101 SPI connection OK on attempt %d" CR), attempt); ELECHOUSE_cc1101.Init(); - ELECHOUSE_cc1101.SetRx(iRFConfig.getFrequency()); + ELECHOUSE_cc1101.SetRx(freqMhz); + THEENGS_LOG_NOTICE(F("C1101 tuned RX to %F MHz" CR), freqMhz); break; - } else { - THEENGS_LOG_ERROR(F("C1101 spi Connection Error" CR)); - delay(delayMS); } + + THEENGS_LOG_ERROR(F("C1101 SPI connection error (attempt %d), retrying" CR), attempt); + delay(delayMS); + // truncated exponential backoff delayMS = delayMS * 2; if (delayMS > delayMaxMS) delayMS = delayMaxMS; } + + if (!connected) { + THEENGS_LOG_ERROR(F("C1101 init failed after retries, radio left disabled" CR)); + } +# else + THEENGS_LOG_TRACE(F("initCC1101 skipped: ZradioCC1101 not enabled" CR)); # endif } void setupCommonRF() { iRFConfig.reInit(); - iRFConfig.loadFromStorage(); + iRFConfig.loadFromStorage(true); } # if !defined(ZgatewayRFM69) && !defined(ZactuatorSomfy) @@ -120,28 +141,37 @@ bool validReceiver(int receiver) { # endif void disableCurrentReceiver() { + if (!isDriverEnabled) { + THEENGS_LOG_TRACE(F("Receiver %d is already disabled" CR), currentReceiver); + return; + } THEENGS_LOG_TRACE(F("disableCurrentReceiver: %d" CR), currentReceiver); switch (currentReceiver) { case ACTIVE_NONE: + isDriverEnabled = false; break; # ifdef ZgatewayPilight case ACTIVE_PILIGHT: disablePilightReceive(); + isDriverEnabled = false; break; # endif # ifdef ZgatewayRF case ACTIVE_RF: disableRFReceive(); + isDriverEnabled = false; break; # endif # ifdef ZgatewayRTL_433 case ACTIVE_RTL: disableRTLreceive(); + isDriverEnabled = false; break; # endif # ifdef ZgatewayRF2 case ACTIVE_RF2: disableRF2Receive(); + isDriverEnabled = false; break; # endif default: @@ -150,6 +180,10 @@ void disableCurrentReceiver() { } void enableActiveReceiver() { + if (isDriverEnabled) { + THEENGS_LOG_TRACE(F("Receiver %d is already enabled" CR), currentReceiver); + return; + } THEENGS_LOG_TRACE(F("enableActiveReceiver: %d" CR), iRFConfig.getActiveReceiver()); switch (iRFConfig.getActiveReceiver()) { # ifdef ZgatewayPilight @@ -157,6 +191,7 @@ void enableActiveReceiver() { initCC1101(); enablePilightReceive(); currentReceiver = ACTIVE_PILIGHT; + isDriverEnabled = true; break; # endif # ifdef ZgatewayRF @@ -164,6 +199,7 @@ void enableActiveReceiver() { initCC1101(); enableRFReceive(iRFConfig.getFrequency(), RF_RECEIVER_GPIO, RF_EMITTER_GPIO); currentReceiver = ACTIVE_RF; + isDriverEnabled = true; break; # endif # ifdef ZgatewayRTL_433 @@ -171,6 +207,7 @@ void enableActiveReceiver() { initCC1101(); enableRTLreceive(); currentReceiver = ACTIVE_RTL; + isDriverEnabled = true; break; # endif # ifdef ZgatewayRF2 @@ -178,6 +215,7 @@ void enableActiveReceiver() { initCC1101(); enableRF2Receive(); currentReceiver = ACTIVE_RF2; + isDriverEnabled = true; break; # endif case ACTIVE_RECERROR: diff --git a/main/config_RF.h b/main/config_RF.h index c166a4b005..71b25ff80b 100644 --- a/main/config_RF.h +++ b/main/config_RF.h @@ -190,7 +190,7 @@ extern RFConfiguration iRFConfig; /*-------------------RF frequency----------------------*/ //Match frequency to the hardware version of the radio used. #ifndef RF_FREQUENCY -# define RF_FREQUENCY 433.92 +# define RF_FREQUENCY 433.92f #endif /** diff --git a/main/gatewayBLEConnect.cpp b/main/gatewayBLEConnect.cpp index 206fdd0ce0..0e43d7a0b8 100644 --- a/main/gatewayBLEConnect.cpp +++ b/main/gatewayBLEConnect.cpp @@ -337,20 +337,20 @@ void BM2_connect::publishData() { static const unsigned char BM6_AES_KEY[16] = { 108, // l 101, // e - 97, // a + 97, // a 103, // g 101, // e 110, // n 100, // d 255, // 0xff 254, // 0xfe - 48, // 0 - 49, // 1 - 48, // 0 - 48, // 0 - 48, // 0 - 48, // 0 - 57, // 9 + 48, // 0 + 49, // 1 + 48, // 0 + 48, // 0 + 48, // 0 + 48, // 0 + 57, // 9 }; void BM6_connect::notifyCB(NimBLERemoteCharacteristic* pChar, uint8_t* pData, size_t length, bool isNotify) { diff --git a/main/gatewayBT.cpp b/main/gatewayBT.cpp index 260315f743..b9c059d6c5 100644 --- a/main/gatewayBT.cpp +++ b/main/gatewayBT.cpp @@ -992,7 +992,7 @@ void launchBTDiscovery(bool overrideDiscovery) { if (!BTConfig.extDecoderEnable && // Do not decode if an external decoder is configured p->sensorModel_id > UNKWNON_MODEL && p->sensorModel_id < TheengsDecoder::BLE_ID_NUM::BLE_ID_MAX && - p->sensorModel_id != TheengsDecoder::BLE_ID_NUM::HHCCJCY01HHCC && + p->sensorModel_id != TheengsDecoder::BLE_ID_NUM::HHCCJCY01HHCC && p->sensorModel_id != TheengsDecoder::BLE_ID_NUM::BM2 && p->sensorModel_id != TheengsDecoder::BLE_ID_NUM::BM6) { // Exception on HHCCJCY01HHCC and BM2/BM6 as these ones are discoverable and connectable if (isTracker) { @@ -1025,10 +1025,10 @@ void launchBTDiscovery(bool overrideDiscovery) { // This should not happen if JSON_MSG_BUFFER is large enough for // the Theengs json properties THEENGS_LOG_ERROR(F("JSON deserialization of Theengs properties overflowed (error %s), buffer capacity: %u. Program might crash. Properties json: %s" CR), - error.c_str(), jsonBuffer.capacity(), properties.c_str()); + error.c_str(), jsonBuffer.capacity(), properties.c_str()); } else { THEENGS_LOG_ERROR(F("JSON deserialization of Theengs properties errored: %" CR), - error.c_str()); + error.c_str()); } } for (JsonPair prop : jsonBuffer["properties"].as()) { diff --git a/main/gatewayRF.cpp b/main/gatewayRF.cpp index f9e884f971..87b5a8ecce 100644 --- a/main/gatewayRF.cpp +++ b/main/gatewayRF.cpp @@ -402,7 +402,7 @@ void enableRFReceive( float rfFrequency = iRFConfig.getFrequency(), int rfReceiverGPIO = RF_RECEIVER_GPIO, int rfEmitterGPIO = RF_EMITTER_GPIO) { - THEENGS_LOG_NOTICE(F("[RF] Enable RF Receiver: %fMhz, RF_EMITTER_GPIO: %d, RF_RECEIVER_GPIO: %d" CR), rfFrequency, rfEmitterGPIO, rfReceiverGPIO); + THEENGS_LOG_NOTICE(F("[RF] Enable RF Receiver: %F Mhz, RF_EMITTER_GPIO: %d, RF_RECEIVER_GPIO: %d" CR), rfFrequency, rfEmitterGPIO, rfReceiverGPIO); # ifdef RF_DISABLE_TRANSMIT mySwitch.disableTransmit(); @@ -410,7 +410,7 @@ void enableRFReceive( mySwitch.enableTransmit(rfEmitterGPIO); # endif - mySwitch.setRepeatTransmit(rfEmitterGPIO); + mySwitch.setRepeatTransmit(RF_EMITTER_REPEAT); mySwitch.enableReceive(rfReceiverGPIO); THEENGS_LOG_TRACE(F("[RF] Setup command topic: %s%s%s\n Setup done" CR), (const char*)mqtt_topic, (const char*)gateway_name, (const char*)subjectMQTTtoRF); diff --git a/main/gatewaySERIAL.cpp b/main/gatewaySERIAL.cpp index bcf304d20a..5f43111cce 100644 --- a/main/gatewaySERIAL.cpp +++ b/main/gatewaySERIAL.cpp @@ -316,7 +316,7 @@ void sendMQTTfromNestedJson(JsonVariant obj, char* topic, int level, int maxLeve topic[topicLength] = '\0'; } else { THEENGS_LOG_ERROR(F("Nested key '%s' at level %d does not fit within max topic length of %d, skipping"), - key, level, mqtt_topic_max_size); + key, level, mqtt_topic_max_size); } } diff --git a/main/main.cpp b/main/main.cpp index 7452920d80..54f9de4bde 100644 --- a/main/main.cpp +++ b/main/main.cpp @@ -96,8 +96,8 @@ char gateway_name[parameters_size + 1] = Gateway_Name; unsigned long lastDiscovery = 0; #if BLEDecryptor - char ble_aes[parameters_size] = BLE_AES; - StaticJsonDocument ble_aes_keys; +char ble_aes[parameters_size] = BLE_AES; +StaticJsonDocument ble_aes_keys; #endif #if !MQTT_BROKER_MODE @@ -2185,11 +2185,11 @@ bool loadConfigFromFlash() { if (json.containsKey("ble_aes")) { strcpy(ble_aes, json["ble_aes"]); THEENGS_LOG_TRACE(F("loaded default BLE AES key %s" CR), ble_aes); - } + } if (json.containsKey("ble_aes_keys")) { ble_aes_keys = json["ble_aes_keys"]; THEENGS_LOG_TRACE(F("loaded %d custom BLE AES keys" CR), ble_aes_keys.size()); - } + } # endif result = true; } else { diff --git a/main/mqttDiscovery.cpp b/main/mqttDiscovery.cpp index 6a249e9db7..c3bb9fca10 100644 --- a/main/mqttDiscovery.cpp +++ b/main/mqttDiscovery.cpp @@ -599,7 +599,7 @@ void createDiscovery(const char* sensor_type, } } - if (diagnostic_entity) { // entity_category + if (diagnostic_entity) { // entity_category sensor["ent_cat"] = "diagnostic"; } diff --git a/main/rf/RFConfiguration.cpp b/main/rf/RFConfiguration.cpp index d8a45f3d70..20366cc105 100644 --- a/main/rf/RFConfiguration.cpp +++ b/main/rf/RFConfiguration.cpp @@ -65,6 +65,7 @@ void RFConfiguration::reInit() { activeReceiver = ACTIVE_RECEIVER; rssiThreshold = 0; newOokThreshold = 0; + THEENGS_LOG_TRACE(F("RFConfiguration reInit: frequency=%F, activeReceiver=%d" CR), frequency, activeReceiver); } /** @@ -82,13 +83,13 @@ void RFConfiguration::eraseStorage() { preferences.begin(Gateway_Short_Name, false); if (preferences.isKey("RFConfig")) { int result = preferences.remove("RFConfig"); - Log.notice(F("RF config erase result: %d" CR), result); + THEENGS_LOG_NOTICE(F("RF config erase result: %d" CR), result); } else { - Log.notice(F("RF config not found" CR)); + THEENGS_LOG_NOTICE(F("RF config not found" CR)); } preferences.end(); #else - Log.warning(F("RF Config Erase not support with this board" CR)); + THEENGS_LOG_WARNING(F("RF Config Erase not support with this board" CR)); #endif } @@ -120,9 +121,9 @@ void RFConfiguration::saveOnStorage() { preferences.begin(Gateway_Short_Name, false); int result = preferences.putString("RFConfig", conf); preferences.end(); - Log.notice(F("RF Config_save: %s, result: %d" CR), conf.c_str(), result); + THEENGS_LOG_NOTICE(F("RF Config_save: %s, result: %d" CR), conf.c_str(), result); #else - Log.warning(F("RF Config_save not support with this board" CR)); + THEENGS_LOG_WARNING(F("RF Config_save not support with this board" CR)); #endif } @@ -149,24 +150,25 @@ void RFConfiguration::loadFromStorage(bool reinitReceiver) { auto error = deserializeJson(jsonBuffer, preferences.getString("RFConfig", "{}")); preferences.end(); if (error) { - Log.error(F("RF Config deserialization failed: %s, buffer capacity: %u" CR), error.c_str(), jsonBuffer.capacity()); + THEENGS_LOG_ERROR(F("RF Config deserialization failed: %s, buffer capacity: %u" CR), error.c_str(), jsonBuffer.capacity()); return; } if (jsonBuffer.isNull()) { - Log.warning(F("RF Config is null" CR)); + THEENGS_LOG_WARNING(F("RF Config is null" CR)); return; } JsonObject jo = jsonBuffer.as(); fromJson(jo); - Log.notice(F("RF Config loaded" CR)); + THEENGS_LOG_NOTICE(F("RF Config loaded" CR)); } else { preferences.end(); - Log.notice(F("RF Config not found using default" CR)); + THEENGS_LOG_NOTICE(F("RF Config not found using default" CR)); } #endif // Disable and re-enable the receiver to ensure proper initialization if (reinitReceiver) { iRFReceiver.disable(); + delay(100); iRFReceiver.enable(); } } @@ -209,10 +211,10 @@ void RFConfiguration::loadFromMessage(JsonObject& RFdata) { if (RFdata.containsKey("erase") && RFdata["erase"].as()) { eraseStorage(); - Log.notice(F("RF Config erased" CR)); + THEENGS_LOG_NOTICE(F("RF Config erased" CR)); } else if (RFdata.containsKey("save") && RFdata["save"].as()) { saveOnStorage(); - Log.notice(F("RF Config saved" CR)); + THEENGS_LOG_NOTICE(F("RF Config saved" CR)); } } @@ -245,17 +247,17 @@ void RFConfiguration::fromJson(JsonObject& RFdata) { if (RFdata.containsKey("frequency") && validFrequency(RFdata["frequency"])) { Config_update(RFdata, "frequency", frequency); - Log.notice(F("RF Receive mhz: %F" CR), frequency); + THEENGS_LOG_NOTICE(F("RF Receive mhz: %F" CR), frequency); success = true; } if (RFdata.containsKey("active")) { Config_update(RFdata, "active", activeReceiver); - Log.notice(F("RF receiver active: %d" CR), activeReceiver); + THEENGS_LOG_NOTICE(F("RF receiver active: %d" CR), activeReceiver); success = true; } #ifdef ZgatewayRTL_433 if (RFdata.containsKey("rssithreshold")) { - Log.notice(F("RTL_433 RSSI Threshold : %d " CR), rssiThreshold); + THEENGS_LOG_NOTICE(F("RTL_433 RSSI Threshold : %d " CR), rssiThreshold); Config_update(RFdata, "rssithreshold", rssiThreshold); rtl_433.setRSSIThreshold(rssiThreshold); success = true; @@ -263,18 +265,18 @@ void RFConfiguration::fromJson(JsonObject& RFdata) { # if defined(RF_SX1276) || defined(RF_SX1278) if (RFdata.containsKey("ookthreshold")) { Config_update(RFdata, "ookthreshold", newOokThreshold); - Log.notice(F("RTL_433 ookThreshold %d" CR), newOokThreshold); + THEENGS_LOG_NOTICE(F("RTL_433 ookThreshold %d" CR), newOokThreshold); rtl_433.setOOKThreshold(newOokThreshold); success = true; } # endif if (RFdata.containsKey("status")) { - Log.notice(F("RF get status:" CR)); + THEENGS_LOG_NOTICE(F("RF get status:" CR)); rtl_433.getStatus(); success = true; } if (!success) { - Log.error(F("MQTTtoRF Fail json" CR)); + THEENGS_LOG_ERROR(F("MQTTtoRF Fail json" CR)); } #endif } @@ -307,11 +309,6 @@ void RFConfiguration::toJson(JsonObject& RFdata) { RFdata["rssithreshold"] = rssiThreshold; RFdata["ookthreshold"] = newOokThreshold; RFdata["active"] = activeReceiver; - - // Add white-list vector to the JSON object - JsonArray whiteListArray = RFdata.createNestedArray("white-list"); - // Add black-list vector to the JSON object - JsonArray blackListArray = RFdata.createNestedArray("black-list"); } /** diff --git a/main/sensorDS1820.cpp b/main/sensorDS1820.cpp index a1bd79a381..3cfd05c4a5 100644 --- a/main/sensorDS1820.cpp +++ b/main/sensorDS1820.cpp @@ -92,10 +92,10 @@ void setupZsensorDS1820() { } ds1820_resolution[ds1820_count] = ds1820.getResolution(ds1820_address); THEENGS_LOG_TRACE(F("DS1820: Device %d, Type: %s, Address: %s, Resolution: %d" CR), - ds1820_count, - (char*)ds1820_type[ds1820_count].c_str(), - (char*)ds1820_addr[ds1820_count].c_str(), - ds1820_resolution[ds1820_count]); + ds1820_count, + (char*)ds1820_type[ds1820_count].c_str(), + (char*)ds1820_addr[ds1820_count].c_str(), + ds1820_resolution[ds1820_count]); ds1820_count++; } } diff --git a/package-lock.json b/package-lock.json index 322fdb3737..9f6584fe72 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,6 +5,9 @@ "packages": { "": { "dependencies": { + "ini": "^4.1.1", + "markdown-table": "^3.0.3", + "mime-types": "^2.1.35", "vuepress-plugin-sitemap": "^2.3.1" }, "devDependencies": { @@ -25,13 +28,14 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.23.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.23.5.tgz", - "integrity": "sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", "dev": true, "dependencies": { - "@babel/highlight": "^7.23.4", - "chalk": "^2.4.2" + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" }, "engines": { "node": ">=6.9.0" @@ -51,6 +55,7 @@ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.7.tgz", "integrity": "sha512-+UpDgowcmqe36d4NwqvKsyPMlOLNGMsfMmQ5WGCu+siCe3t3dfe9njrzGfdN4qq+bcNUt0+Vw6haRxBOycs4dw==", "dev": true, + "peer": true, "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.23.5", @@ -356,18 +361,18 @@ } }, "node_modules/@babel/helper-string-parser": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz", - "integrity": "sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", - "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "dev": true, "engines": { "node": ">=6.9.0" @@ -397,38 +402,26 @@ } }, "node_modules/@babel/helpers": { - "version": "7.23.7", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.7.tgz", - "integrity": "sha512-6AMnjCoC8wjqBzDHkuqpa7jAKwvMo4dC+lr/TFBz+ucfulO1XMpDnwWPGBNwClOKZ8h6xn5N81W/R5OrcKtCbQ==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", "dev": true, "dependencies": { - "@babel/template": "^7.22.15", - "@babel/traverse": "^7.23.7", - "@babel/types": "^7.23.6" + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/highlight": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.23.4.tgz", - "integrity": "sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==", + "node_modules/@babel/parser": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.22.20", - "chalk": "^2.4.2", - "js-tokens": "^4.0.0" + "@babel/types": "^7.28.5" }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.6.tgz", - "integrity": "sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==", - "dev": true, "bin": { "parser": "bin/babel-parser.js" }, @@ -1703,26 +1696,23 @@ "dev": true }, "node_modules/@babel/runtime": { - "version": "7.23.7", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.7.tgz", - "integrity": "sha512-w06OXVOFso7LcbzMiDGt+3X7Rh7Ho8MmgPoWU3rarH+8upf+wSU/grlGbWzQyr3DkdN6ZeuMFjpdwW0Q+HxobA==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", + "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", "dev": true, - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/template": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", - "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==", + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", "dev": true, "dependencies": { - "@babel/code-frame": "^7.22.13", - "@babel/parser": "^7.22.15", - "@babel/types": "^7.22.15" + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" }, "engines": { "node": ">=6.9.0" @@ -1750,14 +1740,13 @@ } }, "node_modules/@babel/types": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.6.tgz", - "integrity": "sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", "dev": true, "dependencies": { - "@babel/helper-string-parser": "^7.23.4", - "@babel/helper-validator-identifier": "^7.22.20", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" }, "engines": { "node": ">=6.9.0" @@ -2840,6 +2829,7 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, + "peer": true, "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -3588,23 +3578,23 @@ "dev": true }, "node_modules/body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "version": "1.20.4", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz", + "integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==", "dev": true, "dependencies": { - "bytes": "3.1.2", + "bytes": "~3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", + "destroy": "~1.2.0", + "http-errors": "~2.0.1", + "iconv-lite": "~0.4.24", + "on-finished": "~2.4.1", + "qs": "~6.14.0", + "raw-body": "~2.5.3", "type-is": "~1.6.18", - "unpipe": "1.0.0" + "unpipe": "~1.0.0" }, "engines": { "node": ">= 0.8", @@ -3620,12 +3610,41 @@ "ms": "2.0.0" } }, + "node_modules/body-parser/node_modules/http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", + "dev": true, + "dependencies": { + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/body-parser/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true }, + "node_modules/body-parser/node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/bonjour": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/bonjour/-/bonjour-3.5.0.tgz", @@ -3804,9 +3823,9 @@ } }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "dependencies": { "balanced-match": "^1.0.0", @@ -3981,6 +4000,7 @@ "url": "https://github.com/sponsors/ai" } ], + "peer": true, "dependencies": { "caniuse-lite": "^1.0.30001565", "electron-to-chromium": "^1.4.601", @@ -4437,6 +4457,7 @@ "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "peer": true, "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -4733,7 +4754,8 @@ "node_modules/commander": { "version": "2.17.1", "resolved": "https://registry.npmjs.org/commander/-/commander-2.17.1.tgz", - "integrity": "sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==" + "integrity": "sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==", + "peer": true }, "node_modules/commondir": { "version": "1.0.1", @@ -5255,9 +5277,9 @@ } }, "node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.6.tgz", + "integrity": "sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==", "dev": true, "dependencies": { "nice-try": "^1.0.4", @@ -6400,6 +6422,7 @@ "integrity": "sha512-+u/msd6iu+HvfysUPkZ9VHm83LImmSNnecYPfFI01pQ7TTcsFR+V0BkybZX7mPtIaI7LCrse6YRj+v3eraJSgw==", "dev": true, "hasInstallScript": true, + "peer": true, "bin": { "esbuild": "bin/esbuild" }, @@ -7643,6 +7666,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/global-dirs/node_modules/ini": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.7.tgz", + "integrity": "sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ==", + "dev": true, + "license": "ISC" + }, "node_modules/globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -8231,21 +8261,21 @@ } }, "node_modules/http-proxy-middleware/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" } }, "node_modules/http-proxy-middleware/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "dependencies": { "to-regex-range": "^5.0.1" @@ -8264,12 +8294,12 @@ } }, "node_modules/http-proxy-middleware/node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -8538,10 +8568,13 @@ "dev": true }, "node_modules/ini": { - "version": "1.3.7", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.7.tgz", - "integrity": "sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ==", - "dev": true + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz", + "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==", + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } }, "node_modules/internal-ip": { "version": "4.3.0", @@ -9521,6 +9554,7 @@ "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-8.4.2.tgz", "integrity": "sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==", "dev": true, + "peer": true, "dependencies": { "argparse": "^1.0.7", "entities": "~1.1.1", @@ -9588,6 +9622,16 @@ "node": ">6.4.0" } }, + "node_modules/markdown-table": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", + "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -9744,7 +9788,6 @@ "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, "engines": { "node": ">= 0.6" } @@ -9753,7 +9796,6 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, "dependencies": { "mime-db": "1.52.0" }, @@ -9949,9 +9991,9 @@ "optional": true }, "node_modules/nanoid": { - "version": "3.3.7", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", - "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "dev": true, "funding": [ { @@ -10715,9 +10757,9 @@ "dev": true }, "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "dev": true }, "node_modules/picomatch": { @@ -11685,12 +11727,12 @@ } }, "node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "version": "6.14.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz", + "integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==", "dev": true, "dependencies": { - "side-channel": "^1.0.6" + "side-channel": "^1.1.0" }, "engines": { "node": ">=0.6" @@ -11757,20 +11799,49 @@ } }, "node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "version": "2.5.3", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz", + "integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==", "dev": true, "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" + "bytes": "~3.1.2", + "http-errors": "~2.0.1", + "iconv-lite": "~0.4.24", + "unpipe": "~1.0.0" }, "engines": { "node": ">= 0.8" } }, + "node_modules/raw-body/node_modules/http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", + "dev": true, + "dependencies": { + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/raw-body/node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/rc": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", @@ -11786,6 +11857,13 @@ "rc": "cli.js" } }, + "node_modules/rc/node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true, + "license": "ISC" + }, "node_modules/readable-stream": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", @@ -11842,12 +11920,6 @@ "node": ">=4" } }, - "node_modules/regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "dev": true - }, "node_modules/regenerator-transform": { "version": "0.15.2", "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.2.tgz", @@ -13626,9 +13698,9 @@ } }, "node_modules/terser-webpack-plugin": { - "version": "1.4.5", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz", - "integrity": "sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw==", + "version": "1.4.6", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.6.tgz", + "integrity": "sha512-2lBVf/VMVIddjSn3GqbT90GvIJ/eYXJkt8cTzU7NbjKqK8fwv18Ftr4PlbF46b/e88743iZFL5Dtr/rC4hjIeA==", "dev": true, "dependencies": { "cacache": "^12.0.2", @@ -13881,15 +13953,6 @@ "integrity": "sha512-JVYrY42wMG7ddf+wBUQR/uHGbjUHZbLisJ8N62AMm0iTZ0p8YTcZLzdtomU0+H+wa99VbkyvQGB3zxB7NDzgIQ==", "dev": true }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/to-object-path": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", @@ -14696,6 +14759,7 @@ "integrity": "sha512-4gCtFXaAA3zYZdTp5s4Hl2sozuySsgz4jy1EnpBHNfpMa9dK1ZCG7viqBPCwXtmgc8nHqUsAu3G4gtmXkkY3Sw==", "deprecated": "Vue 2 has reached EOL and is no longer actively maintained. See https://v2.vuejs.org/eol/ for more details.", "dev": true, + "peer": true, "dependencies": { "@vue/compiler-sfc": "2.7.16", "csstype": "^3.1.0" @@ -14822,9 +14886,9 @@ "dev": true }, "node_modules/vue-server-renderer/node_modules/serialize-javascript": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.1.tgz", - "integrity": "sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", "dev": true, "dependencies": { "randombytes": "^2.1.0" @@ -15039,13 +15103,13 @@ } }, "node_modules/watchpack/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "optional": true, "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -15080,9 +15144,9 @@ } }, "node_modules/watchpack/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "optional": true, "dependencies": { @@ -15187,6 +15251,7 @@ "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.47.0.tgz", "integrity": "sha512-td7fYwgLSrky3fI1EuU5cneU4+pbH6GgOfuKNS1tNPcfdGinGELAqsb/BP4nnvZyKSG2i/xFGU7+n2PvZA8HJQ==", "dev": true, + "peer": true, "dependencies": { "@webassemblyjs/ast": "1.9.0", "@webassemblyjs/helper-module-context": "1.9.0", @@ -15820,13 +15885,14 @@ } }, "@babel/code-frame": { - "version": "7.23.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.23.5.tgz", - "integrity": "sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", "dev": true, "requires": { - "@babel/highlight": "^7.23.4", - "chalk": "^2.4.2" + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" } }, "@babel/compat-data": { @@ -15840,6 +15906,7 @@ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.7.tgz", "integrity": "sha512-+UpDgowcmqe36d4NwqvKsyPMlOLNGMsfMmQ5WGCu+siCe3t3dfe9njrzGfdN4qq+bcNUt0+Vw6haRxBOycs4dw==", "dev": true, + "peer": true, "requires": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.23.5", @@ -16063,15 +16130,15 @@ } }, "@babel/helper-string-parser": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz", - "integrity": "sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==", + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", "dev": true }, "@babel/helper-validator-identifier": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", - "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "dev": true }, "@babel/helper-validator-option": { @@ -16092,33 +16159,24 @@ } }, "@babel/helpers": { - "version": "7.23.7", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.7.tgz", - "integrity": "sha512-6AMnjCoC8wjqBzDHkuqpa7jAKwvMo4dC+lr/TFBz+ucfulO1XMpDnwWPGBNwClOKZ8h6xn5N81W/R5OrcKtCbQ==", + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", "dev": true, "requires": { - "@babel/template": "^7.22.15", - "@babel/traverse": "^7.23.7", - "@babel/types": "^7.23.6" + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" } }, - "@babel/highlight": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.23.4.tgz", - "integrity": "sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==", + "@babel/parser": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.22.20", - "chalk": "^2.4.2", - "js-tokens": "^4.0.0" + "@babel/types": "^7.28.5" } }, - "@babel/parser": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.6.tgz", - "integrity": "sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==", - "dev": true - }, "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { "version": "7.23.3", "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.23.3.tgz", @@ -16963,23 +17021,20 @@ "dev": true }, "@babel/runtime": { - "version": "7.23.7", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.7.tgz", - "integrity": "sha512-w06OXVOFso7LcbzMiDGt+3X7Rh7Ho8MmgPoWU3rarH+8upf+wSU/grlGbWzQyr3DkdN6ZeuMFjpdwW0Q+HxobA==", - "dev": true, - "requires": { - "regenerator-runtime": "^0.14.0" - } + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", + "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", + "dev": true }, "@babel/template": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", - "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==", + "version": "7.27.2", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", + "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", "dev": true, "requires": { - "@babel/code-frame": "^7.22.13", - "@babel/parser": "^7.22.15", - "@babel/types": "^7.22.15" + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" } }, "@babel/traverse": { @@ -17001,14 +17056,13 @@ } }, "@babel/types": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.6.tgz", - "integrity": "sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", "dev": true, "requires": { - "@babel/helper-string-parser": "^7.23.4", - "@babel/helper-validator-identifier": "^7.22.20", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" } }, "@jridgewell/gen-mapping": { @@ -17974,6 +18028,7 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, + "peer": true, "requires": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -18567,23 +18622,23 @@ "dev": true }, "body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "version": "1.20.4", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz", + "integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==", "dev": true, "requires": { - "bytes": "3.1.2", + "bytes": "~3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", + "destroy": "~1.2.0", + "http-errors": "~2.0.1", + "iconv-lite": "~0.4.24", + "on-finished": "~2.4.1", + "qs": "~6.14.0", + "raw-body": "~2.5.3", "type-is": "~1.6.18", - "unpipe": "1.0.0" + "unpipe": "~1.0.0" }, "dependencies": { "debug": { @@ -18595,11 +18650,30 @@ "ms": "2.0.0" } }, + "http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", + "dev": true, + "requires": { + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" + } + }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true + }, + "statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "dev": true } } }, @@ -18741,9 +18815,9 @@ } }, "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "requires": { "balanced-match": "^1.0.0", @@ -18882,6 +18956,7 @@ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.2.tgz", "integrity": "sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==", "dev": true, + "peer": true, "requires": { "caniuse-lite": "^1.0.30001565", "electron-to-chromium": "^1.4.601", @@ -19230,6 +19305,7 @@ "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "peer": true, "requires": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", @@ -19464,7 +19540,8 @@ "commander": { "version": "2.17.1", "resolved": "https://registry.npmjs.org/commander/-/commander-2.17.1.tgz", - "integrity": "sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==" + "integrity": "sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==", + "peer": true }, "commondir": { "version": "1.0.1", @@ -19873,9 +19950,9 @@ } }, "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.6.tgz", + "integrity": "sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==", "dev": true, "requires": { "nice-try": "^1.0.4", @@ -20806,6 +20883,7 @@ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.7.tgz", "integrity": "sha512-+u/msd6iu+HvfysUPkZ9VHm83LImmSNnecYPfFI01pQ7TTcsFR+V0BkybZX7mPtIaI7LCrse6YRj+v3eraJSgw==", "dev": true, + "peer": true, "requires": { "esbuild-android-arm64": "0.14.7", "esbuild-darwin-64": "0.14.7", @@ -21711,6 +21789,14 @@ "dev": true, "requires": { "ini": "1.3.7" + }, + "dependencies": { + "ini": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.7.tgz", + "integrity": "sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ==", + "dev": true + } } }, "globals": { @@ -22151,18 +22237,18 @@ }, "dependencies": { "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "requires": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" } }, "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "requires": { "to-regex-range": "^5.0.1" @@ -22175,12 +22261,12 @@ "dev": true }, "micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "requires": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" } }, @@ -22384,10 +22470,9 @@ "dev": true }, "ini": { - "version": "1.3.7", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.7.tgz", - "integrity": "sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ==", - "dev": true + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz", + "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==" }, "internal-ip": { "version": "4.3.0", @@ -23145,6 +23230,7 @@ "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-8.4.2.tgz", "integrity": "sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==", "dev": true, + "peer": true, "requires": { "argparse": "^1.0.7", "entities": "~1.1.1", @@ -23199,6 +23285,11 @@ "integrity": "sha512-TAIHTHPwa9+ltKvKPWulm/beozQU41Ab+FIefRaQV1NRnpzwcV9QOe6wXQS5WLivm5Q/nlo0rl6laGkMDZE7Gw==", "dev": true }, + "markdown-table": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", + "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==" + }, "math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -23327,14 +23418,12 @@ "mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" }, "mime-types": { "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, "requires": { "mime-db": "1.52.0" } @@ -23498,9 +23587,9 @@ "optional": true }, "nanoid": { - "version": "3.3.7", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", - "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "dev": true }, "nanomatch": { @@ -24091,9 +24180,9 @@ "dev": true }, "picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "dev": true }, "picomatch": { @@ -24923,12 +25012,12 @@ "dev": true }, "qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "version": "6.14.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz", + "integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==", "dev": true, "requires": { - "side-channel": "^1.0.6" + "side-channel": "^1.1.0" } }, "query-string": { @@ -24980,15 +25069,36 @@ "dev": true }, "raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "version": "2.5.3", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz", + "integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==", "dev": true, "requires": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" + "bytes": "~3.1.2", + "http-errors": "~2.0.1", + "iconv-lite": "~0.4.24", + "unpipe": "~1.0.0" + }, + "dependencies": { + "http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", + "dev": true, + "requires": { + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" + } + }, + "statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "dev": true + } } }, "rc": { @@ -25001,6 +25111,14 @@ "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" + }, + "dependencies": { + "ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + } } }, "readable-stream": { @@ -25053,12 +25171,6 @@ "regenerate": "^1.4.2" } }, - "regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "dev": true - }, "regenerator-transform": { "version": "0.15.2", "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.2.tgz", @@ -26506,9 +26618,9 @@ } }, "terser-webpack-plugin": { - "version": "1.4.5", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz", - "integrity": "sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw==", + "version": "1.4.6", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.6.tgz", + "integrity": "sha512-2lBVf/VMVIddjSn3GqbT90GvIJ/eYXJkt8cTzU7NbjKqK8fwv18Ftr4PlbF46b/e88743iZFL5Dtr/rC4hjIeA==", "dev": true, "requires": { "cacache": "^12.0.2", @@ -26691,12 +26803,6 @@ "integrity": "sha512-JVYrY42wMG7ddf+wBUQR/uHGbjUHZbLisJ8N62AMm0iTZ0p8YTcZLzdtomU0+H+wa99VbkyvQGB3zxB7NDzgIQ==", "dev": true }, - "to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true - }, "to-object-path": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", @@ -27336,6 +27442,7 @@ "resolved": "https://registry.npmjs.org/vue/-/vue-2.7.16.tgz", "integrity": "sha512-4gCtFXaAA3zYZdTp5s4Hl2sozuySsgz4jy1EnpBHNfpMa9dK1ZCG7viqBPCwXtmgc8nHqUsAu3G4gtmXkkY3Sw==", "dev": true, + "peer": true, "requires": { "@vue/compiler-sfc": "2.7.16", "csstype": "^3.1.0" @@ -27429,9 +27536,9 @@ "dev": true }, "serialize-javascript": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.1.tgz", - "integrity": "sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", "dev": true, "requires": { "randombytes": "^2.1.0" @@ -27599,13 +27706,13 @@ "optional": true }, "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "optional": true, "requires": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" } }, "chokidar": { @@ -27626,9 +27733,9 @@ } }, "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "optional": true, "requires": { @@ -27720,6 +27827,7 @@ "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.47.0.tgz", "integrity": "sha512-td7fYwgLSrky3fI1EuU5cneU4+pbH6GgOfuKNS1tNPcfdGinGELAqsb/BP4nnvZyKSG2i/xFGU7+n2PvZA8HJQ==", "dev": true, + "peer": true, "requires": { "@webassemblyjs/ast": "1.9.0", "@webassemblyjs/helper-module-context": "1.9.0", diff --git a/package.json b/package.json index 729829034a..e30c63a72d 100644 --- a/package.json +++ b/package.json @@ -1,12 +1,20 @@ { "scripts": { - "docs:dev": "vuepress dev docs", - "docs:build": "vuepress build docs" + "gen:ssl": "node scripts/ensure_ssl_certs.js", + "gen:metadata": "node docsgen/gen_wu.js", + "gen:site:only": "vuepress build docs", + "docs:dev": "npm run gen:metadata && vuepress dev docs", + "docs:build": "npm run gen:metadata && npm run gen:site:only", + "site:preview": " npm run gen:ssl && node scripts/preview_site.js", + "site:preview:full": "npm run docs:build && npm run gen:ssl && node scripts/preview_site.js" }, "devDependencies": { "vuepress": "^1.9.10" }, "dependencies": { + "ini": "^4.1.1", + "markdown-table": "^3.0.3", + "mime-types": "^2.1.35", "vuepress-plugin-sitemap": "^2.3.1" } -} +} \ No newline at end of file diff --git a/scripts/CI_SCRIPTS.md b/scripts/CI_SCRIPTS.md new file mode 100644 index 0000000000..a63f2b7c5d --- /dev/null +++ b/scripts/CI_SCRIPTS.md @@ -0,0 +1,790 @@ +# CI/CD Scripts Documentation + +This documentation describes the CI/CD scripts used to build OpenMQTTGateway firmware and documentation. These scripts work in GitHub Actions, locally, and in any CI/CD environment. + +- [Overview](#cicd-scripts-documentation) +- [Quick Reference](#quick-reference) + - [Script Hierarchy](#script-hierarchy) + - [Script Description](#script-description) + - [Output Structure](#output-structure) +- [Commands](#commands) + - [ci.sh - Main Entry Point](#commands) + - [ci.sh build - Build Firmware](#cish-build---build-firmware) + - [ci.sh site - Build Documentation](#cish-site---build-documentation) + - [ci.sh qa - Code Quality Checks](#cish-qa---code-quality-checks) + - [ci.sh security - Security Vulnerability Scan](#cish-security---security-vulnerability-scan) +- [Internal Scripts](#internal-scripts) + - [ci_list-env.sh](#ci_list-envsh) + - [ci_build_firmware.sh](#ci_build_firmwaresh) + - [ci_prepare_artifacts.sh](#ci_prepare_artifactssh) + - [ci_security.sh](#ci_securitysh) + - [ci_00_config.sh](#ci_00_configsh) +- [Python Helper Scripts](#python-helper-scripts) + - [generate_board_docs.py](#generate_board_docspy) + - [gen_wu.py](#gen_wupy) +- [Environment Variables](#environment-variables) +- [Exit Codes](#exit-codes) +- [Environment Detection](#environment-detection) +- [GitHub Actions Workflows Integration](#github-actions-workflows-integration) + + + +## Quick Reference + +### Script Hierarchy + +``` +ci.sh (dispatcher) +├── build → ci_build.sh → ci_build_firmware.sh +│ → ci_prepare_artifacts.sh (when --deploy-ready) +├── site → ci_site.sh +├── qa → ci_qa.sh +├── security → ci_security.sh (vulnerability scanning with Trivy) +├── list-env → ci_list-env.sh +└── all → qa + build (all envs with --mode) + site sequential +``` + +### Script Description + +| Script | Purpose | Called By | +|--------|---------|----------| +| `ci.sh` | Main command dispatcher | User/GitHub Actions | +| `ci_build.sh` | Build firmware orchestrator | ci.sh build | +| `ci_site.sh` | Documentation build orchestrator | ci.sh site | +| `ci_00_config.sh` | Shared configuration loader | ci.sh | +| `ci_qa.sh` | Quality assurance and shellcheck | ci.sh qa | +| `ci_security.sh` | Security vulnerability scanning with Trivy | ci.sh security | +| `ci_list-env.sh` | List PlatformIO environments (fast JSON or full scan) | ci.sh list-env / direct | +| `ci_build_firmware.sh` | PlatformIO build execution | ci_build.sh | +| `ci_prepare_artifacts.sh` | Artifact packaging (when requested) | ci_build.sh | + + +### Output Structure + +Build outputs are organized in the project root: + +``` +.pio/build// # PlatformIO build outputs +├── firmware.bin # Main firmware binary +├── bootloader.bin # ESP32 bootloader +└── partitions.bin # ESP32 partition table + +generated/ +├── artifacts/ # Packaged firmware artifacts +├── site/ # Built documentation (VuePress output) +└── reports/ # Security scan and SBOM reports + └── sbom/ # SBOM in CycloneDX and SPDX formats +``` + +## Commands + +`ci.sh` is the main entry point. It dispatches to the specialized scripts. + +**Usage:** +```bash +./scripts/ci.sh [OPTIONS] +``` + +**Commands:** +- `build` - Build firmware for a PlatformIO environment (optionally prepare artifacts) +- `site` or `docs` - Build documentation site +- `qa` or `lint` - Run formatting and shellcheck checks +- `security` - Scan for security vulnerabilities using Trivy (filesystem, container images) +- `all` or `pipeline` - Run qa → build (all environments) → site with injected mode +- `list-env` - List available PlatformIO environments (JSON fast list or full ini scan) + +**Examples:** +```bash +# Help per command +./scripts/ci.sh build --help +./scripts/ci.sh site --help +./scripts/ci.sh qa --help +./scripts/ci.sh security --help +./scripts/ci.sh list-env --help + +# Build firmware +./scripts/ci.sh build esp32dev-all-test --mode dev +./scripts/ci.sh build esp32dev-bt --version v1.8.0 --deploy-ready --output generated/artifacts + +# Build docs +./scripts/ci.sh site --mode prod --url-prefix / +./scripts/ci.sh site --mode dev --preview + +# QA (formatting + shellcheck) +./scripts/ci.sh qa --check +./scripts/ci.sh qa --fix --verbose + +# Security scanning +./scripts/ci.sh security --scan-type fs --severity HIGH,CRITICAL +./scripts/ci.sh security --scan-type fs --generate-sbom + +# Full pipeline (qa + build all envs + site) +./scripts/ci.sh all --mode dev +./scripts/ci.sh all --mode prod --preview +``` + +--- + +### ci.sh build - Build Firmware + +Runs the build pipeline (tool checks → PlatformIO build → optional artifact packaging). + +**Usage:** +```bash +./scripts/ci.sh build [OPTIONS] +``` + +**Required Argument:** +- `` PlatformIO environment name (e.g., esp32dev-ble) + +**Options:** +- `--mode ` Build mode (default: prod). `dev` enables OTA flags in the PlatformIO build. +- `--deploy-ready` Copy/rename build outputs and libs via ci_prepare_artifacts.sh. +- `--version [TAG]` Set version used for `OMG_VERSION` and artifact folder naming. If TAG is omitted the script auto-generates (CI: BUILD_NUMBER/GIT_COMMIT, local: timestamp). +- `--output ` Output directory for packaged artifacts (only used when `--deploy-ready` is set, default `generated/artifacts`). +- `--skip-verification` Skip the tool availability checks. +- `--clean` Clean the PlatformIO environment before building. +- `--verbose` Verbose PlatformIO output. +- `--help` Show help. + +**Behavior:** +- Tool check verifies python3, platformio, git (can be skipped). +- Builds via ci_build_firmware.sh (adds `--dev-ota` when `--mode dev`). +- Packaging runs only when `--deploy-ready` is provided; artifacts land under `generated/artifacts/firmware_build/` with env-prefixed filenames plus zipped libraries. + + +**Examples:** +```bash +# Dev build with OTA flags +./scripts/ci.sh build esp32dev-ble --mode dev + +# Prod build with auto-version and deployable artifacts +./scripts/ci.sh build esp32dev-bt --version --deploy-ready --output generated/artifacts + +# Clean + verbose build +./scripts/ci.sh build nodemcuv2-rf --clean --verbose +``` + +--- + +### ci.sh site - Build Documentation + +Builds the VuePress documentation site. + +**Usage:** +```bash +./scripts/ci.sh site [OPTIONS] +``` + +**Options:** +- `--mode ` Build mode (default: dev). +- `--url-prefix ` Base URL path for links (e.g., '/' for root, '/dev/' for dev) (default: /dev/). +- `--version ` Version string written to meta.json (default: edge). +- `--preview` Start the local HTTPS preview server after building (https://localhost:8443). +- `--clean` Remove generated/site folder before building. +- `--insecure-curl` Allow curl to skip TLS verification when downloading common config. +- `--help` Show help. + +**Behavior:** +- Checks for node, npm, openssl; installs npm deps; downloads commonConfig.js. +- Writes docs/.vuepress/meta.json with mode/url_prefix/version; builds via `npm run docs:build`. +- Preview mode runs `npm run site:preview`. + +**Examples:** +```bash +# Production build +./scripts/ci.sh site --mode prod --url-prefix / + +# Development build with preview +./scripts/ci.sh site --mode dev --url-prefix /dev/ --version edge --preview + +# Clean then build with custom version +./scripts/ci.sh site --clean --version 1.8.0 +``` + +--- + +### ci.sh qa - Code Quality Checks + +Checks and fixes code formatting using clang-format and runs shellcheck on shell scripts. + +**Usage:** +```bash +./scripts/ci.sh qa [OPTIONS] +``` + +**Options:** +- `--check` Check formatting only (default) +- `--fix` Apply formatting in place +- `--format` Run only format checks +- `--shellcheck` Run shellcheck on shell scripts in scripts/ directory +- `--all` Future hook to run all QA checks (current implementation runs formatting + shellcheck) +- `--source ` Source directory for formatting checks (default: main) +- `--extensions ` File extensions for formatting (comma-separated, default: h,ino,cpp) +- `--clang-format-version ` clang-format version to use (default: 9) +- `--verbose` Verbose output +- `--help` Show help + +**Execution Flow:** +``` +ci.sh qa --check --source main --extensions h,ino + │ + ├─> ci_qa.sh (orchestrator) + │ ├─> check_clang_format() - Find clang-format-9 or clang-format + │ │ ├─> find_files() - Locate files matching extensions in source dir + │ │ └─> check_formatting() - Run clang-format --dry-run --Werror + │ │ └─> Report files with formatting issues + │ │ + │ └─> shellcheck_check() - Find and scan shell scripts + │ ├─> find_shell_scripts() - Locate *.sh files in scripts/ directory + │ └─> run_shellcheck() - Run shellcheck on found scripts + │ └─> Report shell script issues + │ + └─> Exit code: 0 (pass) or 1 (issues found) +``` + +**Examples:** +```bash +# Check both formatting and shellcheck (default) +./scripts/ci.sh qa --check + +# Fix formatting automatically +./scripts/ci.sh qa --fix + +# Check only formatting for specific directory +./scripts/ci.sh qa --check --format --source lib + +# Check only .h and .ino files +./scripts/ci.sh qa --check --extensions h,ino + +# Check only shellcheck for shell scripts +./scripts/ci.sh qa --check --shellcheck + +# Check with verbose output +./scripts/ci.sh qa --check --verbose + +# Use different clang-format version +./scripts/ci.sh qa --check --clang-format-version 11 +``` + +**Required Tools:** +- clang-format (version specified, default: 9) + - Install: `sudo apt-get install clang-format-9` +- shellcheck (for shell script linting) + - Install: `sudo apt-get install shellcheck` + +**Output:** +- Check mode: Lists files with formatting issues and shell script errors, shows diffs +- Fix mode: Modifies formatting in-place and reports changes +- Exit code 0: All checks passed (proper formatting and no shellcheck errors) +- Exit code 1: Issues found (formatting or shellcheck violations) + +--- + +### ci.sh security - Security Vulnerability Scan + +Scans the project for security vulnerabilities using Trivy and generates Software Bill of Materials (SBOM). + +**Usage:** +```bash +./scripts/ci.sh security [OPTIONS] +``` + +**Options:** +- `--scan-type ` Type of scan (default: fs) + - `fs` - Filesystem scan (default, scans for vulnerabilities and misconfigurations) + - `config` - Configuration scan only + - `image` - Container image scan +- `--severity ` Severity levels to report (comma-separated: UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL) (default: HIGH,CRITICAL) +- `--scan-path ` Path to scan (default: current directory `.`) +- `--generate-sbom` Generate Software Bill of Materials in CycloneDX and SPDX formats (default: true) +- `--exit-code <0|1>` Exit code when vulnerabilities found (0=continue, 1=fail) (default: 0) +- `--upload-to-security-tab` Upload SARIF report to GitHub Security tab (GitHub Actions only, default: true) +- `--verbose` Verbose output +- `--help` Show help + +**Behavior:** +- Installs Trivy if not present +- Scans filesystem or configuration for known vulnerabilities +- Generates multiple report formats: SARIF, JSON, table summary +- Creates SBOM in CycloneDX and SPDX formats when `--generate-sbom` is enabled +- Reports are saved to `generated/reports/` directory +- Summary is appended to GitHub job summary when running in GitHub Actions +- Uploads SARIF to GitHub Security tab for dashboard visibility + +**Output Structure:** +``` +generated/reports/ +├── trivy-results.sarif # SARIF format (for GitHub Security tab) +├── trivy-results.json # JSON format (detailed results) +├── security-summary.md # Markdown summary +└── sbom/ + ├── sbom.cyclonedx.json # CycloneDX format + └── sbom.spdx.json # SPDX format +``` + +**Examples:** +```bash +# Scan filesystem for HIGH and CRITICAL vulnerabilities +./scripts/ci.sh security --scan-type fs --severity HIGH,CRITICAL + +# Full scan with all severity levels and SBOM generation +./scripts/ci.sh security --scan-type fs --severity UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL --generate-sbom + +# Scan specific path with verbose output +./scripts/ci.sh security --scan-path ./lib --verbose + +# Configuration scan only +./scripts/ci.sh security --scan-type config + +# Scan and fail on vulnerabilities +./scripts/ci.sh security --exit-code 1 +``` + +**Required Tools:** +- Trivy (vulnerability scanner) + - Install: `wget -qO - https://aquasecurity.github.io/trivy-repo/deb/public.key | sudo apt-key add -` && `echo "deb https://aquasecurity.github.io/trivy-repo/deb $(lsb_release -sc) main" | sudo tee -a /etc/apt/sources.list.d/trivy.list` && `sudo apt-get update && sudo apt-get install -y trivy` + +**Output:** +- Detailed SARIF and JSON reports for integration with tools +- Human-readable Markdown summary with findings count +- SBOM artifacts for supply chain tracking +- GitHub Security tab integration when in GitHub Actions +- Exit code 0: Scan completed (vulnerabilities may have been found) +- Exit code 1: Scan failed or critical vulnerabilities found (only if `--exit-code 1`) + +--- + +### ci.sh all - Complete Pipeline + +Runs the complete CI/CD pipeline (qa → build all environments → site) with mode injected to all steps. + +**Usage:** +```bash +./scripts/ci.sh all --mode [--preview] +``` + +**Required Options:** +- `--mode ` Build mode (required). Injected to all pipeline steps: + - `qa`: Always runs `--check` + - `build`: Passes mode to all PlatformIO environments (dev enables OTA flags) + - `site`: Passes mode for documentation generation + +**Optional Options:** +- `--preview` Start local HTTPS preview server at https://localhost:8443 after building the site +- `--help` Show help + +**Behavior:** +- No environment argument needed; builds **all** available environments +- All three steps (qa, build, site) receive the same `--mode` value +- If any step fails, the pipeline aborts +- Site build warnings do not abort the pipeline (continues with success status) + +**Execution Flow:** +``` +ci.sh all --mode dev --preview + │ + ├─> Step 1: ci_qa.sh --check + │ └─> Exit on failure + │ + ├─> Step 2: ci_build.sh --mode dev (builds all environments) + │ └─> Exit on failure + │ + └─> Step 3: ci_site.sh --mode dev --preview + ├─> Build documentation + └─> Start preview server at https://localhost:8443 +``` + +**Examples:** +```bash +# Complete pipeline in dev mode +./scripts/ci.sh all --mode dev + +# Complete pipeline in prod mode with site preview +./scripts/ci.sh all --mode prod --preview + +# Help for this command +./scripts/ci.sh all --help +``` + +**Output:** +- Step-by-step progress messages for each pipeline phase +- Final summary showing mode, preview status, duration, and overall status +- Exit code 0: All steps successful +- Exit code 1: Any step failed + +--- + +### ci.sh list-env - List Environments + +Lists PlatformIO environments available to build. + +**Usage:** +```bash +./scripts/ci.sh list-env [--full] +``` + +**Options:** +- Default: read .github/workflows/environments.json (requires jq) and show curated list. +- `--full` Parse platformio.ini and environments.ini for an exhaustive list (skips *-test and *-all-). +- `--help` Show help. + +**Examples:** +```bash +./scripts/ci.sh list-env +./scripts/ci.sh list-env --full +``` + +--- + +## Internal Scripts + +These scripts are called by the main orchestrators. They can be run directly for troubleshooting, but the preferred entrypoints are the ci.sh commands. + +### ci_list-env.sh + +Lists PlatformIO environments for OpenMQTTGateway. + +**Called By:** `ci.sh list-env` or direct call + +**Usage:** +```bash +./scripts/ci_list-env.sh [--full] +``` + +**Options:** +- Default: read .github/workflows/environments.json for a curated list (needs jq) +- `--full` - Parse platformio.ini and environments.ini for all envs except *-test and *-all- +- `-h|--help` - Show help + +**Output:** +- Shows sorted environments in columns and prints the total count + +**Examples:** +```bash +./scripts/ci_list-env.sh +./scripts/ci_list-env.sh --full +``` + +### ci_build_firmware.sh + +Executes PlatformIO build for specified environment. + +**Called By:** `ci_build.sh` + +**Usage:** +```bash +./scripts/ci_build_firmware.sh [OPTIONS] +``` + +**Arguments:** +- `` - PlatformIO environment name + +**Options:** +- `--version ` - Set OMG_VERSION for the build (passed through from ci_build.sh) +- `--dev-ota` - Enable development OTA (sets PLATFORMIO_BUILD_FLAGS) +- `--clean` - Clean before build +- `--verbose` - Verbose PlatformIO output +- `--no-verify` - Skip artifact verification after build + +**Environment Variables Set:** +- `PYTHONIOENCODING=utf-8` +- `PYTHONUTF8=1` +- `PLATFORMIO_BUILD_FLAGS="-DDEVELOPMENTOTA=true"` (when --dev-ota) +- `OMG_VERSION` (when --version is provided) + +**PlatformIO Command:** +```bash +platformio run -e [--verbose] +``` + +**Output Location:** +- `.pio/build//firmware.bin` +- `.pio/build//bootloader.bin` (ESP32 only) +- `.pio/build//partitions.bin` (ESP32 only) + +--- + +### ci_prepare_artifacts.sh + +Packages firmware binaries and libraries from a PlatformIO build directory; can also only create source archive when no environment is provided. + +**Called By:** `ci_build.sh` + +**Usage:** +```bash +./scripts/ci_prepare_artifacts.sh [OPTIONS] +``` + +**Arguments:** +- `` - PlatformIO environment name (optional; if omitted only source archive is created) + +**Options:** +- `--output ` Output directory (default: generated/artifacts) +- `--version ` Append a version subfolder inside the output directory +- `--clean` Clean output directory before writing +- `--help` Show help + +**Behavior:** +- If `version` is provided, outputs go to `//firmware_build/`; otherwise to `/firmware_build/`. +- With an environment: copies and renames firmware.bin/partitions.bin/bootloader.bin/boot_app0.bin to `-*.bin`, then zips libraries for that env into `*-libraries.tgz`. +- Without an environment: only creates `OpenMQTTGateway_sources.tgz` from `main` and `LICENSE.txt`. +- Lists the prepared artifacts and their sizes at the end. + +--- + +### ci_00_config.sh + +Shared configuration and helper functions for all CI scripts. + +**Sourced By:** All ci_*.sh scripts + +**Provides:** +- Color codes for terminal output (BLUE, GREEN, RED, YELLOW, NC) +- Logging functions: `log_info()`, `log_warn()`, `log_error()`, `log_success()` +- Path constants: `BUILD_DIR`, `ARTIFACTS_DIR`, `SITE_DIR` +- Common utility functions + +**Constants Defined:** +- `BUILD_DIR=".pio/build"` - PlatformIO build directory +- `ARTIFACTS_DIR="generated/artifacts"` - Artifact output directory +- `SITE_DIR="generated/site"` - Documentation output directory +- `REPORTS_DIR="generated/reports"` - Security scan and quality reports directory + +**Logging Functions:** +```bash +log_info "message" # Blue [INFO] prefix +log_warn "message" # Yellow [WARN] prefix +log_error "message" # Red [ERROR] prefix +log_success "message" # Green [SUCCESS] prefix +``` + +--- + +### ci_security.sh + +Performs security vulnerability scanning and Software Bill of Materials (SBOM) generation using Trivy. + +**Called By:** `ci.sh security` + +**Usage:** +```bash +./scripts/ci_security.sh [OPTIONS] +``` + +**Options:** +- `--scan-type ` Type of scan (default: fs) + - `fs` - Filesystem scan (default) + - `config` - Configuration scan + - `image` - Container image scan +- `--severity ` Severity levels (comma-separated: UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL) (default: HIGH,CRITICAL) +- `--scan-path ` Path to scan (default: .) +- `--generate-sbom` Generate SBOM (default: true) +- `--exit-code <0|1>` Exit code behavior (0=continue, 1=fail) (default: 0) +- `--upload-to-security-tab` Upload SARIF to GitHub (default: true) +- `--verbose` Verbose output +- `--help` Show help + +**Behavior:** +- Ensures Trivy is installed via package manager +- Creates `generated/reports/` directory structure +- Runs Trivy with specified parameters +- Generates SARIF, JSON, and table formats +- Creates SBOM in CycloneDX and SPDX formats (when enabled) +- Uploads SARIF to GitHub Security tab when `GITHUB_TOKEN` and `--upload-to-security-tab` are set +- Appends summary to GitHub job summary if in GitHub Actions +- Validates critical vulnerabilities and exits with code 1 if found and `--exit-code 1` is set + +**Output Files:** +- `generated/reports/trivy-results.sarif` - SARIF format for GitHub integration +- `generated/reports/trivy-results.json` - Full JSON results +- `generated/reports/security-summary.md` - Human-readable summary +- `generated/reports/sbom/sbom.cyclonedx.json` - CycloneDX SBOM +- `generated/reports/sbom/sbom.spdx.json` - SPDX SBOM + +**Exit Codes:** +- `0` - Success (vulnerabilities may have been found) +- `1` - Scan failed, critical vulnerabilities found (only if `--exit-code 1`), or missing dependencies + +**Trivy Integration:** +- Scans for known vulnerabilities in dependencies +- Detects misconfigurations and insecure practices +- Generates compliant SBOM artifacts +- Provides detailed reporting in multiple formats + +**Example:** +```bash +# Scan filesystem with severity filter +./scripts/ci_security.sh --scan-type fs --severity HIGH,CRITICAL + +# Local scan with SBOM and JSON output +./scripts/ci_security.sh --scan-type fs --generate-sbom --verbose + +# In GitHub Actions with security tab upload +./scripts/ci_security.sh --scan-type fs --severity HIGH,CRITICAL --upload-to-security-tab +``` + +--- + +## Python Helper Scripts + +Legacy helper scripts are kept for compatibility; they are not called by the current ci_site.sh flow. Below the list: + - `generate_board_docs.py` + - `gen_wu.py` + +### generate_board_docs.py + +Auto-generates board-specific documentation pages from platformio.ini. + +**Called By:** Not invoked by current ci_site.sh (legacy helper) + +**Usage:** +```bash +python3 ./scripts/generate_board_docs.py +``` + +**Input:** +- `platformio.ini` - Board configurations +- `environments.ini` - Additional environments + +**Output:** +- Markdown files in `docs/` directory for each board configuration + +**Purpose:** +- Creates documentation pages for each hardware board +- Extracts configuration details from PlatformIO environment definitions +- Formats technical specifications and pin mappings + +--- + +### gen_wu.py + +Generates WebUploader manifest for OTA firmware updates. + +**Called By:** Not invoked by current ci_site.sh (legacy helper) + +**Usage:** +```bash +python3 ./scripts/gen_wu.py [--dev] [repository] +``` + +**Arguments:** +- `--dev` - Generate development manifest +- `repository` - GitHub repository name (e.g., 1technophile/OpenMQTTGateway) + +**Input:** +- `.pio/build//firmware.bin` - Compiled firmware files +- `scripts/latest_version.json` or `scripts/latest_version_dev.json` + +**Output:** +- WebUploader manifest JSON file in `docs/.vuepress/public/` + +**Purpose:** +- Creates manifest for web-based firmware updater +- Lists available firmware files with metadata +- Used by documentation site for OTA updates + +--- + +## Environment Variables + +Scripts respect these environment variables: + +- `CI`/`BUILD_NUMBER`/`GIT_COMMIT`: Used by ci_build.sh to auto-generate version when `--version` flag has no tag +- `PYTHONIOENCODING=utf-8`, `PYTHONUTF8=1`: Python encoding settings set by ci_build_firmware.sh +- `PLATFORMIO_BUILD_FLAGS`: Set to include development OTA flag when `--dev-ota` is used +- `OMG_VERSION`: Set when `--version` is passed to ci_build.sh/ci_build_firmware.sh + +--- + +## Exit Codes + +All scripts use standard exit codes: + +- `0` - Success +- `1` - General error or failure +- `2` - Missing required tools or dependencies + +Scripts use `set -euo pipefail` for strict error handling: +- `-e` - Exit on error +- `-u` - Exit on undefined variable +- `-o pipefail` - Exit on pipe failure + +--- + +## Environment Detection + +Scripts automatically detect if running in CI/CD: + +```bash +if [[ "${CI:-false}" == "true" ]]; then + # Running in CI/CD + # Disable interactive prompts + # Use different output formatting +fi +``` + +CI/CD environments typically set: +- `CI=true` +- `GITHUB_ACTIONS=true` (GitHub Actions) +- `BUILD_NUMBER` (build number) +- `GIT_COMMIT` (commit hash) + +--- + +## GitHub Actions Workflows Integration + +The CI scripts integrate with GitHub Actions workflows in `.github/workflows/`: + +### task-lint.yml +- Reusable workflow that runs `ci.sh qa --check` +- Installs clang-format and shellcheck +- Validates code formatting and shell script quality +- Can be called with custom source directory and file extensions + +### task-build.yml +- Main build workflow orchestrator +- Calls task-lint.yml for code quality checks +- Calls task-security-scan.yml for vulnerability scanning +- Builds firmware for all or specified environments +- Prepares and uploads build artifacts +- Supports matrix builds for multiple environments +- Manages build artifact retention + +### task-security-scan.yml +- Reusable security scanning workflow +- Installs Trivy vulnerability scanner +- Calls `ci_security.sh` with configurable parameters +- Generates SARIF, JSON, and SBOM reports +- Uploads SARIF to GitHub Security tab for code scanning dashboard +- Fails build on critical vulnerabilities when configured +- Uploads SBOM artifacts for supply chain tracking + +### security-scan.yml +- Scheduled security scanning (runs weekly by default) +- Triggered manually with input parameters +- Allows filtering by severity level +- Configurable exit behavior (fail or continue) +- Optional SBOM generation and upload + +**Workflow Dependencies:** +``` +task-build.yml +├─> task-lint.yml (linting) +├─> task-security-scan.yml (security scanning) +└─> Build environment matrix (firmware compilation) +``` + +**Key Features:** +- Parallel linting and security scans +- Artifact retention policies +- GitHub Security tab integration +- Detailed build reports +- SBOM generation for compliance +- Support for custom build parameters + +--- + +This documentation reflects the current implementation of CI/CD scripts. All scripts are located in `./scripts/` directory. + +For GitHub Actions workflow documentation, see `.github/workflows/README.md`. diff --git a/scripts/add_c_flags.py b/scripts/add_c_flags.py index 902a95d472..f9a7f216ea 100644 --- a/scripts/add_c_flags.py +++ b/scripts/add_c_flags.py @@ -1,3 +1,5 @@ +# Adds compiler flags to suppress warnings during PlatformIO build +# Used by: PlatformIO environments (esp32dev-pilight*, esp32-m5stick-c*) Import("env") diff --git a/scripts/ci.sh b/scripts/ci.sh new file mode 100755 index 0000000000..698484c8b4 --- /dev/null +++ b/scripts/ci.sh @@ -0,0 +1,328 @@ +#!/bin/bash +# CI/CD Main Entry Point - Command Dispatcher +# Routes commands to specialized scripts for build, site, qa, and all +# Usage: ./scripts/ci.sh [OPTIONS] + +set -euo pipefail + +# Constants +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +readonly SCRIPT_DIR + + +# Load shared configuration (colors, logging functions, paths) +if [[ -f "${SCRIPT_DIR}/ci_00_config.sh" ]]; then + source "${SCRIPT_DIR}/ci_00_config.sh" +else + echo "ERROR: ci_00_config.sh not found" >&2 + exit 1 +fi + +# Function to print banner +print_banner() { + echo "╔════════════════════════════════════════╗" + echo "║ OpenMQTTGateway CI/CD Pipeline ║" + echo "╚════════════════════════════════════════╝" + echo "" +} + +# Show usage +usage() { + cat << EOF +Usage: $0 [OPTIONS] + +OpenMQTTGateway CI/CD Pipeline - Main Entry Point + +Commands: + build Build firmware for specified environment + site Build and deploy documentation/website + qa Run quality assurance checks (linting, formatting) + security Run security vulnerability scan using Trivy + all Run complete pipeline (qa + build + site) + list-env List available environments for building firmware + +Examples: + # Build firmware + $0 build esp32dev-all-test --mode dev + $0 build esp32dev-bt --version v1.8.0 --deploy-ready + + # Build and deploy documentation + $0 site --mode prod --deploy + $0 site --mode dev --preview + + # Run quality checks + $0 qa --check + $0 qa --fix + + # Run security scan + $0 security + $0 security --scan-type config --exit-code 1 + $0 security --severity UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL + + # Run complete pipeline (all envs, mode required) + $0 all --mode dev + $0 all --mode prod --preview + $0 all --mode dev -e esp32dev-bt + + # List available environments + $0 list-env + +Get help for specific commands: + $0 build --help + $0 site --help + $0 qa --help + $0 security --help + $0 list-env --help + +EOF + exit 0 +} + + + +# Function to get list of environments to build +# Uses ci_list-env.sh (no parameters) and returns one env per line +get_environments() { + # Run the curated list script and normalize output + "${SCRIPT_DIR}/ci_list-env.sh" \ + | sed -r 's/\x1B\[[0-9;]*[mK]//g' \ + | tr '\t ' '\n' \ + | sed '/^\s*$/d' \ + | grep -E '^[A-Za-z0-9._-]+$' \ + | sort -u +} + +# Function to run complete pipeline using the underlying scripts +# Usage: run_all_pipeline --mode [--preview] +run_all_pipeline() { + local start_time + start_time=$(date +%s) + + local mode="" + local preview=false + local env_override="" + local version="" + local do_clean=false + + # Parse arguments + while [[ $# -gt 0 ]]; do + case "$1" in + --mode) + if [[ -z "${2:-}" ]]; then + log_error "--mode requires an argument (dev or prod)" + return 1 + fi + mode="$2" + if [[ "$mode" != "dev" && "$mode" != "prod" ]]; then + log_error "Invalid mode: $mode. Must be 'dev' or 'prod'" + return 1 + fi + shift 2 + ;; + --clean) + do_clean=true + shift + ;; + --preview) + preview=true + shift + ;; + -e|--env) + if [[ -z "${2:-}" ]]; then + log_error "-e|--env requires an environment name" + return 1 + fi + env_override="$2" + shift 2 + ;; + -v|--version) + if [[ -z "${2:-}" ]]; then + log_error "-v|--version requires a version string" + return 1 + fi + version="$2" + shift 2 + ;; + --help|-h) + echo "Usage: $0 all --mode [--preview]" + echo "" + echo "Options:" + echo " --mode Build mode (required)" + echo " --preview Optional Show site in local at http://localhost:8443" + echo " -e, --env Optional Build only the specified environment" + return 0 + ;; + *) + log_error "Unknown option: $1" + return 1 + ;; + esac + done + + # Validate mode is provided + if [[ -z "$mode" ]]; then + log_error "--mode is required. Usage: $0 all --mode [--preview]" + return 1 + fi + + log_info "Starting complete CI/CD pipeline (mode: $mode, preview: $preview)..." + echo "" + + # Step 1: Quality Assurance + log_info "═══ Step 1/3: Quality Assurance ═══" + log_info "RUN: ---> ${SCRIPT_DIR}/ci_qa.sh --check" + if ! "${SCRIPT_DIR}/ci_qa.sh" --check; then + log_error "QA checks failed. Pipeline aborted." + return 1 + fi + echo "" + + # Step 2: Build Firmware + log_info "═══ Step 2/3: Build Firmware ═══" + + # Get list of environments + local -a environments + if [[ -n "$env_override" ]]; then + log_info "Using single environment override: $env_override" + environments=("$env_override") + else + mapfile -t environments < <(get_environments) + fi + + if [[ ${#environments[@]} -eq 0 ]]; then + log_error "No environments found to build" + return 1 + fi + + log_info "Found ${#environments[@]} environments to build" + echo "" + + local build_count=0 + local failed_builds=() + local build_args=() + + if [[ -n "$version" ]]; then + build_args+=("--version" "$version") + log_info "Using version override: $version" + fi + build_args+=("--mode" "$mode") + build_args+=("--deploy-ready") + if [[ "$do_clean" == true ]]; then + build_args+=("--clean") + fi + + for env in "${environments[@]}"; do + ((++build_count)) + log_info "[$build_count/${#environments[@]}] Building: $env" + + set +e + log_info "RUN: ---> ${SCRIPT_DIR}/ci_build.sh" "$env" "${build_args[@]}" + "${SCRIPT_DIR}/ci_build.sh" "$env" "${build_args[@]}" + local rc=$? + set -e + if [[ $rc -ne 0 ]]; then + log_error "Build failed for environment: $env" + failed_builds+=("$env") + fi + done + + echo "" + if [[ ${#failed_builds[@]} -gt 0 ]]; then + log_error "Build failed for ${#failed_builds[@]} environment(s):" + printf ' - %s\n' "${failed_builds[@]}" + return 1 + fi + + log_success "All environments built successfully (${#environments[@]} total)" + echo "" + + # Step 3: Build Site + log_info "═══ Step 3/3: Build Documentation ═══" + local site_args=("--mode" "$mode") + if [[ "$preview" == true ]]; then + site_args+=("--preview") + fi + if [[ -n "$version" ]]; then + site_args+=("--version" "$version") + fi + if [[ "$do_clean" == true ]]; then + site_args+=("--clean") + fi + + log_info "RUN: --->${SCRIPT_DIR}/ci_site.sh" "${site_args[@]}" + if ! "${SCRIPT_DIR}/ci_site.sh" "${site_args[@]}"; then + log_warn "Site build failed, but continuing..." + fi + echo "" + + local end_time + end_time=$(date +%s) + local duration=$((end_time - start_time)) + + echo "" + echo "╔════════════════════════════════════════╗" + echo "║ Complete Pipeline Summary ║" + echo "╚════════════════════════════════════════╝" + echo " Mode: $mode" + echo " Preview: $preview" + echo " Total Duration: ${duration}s" + echo " Status: SUCCESS ✓" + echo "╚════════════════════════════════════════╝" +} + +# Main execution +main() { + # Check if no arguments provided + if [[ $# -eq 0 ]]; then + print_banner + usage + fi + + # Get command + local command="$1" + shift || true + + # Handle help flags + if [[ "$command" == "--help" || "$command" == "-h" ]]; then + print_banner + usage + fi + + print_banner + + # Route to appropriate pipeline + case "$command" in + build) + log_info "Executing build pipeline..." + "${SCRIPT_DIR}/ci_build.sh" "$@" + ;; + site|docs) + log_info "Executing site pipeline..." + "${SCRIPT_DIR}/ci_site.sh" "$@" + ;; + qa|lint) + log_info "Executing QA pipeline..." + "${SCRIPT_DIR}/ci_qa.sh" "$@" + ;; + security) + log_info "Executing security scan..." + "${SCRIPT_DIR}/ci_security.sh" "$@" + ;; + list-env) + log_info "Executing list-env pipeline..." + "${SCRIPT_DIR}/ci_list-env.sh" "$@" + ;; + + all|pipeline) + run_all_pipeline "$@" + ;; + *) + log_error "Unknown command: $command" + echo "" + usage + ;; + esac +} + +# Execute main function +main "$@" diff --git a/scripts/ci_00_config.sh b/scripts/ci_00_config.sh new file mode 100755 index 0000000000..a29e1cc0af --- /dev/null +++ b/scripts/ci_00_config.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# shellcheck disable=SC2034 +# Build Scripts Configuration +# Used by: All build scripts for centralized configuration + +# Centralized Output Directory Structure +# All CI/CD generated files go under generated/ +ARTIFACTS_DIR="generated/artifacts" +SITE_DIR="generated/site" +REPORTS_DIR="generated/reports" + +# PlatformIO Directory Configuration +BUILD_DIR=".pio/build" + +# ============================================================================ +# Colors - ANSI color codes for terminal output +# ============================================================================ +readonly RED='\033[0;31m' +readonly GREEN='\033[0;32m' +readonly YELLOW='\033[1;33m' +readonly BLUE='\033[0;34m' +readonly NC='\033[0m' # No Color + +# ============================================================================ +# Logging Functions - Standardized logging across all build scripts +# ============================================================================ +log_info() { echo -e "${GREEN}[INFO]${NC} $*" >&2; } +log_warn() { echo -e "${YELLOW}[WARN]${NC} $*" >&2; } +log_error() { echo -e "${RED}[ERROR]${NC} $*" >&2; } +log_success() { echo -e "${GREEN}[SUCCESS]${NC} $*" >&2; } +log_step() { echo -e "${BLUE}[STEP]${NC} $*" >&2; } diff --git a/scripts/ci_build.sh b/scripts/ci_build.sh new file mode 100755 index 0000000000..9a0e970bc2 --- /dev/null +++ b/scripts/ci_build.sh @@ -0,0 +1,359 @@ +#!/bin/bash +# CI/CD agnostic wrapper for complete build pipeline +# Orchestrates all build scripts with a single command +# Usage: ./scripts/ci.sh [OPTIONS] + +set -euo pipefail + +# Constants +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +readonly SCRIPT_DIR +readonly PROJECT_ROOT + +# Load shared configuration (colors, logging functions, paths) +if [[ -f "${SCRIPT_DIR}/ci_00_config.sh" ]]; then + source "${SCRIPT_DIR}/ci_00_config.sh" +else + echo "ERROR: ci_00_config.sh not found" >&2 + exit 1 +fi + +# Function to print banner +print_banner() { + echo "╔════════════════════════════════════════╗" + echo "║ OpenMQTTGateway CI/CD Build ║" + echo "╚════════════════════════════════════════╝" + echo "" +} + +# Function to print summary +print_summary() { + local env="$1" + local version="$2" + local start_time="$3" + local end_time + end_time=$(date +%s) + local duration=$((end_time - start_time)) + + echo "" + echo "╔════════════════════════════════════════╗" + echo "║ Build Summary ║" + echo "╚════════════════════════════════════════╝" + echo " Environment: $env" + echo " Version: $version" + echo " Duration: ${duration}s" + echo " Status: SUCCESS ✓" + echo "╚════════════════════════════════════════╝" +} + +# Function to check if command exists +command_exists() { + command -v "$1" >/dev/null 2>&1 +} + +# Function to get command version +get_command_version() { + local cmd="$1" + case "$cmd" in + platformio) + platformio --version 2>&1 | head -n1 | grep -oP '\d+\.\d+\.\d+' || echo "unknown" + ;; + python|python3) + python3 --version 2>&1 | grep -oP '\d+\.\d+' || echo "unknown" + ;; + *) + echo "unknown" + ;; + esac +} + +# Function to verify required tools +verify_build_tools() { + log_info "Verifying required build tools..." + + local missing_tools=() + + # Check Python + if ! command_exists python3; then + missing_tools+=("python3") + else + local python_version + python_version=$(get_command_version python3) + log_info "✓ Python ${python_version} found" + fi + + # Check PlatformIO + if ! command_exists platformio; then + missing_tools+=("platformio") + else + local pio_version + pio_version=$(get_command_version platformio) + log_info "✓ PlatformIO ${pio_version} found" + fi + + # Check git (for version auto-generation) + if ! command_exists git; then + log_warn "git not found (optional, but recommended)" + else + log_info "✓ git found" + fi + + # Report missing tools + if [[ ${#missing_tools[@]} -gt 0 ]]; then + log_error "Missing required tools: ${missing_tools[*]}" + log_error "" + log_error "Please install missing tools:" + for tool in "${missing_tools[@]}"; do + case "$tool" in + python3) + log_error " - Python 3: https://www.python.org/downloads/" + ;; + platformio) + log_error " - PlatformIO: pip3 install platformio" + log_error " or: pip3 install ${PLATFORMIO_VERSION:-platformio}" + ;; + esac + done + log_error "" + log_error "Or skip this check with: --skip-verification" + return 1 + fi + + log_info "All required tools are available" + return 0 +} + +# Function to cleanup on error +cleanup_on_error() { + log_error "Build failed, cleaning up..." + # Restore any backups + find . -name "*.bak" -type f -exec bash -c 'mv "$1" "${1%.bak}"' _ {} \; 2>/dev/null || true +} + +# Show usage +usage() { + cat << EOF +Usage: $0 [OPTIONS] + +Complete CI/CD build pipeline wrapper. + +Arguments: + environment PlatformIO environment name + +Options: + --mode MODE Build mode: 'prod' or 'dev' [default: prod] + 'dev' enables OTA and development features + --deploy-ready Prepare for deployment (renamed artifacts) + --version [TAG] Set version tag (if TAG omitted, auto-generated) + --output DIR Output directory for artifacts [default: generated/artifacts/] + --skip-verification Skip build tools verification + --clean Clean build before starting + --verbose Enable verbose output + --help Show this help message + +Environment Variables: + CI Set to 'true' in CI/CD environments + BUILD_NUMBER Build number from CI/CD + GIT_COMMIT Git commit hash for versioning + +Examples: + # Local development build + $0 esp32dev-all-test --mode dev + + # Production release build + $0 esp32dev-bt --version v1.7.0 --mode prod --deploy-ready + + # CI/CD build (auto-detects version) + $0 theengs-bridge --version --mode dev + +EOF +} + +# Main pipeline +main() { + local environment="" + local version="" + local set_version=false + local mode="" + local prepare_for_deploy=false + local output_dir="" + local skip_verification=false + local clean=false + local verbose=false + + # Parse arguments + while [[ $# -gt 0 ]]; do + case "$1" in + --version) + set_version=true + # Check if next argument is a version tag or another option + if [[ $# -gt 1 && ! "$2" =~ ^-- ]]; then + version="$2" + shift 2 + else + shift + fi + ;; + --mode) + if [[ $# -lt 2 ]]; then + log_error "--mode requires an argument: 'prod' or 'dev'" + usage + exit 1 + fi + if [[ "$2" != "prod" && "$2" != "dev" ]]; then + log_error "Invalid mode: $2. Must be 'prod' or 'dev'" + usage + exit 1 + fi + mode="$2" + shift 2 + ;; + --deploy-ready) + prepare_for_deploy=true + shift + ;; + --output) + if [[ $# -lt 2 ]]; then + log_error "--output requires a directory argument" + usage + exit 1 + fi + output_dir="$2" + shift 2 + ;; + --skip-verification) + skip_verification=true + shift + ;; + --clean) + clean=true + shift + ;; + --verbose) + verbose=true + shift + ;; + --help|-h) + usage + exit 0 + ;; + -*) + log_error "Unknown option: $1" + usage + exit 1 + ;; + *) + environment="$1" + shift + ;; + esac + done + + # Validate environment + if [[ -z "$environment" ]]; then + log_error "Environment name is required" + usage + exit 1 + fi + + # Set default mode if not specified + if [[ -z "$mode" ]]; then + mode="prod" + log_info "Mode not specified, defaulting to production" + fi + + + # Auto-generate version if --version flag is set but no tag provided + if [[ "$set_version" == "true" && -z "$version" ]]; then + if [[ "${CI:-false}" == "true" ]]; then + # CI/CD environment + version="${BUILD_NUMBER:-${GIT_COMMIT:-unknown}}" + else + # Local development + version="local-$(date +%Y%m%d-%H%M%S)" + fi + log_info "Auto-generated version: $version" + fi + + # Setup error handling + trap cleanup_on_error ERR + + # Change to project root + cd "$PROJECT_ROOT" + + # Start timer + local start_time + start_time=$(date +%s) + + # Print banner + print_banner + + # Step 1: Verify build tools + if [[ "$skip_verification" == "false" ]]; then + log_step "1/4 Verifying build tools..." + verify_build_tools || exit 1 + echo "" + else + log_warn "Skipping build tools verification (--skip-verification)" + echo "" + fi + + # Step 2: Set version + # not required + + # Step 3: Build firmware + log_step "3/4 Building firmware for: $environment" + local build_opts=() + [[ "$mode" == "dev" ]] && build_opts+=(--dev-ota) + [[ "$clean" == "true" ]] && build_opts+=(--clean) + [[ "$verbose" == "true" ]] && build_opts+=(--verbose) + [[ "$set_version" == "true" ]] && build_opts+=(--version "$version") + + "${SCRIPT_DIR}/ci_build_firmware.sh" "$environment" "${build_opts[@]}" || exit 1 + echo "" + + # Step 4: Prepare artifacts + log_step "4/4 Preparing artifacts..." + + if [[ "$prepare_for_deploy" == "true" ]]; then + log_info "Preparing artifacts for deployment" + local artifact_opts=() + [[ "$clean" == "true" ]] && artifact_opts+=(--clean) + [[ -n "$output_dir" ]] && artifact_opts+=(--output "$output_dir") + [[ "$set_version" == "true" ]] && artifact_opts+=(--version "$version") + "${SCRIPT_DIR}/ci_prepare_artifacts.sh" "$environment" "${artifact_opts[@]}" || exit 1 + echo "" + # Check if site folder exists and copy built files to avoid rebuilding the site + if [[ "$mode" == "dev" ]]; then + local site_dir="${PROJECT_ROOT}/generated/site/dev" + local artifacts_dir="${output_dir:-${PROJECT_ROOT}/generated/artifacts/firmware_build}" + + if [[ -d "$site_dir" ]]; then + log_info "Site folder exists, copying built firmware files to site/dev..." + + # Copy firmware files for the current environment + for file in "${artifacts_dir}/${environment}"-*.bin "${artifacts_dir}/${environment}"-*.tgz; do + if [[ -f "$file" ]]; then + cp -v "$file" "$site_dir/" || log_warn "Failed to copy $(basename "$file")" + fi + done + + log_info "✓ Firmware files copied to site/dev (no site rebuild needed)" + else + log_warn "Site folder not found at: $site_dir" + log_info "Run 'ci.sh site ' to generate it" + fi + fi + fi + + # Print summary + print_summary "$environment" "$version" "$start_time" + + log_info "✓ Complete build pipeline finished successfully" +} + +# Run main if executed directly +if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then + main "$@" +fi diff --git a/scripts/ci_build_firmware.sh b/scripts/ci_build_firmware.sh new file mode 100755 index 0000000000..7e4d43f64b --- /dev/null +++ b/scripts/ci_build_firmware.sh @@ -0,0 +1,294 @@ +#!/bin/bash +# Builds firmware for specified PlatformIO environment +# Used by: CI/CD pipelines and local development +# Usage: ./build_firmware.sh [OPTIONS] + +set -euo pipefail + +# Constants +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +readonly SCRIPT_DIR +readonly PROJECT_ROOT + +# Load shared configuration (colors, logging functions, paths) +if [[ -f "${SCRIPT_DIR}/ci_00_config.sh" ]]; then + source "${SCRIPT_DIR}/ci_00_config.sh" +else + echo "ERROR: ci_00_config.sh not found" >&2 + exit 1 +fi + +# Set absolute path for BUILD_DIR +BUILD_DIR="${PROJECT_ROOT}/${BUILD_DIR}" + +# Script-specific logging function +log_build() { echo -e "${BLUE}[BUILD]${NC} $*"; } + +# Function to validate environment name +validate_environment() { + local env="$1" + + if [[ -z "$env" ]]; then + log_error "Environment name is required" + return 1 + fi + + # Check if environment exists in platformio.ini or environments.ini + if ! grep -q "^\[env:${env}\]" "${PROJECT_ROOT}/platformio.ini" "${PROJECT_ROOT}/environments.ini" 2>/dev/null; then + log_warn "Environment '${env}' not found in configuration files" + log_warn "Proceeding anyway (PlatformIO will validate)" + fi + + log_info "Building environment: $env" +} + + + +# Function to check PlatformIO availability +check_platformio() { + if ! command -v platformio >/dev/null 2>&1; then + log_error "PlatformIO not found. Run setup_build_env.sh first" + return 1 + fi +} + +# Function to clean build artifacts +clean_build() { + local env="$1" + local env_dir="${BUILD_DIR}/${env}" + + if [[ -d "$env_dir" ]]; then + log_info "Cleaning previous build artifacts for: $env" + rm -rf "$env_dir" + fi +} + +# Function to run PlatformIO build +run_build() { + local env="$1" + local clean="${2:-false}" + local verbose="${3:-false}" + local enable_dev_ota="${4:-false}" + local version="${5:-edge}" + + if [[ "$enable_dev_ota" == "true" ]]; then + log_info "Development OTA enabled" + export PLATFORMIO_BUILD_FLAGS='"-DDEVELOPMENTOTA=true"' + fi + + if [[ -n "$version" ]]; then + log_info "Setting firmware version to: $version" + export PLATFORMIO_BUILD_FLAGS="${PLATFORMIO_BUILD_FLAGS} -DOMG_VERSION=\\\"${version}\\\"" + fi + + log_build "Starting build for environment: $env" + + local build_cmd="platformio run -e $env" + + if [[ "$clean" == "true" ]]; then + build_cmd="platformio run -e $env --target clean && $build_cmd" + fi + + if [[ "$verbose" == "true" ]]; then + build_cmd="$build_cmd --verbose" + fi + + # Execute build with timing + local start_time + start_time=$(date +%s) + + log_info "PlatformIO Build Flags: $PLATFORMIO_BUILD_FLAGS" + log_info "Executing: $build_cmd" + + if eval "$build_cmd"; then + local end_time + end_time=$(date +%s) + local duration=$((end_time - start_time)) + + log_build "Build completed successfully in ${duration}s" + return 0 + else + log_error "Build failed for environment: $env" + return 1 + fi +} + +# Function to verify build artifacts +verify_artifacts() { + local env="$1" + local env_dir="${BUILD_DIR}/${env}" + + log_info "Verifying build artifacts..." + + local artifacts_found=0 + local firmware="${env_dir}/firmware.bin" + local partitions="${env_dir}/partitions.bin" + local bootloader="${env_dir}/bootloader.bin" + + if [[ -f "$firmware" ]]; then + local size + size=$(stat -f%z "$firmware" 2>/dev/null || stat -c%s "$firmware" 2>/dev/null) + log_info "✓ firmware.bin (${size} bytes)" + ((artifacts_found++)) + else + log_warn "✗ firmware.bin not found" + fi + + if [[ -f "$partitions" ]]; then + log_info "✓ partitions.bin" + ((artifacts_found++)) + fi + + if [[ -f "$bootloader" ]]; then + log_info "✓ bootloader.bin" + ((artifacts_found++)) + fi + + if [[ $artifacts_found -eq 0 ]]; then + log_error "No build artifacts found" + return 1 + fi + + log_info "Found ${artifacts_found} artifact(s)" +} + +# Function to show build summary +show_build_summary() { + local env="$1" + local env_dir="${BUILD_DIR}/${env}" + + echo "" + echo "═══════════════════════════════════════" + echo " Build Summary: $env" + echo "═══════════════════════════════════════" + + if [[ -d "$env_dir" ]]; then + find "$env_dir" -name "*.bin" -o -name "*.elf" | while read -r file; do + local size + size=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null) + local size_kb=$((size / 1024)) + echo " $(basename "$file"): ${size_kb} KB" + done + fi + + echo "═══════════════════════════════════════" +} + +# Show usage +usage() { + cat << EOF +Usage: $0 [OPTIONS] + +Build firmware for a specific PlatformIO environment. + +Arguments: + environment PlatformIO environment name (e.g., esp32dev-all-test) + +Options: + --dev-ota Enable development OTA build flags + --clean Clean build artifacts before building + --verbose Enable verbose build output + --no-verify Skip artifact verification + --version Set firmware version (default: edge) + --help Show this help message + +Examples: + $0 esp32dev-all-test + $0 esp32dev-bt --dev-ota + $0 theengs-bridge --clean --verbose + +EOF +} + +# Main execution +main() { + local environment="" + local enable_dev_ota=false + local clean_build_flag=false + local verbose=false + local verify=true + local version="edge" + + # Parse arguments + while [[ $# -gt 0 ]]; do + case "$1" in + --version) + version="$2" + shift 2 + ;; + --dev-ota) + enable_dev_ota=true + shift + ;; + --clean) + clean_build_flag=true + shift + ;; + --verbose) + verbose=true + shift + ;; + --no-verify) + verify=false + shift + ;; + --help|-h) + usage + exit 0 + ;; + -*) + log_error "Unknown option: $1" + usage + exit 1 + ;; + *) + environment="$1" + shift + ;; + esac + done + + # Validate inputs + if [[ -z "$environment" ]]; then + log_error "Environment name is required" + usage + exit 1 + fi + + # Change to project root + cd "$PROJECT_ROOT" + + # Check prerequisites + check_platformio || exit 1 + + # Validate environment + validate_environment "$environment" || exit 1 + + # Setup build environment + export PYTHONIOENCODING=utf-8 + export PYTHONUTF8=1 + + # Clean if requested + if [[ "$clean_build_flag" == "true" ]]; then + clean_build "$environment" + fi + + # Run build + run_build "$environment" "$clean_build_flag" "$verbose" "$enable_dev_ota" "$version" || exit 1 + + # Verify artifacts + if [[ "$verify" == "true" ]]; then + verify_artifacts "$environment" || exit 1 + fi + + # Show summary + show_build_summary "$environment" + + log_info "Build process completed successfully" +} + +# Run main if executed directly +if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then + main "$@" +fi diff --git a/scripts/ci_list-env.sh b/scripts/ci_list-env.sh new file mode 100755 index 0000000000..052f74d633 --- /dev/null +++ b/scripts/ci_list-env.sh @@ -0,0 +1,125 @@ +#!/bin/bash +# Lists all available PlatformIO environments for OpenMQTTGateway +# Usage: ./scripts/ci_list.sh [--full] + +set -euo pipefail + +# Constants +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +readonly SCRIPT_DIR +readonly PROJECT_ROOT + +# Load shared configuration (colors, logging functions, paths) +if [[ -f "${SCRIPT_DIR}/ci_00_config.sh" ]]; then + source "${SCRIPT_DIR}/ci_00_config.sh" +else + echo "ERROR: ci_00_config.sh not found" >&2 + exit 1 +fi + + +## use .github/workflows/environments.json to list environments +list_environments_from_Json() { + local json_file="${PROJECT_ROOT}/.github/workflows/environments.json" + if [[ ! -f "$json_file" ]]; then + log_error "JSON file not found: $json_file" + return 1 + fi + + if ! command -v jq >/dev/null 2>&1; then + log_error "jq is required to read $json_file" + return 1 + fi + + log_info "Available PlatformIO environments from JSON:" + echo "" + local envs=() + while IFS= read -r env; do + # Skip test environments + if [[ ! "$env" =~ -test$ && ! "$env" =~ -all- ]]; then + envs+=("$env") + fi + done < <(jq -r '.environments.all[]? // empty' "$json_file") + + # Sort and display unique environments + if [[ ${#envs[@]} -gt 0 ]]; then + printf '%s\n' "${envs[@]}" | sort -u | column -c 80 + else + log_warn "No environments found in JSON file" + fi + echo "" + log_info "Total: $(printf '%s\n' "${envs[@]}" | sort -u | wc -l) environments" + +} + + +list_environments() { + log_info "Available PlatformIO environments:" + echo "" + local env_files=("${PROJECT_ROOT}/platformio.ini" "${PROJECT_ROOT}/environments.ini") + local envs=() + for file in "${env_files[@]}"; do + if [[ -f "$file" ]]; then + while IFS= read -r line; do + if [[ "$line" =~ ^\[env:([^\]]+)\] ]]; then + local env_name="${BASH_REMATCH[1]}" + envs+=("$env_name") + fi + done < "$file" + fi + done + # Sort and display unique environments + if [[ ${#envs[@]} -gt 0 ]]; then + printf '%s\n' "${envs[@]}" | sort -u | column -c 80 + else + log_warn "No environments found in configuration files" + fi + echo "" + log_info "Total: $(printf '%s\n' "${envs[@]}" | sort -u | wc -l) environments" +} + +# Main execution +usage() { + cat < [OPTIONS] + +set -euo pipefail + +# Constants +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +readonly SCRIPT_DIR +readonly PROJECT_ROOT + +# Load shared configuration (colors, logging functions, paths) +if [[ -f "${SCRIPT_DIR}/ci_00_config.sh" ]]; then + source "${SCRIPT_DIR}/ci_00_config.sh" +else + echo "ERROR: ci_00_config.sh not found" >&2 + exit 1 +fi + +# Set absolute paths +BUILD_DIR="${PROJECT_ROOT}/${BUILD_DIR}" +DEFAULT_OUTPUT_DIR="${PROJECT_ROOT}/${ARTIFACTS_DIR}" + +# Function to create output directory +prepare_output_dir() { + local output_dir="$1" + local clean_flag="${2:-false}" + + if [[ -d "$output_dir" ]]; then + if [[ "$clean_flag" == "true" ]]; then + log_warn "Cleaning and recreating output directory: $output_dir" + rm -rf "$output_dir" + mkdir -p "$output_dir" + else + log_warn "Output directory already exists and will be reused: $output_dir" + fi + else + mkdir -p "$output_dir" + log_info "Created output directory: $output_dir" + fi +} + +# Function to copy artifact with optional renaming +copy_artifact() { + local source="$1" + local dest="$2" + local artifact_type="$3" + + if [[ ! -f "$source" ]]; then + log_warn "${artifact_type} not found: $source" + return 1 + fi + + if cp "$source" "$dest"; then + local size + size=$(stat -f%z "$dest" 2>/dev/null || stat -c%s "$dest" 2>/dev/null) + local size_kb=$((size / 1024)) + log_info "✓ Copied ${artifact_type}: $(basename "$dest") (${size_kb} KB)" + return 0 + else + log_error "Failed to copy ${artifact_type}: $source" + return 1 + fi +} + + +# Function to prepare deployment artifacts (with renaming) +prepare_artifacts() { + local env="$1" + local output_dir="$2" + local env_dir="${BUILD_DIR}/${env}" + + log_info "Preparing firmware directory for: $env" + local copied=0 + + # Copy and rename firmware.bin + if copy_artifact "${env_dir}/firmware.bin" "${output_dir}/${env}-firmware.bin" "firmware"; then + ((copied++)) + fi + + # Copy and rename partitions.bin (optional) + copy_artifact "${env_dir}/partitions.bin" "${output_dir}/${env}-partitions.bin" "partitions" && ((copied++)) || true + + # Copy and rename bootloader.bin (optional) + copy_artifact "${env_dir}/bootloader.bin" "${output_dir}/${env}-bootloader.bin" "bootloader" && ((copied++)) || true + + # Copy boot_app0.bin if exists (ESP32 specific) + copy_artifact "${env_dir}/boot_app0.bin" "${output_dir}/${env}-boot_app0.bin" "boot_app0" && ((copied++)) || true + + if [[ $copied -eq 0 ]]; then + log_error "No artifacts were copied" + return 1 + fi + + log_info "Copied ${copied} artifact(s) in deployment mode" +} + +prepare_libraries() { + local env="$1" + local output_dir="$2" + local env_dir="${BUILD_DIR}/${env}" + + # Process libraries: create temp copy with renamed folders, zip, preserve originals + log_info "Processing libraries for environment: $env" + TEMP_LIBDEPS=$(mktemp -p "$output_dir" -d) || { echo "Failed to create temp directory"; return 1; } + + cp -r .pio/libdeps/"$env" "$TEMP_LIBDEPS/" || { log_error "Failed to copy libdeps for $env"; return 1; } + + ( + cd "$TEMP_LIBDEPS" + log_step "Replace space by _ in folder names (temp copy only)" + find . -type d -name "* *" | while read -r FNAME; do + mv "$FNAME" "${FNAME// /_}" + done + + log_step "Zipping libraries per board" + for i in */; do + tar -czf "${i%/}-libraries.tgz" "$i" > /dev/null + done + + mv ./*.tgz "${output_dir}" + ) + + rm -rf "$TEMP_LIBDEPS" + log_info "✓ Created library archives in: $output_dir" +} + +prepare_sources() { + local output_dir="$1" + + log_info "Preparing source code archive" + + # Create and move sources tar.gz (newly generated, safe to move) + if tar -czf "${output_dir}/OpenMQTTGateway_sources.tgz" main LICENSE.txt > /dev/null; then + log_info "✓ Created source archive: OpenMQTTGateway_sources.tgz" + else + log_error "Failed to create source archive" + return 1 + fi +} + + + + + + + + +# Function to list artifacts +list_artifacts() { + local output_dir="$1" + + echo "" + echo "═══════════════════════════════════════" + echo " Prepared Artifacts" + echo "═══════════════════════════════════════" + + if [[ -d "$output_dir" ]]; then + find "$output_dir" -type f | sort | while read -r file; do + local size + size=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null) + local size_kb=$((size / 1024)) + echo " $(basename "$file"): ${size_kb} KB" + done + else + echo " No artifacts found" + fi + + echo "═══════════════════════════════════════" +} + +# Show usage +usage() { + cat << EOF +Usage: $0 [OPTIONS] + +Prepare artifacts for upload or deployment. + +Arguments: + environment PlatformIO environment name, if omitted will be created source archive only. + +Options: + --clean Clean existing output directory before preparing artifacts + --output DIR Output directory [default: generated/artifacts/] + --help Show this help message + +Examples: + $0 esp32dev-all-test + $0 esp32dev-bt --deploy --manifest + $0 theengs-bridge --output build/artifacts --compress + +EOF +} + +# Main execution +main() { + local environment="" + local output_dir="$DEFAULT_OUTPUT_DIR" + local clean_flag=false + #local version="" ## WILL BE USED WHEN THE VERSION ITSELF AFFECTS THE ARTIFACTS NAMING + + # Parse arguments + while [[ $# -gt 0 ]]; do + case "$1" in + --output) + output_dir="$2" + shift 2 + ;; + -v|--version) + if [[ -z "${2:-}" ]]; then + log_error "-v|--version requires a version string" + return 1 + fi + #version="$2" + shift 2 + ;; + --clean) + clean_flag=true + shift + ;; + --help|-h) + usage + exit 0 + ;; + -*) + log_error "Unknown option: $1" + usage + exit 1 + ;; + *) + environment="$1" + shift + ;; + esac + done + + # Change to project root + cd "$PROJECT_ROOT" + + # TODO FOR NEXT STEP MULTI RELEASE: TAG, RC, edge + #if [[ -n "$version" ]]; then + # # Sanitize version string for directory name + # safe_version=$(echo "$version" | sed 's/[^a-zA-Z0-9._-]/_/g') + # output_dir="${output_dir}/${safe_version}" + #fi + + + # Validate inputs + if [[ -z "$environment" ]]; then + log_info "No environment specified, only preparing source archive" + + # Create output directory + prepare_output_dir "$output_dir" "$clean_flag" + + # Prepare source code archive + prepare_sources "$output_dir" || exit 1 + else + # Check if build directory exists + if [[ ! -d "${BUILD_DIR}/${environment}" ]]; then + log_error "Build directory not found for environment: $environment" + log_error "Run build_firmware.sh first" + exit 1 + fi + + #normalize output directory path + #output_dir="${output_dir}/firmware-${environment}" + output_dir="${output_dir}/firmware_build" + + + # Create output directory + prepare_output_dir "$output_dir" "$clean_flag" + + # Prepare artifacts based on mode + prepare_artifacts "$environment" "$output_dir" || exit 1 + + # Prepare libraries + prepare_libraries "$environment" "$output_dir" || exit 1 + fi + + # Show summary + list_artifacts "$output_dir" + + log_info "Artifact preparation completed successfully" +} + +# Run main if executed directly +if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then + main "$@" +fi diff --git a/scripts/ci_qa.sh b/scripts/ci_qa.sh new file mode 100755 index 0000000000..267e3df903 --- /dev/null +++ b/scripts/ci_qa.sh @@ -0,0 +1,474 @@ +#!/bin/bash +# CI/CD Quality Assurance (QA) - Code Linting and Formatting +# Checks and fixes code formatting using clang-format +# Usage: ./scripts/ci_qa.sh [OPTIONS] + +set -euo pipefail + +# Constants +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +readonly SCRIPT_DIR +readonly PROJECT_ROOT + +# Load shared configuration +if [[ -f "${SCRIPT_DIR}/ci_00_config.sh" ]]; then + source "${SCRIPT_DIR}/ci_00_config.sh" +else + echo "ERROR: ci_00_config.sh not found" >&2 + exit 1 +fi + +# Default values +FIX_MODE=false +FORMAT_ONLY=false +SOURCE_DIR="main" +EXTENSIONS="h,ino,cpp" +CLANG_FORMAT_VERSION="9" +VERBOSE=false + +# Function to check if clang-format is available +check_clang_format() { + local version="$1" + local cmd="clang-format-${version}" + + if command -v "$cmd" >/dev/null 2>&1; then + echo "$cmd" + return 0 + fi + + # Try without version suffix + if command -v clang-format >/dev/null 2>&1; then + echo "clang-format" + return 0 + fi + + return 1 +} + +# Function to find files to check +find_files() { + local source="$1" + local extensions="$2" + + if [[ ! -d "${PROJECT_ROOT}/${source}" ]]; then + return 1 + fi + + local find_patterns=() + IFS=',' read -ra exts <<< "$extensions" + for ext in "${exts[@]}"; do + find_patterns+=(-name "*.${ext}" -o) + done + # Remove last -o + unset 'find_patterns[-1]' + + local files + files=$(find "${PROJECT_ROOT}/${source}" -type f \( "${find_patterns[@]}" \) 2>/dev/null || true) + + if [[ -z "$files" ]]; then + return 1 + fi + + echo "$files" +} + +# Function to check formatting +check_formatting() { + local clang_format_cmd="$1" + local files="$2" + + log_info "Checking code formatting..." + + local failed_files=() + local checked_count=0 + local has_issues=false + + while IFS= read -r file; do + if [[ -z "$file" ]]; then + continue + fi + + checked_count=$((checked_count + 1)) + + if [[ "$VERBOSE" == true ]]; then + log_info "Checking: $file" + fi + + # Check if file needs formatting and capture diff + local diff_output + diff_output=$("$clang_format_cmd" --dry-run --Werror "$file" 2>&1) + local format_result=$? + + if [[ $format_result -ne 0 ]]; then + failed_files+=("$file") + has_issues=true + + # Show the actual formatting differences + echo "" + log_warn "⚠ Formatting issues in: $file" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + + # Generate and show diff with colors + local actual_diff + actual_diff=$(diff -u "$file" <("$clang_format_cmd" "$file") 2>/dev/null || true) + + if [[ -n "$actual_diff" ]]; then + echo "$actual_diff" | head -50 | while IFS= read -r line; do + if [[ "$line" =~ ^-[^-] ]]; then + echo -e "\033[31m$line\033[0m" # Red for removed lines + elif [[ "$line" =~ ^+[^+] ]]; then + echo -e "\033[32m$line\033[0m" # Green for added lines + elif [[ "$line" =~ ^@@ ]]; then + echo -e "\033[36m$line\033[0m" # Cyan for line numbers + else + echo "$line" + fi + done + else + echo "$diff_output" + fi + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + else + if [[ "$VERBOSE" == true ]]; then + log_info " ✓ OK" + fi + fi + done <<< "$files" + + echo "" + log_info "Checked ${checked_count} files" + + if [[ $has_issues == true ]]; then + echo "" + log_error "Found ${#failed_files[@]} files with formatting issues:" + for file in "${failed_files[@]}"; do + log_error " - $file" + done + echo "" + log_error "To fix these issues automatically, run:" + log_error " $0 --fix" + echo "" + return 1 + fi + + log_info "✓ All files are properly formatted" + return 0 +} + +# Function to fix formatting +fix_formatting() { + local clang_format_cmd="$1" + local files="$2" + + log_info "Fixing code formatting..." + + local fixed_count=0 + local total_count=0 + + while IFS= read -r file; do + if [[ -z "$file" ]]; then + continue + fi + + total_count=$((total_count + 1)) + + if [[ "$VERBOSE" == true ]]; then + log_info "Processing: $file" + fi + + # Apply formatting in-place + if "$clang_format_cmd" -i "$file" 2>/dev/null; then + fixed_count=$((fixed_count + 1)) + if [[ "$VERBOSE" == true ]]; then + log_info " ✓ Formatted" + fi + else + if [[ "$VERBOSE" == true ]]; then + log_warn " ✗ Failed to format" + fi + fi + done <<< "$files" + + echo "" + log_info "Processed ${total_count} files" + log_info "✓ Formatting applied to ${fixed_count} files" + + if [[ $fixed_count -gt 0 ]]; then + log_warn "" + log_warn "Files have been modified. Please review and commit the changes:" + log_warn " git diff" + log_warn " git add -u" + log_warn " git commit -m 'style: apply clang-format'" + fi +} + +# Function to run shellcheck on scripts +run_shellcheck() { + log_info "Checking shell scripts with ShellCheck..." + + # Check if shellcheck is installed + if ! command -v shellcheck >/dev/null 2>&1; then + log_warn "ShellCheck not found, skipping shell script checks" + log_info "To install ShellCheck:" + log_info " Ubuntu/Debian: sudo apt-get install shellcheck" + log_info " macOS: brew install shellcheck" + return 0 # Don't fail, just skip + fi + + log_info "✓ ShellCheck found: $(shellcheck --version | head -n2 | tail -n1)" + + # Find all .sh files in scripts directory + local scripts_dir="${PROJECT_ROOT}/scripts" + if [[ ! -d "$scripts_dir" ]]; then + log_warn "Scripts directory not found: $scripts_dir" + return 0 + fi + + local shell_files + shell_files=$(find "$scripts_dir" -type f -name "*.sh" 2>/dev/null) + + if [[ -z "$shell_files" ]]; then + log_warn "No shell scripts found in $scripts_dir" + return 0 + fi + + local file_count + file_count=$(echo "$shell_files" | wc -l) + log_info "Found $file_count shell script(s) to check" + + # Run shellcheck on all files at once to allow cross-file analysis + log_info "Running ShellCheck on scripts directory..." + + local shellcheck_output + # shellcheck disable=SC2086 + shellcheck_output=$(shellcheck -f gcc $shell_files 2>&1) + local shellcheck_result=$? + + if [[ $shellcheck_result -ne 0 ]]; then + echo "" + log_warn "⚠ ShellCheck found issues:" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + echo "$shellcheck_output" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + echo "" + log_error "Please fix the ShellCheck warnings/errors" + return 1 + fi + + log_success "✓ All $file_count shell script(s) passed ShellCheck" + return 0 +} + +# Function to run all QA checks +run_all_checks() { + log_info "Running all quality assurance checks..." + + local all_passed=true + + # Format check + log_info "═══ Code Formatting ═══" + if ! run_format_check; then + all_passed=false + fi + echo "" + + # ShellCheck + log_info "═══ Shell Scripts Check ═══" + if ! run_shellcheck; then + all_passed=false + fi + echo "" + + # Future: Add more checks here + # - cppcheck static analysis + # - code complexity metrics + # - TODO/FIXME detection + # - license header validation + + if [[ "$all_passed" == false ]]; then + log_error "Some QA checks failed" + return 1 + fi + + log_info "✓ All QA checks passed" + return 0 +} + +# Function to run format check +run_format_check() { + log_info "Checking for clang-format version ${CLANG_FORMAT_VERSION}..." + + local clang_format_cmd + clang_format_cmd=$(check_clang_format "$CLANG_FORMAT_VERSION") + + # shellcheck disable=SC2181 + if [[ $? -ne 0 ]] || [[ -z "$clang_format_cmd" ]]; then + log_error "clang-format not found" + log_error "Please install clang-format:" + log_error " Ubuntu/Debian: sudo apt-get install clang-format-${CLANG_FORMAT_VERSION}" + log_error " macOS: brew install clang-format" + return 1 + fi + + if [[ "$clang_format_cmd" == "clang-format-${CLANG_FORMAT_VERSION}" ]]; then + log_info "✓ clang-format-${CLANG_FORMAT_VERSION} found" + else + local installed_version + installed_version=$(clang-format --version | grep -oP '\d+\.\d+' | head -1 || echo "unknown") + log_warn "clang-format-${CLANG_FORMAT_VERSION} not found, using clang-format (version ${installed_version})" + fi + + log_info "Finding files in '${SOURCE_DIR}' with extensions: ${EXTENSIONS}" + + local files + files=$(find_files "$SOURCE_DIR" "$EXTENSIONS") + + # shellcheck disable=SC2181 + if [[ $? -ne 0 ]] || [[ -z "$files" ]]; then + log_error "Source directory not found: ${PROJECT_ROOT}/${SOURCE_DIR}" + return 1 + fi + + local file_count + file_count=$(echo "$files" | wc -l) + log_info "Found ${file_count} files to check" + + if [[ "$FIX_MODE" == true ]]; then + fix_formatting "$clang_format_cmd" "$files" + else + check_formatting "$clang_format_cmd" "$files" + fi +} + +# Function to show usage +usage() { + cat << EOF +Usage: $0 [OPTIONS] + +Run quality assurance checks on OpenMQTTGateway code. + +Options: + --check Check formatting only (CI mode) [default] + --fix Fix formatting issues automatically + --format Run only format checks + --all Run all QA checks [default] + --source DIR Source directory to check [default: main] + --extensions EXTS File extensions (comma-separated) [default: h,ino,cpp] + --clang-format-version V clang-format version [default: 9] + --verbose Enable verbose output + --help Show this help message + +Examples: + # Check formatting (CI mode) + $0 --check + + # Fix formatting issues + $0 --fix + + # Check specific directory + $0 --check --source lib/LEDManager + + # Check with custom extensions + $0 --check --extensions h,cpp + + # Verbose output + $0 --check --verbose + +EOF + exit 0 +} + +# Parse command line arguments +parse_args() { + while [[ $# -gt 0 ]]; do + case "$1" in + --check) + FIX_MODE=false + shift + ;; + --fix) + FIX_MODE=true + shift + ;; + --format) + FORMAT_ONLY=true + shift + ;; + --all) + FORMAT_ONLY=false + shift + ;; + --source) + SOURCE_DIR="$2" + shift 2 + ;; + --extensions) + EXTENSIONS="$2" + shift 2 + ;; + --clang-format-version) + CLANG_FORMAT_VERSION="$2" + shift 2 + ;; + --verbose) + VERBOSE=true + shift + ;; + --help) + usage + ;; + *) + log_error "Unknown option: $1" + usage + ;; + esac + done +} + +# Main execution +main() { + local start_time + start_time=$(date +%s) + + parse_args "$@" + + log_info "Starting QA pipeline..." + + if [[ "$FIX_MODE" == true ]]; then + log_info "Mode: FIX (will modify files)" + else + log_info "Mode: CHECK (read-only)" + fi + + # Run checks + local result=0 + if [[ "$FORMAT_ONLY" == true ]]; then + run_format_check || result=$? + else + run_all_checks || result=$? + fi + + local end_time + end_time=$(date +%s) + local duration=$((end_time - start_time)) + + echo "" + echo "╔════════════════════════════════════════╗" + echo "║ QA Pipeline Summary ║" + echo "╚════════════════════════════════════════╝" + echo " Duration: ${duration}s" + + if [[ $result -eq 0 ]]; then + echo " Status: SUCCESS ✓" + echo "╚════════════════════════════════════════╝" + return 0 + else + echo " Status: FAILED ✗" + echo "╚════════════════════════════════════════╝" + return 1 + fi +} + +# Execute main function +main "$@" diff --git a/scripts/ci_security.sh b/scripts/ci_security.sh new file mode 100755 index 0000000000..175954cea4 --- /dev/null +++ b/scripts/ci_security.sh @@ -0,0 +1,422 @@ +#!/bin/bash +# OpenMQTTGateway Security Scan Script +# Runs Trivy vulnerability scanner and generates reports +# Based on task-security-scan.yml workflow +# Usage: ./scripts/ci_security.sh [OPTIONS] + +set -euo pipefail + +# Constants +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +readonly SCRIPT_DIR +readonly PROJECT_ROOT + +# Load shared configuration (colors, logging functions, paths) +if [[ -f "${SCRIPT_DIR}/ci_00_config.sh" ]]; then + source "${SCRIPT_DIR}/ci_00_config.sh" +else + echo "ERROR: ci_00_config.sh not found" >&2 + exit 1 +fi + +# Default values +SCAN_TYPE="fs" +SEVERITY="HIGH,CRITICAL" +EXIT_CODE_ON_VULN="0" +SCAN_PATH="." +EXCLUDE_PATHS="" +GENERATE_SBOM=false # Off by default + +OUTPUT_DIR="${PROJECT_ROOT}/${REPORTS_DIR}" + +# Show usage +usage() { + cat << EOF +Usage: $0 [OPTIONS] + +Security vulnerability scanning using Trivy + +Options: + --scan-type Type of scan: fs (filesystem), config, or image + Default: fs + + --severity Severity levels to report (comma-separated) + Options: UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL + Default: HIGH,CRITICAL + + --scan-path Path to scan (default: entire repository) + Default: . + + --exit-code Exit code when vulnerabilities found + 0 = continue (don't fail) + 1 = fail build + Default: 0 + + --exclude Paths to exclude from scan (comma-separated) + Example: node_modules,test,docs + Default: (none) + + --generate-sbom Generate Software Bill of Materials (SBOM) + Creates CycloneDX and SPDX JSON formats + Default: off + + --output-dir Directory to save reports + Default: ./generated/reports + + -h, --help Show this help message + +Examples: + # Scan filesystem for HIGH and CRITICAL issues + $0 + + # Scan config files and fail if vulnerabilities found + $0 --scan-type config --exit-code 1 + + # Scan specific path for all severity levels + $0 --scan-path ./main --severity UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL + + # Scan with excluded paths + $0 --exclude node_modules,test,docs + + # Scan with multiple options + $0 --scan-path ./main --exclude test --severity HIGH,CRITICAL --exit-code 1 + + # Scan with SBOM generation + $0 --generate-sbom + + # Complete scan with SBOM and custom severity + $0 --severity UNKNOWN,LOW,MEDIUM,HIGH,CRITICAL --generate-sbom + +EOF + exit 0 +} + +# Function to check if Trivy is installed +check_trivy() { + if ! command -v trivy &> /dev/null; then + log_error "Trivy is not installed. Install it from: https://github.com/aquasecurity/trivy" + return 1 + fi + log_success "Trivy found: $(trivy --version)" +} + +# Function to check if jq is installed (for JSON parsing) +check_jq() { + if ! command -v jq &> /dev/null; then + log_warn "jq is not installed. Some report features will be limited." + return 1 + fi + return 0 +} + +# Function to run Trivy scan with specified format +# Usage: run_trivy_scan [exit_code] +run_trivy_scan() { + local format="$1" + local filename="$2" + local exit_code="${3:-0}" # Default to 0 for non-critical scans + local output_file="${OUTPUT_DIR}/${filename}" + + log_info "Running Trivy $SCAN_TYPE scan ($format format)..." + + mkdir -p "$OUTPUT_DIR" + + # Build trivy command - TUTTE le scansioni (vuln, config, secret, license) + # Senza limitazioni sui scanner + local trivy_cmd="trivy $SCAN_TYPE $SCAN_PATH --format $format --output $output_file --severity $SEVERITY --exit-code $exit_code" + + # Add exclude paths if provided + if [[ -n "$EXCLUDE_PATHS" ]]; then + # Convert comma-separated paths to trivy --skip-dirs format + local skip_dirs + skip_dirs=$(echo "$EXCLUDE_PATHS" | sed 's/,/ --skip-dirs /g' | sed 's/^/ --skip-dirs /') + trivy_cmd="$trivy_cmd $skip_dirs" + fi + + # Log il comando completo + log_info "Executing: $trivy_cmd" + + if eval "$trivy_cmd" 2>&1 | tee -a "${OUTPUT_DIR}/trivy-scan.log"; then + log_success "${format^^} report generated: $output_file" + return 0 + else + local rc=$? + if [[ $rc -eq 1 && "$exit_code" == "1" ]]; then + log_error "Vulnerabilities found (exit code: $rc)" + return $rc + fi + log_success "Scan completed with exit code: $rc" + return 0 + fi +} + +# Function to create summary report +create_summary_report() { + log_info "Creating security summary report..." + + local summary_file="${OUTPUT_DIR}/security-summary.md" + + { + echo "# 🔒 Security Scan Results" + echo "" + echo "**Scan Type**: $SCAN_TYPE" + echo "**Path Scanned**: $SCAN_PATH" + echo "**Severity Filter**: $SEVERITY" + echo "**Scan Date**: $(date -u '+%Y-%m-%d %H:%M:%S UTC')" + echo "" + } > "$summary_file" + + # Parse SARIF if it exists + local sarif_file="${OUTPUT_DIR}/trivy-results.sarif" + if [[ -f "$sarif_file" ]]; then + log_info "Parsing SARIF results..." + + if check_jq; then + # Count vulnerabilities by severity + local vuln_count + vuln_count=$(jq '[.runs[].results[]] | length' "$sarif_file" 2>/dev/null || echo "0") + + local critical high medium low + critical=$(jq '[.runs[].results[] | select(.level == "error")] | length' "$sarif_file" 2>/dev/null || echo "0") + high=$(jq '[.runs[].results[] | select(.level == "warning")] | length' "$sarif_file" 2>/dev/null || echo "0") + medium=$(jq '[.runs[].results[] | select(.level == "note")] | length' "$sarif_file" 2>/dev/null || echo "0") + low=$(jq '[.runs[].results[] | select(.level == "none")] | length' "$sarif_file" 2>/dev/null || echo "0") + + { + echo "## Vulnerability Summary" + echo "" + echo "**Total Vulnerabilities**: ${vuln_count}" + echo "" + echo "- 🔴 **Critical**: ${critical}" + echo "- 🟠 **High**: ${high}" + echo "- 🟡 **Medium**: ${medium}" + echo "- 🟢 **Low**: ${low}" + echo "" + } >> "$summary_file" + + # List vulnerability details + if [[ "$vuln_count" -gt 0 ]]; then + { + echo "## Vulnerability Details" + echo "" + } >> "$summary_file" + + jq -r '.runs[].results[] | + "### \(.level | ascii_upcase): \(.ruleId)\n" + + "**Location**: \(.locations[0].physicalLocation.artifactLocation.uri // "N/A")\n" + + "**Message**: \(.message.text)\n" + + (if .properties.CVE then "**CVE**: \(.properties.CVE)\n" else "" end) + + (if .properties.cvss then "**CVSS Score**: \(.properties.cvss)\n" else "" end) + + ""' "$sarif_file" >> "$summary_file" 2>/dev/null || \ + echo "Could not parse vulnerability details" >> "$summary_file" + fi + else + echo "⚠️ jq not available for detailed SARIF parsing" >> "$summary_file" + fi + else + echo "⚠️ SARIF file not found" >> "$summary_file" + fi + + # Add available report formats + { + echo "## Report Formats Available" + echo "" + if [[ -f "${OUTPUT_DIR}/trivy-results.sarif" ]]; then + echo "- ✅ \`trivy-results.sarif\` - SARIF format (for GitHub/IDE integration)" + fi + if [[ -f "${OUTPUT_DIR}/trivy-report.json" ]]; then + echo "- ✅ \`trivy-report.json\` - JSON format (for automation)" + fi + if [[ -f "${OUTPUT_DIR}/trivy-report.txt" ]]; then + echo "- ✅ \`trivy-report.txt\` - Human-readable table" + fi + echo "" + } >> "$summary_file" + + # Add footer + { + echo "---" + echo "" + echo "Generated by OpenMQTTGateway CI/CD Security Scan" + echo "For more information, see: https://github.com/aquasecurity/trivy" + } >> "$summary_file" + + log_success "Summary report generated: $summary_file" +} + +# Function to check for critical vulnerabilities and fail if needed +check_critical_vulnerabilities() { + if [[ "$EXIT_CODE_ON_VULN" != "1" ]]; then + return 0 + fi + + local sarif_file="${OUTPUT_DIR}/trivy-results.sarif" + + if [[ ! -f "$sarif_file" ]]; then + log_warn "SARIF file not found, skipping critical check" + return 0 + fi + + if check_jq; then + local critical + critical=$(jq '[.runs[].results[] | select(.level == "error")] | length' "$sarif_file" 2>/dev/null || echo "0") + + if [[ "$critical" -gt 0 ]]; then + log_error "❌ Found ${critical} critical vulnerabilities!" + log_error "Review the security reports in: $OUTPUT_DIR" + return 1 + fi + fi + + return 0 +} + +# Main execution +main() { + # Parse arguments + while [[ $# -gt 0 ]]; do + case "$1" in + --scan-type) + if [[ -z "${2:-}" ]]; then + log_error "--scan-type requires an argument" + return 1 + fi + SCAN_TYPE="$2" + shift 2 + ;; + --severity) + if [[ -z "${2:-}" ]]; then + log_error "--severity requires an argument" + return 1 + fi + SEVERITY="$2" + shift 2 + ;; + --scan-path) + if [[ -z "${2:-}" ]]; then + log_error "--scan-path requires an argument" + return 1 + fi + SCAN_PATH="$2" + shift 2 + ;; + --exit-code) + if [[ -z "${2:-}" ]]; then + log_error "--exit-code requires an argument" + return 1 + fi + EXIT_CODE_ON_VULN="$2" + shift 2 + ;; + --exclude) + if [[ -z "${2:-}" ]]; then + log_error "--exclude requires an argument" + return 1 + fi + EXCLUDE_PATHS="$2" + shift 2 + ;; + --generate-sbom) + GENERATE_SBOM=true + shift + ;; + --output-dir) + if [[ -z "${2:-}" ]]; then + log_error "--output-dir requires an argument" + return 1 + fi + OUTPUT_DIR="$2" + shift 2 + ;; + -h|--help) + usage + ;; + *) + log_error "Unknown option: $1" + echo "" + usage + ;; + esac + done + + # Pre-flight checks + log_info "Running security scan checks..." + if ! check_trivy; then + log_error "Required tool check failed" + return 1 + fi + check_jq || true + + echo "" + log_info "═══ Security Scan Configuration ═══" + echo " Scan Type: $SCAN_TYPE" + echo " Path: $SCAN_PATH" + echo " Severity: $SEVERITY" + echo " Exit on Vulnerabilities: $EXIT_CODE_ON_VULN" + echo " Excluded Paths: ${EXCLUDE_PATHS:-none}" + echo " Generate SBOM: ${GENERATE_SBOM:-false}" + echo " Output Directory: $OUTPUT_DIR" + echo "" + + # Run scans + if ! run_trivy_scan "sarif" "trivy-results.sarif" "$EXIT_CODE_ON_VULN"; then + if [[ "$EXIT_CODE_ON_VULN" == "1" ]]; then + return 1 + fi + fi + + run_trivy_scan "json" "trivy-report.json" "0" + run_trivy_scan "table" "trivy-report.txt" "0" + + # Create summary + create_summary_report + + # Generate SBOM if requested + if [[ "$GENERATE_SBOM" == "true" ]]; then + log_info "Generating Software Bill of Materials (SBOM)..." + + local sbom_dir="${OUTPUT_DIR}/sbom" + mkdir -p "$sbom_dir" + + # Generate CycloneDX SBOM + log_info "Generating CycloneDX SBOM..." + if trivy "$SCAN_TYPE" "$SCAN_PATH" \ + --format cyclonedx \ + --output "$sbom_dir/sbom-cyclonedx.json" \ + --exit-code 0 2>&1 | grep -i "generated\|error" || true; then + if [[ -f "$sbom_dir/sbom-cyclonedx.json" ]]; then + log_success "CycloneDX SBOM generated: $sbom_dir/sbom-cyclonedx.json" + fi + fi + + # Generate SPDX SBOM + log_info "Generating SPDX SBOM..." + if trivy "$SCAN_TYPE" "$SCAN_PATH" \ + --format spdx-json \ + --output "$sbom_dir/sbom-spdx.json" \ + --exit-code 0 2>&1 | grep -i "generated\|error" || true; then + if [[ -f "$sbom_dir/sbom-spdx.json" ]]; then + log_success "SPDX SBOM generated: $sbom_dir/sbom-spdx.json" + fi + fi + + if [[ -f "$sbom_dir/sbom-cyclonedx.json" ]] || [[ -f "$sbom_dir/sbom-spdx.json" ]]; then + log_success "SBOM generation completed" + echo "" + fi + fi + # Check for critical vulnerabilities + if ! check_critical_vulnerabilities; then + return 1 + fi + + echo "" + log_success "Security scan completed successfully!" + log_info "Reports saved to: $OUTPUT_DIR" + + return 0 +} + +# Execute main function +main "$@" diff --git a/scripts/ci_site.sh b/scripts/ci_site.sh new file mode 100755 index 0000000000..3d072a7e01 --- /dev/null +++ b/scripts/ci_site.sh @@ -0,0 +1,356 @@ +#!/bin/bash +# CI/CD Site/Documentation Builder +# This script builds and deploys VuePress documentation with version management. +# Usage: ./scripts/ci_site.sh [OPTIONS] - Specify options for the script execution. + +set -euo pipefail + +# Constants +# Resolve the folder containing this script so relative paths work +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" +DOCS_DIR="${PROJECT_ROOT}/docs" +readonly SCRIPT_DIR +readonly PROJECT_ROOT +readonly DOCS_DIR + + +# Load shared configuration +if [[ -f "${SCRIPT_DIR}/ci_00_config.sh" ]]; then + source "${SCRIPT_DIR}/ci_00_config.sh" +else + echo "ERROR: ci_00_config.sh not found" >&2 + exit 1 +fi + +# Final output directory for the site +readonly SITE_OUTPUT_DIR="${PROJECT_ROOT}/${SITE_DIR}" + +# Default values +MODE="prod" # Set the default mode to production +CURL_INSECURE=false # Allow curl to skip TLS verification (use only when needed) +CLEAN=false # Clean generated/site before build +LEGACY_OPENSSL=false # Use --openssl-legacy-provider for older Node.js versions + +# Function to check required tools +check_requirements() { + log_info "Checking required tools..." + + local missing_tools=() + + if ! command -v node >/dev/null 2>&1; then + missing_tools+=("node") # Node.js is required + else + local node_version + node_version=$(node --version) + log_info "✓ Node.js ${node_version} found" + fi + + if ! command -v npm >/dev/null 2>&1; then + missing_tools+=("npm") # npm is required + else + local npm_version + npm_version=$(npm --version) + log_info "✓ npm ${npm_version} found" + fi + + if ! command -v openssl >/dev/null 2>&1; then + missing_tools+=("openssl") # OpenSSL is required + else + local openssl_version + openssl_version=$(openssl version | grep -oP '\d+\.\d+\.\d+' || echo "unknown") + log_info "✓ OpenSSL ${openssl_version} found" + fi + + if [[ ${#missing_tools[@]} -gt 0 ]]; then + log_error "Missing required tools: ${missing_tools[*]}" + return 1 + fi + + log_info "All required tools are available" + return 0 +} + +# Function to install dependencies +install_dependencies() { + log_info "Installing dependencies..." + + # Install Node dependencies + log_info "Installing Node.js dependencies..." + cd "${PROJECT_ROOT}" + npm install --quiet || { + log_error "Failed to install Node.js dependencies" + return 1 + } + + log_info "Dependencies installed successfully" +} + +# Function to download common config +download_common_config() { + log_info "Downloading common configuration..." + + local config_url="https://www.theengs.io/commonConfig.js" + local config_dest="${DOCS_DIR}/.vuepress/public/commonConfig.js" + local curl_opts="-sSf" + + # Optionally disable TLS verification if explicitly requested + if [[ "${CURL_INSECURE}" == "true" ]]; then + curl_opts+="k" + log_warn "curl is running with --insecure; TLS verification is disabled" + fi + + mkdir -p "$(dirname "$config_dest")" + + if curl ${curl_opts} -o "$config_dest" "$config_url"; then + log_info "✓ Common config downloaded" + else + log_warn "Failed to download common config, continuing anyway..." + fi +} + +create_configuration_files() { + local url_prefix="$1" + local version="$2" + local mode="$3" + local dest_line="" + + if [[ "$mode" == "dev" ]]; then + dest_line=" \"dest\": \"generated/site/dev\"," + fi + + # download common config + download_common_config + + + ## Create a meta.json file on config folder + local meta_file="${DOCS_DIR}/.vuepress/meta.json" + cat > "$meta_file" < - $mfn - - ''') - -wu_temp_p1 = ''' - -''' - -manif_path = 'docs/.vuepress/public/firmware_build/' -vue_path = 'docs/.vuepress/components/' -cors_proxy = '' # 'https://cors.bridged.cc/' -esp32_boot = 'https://github.com/espressif/arduino-esp32/raw/2.0.7/tools/partitions/boot_app0.bin' diff --git a/scripts/compressFirmware.py b/scripts/compressFirmware.py index cd57a6d515..0616337bcf 100644 --- a/scripts/compressFirmware.py +++ b/scripts/compressFirmware.py @@ -1,3 +1,5 @@ +# Compresses firmware binaries with gzip for OTA updates during build +# Used by: PlatformIO environments (optional, commented in environments.ini) import gzip import shutil import os diff --git a/scripts/ensure_ssl_certs.js b/scripts/ensure_ssl_certs.js new file mode 100644 index 0000000000..42c33382f1 --- /dev/null +++ b/scripts/ensure_ssl_certs.js @@ -0,0 +1,31 @@ +#!/usr/bin/env node +'use strict'; + +const fs = require('fs'); +const path = require('path'); + +function ensureSSLCerts() { + const sslDir = path.join(process.cwd(), '.ssl'); + const keyFile = path.join(sslDir, 'key.pem'); + const certFile = path.join(sslDir, 'cert.pem'); + + if (fs.existsSync(keyFile) && fs.existsSync(certFile)) { + return; + } + + const { execSync } = require('child_process'); + + fs.mkdirSync(sslDir, { recursive: true }); + + try { + execSync(`openssl req -new -x509 -keyout "${keyFile}" -out "${certFile}" -days 365 -nodes -subj "/C=US/ST=State/L=City/O=OpenMQTTGateway/CN=localhost" 2>/dev/null`, { + stdio: 'inherit' + }); + console.log('✓ SSL certificate generated'); + } catch (err) { + console.error('Failed to generate SSL certificate. Ensure openssl is installed.'); + process.exit(1); + } +} + +ensureSSLCerts(); diff --git a/scripts/gen_wu.py b/scripts/gen_wu.py deleted file mode 100644 index f33157b010..0000000000 --- a/scripts/gen_wu.py +++ /dev/null @@ -1,147 +0,0 @@ -import os -import requests -import json -import argparse -import shutil - -from common_wu import mf_temp32, mf_temp32c3, mf_temp8266, wu_temp_opt, wu_temp_p1, wu_temp_p2, wu_temp_p3, wu_temp_p4, wu_temp_end, vue_path, manif_path, cors_proxy, esp32_boot, mf_temp32s3 - -parser = argparse.ArgumentParser() -parser.add_argument('--dev', action='store_true') -parser.add_argument('repo', nargs='?', default='1technophile/OpenMQTTGateway') -args = parser.parse_args() -repo = args.repo -dev = args.dev - -bin_path = 'toDeploy/' -manif_folder = "/firmware_build/" - -if not os.path.exists(manif_path): - os.makedirs(manif_path) - -if dev: - print('Generate Web Upload in dev mode') - manif_folder = "/dev" + manif_folder - # copy OTA latest version definition - shutil.copy("scripts/latest_version_dev.json", - manif_path + "latest_version_dev.json") - # copy the binaries frombin_path to manif_path - for name in os.listdir(bin_path): - if '.bin' in name: - shutil.copyfile(bin_path + name, (manif_path + name)) -else: - print('Generate Web Upload in release mode') - # copy OTA latest version definition - shutil.copy("scripts/latest_version.json", manif_path + "latest_version.json") - release = requests.get('https://api.github.com/repos/' + - repo + '/releases/latest') - rel_data = json.loads(release.text) - if 'assets' in rel_data: - assets = rel_data['assets'] - # Download assets into manif_path - for item in range(len(assets)): - name = assets[item]['name'] - if 'firmware.bin' in name: - fw_bin = requests.get(assets[item]['browser_download_url']) - filename = assets[item]['browser_download_url'].split('/')[-1] - with open(manif_path + filename, 'wb') as output_file: - output_file.write(fw_bin.content) - print('Downloaded: ' + filename) - if 'partitions.bin' in name: - part_bin = requests.get(assets[item]['browser_download_url']) - filename = assets[item]['browser_download_url'].split('/')[-1] - with open(manif_path + filename, 'wb') as output_file: - output_file.write(part_bin.content) - print('Downloaded: ' + filename) - if 'bootloader.bin' in name: - bl_bin = requests.get(assets[item]['browser_download_url']) - filename = assets[item]['browser_download_url'].split('/')[-1] - with open(manif_path + filename, 'wb') as output_file: - output_file.write(bl_bin.content) - print('Downloaded: ' + filename) - else: - print('Assets not found') - os._exit(1) - -if not os.path.exists(vue_path): - os.makedirs(vue_path) - -boot_bin = requests.get(esp32_boot) -filename = esp32_boot.split('/')[-1] -with open(manif_path + filename, 'wb') as output_file: - output_file.write(boot_bin.content) - -wu_file = open(vue_path + 'web-uploader.vue', 'w') -wu_file.write(wu_temp_p1) - -for name in sorted(os.listdir(manif_path)): - if 'firmware.bin' in name and ('esp32c3' not in name ) and ('esp32s3' not in name ) and ('esp32' in name or 'ttgo' in name or 'heltec' in name or 'thingpulse' in name or 'theengs' in name or 'lilygo' in name or 'shelly' in name or 'tinypico' in name): - fw = name.split('-firmware')[0] - man_file = fw + '.manifest.json' - fwp_name = name.split('-firmware')[0] + '-partitions.bin' - fwb_name = name.split('-firmware')[0] + '-bootloader.bin' - mani_str = mf_temp32.substitute({'cp': cors_proxy, 'part': manif_folder + fwp_name.split('/')[-1], 'bin': manif_folder + name, 'bl': manif_folder + fwb_name, 'boot': manif_folder + esp32_boot.split('/')[-1]}) - - with open(manif_path + man_file, 'w') as nf: - nf.write(mani_str) - - wu_file.write(wu_temp_opt.substitute( - {'mff': manif_folder + man_file, 'mfn': fw})) - - print('Created: ' + man_file) - -wu_file.write(wu_temp_p2) - -for name in sorted(os.listdir(manif_path)): - if 'firmware.bin' in name and ('esp32c3' in name ): - fw = name.split('-firmware')[0] - man_file = fw + '.manifest.json' - fwp_name = name.split('-firmware')[0] + '-partitions.bin' - fwb_name = name.split('-firmware')[0] + '-bootloader.bin' - mani_str = mf_temp32c3.substitute({'cp': cors_proxy, 'part': manif_folder + fwp_name, 'bin': manif_folder + name, 'bl': manif_folder + fwb_name, 'boot': manif_folder + esp32_boot.split('/')[-1]}) - - with open(manif_path + man_file, 'w') as nf: - nf.write(mani_str) - - wu_file.write(wu_temp_opt.substitute( - {'mff': manif_folder + man_file, 'mfn': fw})) - - print('Created: ' + man_file) - -wu_file.write(wu_temp_p3) - -for name in sorted(os.listdir(manif_path)): - if 'firmware.bin' in name and ('esp32s3' in name ): - fw = name.split('-firmware')[0] - man_file = fw + '.manifest.json' - fwp_name = name.split('-firmware')[0] + '-partitions.bin' - fwb_name = name.split('-firmware')[0] + '-bootloader.bin' - mani_str = mf_temp32s3.substitute({'cp': cors_proxy, 'part': manif_folder + fwp_name, 'bin': manif_folder + name, 'bl': manif_folder + fwb_name, 'boot': manif_folder + esp32_boot.split('/')[-1]}) - - with open(manif_path + man_file, 'w') as nf: - nf.write(mani_str) - - wu_file.write(wu_temp_opt.substitute( - {'mff': manif_folder + man_file, 'mfn': fw})) - - print('Created: ' + man_file) - -wu_file.write(wu_temp_p4) - -for name in sorted(os.listdir(manif_path)): - if 'firmware.bin' in name and ('nodemcu' in name or 'sonoff' in name or 'rf-wifi-gateway' in name or 'manual-wifi-test' in name or 'rfbridge' in name): - fw = name.split('-firmware')[0] - man_file = fw + '.manifest.json' - mani_str = mf_temp8266.substitute( - {'cp': cors_proxy, 'bin': manif_folder + name}) - - with open(manif_path + man_file, 'w') as nf: - nf.write(mani_str) - - wu_file.write( - manif_folder + wu_temp_opt.substitute({'mff': manif_folder + man_file, 'mfn': fw})) - - print('Created: ' + man_file) - -wu_file.write(wu_temp_end) -wu_file.close() diff --git a/scripts/generate_board_docs.py b/scripts/generate_board_docs.py deleted file mode 100644 index 19a5d2e455..0000000000 --- a/scripts/generate_board_docs.py +++ /dev/null @@ -1,88 +0,0 @@ -import pytablereader as ptr -import pandas as pd -import os -import re -import configparser -conf = configparser.ConfigParser() - -# Init the table with the columns -table_init = pd.DataFrame(columns=['Environment', 'uC', 'Hardware', 'Description', 'Modules', 'Platform', - 'Partitions', 'Libraries', 'Options']) -table = table_init - -# Parse platformio.ini to retrieve boards information -conf.read('environments.ini') -for each_section in conf.sections(): - if ("env:" in each_section and "-test" not in each_section): - env = each_section.replace("env:", "") - uc = "" - board = "" - hardware = "" - description = "" - modules = "" - platform = "" - partitions = "" - libraries = "" - options = "" - for (k, v) in conf.items(each_section): - v = v.replace('{', '').replace('}', '').replace('$', '').replace( - "env:", '').replace('\'', '').replace("-D", "") - if (k == "board"): - uc = v - if (k == "platform"): - platform = v - if (k == "board_build.partitions"): - partitions = v - if (k == "lib_deps"): - libraries = v - libraries = libraries.replace( - "\ncom-esp.lib_deps\n", "").replace( - "\ncom-arduino.lib_deps\n", "").replace("libraries.", "") - if (k == "build_flags"): - options = v - for o in options.split('\n'): - if ("gateway" in o or "sensor" in o or "actuator" in o): - if (modules != ""): - modules = modules + "\n" - modules = modules + o[1:o.rfind("=\"")] - options = options.replace( - "com-esp.build_flags\n", "") - if (k == "custom_description"): - description = v - if (k == "custom_hardware"): - hardware = v - table.loc[len(table.index)] = [env, uc, hardware, description, modules, platform, - partitions, libraries, options] - -# Sort rows per Environment name -table.sort_values(by=['Environment'], inplace=True, - key=lambda col: col.str.lower()) - -# Produce individual file -for ind in table.index: - table_extract = table.iloc[ind] - print(table_extract) - file = open("docs/prerequisites/boards/" + - table.iloc[ind]["Environment"] + ".md", 'w') - table_extract = table_extract.rename_axis("Board index") - table_md = table_extract.to_markdown() - n = file.write(table_md) - file.close() - -# Produce list file -# Add link to the file from the environment and replace /n with , -for ind in table.index: - table['Environment'][ind] = "[" + table['Environment'][ind] + \ - "](../prerequisites/boards/" + table['Environment'][ind] + ")" - -table = table.replace("\n", ", ", regex=True) -table = table.drop(["Partitions", "Hardware", "Platform", "Options","Modules"], axis=1) -table = table.reset_index(drop=True) -print(table) -# Convert to Markdown and save per Model_Id -table_md = table.to_markdown() -file = open("docs/prerequisites/board.md", 'a') -n = file.write("# Supported\n" + table_md) -file = open("docs/upload/web-install.md", 'a') -n = file.write(table_md) -file.close() diff --git a/scripts/prepare_deploy.sh b/scripts/prepare_deploy.sh deleted file mode 100755 index 2dda51b3b9..0000000000 --- a/scripts/prepare_deploy.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -set -e -echo "renaming bin files with the environment name" -rename -v 's:/:-:g' .pio/build/*/*.bin -mkdir toDeploy -rename 's/.pio-build-//' .*.bin -( - cd .pio/libdeps - echo "replace space by _ in folder names" - find . -type d -name "* *" | while read FNAME; do mv "$FNAME" "${FNAME// /_}"; done - echo "zipping libraries per board" - for i in */ - do - zip -r "${i%/}-libraries.zip" "$i" - done - ls -lA - mv *.zip ../../toDeploy -) -# remove binaries for *-all*, *-test* env and only zip containing *-test* -rm -f *-all*.bin *-test*.bin *-test*.zip -echo "zipping code and licence" -zip -r OpenMQTTGateway_sources.zip main LICENSE.txt -mv *.zip toDeploy -mv *.bin toDeploy - -ls -lA toDeploy diff --git a/scripts/preview_site.js b/scripts/preview_site.js new file mode 100644 index 0000000000..6282b87eef --- /dev/null +++ b/scripts/preview_site.js @@ -0,0 +1,60 @@ +#!/usr/bin/env node +'use strict'; + +// Minimal HTTPS static server for generated/site only. + +const fs = require('fs'); +const path = require('path'); +const https = require('https'); +const mime = require('mime-types'); + +const repoRoot = path.resolve(__dirname, '..'); +const siteRoot = path.join(repoRoot, 'generated', 'site'); +const port = parseInt(process.argv[2] || 8443, 10); +const keyFile = process.argv[3] || path.join(repoRoot, '.ssl', 'key.pem'); +const certFile = process.argv[4] || path.join(repoRoot, '.ssl', 'cert.pem'); + +if (!fs.existsSync(siteRoot)) { + console.error(`Error: folder not found: ${siteRoot}`); + process.exit(1); +} + +if (!fs.existsSync(keyFile) || !fs.existsSync(certFile)) { + console.error(`Error: SSL certificates not found at ${keyFile} or ${certFile}`); + console.error('Use scripts/ensure_ssl_certs.js to generate them.'); + process.exit(1); +} + +const server = https.createServer({ + key: fs.readFileSync(keyFile), + cert: fs.readFileSync(certFile) +}, (req, res) => { + const reqPath = decodeURIComponent(req.url.split('?')[0]); + const safePath = reqPath.endsWith('/') ? `${reqPath}index.html` : reqPath; + const filePath = path.join(siteRoot, safePath); + + if (!filePath.startsWith(siteRoot)) { + res.writeHead(403); + return res.end('Forbidden'); + } + + fs.stat(filePath, (err, stats) => { + if (err || !stats.isFile()) { + res.writeHead(404); + return res.end('Not Found'); + } + const type = mime.contentType(path.extname(filePath)) || 'application/octet-stream'; + res.writeHead(200, { 'Content-Type': type }); + fs.createReadStream(filePath).pipe(res); + }); +}); + +server.listen(port, '0.0.0.0', () => { + console.log(`Serving generated/site over HTTPS at https://localhost:${port}/`); + console.log(`Serving generated/site over HTTPS at https://:${port}/`); + console.log(`Note: remember that if you are testing DEV site you should go on https://:${port}/dev/`); +}); + +process.on('SIGINT', () => { + server.close(() => process.exit()); +}); diff --git a/scripts/replace_lib.py b/scripts/replace_lib.py index a792cc5c6d..9e69e2dd5c 100644 --- a/scripts/replace_lib.py +++ b/scripts/replace_lib.py @@ -1,4 +1,5 @@ - +# Replaces BLE library with custom version during PlatformIO build +# Used by: Currently unused (utility script for BLE library replacement) import shutil import os import hashlib