diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 00000000..e6dbc5af
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,12 @@
+*.db*
+/stacks
+/node_modules
+*.md
+/docker
+*.dot
+*.svg
+*.mmd
+*.lock
+src/tests
+.github
+.local-tests
\ No newline at end of file
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..7eeacf33
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1 @@
+*.ts info@itsnik.de
\ No newline at end of file
diff --git a/.github/DockStat.png b/.github/DockStat.png
new file mode 100644
index 00000000..d375bd49
Binary files /dev/null and b/.github/DockStat.png differ
diff --git a/.github/scripts/dep-graph.sh b/.github/scripts/dep-graph.sh
new file mode 100644
index 00000000..be7d8731
--- /dev/null
+++ b/.github/scripts/dep-graph.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+mermaidContent="$(cat dependency-graph.mmd)"
+
+echo "---
+config:
+ flowchart:
+ defaultRenderer: elk
+---
+
+$mermaidContent
+" > dependency-graph.mmd
+
diff --git a/.github/workflows/anchore.yml b/.github/workflows/anchore.yml
deleted file mode 100644
index 053123d0..00000000
--- a/.github/workflows/anchore.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-# This workflow uses actions that are not certified by GitHub.
-# They are provided by a third-party and are governed by
-# separate terms of service, privacy policy, and support
-# documentation.
-
-# This workflow checks out code, builds an image, performs a container image
-# vulnerability scan with Anchore's Grype tool, and integrates the results with GitHub Advanced Security
-# code scanning feature. For more information on the Anchore scan action usage
-# and parameters, see https://github.com/anchore/scan-action. For more
-# information on Anchore's container image scanning tool Grype, see
-# https://github.com/anchore/grype
-name: Anchore Grype vulnerability scan
-
-on:
- push:
- branches: [ "main" ]
- pull_request:
- # The branches below must be a subset of the branches above
- branches: [ "main" ]
- schedule:
- - cron: '30 9 * * 1'
-
-permissions:
- contents: read
-
-jobs:
- Anchore-Build-Scan:
- permissions:
- contents: read # for actions/checkout to fetch code
- security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
- actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
- runs-on: ubuntu-latest
- steps:
- - name: Check out the code
- uses: actions/checkout@v4
- - name: Build the Docker image
- run: docker build . --file Dockerfile --tag localbuild/testimage:latest
- - name: Run the Anchore Grype scan action
- uses: anchore/scan-action@d5aa5b6cb9414b0c7771438046ff5bcfa2854ed7
- id: scan
- with:
- image: "localbuild/testimage:latest"
- fail-build: true
- severity-cutoff: critical
- - name: Upload vulnerability report
- uses: github/codeql-action/upload-sarif@v3
- with:
- sarif_file: ${{ steps.scan.outputs.sarif }}
diff --git a/.github/workflows/build-dev.yaml b/.github/workflows/build-dev.yaml
deleted file mode 100644
index 72a370e7..00000000
--- a/.github/workflows/build-dev.yaml
+++ /dev/null
@@ -1,26 +0,0 @@
-name: Docker Image CI (dev)
-
-on:
- push:
- branches: [ "dev" ]
-
-permissions:
- packages: write
- contents: read
-
-jobs:
- build-main:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- name: Checkout repository
-
- - uses: pmorelli92/github-container-registry-build-push@2.2.1
- name: Build and Publish latest service image
- with:
- github-push-secret: ${{secrets.GITHUB_TOKEN}}
- docker-image-name: dockstatapi
- docker-image-tag: dev # optional
- dockerfile-path: Dockerfile # optional
- build-context: . # optional
- build-only: false # optional
diff --git a/.github/workflows/build-image.yml b/.github/workflows/build-image.yml
deleted file mode 100644
index 2836ac9a..00000000
--- a/.github/workflows/build-image.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-name: Docker Image CI
-
-on:
- push:
- branches: [ "main" ]
-
-permissions:
- packages: write
- contents: read
-
-jobs:
- build-main:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- name: Checkout repository
-
- - uses: pmorelli92/github-container-registry-build-push@2.2.1
- name: Build and Publish latest service image
- with:
- github-push-secret: ${{secrets.GITHUB_TOKEN}}
- docker-image-name: dockstatapi
- docker-image-tag: latest # optional
- dockerfile-path: Dockerfile # optional
- build-context: . # optional
- build-only: false # optional
diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml
new file mode 100644
index 00000000..e7061106
--- /dev/null
+++ b/.github/workflows/cd.yml
@@ -0,0 +1,63 @@
+name: "Continuous Delivery"
+
+on:
+ release:
+ types: [published, prereleased]
+
+permissions:
+ contents: read
+ packages: write
+
+jobs:
+ publish:
+ name: Publish Container Image
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Setup Docker Buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Login to GHCR
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Determine tags
+ id: tags
+ uses: docker/metadata-action@v5
+ with:
+ images: ghcr.io/its4nik/dockstatapi
+ tags: |
+ type=semver,pattern={{version}}
+ type=semver,pattern={{major}}.{{minor}}
+ type=semver,pattern={{major}}
+ type=sha
+
+ - name: Build and push
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ file: docker/Dockerfile
+ platforms: linux/amd64,linux/arm64
+ push: true
+ tags: ${{ steps.tags.outputs.tags }}
+
+ sbom:
+ name: Generate SBOM
+ runs-on: ubuntu-latest
+ needs: publish
+ steps:
+ - name: Generate SBOM
+ uses: aquasecurity/trivy-action@0.28.0
+ with:
+ image-ref: ghcr.io/its4nik/dockstatapi:${{ github.event.release.tag_name }}
+ format: spdx-json
+ output: sbom.json
+
+ - name: Upload SBOM
+ uses: github/codeql-action/upload-sarif@v3
+ with:
+ sarif_file: sbom.json
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 00000000..6cf46283
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,89 @@
+name: "Continuous Integration"
+
+on:
+ push:
+ pull_request:
+
+jobs:
+ unit-test:
+ name: Unit Testing
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ checks: write
+ security-events: write
+
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Setup Bun
+ uses: oven-sh/setup-bun@v2
+ with:
+ bun-version: latest
+
+ - name: Install dependencies
+ run: bun install
+
+ - name: Run unit tests
+ run: |
+ export PAD_NEW_LINES=false
+ docker compose -f docker/docker-compose.dev.yaml up -d
+ bun test
+
+ - name: Log unit test files
+ run: |
+ ls -lah reports/markdown
+
+ - name: Publish Test Report
+ if: always()
+ run: |
+ SUMMARY=""
+ for element in $(ls reports/markdown); do
+ SUMMARY="$(echo -e "${SUMMARY}\n$(cat "reports/markdown/${element}")")"
+ done
+ echo "$SUMMARY" >> $GITHUB_STEP_SUMMARY
+ build-scan:
+ name: Build and Security Scan
+ runs-on: ubuntu-latest
+ needs: unit-test
+ permissions:
+ contents: read
+ checks: write
+ security-events: write
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Build Docker image
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ file: docker/Dockerfile
+ tags: dockstatapi:ci-${{ github.sha }}
+ load: true
+
+ - name: Start and test container
+ run: |
+ docker run --name test-container -d dockstatapi:ci-${{ github.sha }}
+ sleep 10
+ docker ps | grep test-container
+ docker logs test-container
+ docker stop test-container
+
+ - name: Trivy vulnerability scan
+ uses: aquasecurity/trivy-action@0.28.0
+ with:
+ image-ref: "dockstatapi:ci-${{ github.sha }}"
+ format: "sarif"
+ output: "trivy-results.sarif"
+ severity: "HIGH,CRITICAL"
+
+ - name: Upload security results
+ uses: github/codeql-action/upload-sarif@v3
+ with:
+ sarif_file: "trivy-results.sarif"
diff --git a/.github/workflows/contributors.yml b/.github/workflows/contributors.yml
new file mode 100644
index 00000000..9edece27
--- /dev/null
+++ b/.github/workflows/contributors.yml
@@ -0,0 +1,21 @@
+name: Update CONTRIBUTORS file
+on:
+ schedule:
+ - cron: "0 0 1 * *"
+ workflow_dispatch:
+jobs:
+ main:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: minicli/action-contributors@v3.3
+ name: "Update a projects CONTRIBUTORS file"
+ env:
+ CONTRIB_REPOSITORY: "Its4Nik/DockStatAPI"
+ CONTRIB_OUTPUT_FILE: "CONTRIBUTORS.md"
+
+ - name: Commit changes
+ uses: test-room-7/action-update-file@v1
+ with:
+ file-path: "CONTRIBUTORS.md"
+ commit-msg: Update Contributors
+ github-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/dependency-graph.yml b/.github/workflows/dependency-graph.yml
new file mode 100644
index 00000000..9e178ce7
--- /dev/null
+++ b/.github/workflows/dependency-graph.yml
@@ -0,0 +1,44 @@
+name: "Generate Dependency Graph"
+
+on:
+ push:
+
+permissions: write-all
+
+jobs:
+ generate:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Repository
+ uses: actions/checkout@v4
+
+ - name: Setup Node.js
+ uses: oven-sh/setup-bun@v2
+
+ - name: Install dependency-cruiser and graphviz
+ run: |
+ bun add dependency-cruiser
+ sudo apt-get install -y graphviz
+
+ - name: Generate Mermaid Dependency Graph
+ run: |
+ bun run dependency-cruiser --output-type mermaid src/index.ts --output-to dependency-graph.mmd --no-config -x node_modules --ts-pre-compilation-deps --ts-config tsconfig.json
+ echo "Mermaid graph generated at dependency-graph.mmd"
+
+ - name: Convert to ELK Layout
+ run: |
+ bash ./.github/scripts/dep-graph.sh
+
+ - name: Generate Dependency Graph (SVG)
+ run: |
+ bun run dependency-cruiser --output-type dot src/index.ts --output-to dependency-graph.dot --no-config -x node_modules --ts-pre-compilation-deps --ts-config tsconfig.json
+ dot -T svg -Gsplines=ortho dependency-graph.dot -o dependency-graph.svg
+ echo "SVG graph generated at dependency-graph.svg"
+
+ - name: Commit and Push Changes
+ uses: EndBug/add-and-commit@v9
+ with:
+ add: '["dependency-graph.svg", "dependency-graph.mmd"]'
+ message: "Update dependency graphs"
+ committer_name: "GitHub Action"
+ committer_email: "action@github.com"
diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml
new file mode 100644
index 00000000..5fe920a7
--- /dev/null
+++ b/.github/workflows/lint.yaml
@@ -0,0 +1,55 @@
+name: "Lint"
+
+on:
+ push:
+ pull_request:
+
+jobs:
+ lint-test:
+ name: Lint
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ checks: write
+ security-events: write
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Setup Bun
+ uses: oven-sh/setup-bun@v2
+ with:
+ bun-version: latest
+
+ - name: Install dependencies
+ run: bun install
+
+ - name: Knip check
+ if: ${{ github.event_name == 'pull_request' }}
+ uses: codex-/knip-reporter@v2
+
+ - name: Run linter
+ run: |
+ bun biome format --fix
+ bun biome lint --fix
+ bun biome check --fix
+ bun biome ci
+
+ - name: Add linted files
+ run: git add src/
+
+ - name: Check for changes
+ id: check-changes
+ run: |
+ git diff --cached --quiet || echo "changes_detected=true" >> $GITHUB_OUTPUT
+
+ - name: Commit and push lint changes
+ if: |
+ steps.check-changes.outputs.changes_detected == 'true' &&
+ github.event_name == 'push'
+ run: |
+ git config --global user.name "GitHub Actions"
+ git config --global user.email "actions@github.com"
+ git commit -m "CQL: Apply lint fixes [skip ci]"
+ git push
diff --git a/.gitignore b/.gitignore
index 2e7f14aa..e34b9b1e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,9 @@
-dockstat.log
-node_modules
-.dockerignore
-apprise_config.yml
\ No newline at end of file
+*.db*
+/stacks
+/node_modules
+.test
+build
+*.xml
+dependency-*.{mmd,dot,svg}
+Knip-Report.md
+reports/**
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 00000000..71322072
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "src/typings"]
+ path = src/typings
+ url = git@github.com:Its4Nik/dockstat-types.git
diff --git a/.knip.json b/.knip.json
new file mode 100644
index 00000000..e786d748
--- /dev/null
+++ b/.knip.json
@@ -0,0 +1,5 @@
+{
+ "entry": ["src/index.ts"],
+ "project": ["src/**/*.ts"],
+ "ignore": ["src/plugins/*.plugin.ts", "src/tests/*.ts"]
+}
diff --git a/.local-tests/test-container-changes.sh b/.local-tests/test-container-changes.sh
new file mode 100644
index 00000000..5df50759
--- /dev/null
+++ b/.local-tests/test-container-changes.sh
@@ -0,0 +1,15 @@
+commands=("kill" "start" "restart" "start" "pause" "unpause")
+container="SQLite-web"
+
+press(){
+ echo "Press enter to continue"
+ read -r -p ">"
+}
+
+for command in "${commands[@]}"; do
+ press
+ echo "Running $command for $container"
+ docker "$command" "$container"
+done
+
+docker start "$container"
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
new file mode 100644
index 00000000..e69de29b
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index 8c70ae68..00000000
--- a/Dockerfile
+++ /dev/null
@@ -1,34 +0,0 @@
-# Stage 1: Build stage
-FROM node:latest AS builder
-
-LABEL maintainer="https://github.com/its4nik"
-LABEL version="1.0"
-LABEL description="API for DockStat: Docker container statistics."
-LABEL license="MIT"
-LABEL repository="https://github.com/its4nik/dockstatapi"
-LABEL documentation="https://github.com/its4nik/dockstatapi"
-
-WORKDIR /api
-
-COPY package*.json ./
-
-RUN npm install --production
-
-COPY . .
-
-# Stage 2: Production stage
-FROM node:alpine
-
-WORKDIR /api
-
-COPY --from=builder /api .
-
-RUN apk add --no-cache bash curl
-
-RUN bash /api/scripts/install_apprise.sh
-
-EXPOSE 7070
-
-HEALTHCHECK CMD curl --fail http://localhost:7070/ || exit 1
-
-ENTRYPOINT [ "bash", "entrypoint.sh" ]
diff --git a/LICENSE b/LICENSE
index 0a731244..428e5953 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,28 +1,407 @@
-BSD 3-Clause License
-
-Copyright (c) 2024, ItsNik
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
-
-3. Neither the name of the copyright holder nor the names of its
- contributors may be used to endorse or promote products derived from
- this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+Attribution-NonCommercial 4.0 International
+
+=======================================================================
+
+Creative Commons Corporation ("Creative Commons") is not a law firm and
+does not provide legal services or legal advice. Distribution of
+Creative Commons public licenses does not create a lawyer-client or
+other relationship. Creative Commons makes its licenses and related
+information available on an "as-is" basis. Creative Commons gives no
+warranties regarding its licenses, any material licensed under their
+terms and conditions, or any related information. Creative Commons
+disclaims all liability for damages resulting from their use to the
+fullest extent possible.
+
+Using Creative Commons Public Licenses
+
+Creative Commons public licenses provide a standard set of terms and
+conditions that creators and other rights holders may use to share
+original works of authorship and other material subject to copyright
+and certain other rights specified in the public license below. The
+following considerations are for informational purposes only, are not
+exhaustive, and do not form part of our licenses.
+
+ Considerations for licensors: Our public licenses are
+ intended for use by those authorized to give the public
+ permission to use material in ways otherwise restricted by
+ copyright and certain other rights. Our licenses are
+ irrevocable. Licensors should read and understand the terms
+ and conditions of the license they choose before applying it.
+ Licensors should also secure all rights necessary before
+ applying our licenses so that the public can reuse the
+ material as expected. Licensors should clearly mark any
+ material not subject to the license. This includes other CC-
+ licensed material, or material used under an exception or
+ limitation to copyright. More considerations for licensors:
+ wiki.creativecommons.org/Considerations_for_licensors
+
+ Considerations for the public: By using one of our public
+ licenses, a licensor grants the public permission to use the
+ licensed material under specified terms and conditions. If
+ the licensor's permission is not necessary for any reason--for
+ example, because of any applicable exception or limitation to
+ copyright--then that use is not regulated by the license. Our
+ licenses grant only permissions under copyright and certain
+ other rights that a licensor has authority to grant. Use of
+ the licensed material may still be restricted for other
+ reasons, including because others have copyright or other
+ rights in the material. A licensor may make special requests,
+ such as asking that all changes be marked or described.
+ Although not required by our licenses, you are encouraged to
+ respect those requests where reasonable. More considerations
+ for the public:
+ wiki.creativecommons.org/Considerations_for_licensees
+
+=======================================================================
+
+Creative Commons Attribution-NonCommercial 4.0 International Public
+License
+
+By exercising the Licensed Rights (defined below), You accept and agree
+to be bound by the terms and conditions of this Creative Commons
+Attribution-NonCommercial 4.0 International Public License ("Public
+License"). To the extent this Public License may be interpreted as a
+contract, You are granted the Licensed Rights in consideration of Your
+acceptance of these terms and conditions, and the Licensor grants You
+such rights in consideration of benefits the Licensor receives from
+making the Licensed Material available under these terms and
+conditions.
+
+
+Section 1 -- Definitions.
+
+ a. Adapted Material means material subject to Copyright and Similar
+ Rights that is derived from or based upon the Licensed Material
+ and in which the Licensed Material is translated, altered,
+ arranged, transformed, or otherwise modified in a manner requiring
+ permission under the Copyright and Similar Rights held by the
+ Licensor. For purposes of this Public License, where the Licensed
+ Material is a musical work, performance, or sound recording,
+ Adapted Material is always produced where the Licensed Material is
+ synched in timed relation with a moving image.
+
+ b. Adapter's License means the license You apply to Your Copyright
+ and Similar Rights in Your contributions to Adapted Material in
+ accordance with the terms and conditions of this Public License.
+
+ c. Copyright and Similar Rights means copyright and/or similar rights
+ closely related to copyright including, without limitation,
+ performance, broadcast, sound recording, and Sui Generis Database
+ Rights, without regard to how the rights are labeled or
+ categorized. For purposes of this Public License, the rights
+ specified in Section 2(b)(1)-(2) are not Copyright and Similar
+ Rights.
+ d. Effective Technological Measures means those measures that, in the
+ absence of proper authority, may not be circumvented under laws
+ fulfilling obligations under Article 11 of the WIPO Copyright
+ Treaty adopted on December 20, 1996, and/or similar international
+ agreements.
+
+ e. Exceptions and Limitations means fair use, fair dealing, and/or
+ any other exception or limitation to Copyright and Similar Rights
+ that applies to Your use of the Licensed Material.
+
+ f. Licensed Material means the artistic or literary work, database,
+ or other material to which the Licensor applied this Public
+ License.
+
+ g. Licensed Rights means the rights granted to You subject to the
+ terms and conditions of this Public License, which are limited to
+ all Copyright and Similar Rights that apply to Your use of the
+ Licensed Material and that the Licensor has authority to license.
+
+ h. Licensor means the individual(s) or entity(ies) granting rights
+ under this Public License.
+
+ i. NonCommercial means not primarily intended for or directed towards
+ commercial advantage or monetary compensation. For purposes of
+ this Public License, the exchange of the Licensed Material for
+ other material subject to Copyright and Similar Rights by digital
+ file-sharing or similar means is NonCommercial provided there is
+ no payment of monetary compensation in connection with the
+ exchange.
+
+ j. Share means to provide material to the public by any means or
+ process that requires permission under the Licensed Rights, such
+ as reproduction, public display, public performance, distribution,
+ dissemination, communication, or importation, and to make material
+ available to the public including in ways that members of the
+ public may access the material from a place and at a time
+ individually chosen by them.
+
+ k. Sui Generis Database Rights means rights other than copyright
+ resulting from Directive 96/9/EC of the European Parliament and of
+ the Council of 11 March 1996 on the legal protection of databases,
+ as amended and/or succeeded, as well as other essentially
+ equivalent rights anywhere in the world.
+
+ l. You means the individual or entity exercising the Licensed Rights
+ under this Public License. Your has a corresponding meaning.
+
+
+Section 2 -- Scope.
+
+ a. License grant.
+
+ 1. Subject to the terms and conditions of this Public License,
+ the Licensor hereby grants You a worldwide, royalty-free,
+ non-sublicensable, non-exclusive, irrevocable license to
+ exercise the Licensed Rights in the Licensed Material to:
+
+ a. reproduce and Share the Licensed Material, in whole or
+ in part, for NonCommercial purposes only; and
+
+ b. produce, reproduce, and Share Adapted Material for
+ NonCommercial purposes only.
+
+ 2. Exceptions and Limitations. For the avoidance of doubt, where
+ Exceptions and Limitations apply to Your use, this Public
+ License does not apply, and You do not need to comply with
+ its terms and conditions.
+
+ 3. Term. The term of this Public License is specified in Section
+ 6(a).
+
+ 4. Media and formats; technical modifications allowed. The
+ Licensor authorizes You to exercise the Licensed Rights in
+ all media and formats whether now known or hereafter created,
+ and to make technical modifications necessary to do so. The
+ Licensor waives and/or agrees not to assert any right or
+ authority to forbid You from making technical modifications
+ necessary to exercise the Licensed Rights, including
+ technical modifications necessary to circumvent Effective
+ Technological Measures. For purposes of this Public License,
+ simply making modifications authorized by this Section 2(a)
+ (4) never produces Adapted Material.
+
+ 5. Downstream recipients.
+
+ a. Offer from the Licensor -- Licensed Material. Every
+ recipient of the Licensed Material automatically
+ receives an offer from the Licensor to exercise the
+ Licensed Rights under the terms and conditions of this
+ Public License.
+
+ b. No downstream restrictions. You may not offer or impose
+ any additional or different terms or conditions on, or
+ apply any Effective Technological Measures to, the
+ Licensed Material if doing so restricts exercise of the
+ Licensed Rights by any recipient of the Licensed
+ Material.
+
+ 6. No endorsement. Nothing in this Public License constitutes or
+ may be construed as permission to assert or imply that You
+ are, or that Your use of the Licensed Material is, connected
+ with, or sponsored, endorsed, or granted official status by,
+ the Licensor or others designated to receive attribution as
+ provided in Section 3(a)(1)(A)(i).
+
+ b. Other rights.
+
+ 1. Moral rights, such as the right of integrity, are not
+ licensed under this Public License, nor are publicity,
+ privacy, and/or other similar personality rights; however, to
+ the extent possible, the Licensor waives and/or agrees not to
+ assert any such rights held by the Licensor to the limited
+ extent necessary to allow You to exercise the Licensed
+ Rights, but not otherwise.
+
+ 2. Patent and trademark rights are not licensed under this
+ Public License.
+
+ 3. To the extent possible, the Licensor waives any right to
+ collect royalties from You for the exercise of the Licensed
+ Rights, whether directly or through a collecting society
+ under any voluntary or waivable statutory or compulsory
+ licensing scheme. In all other cases the Licensor expressly
+ reserves any right to collect such royalties, including when
+ the Licensed Material is used other than for NonCommercial
+ purposes.
+
+
+Section 3 -- License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the
+following conditions.
+
+ a. Attribution.
+
+ 1. If You Share the Licensed Material (including in modified
+ form), You must:
+
+ a. retain the following if it is supplied by the Licensor
+ with the Licensed Material:
+
+ i. identification of the creator(s) of the Licensed
+ Material and any others designated to receive
+ attribution, in any reasonable manner requested by
+ the Licensor (including by pseudonym if
+ designated);
+
+ ii. a copyright notice;
+
+ iii. a notice that refers to this Public License;
+
+ iv. a notice that refers to the disclaimer of
+ warranties;
+
+ v. a URI or hyperlink to the Licensed Material to the
+ extent reasonably practicable;
+
+ b. indicate if You modified the Licensed Material and
+ retain an indication of any previous modifications; and
+
+ c. indicate the Licensed Material is licensed under this
+ Public License, and include the text of, or the URI or
+ hyperlink to, this Public License.
+
+ 2. You may satisfy the conditions in Section 3(a)(1) in any
+ reasonable manner based on the medium, means, and context in
+ which You Share the Licensed Material. For example, it may be
+ reasonable to satisfy the conditions by providing a URI or
+ hyperlink to a resource that includes the required
+ information.
+
+ 3. If requested by the Licensor, You must remove any of the
+ information required by Section 3(a)(1)(A) to the extent
+ reasonably practicable.
+
+ 4. If You Share Adapted Material You produce, the Adapter's
+ License You apply must not prevent recipients of the Adapted
+ Material from complying with this Public License.
+
+
+Section 4 -- Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that
+apply to Your use of the Licensed Material:
+
+ a. for the avoidance of doubt, Section 2(a)(1) grants You the right
+ to extract, reuse, reproduce, and Share all or a substantial
+ portion of the contents of the database for NonCommercial purposes
+ only;
+
+ b. if You include all or a substantial portion of the database
+ contents in a database in which You have Sui Generis Database
+ Rights, then the database in which You have Sui Generis Database
+ Rights (but not its individual contents) is Adapted Material; and
+
+ c. You must comply with the conditions in Section 3(a) if You Share
+ all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not
+replace Your obligations under this Public License where the Licensed
+Rights include other Copyright and Similar Rights.
+
+
+Section 5 -- Disclaimer of Warranties and Limitation of Liability.
+
+ a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
+ EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
+ AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
+ ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
+ IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
+ WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
+ PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
+ ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
+ KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
+ ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
+
+ b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
+ TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
+ NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
+ INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
+ COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
+ USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
+ ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
+ DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
+ IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
+
+ c. The disclaimer of warranties and limitation of liability provided
+ above shall be interpreted in a manner that, to the extent
+ possible, most closely approximates an absolute disclaimer and
+ waiver of all liability.
+
+
+Section 6 -- Term and Termination.
+
+ a. This Public License applies for the term of the Copyright and
+ Similar Rights licensed here. However, if You fail to comply with
+ this Public License, then Your rights under this Public License
+ terminate automatically.
+
+ b. Where Your right to use the Licensed Material has terminated under
+ Section 6(a), it reinstates:
+
+ 1. automatically as of the date the violation is cured, provided
+ it is cured within 30 days of Your discovery of the
+ violation; or
+
+ 2. upon express reinstatement by the Licensor.
+
+ For the avoidance of doubt, this Section 6(b) does not affect any
+ right the Licensor may have to seek remedies for Your violations
+ of this Public License.
+
+ c. For the avoidance of doubt, the Licensor may also offer the
+ Licensed Material under separate terms or conditions or stop
+ distributing the Licensed Material at any time; however, doing so
+ will not terminate this Public License.
+
+ d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
+ License.
+
+
+Section 7 -- Other Terms and Conditions.
+
+ a. The Licensor shall not be bound by any additional or different
+ terms or conditions communicated by You unless expressly agreed.
+
+ b. Any arrangements, understandings, or agreements regarding the
+ Licensed Material not stated herein are separate from and
+ independent of the terms and conditions of this Public License.
+
+
+Section 8 -- Interpretation.
+
+ a. For the avoidance of doubt, this Public License does not, and
+ shall not be interpreted to, reduce, limit, restrict, or impose
+ conditions on any use of the Licensed Material that could lawfully
+ be made without permission under this Public License.
+
+ b. To the extent possible, if any provision of this Public License is
+ deemed unenforceable, it shall be automatically reformed to the
+ minimum extent necessary to make it enforceable. If the provision
+ cannot be reformed, it shall be severed from this Public License
+ without affecting the enforceability of the remaining terms and
+ conditions.
+
+ c. No term or condition of this Public License will be waived and no
+ failure to comply consented to unless expressly agreed to by the
+ Licensor.
+
+ d. Nothing in this Public License constitutes or may be interpreted
+ as a limitation upon, or waiver of, any privileges and immunities
+ that apply to the Licensor or You, including from the legal
+ processes of any jurisdiction or authority.
+
+=======================================================================
+
+Creative Commons is not a party to its public
+licenses. Notwithstanding, Creative Commons may elect to apply one of
+its public licenses to material it publishes and in those instances
+will be considered the "Licensor." The text of the Creative Commons
+public licenses is dedicated to the public domain under the CC0 Public
+Domain Dedication. Except for the limited purpose of indicating that
+material is shared under a Creative Commons public license or as
+otherwise permitted by the Creative Commons policies published at
+creativecommons.org/policies, Creative Commons does not authorize the
+use of the trademark "Creative Commons" or any other trademark or logo
+of Creative Commons without its prior written consent including,
+without limitation, in connection with any unauthorized modifications
+to any of its public licenses or any other arrangements,
+understandings, or agreements concerning use of licensed material. For
+the avoidance of doubt, this paragraph does not form part of the
+public licenses.
+
+Creative Commons may be contacted at creativecommons.org.
diff --git a/README.md b/README.md
index 0735e1af..36384590 100644
--- a/README.md
+++ b/README.md
@@ -1,82 +1,39 @@
-# DockstatAPI
+
+
-## This tool relies on the [DockerSocket Proxy](https://docs.linuxserver.io/images/docker-socket-proxy/), please see it's documentation for more information.
-
-This is the DockStatAPI used in [DockStat](https://github.com/its4nik/dockstat).
-
-It features an easy way to configure using a yaml file.
-
-You can specify multiple hosts, using a Docker Socket Proxy like this:
-
-## Installation:
-
-docker-compose.yaml
-```yaml
-services:
- dockstatapi:
- image: ghcr.io/its4nik/dockstatapi:latest
- container_name: dockstatapi
- environment:
- - SECRET=CHANGEME # This is required in the header 'Authorization': 'CHANGEME'
- - ALLOW_LOCALHOST="False" # Defaults to False
- ports:
- - "7070:7070"
- volumes:
- - ./dockstatapi:/api/config # Place your hosts.yaml file here
- restart: always
-```
-
-Example docker-socket onfiguration:
+---
-```yaml
-socket-proxy:
- image: lscr.io/linuxserver/socket-proxy:latest
- container_name: socket-proxy
- environment:
- - CONTAINERS=1 # Needed for the api to worrk
- - INFO=1 # Needed for the api to work
- volumes:
- - /var/run/docker.sock:/var/run/docker.sock:ro
- restart: unless-stopped
- read_only: true
- tmpfs:
- - /run
- ports:
- - 2375:2375
-```
+# DockStatAPI
-Configuration inside the mounted folder, as hosts.yaml
-```yaml
-mintimeout: 10000 # The minimum time to wait before querying the same server again, defaults to 5000 Ms
+Docker monitoring API with real-time statistics, stack management, and plugin support.
-log:
- logsize: 10 # Specify the Size of the log files in MB, default is 1MB
- LogCount: 1 # How many log files should be kept in rotation. Default is 5
+## Features
-hosts:
- YourHost1:
- url: hetzner
- port: 2375
+- Real-time container metrics via WebSocket
+- Multi-host Docker environment monitoring
+- Compose stack deployment/management
+- Plugin system for custom logic/notifications
+- Historical stats storage (SQLite)
+- Swagger API documentation
+- Web dashboard ([DockStat](https://github.com/its4nik/DockStat))
-# This is used for DockStat
-# Please see the dockstat documentation for more information
-tags:
- raspberry: red-200
- private: violet-400
+## Tech Stack
-container:
- dozzle: # Container name
- link: https://github.com
-```
+- **Runtime**: [Bun.sh](https://bun.sh)
+- **Framework**: [Elysia.js](https://elysiajs.com/)
+- **Database**: SQLite (WAL mode)
+- **Docker**: dockerode + compose
+- **Monitoring**: Custom metrics collection
+- **Auth**: [Authentication](https://outline.itsnik.de/s/dockstat/doc/authentication-VSGhxqjtXf)
-Please see the documentation for more information on what endpoints will be provieded.
+## Documentation and Wiki
-[Documentation](https://outline.itsnik.de/s/dockstat/doc/backend-api-reference-YzcBbDvY33)
+Please see [DockStatAPI](https://dockstatapi.itsnik.de)
----
+## Project Graph
-This Api uses a "queuing" mechanism to communicate to the servers, so that we dont ask the same server multiple times without getting an answer.
+### SVG:
-Feel free to use this API in any of your projects :D
+
-The `/stats` endpoint server all information that are gethered from the server in a json format.
+Click [here](./dependency-graph.mmd) for the mermaid version.
diff --git a/biome.json b/biome.json
new file mode 100644
index 00000000..f9d82247
--- /dev/null
+++ b/biome.json
@@ -0,0 +1,29 @@
+{
+ "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
+ "vcs": {
+ "enabled": true,
+ "clientKind": "git",
+ "useIgnoreFile": true
+ },
+ "formatter": {
+ "enabled": true,
+ "indentStyle": "tab"
+ },
+ "organizeImports": {
+ "enabled": true
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": true
+ }
+ },
+ "javascript": {
+ "formatter": {
+ "quoteStyle": "double"
+ }
+ },
+ "files": {
+ "ignore": ["./src/tests/junit-exporter.ts"]
+ }
+}
diff --git a/bun.lock b/bun.lock
new file mode 100644
index 00000000..f2cc38f1
--- /dev/null
+++ b/bun.lock
@@ -0,0 +1,444 @@
+{
+ "lockfileVersion": 1,
+ "workspaces": {
+ "": {
+ "name": "dockstatapi",
+ "dependencies": {
+ "@elysiajs/server-timing": "^1.3.0",
+ "@elysiajs/static": "^1.3.0",
+ "@elysiajs/swagger": "^1.3.0",
+ "chalk": "^5.4.1",
+ "date-fns": "^4.1.0",
+ "docker-compose": "^1.2.0",
+ "dockerode": "^4.0.6",
+ "elysia": "latest",
+ "elysia-remote-dts": "^1.0.2",
+ "knip": "latest",
+ "logestic": "^1.2.4",
+ "split2": "^4.2.0",
+ "winston": "^3.17.0",
+ "yaml": "^2.7.1",
+ },
+ "devDependencies": {
+ "@biomejs/biome": "1.9.4",
+ "@types/bun": "latest",
+ "@types/dockerode": "^3.3.38",
+ "@types/node": "^22.15.17",
+ "@types/split2": "^4.2.3",
+ "bun-types": "latest",
+ "cross-env": "^7.0.3",
+ "logform": "^2.7.0",
+ "typescript": "^5.8.3",
+ "wrap-ansi": "^9.0.0",
+ },
+ },
+ },
+ "trustedDependencies": [
+ "protobufjs",
+ ],
+ "packages": {
+ "@balena/dockerignore": ["@balena/dockerignore@1.0.2", "", {}, "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q=="],
+
+ "@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="],
+
+ "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.9.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw=="],
+
+ "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.9.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg=="],
+
+ "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g=="],
+
+ "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA=="],
+
+ "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg=="],
+
+ "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg=="],
+
+ "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.9.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg=="],
+
+ "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.9.4", "", { "os": "win32", "cpu": "x64" }, "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA=="],
+
+ "@colors/colors": ["@colors/colors@1.6.0", "", {}, "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA=="],
+
+ "@dabh/diagnostics": ["@dabh/diagnostics@2.0.3", "", { "dependencies": { "colorspace": "1.1.x", "enabled": "2.0.x", "kuler": "^2.0.0" } }, "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA=="],
+
+ "@elysiajs/server-timing": ["@elysiajs/server-timing@1.3.0", "", { "peerDependencies": { "elysia": ">= 1.3.0" } }, "sha512-c5Ay0Va7gIWjJ9CawHx05UtKP6UQVkMKCFnf16eBG0G/GgUkrMMGHWD/duCBaDbeRwbbb7IwHDoaFvStWrB2IQ=="],
+
+ "@elysiajs/static": ["@elysiajs/static@1.3.0", "", { "dependencies": { "node-cache": "^5.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "sha512-7mWlj2U/AZvH27IfRKqpUjDP1W9ZRldF9NmdnatFEtx0AOy7YYgyk0rt5hXrH6wPcR//2gO2Qy+k5rwswpEhJA=="],
+
+ "@elysiajs/swagger": ["@elysiajs/swagger@1.3.0", "", { "dependencies": { "@scalar/themes": "^0.9.52", "@scalar/types": "^0.0.12", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "sha512-0fo3FWkDRPNYpowJvLz3jBHe9bFe6gruZUyf+feKvUEEMG9ZHptO1jolSoPE0ffFw1BgN1/wMsP19p4GRXKdfg=="],
+
+ "@grpc/grpc-js": ["@grpc/grpc-js@1.13.3", "", { "dependencies": { "@grpc/proto-loader": "^0.7.13", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-FTXHdOoPbZrBjlVLHuKbDZnsTxXv2BlHF57xw6LuThXacXvtkahEPED0CKMk6obZDf65Hv4k3z62eyPNpvinIg=="],
+
+ "@grpc/proto-loader": ["@grpc/proto-loader@0.7.15", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.2.5", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ=="],
+
+ "@js-sdsl/ordered-map": ["@js-sdsl/ordered-map@4.4.2", "", {}, "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="],
+
+ "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
+
+ "@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="],
+
+ "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="],
+
+ "@protobufjs/aspromise": ["@protobufjs/aspromise@1.1.2", "", {}, "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="],
+
+ "@protobufjs/base64": ["@protobufjs/base64@1.1.2", "", {}, "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="],
+
+ "@protobufjs/codegen": ["@protobufjs/codegen@2.0.4", "", {}, "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="],
+
+ "@protobufjs/eventemitter": ["@protobufjs/eventemitter@1.1.0", "", {}, "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="],
+
+ "@protobufjs/fetch": ["@protobufjs/fetch@1.1.0", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" } }, "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ=="],
+
+ "@protobufjs/float": ["@protobufjs/float@1.0.2", "", {}, "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="],
+
+ "@protobufjs/inquire": ["@protobufjs/inquire@1.1.0", "", {}, "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="],
+
+ "@protobufjs/path": ["@protobufjs/path@1.1.2", "", {}, "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="],
+
+ "@protobufjs/pool": ["@protobufjs/pool@1.1.0", "", {}, "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="],
+
+ "@protobufjs/utf8": ["@protobufjs/utf8@1.1.0", "", {}, "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="],
+
+ "@scalar/openapi-types": ["@scalar/openapi-types@0.1.1", "", {}, "sha512-NMy3QNk6ytcCoPUGJH0t4NNr36OWXgZhA3ormr3TvhX1NDgoF95wFyodGVH8xiHeUyn2/FxtETm8UBLbB5xEmg=="],
+
+ "@scalar/themes": ["@scalar/themes@0.9.86", "", { "dependencies": { "@scalar/types": "0.1.7" } }, "sha512-QUHo9g5oSWi+0Lm1vJY9TaMZRau8LHg+vte7q5BVTBnu6NuQfigCaN+ouQ73FqIVd96TwMO6Db+dilK1B+9row=="],
+
+ "@scalar/types": ["@scalar/types@0.0.12", "", { "dependencies": { "@scalar/openapi-types": "0.1.1", "@unhead/schema": "^1.9.5" } }, "sha512-XYZ36lSEx87i4gDqopQlGCOkdIITHHEvgkuJFrXFATQs9zHARop0PN0g4RZYWj+ZpCUclOcaOjbCt8JGe22mnQ=="],
+
+ "@sinclair/typebox": ["@sinclair/typebox@0.34.33", "", {}, "sha512-5HAV9exOMcXRUxo+9iYB5n09XxzCXnfy4VTNW4xnDv+FgjzAGY989C28BIdljKqmF+ZltUwujE3aossvcVtq6g=="],
+
+ "@tokenizer/inflate": ["@tokenizer/inflate@0.2.7", "", { "dependencies": { "debug": "^4.4.0", "fflate": "^0.8.2", "token-types": "^6.0.0" } }, "sha512-MADQgmZT1eKjp06jpI2yozxaU9uVs4GzzgSL+uEq7bVcJ9V1ZXQkeGNql1fsSI0gMy1vhvNTNbUqrx+pZfJVmg=="],
+
+ "@tokenizer/token": ["@tokenizer/token@0.3.0", "", {}, "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="],
+
+ "@types/bun": ["@types/bun@1.2.13", "", { "dependencies": { "bun-types": "1.2.13" } }, "sha512-u6vXep/i9VBxoJl3GjZsl/BFIsvML8DfVDO0RYLEwtSZSp981kEO1V5NwRcO1CPJ7AmvpbnDCiMKo3JvbDEjAg=="],
+
+ "@types/docker-modem": ["@types/docker-modem@3.0.6", "", { "dependencies": { "@types/node": "*", "@types/ssh2": "*" } }, "sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg=="],
+
+ "@types/dockerode": ["@types/dockerode@3.3.38", "", { "dependencies": { "@types/docker-modem": "*", "@types/node": "*", "@types/ssh2": "*" } }, "sha512-nnrcfUe2iR+RyOuz0B4bZgQwD9djQa9ADEjp7OAgBs10pYT0KSCtplJjcmBDJz0qaReX5T7GbE5i4VplvzUHvA=="],
+
+ "@types/node": ["@types/node@22.15.17", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-wIX2aSZL5FE+MR0JlvF87BNVrtFWf6AE6rxSE9X7OwnVvoyCQjpzSRJ+M87se/4QCkCiebQAqrJ0y6fwIyi7nw=="],
+
+ "@types/split2": ["@types/split2@4.2.3", "", { "dependencies": { "@types/node": "*" } }, "sha512-59OXIlfUsi2k++H6CHgUQKEb2HKRokUA39HY1i1dS8/AIcqVjtAAFdf8u+HxTWK/4FUHMJQlKSZ4I6irCBJ1Zw=="],
+
+ "@types/ssh2": ["@types/ssh2@1.15.5", "", { "dependencies": { "@types/node": "^18.11.18" } }, "sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ=="],
+
+ "@types/triple-beam": ["@types/triple-beam@1.3.5", "", {}, "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw=="],
+
+ "@unhead/schema": ["@unhead/schema@1.11.20", "", { "dependencies": { "hookable": "^5.5.3", "zhead": "^2.2.4" } }, "sha512-0zWykKAaJdm+/Y7yi/Yds20PrUK7XabLe9c3IRcjnwYmSWY6z0Cr19VIs3ozCj8P+GhR+/TI2mwtGlueCEYouA=="],
+
+ "ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="],
+
+ "ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="],
+
+ "argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
+
+ "asn1": ["asn1@0.2.6", "", { "dependencies": { "safer-buffer": "~2.1.0" } }, "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ=="],
+
+ "async": ["async@3.2.6", "", {}, "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="],
+
+ "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="],
+
+ "bcrypt-pbkdf": ["bcrypt-pbkdf@1.0.2", "", { "dependencies": { "tweetnacl": "^0.14.3" } }, "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w=="],
+
+ "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="],
+
+ "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
+
+ "buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="],
+
+ "buildcheck": ["buildcheck@0.0.6", "", {}, "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A=="],
+
+ "bun-types": ["bun-types@1.2.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-rRjA1T6n7wto4gxhAO/ErZEtOXyEZEmnIHQfl0Dt1QQSB4QV0iP6BZ9/YB5fZaHFQ2dwHFrmPaRQ9GGMX01k9Q=="],
+
+ "chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="],
+
+ "chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="],
+
+ "cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="],
+
+ "clone": ["clone@2.1.2", "", {}, "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w=="],
+
+ "color": ["color@3.2.1", "", { "dependencies": { "color-convert": "^1.9.3", "color-string": "^1.6.0" } }, "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA=="],
+
+ "color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
+
+ "color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
+
+ "color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="],
+
+ "colorspace": ["colorspace@1.1.4", "", { "dependencies": { "color": "^3.1.3", "text-hex": "1.0.x" } }, "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w=="],
+
+ "cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
+
+ "cpu-features": ["cpu-features@0.0.10", "", { "dependencies": { "buildcheck": "~0.0.6", "nan": "^2.19.0" } }, "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA=="],
+
+ "cross-env": ["cross-env@7.0.3", "", { "dependencies": { "cross-spawn": "^7.0.1" }, "bin": { "cross-env": "src/bin/cross-env.js", "cross-env-shell": "src/bin/cross-env-shell.js" } }, "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw=="],
+
+ "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="],
+
+ "date-fns": ["date-fns@4.1.0", "", {}, "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg=="],
+
+ "debug": ["debug@4.4.0", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA=="],
+
+ "docker-compose": ["docker-compose@1.2.0", "", { "dependencies": { "yaml": "^2.2.2" } }, "sha512-wIU1eHk3Op7dFgELRdmOYlPYS4gP8HhH1ZmZa13QZF59y0fblzFDFmKPhyc05phCy2hze9OEvNZAsoljrs+72w=="],
+
+ "docker-modem": ["docker-modem@5.0.6", "", { "dependencies": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", "ssh2": "^1.15.0" } }, "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ=="],
+
+ "dockerode": ["dockerode@4.0.6", "", { "dependencies": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", "docker-modem": "^5.0.6", "protobufjs": "^7.3.2", "tar-fs": "~2.1.2", "uuid": "^10.0.0" } }, "sha512-FbVf3Z8fY/kALB9s+P9epCpWhfi/r0N2DgYYcYpsAUlaTxPjdsitsFobnltb+lyCgAIvf9C+4PSWlTnHlJMf1w=="],
+
+ "elysia": ["elysia@1.3.1", "", { "dependencies": { "cookie": "^1.0.2", "exact-mirror": "0.1.2", "fast-decode-uri-component": "^1.0.1" }, "optionalDependencies": { "@sinclair/typebox": "^0.34.33", "openapi-types": "^12.1.3" }, "peerDependencies": { "file-type": ">= 20.0.0", "typescript": ">= 5.0.0" } }, "sha512-En41P6cDHcHtQ0nvfsn9ayB+8ahQJqG1nzvPX8FVZjOriFK/RtZPQBtXMfZDq/AsVIk7JFZGFEtAVEmztNJVhQ=="],
+
+ "elysia-remote-dts": ["elysia-remote-dts@1.0.2", "", { "dependencies": { "debug": "4.4.0", "get-tsconfig": "4.10.0" }, "peerDependencies": { "elysia": ">= 1.0.0", "typescript": ">=5" } }, "sha512-ktRxKGozPDW24d3xbUS2sMLNsRHHX/a4Pgqyzv2O0X4HsDrD+agoUYL/PvYQrGJKPSc3xzvU5uvhNHFhEql6aw=="],
+
+ "emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="],
+
+ "enabled": ["enabled@2.0.0", "", {}, "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ=="],
+
+ "end-of-stream": ["end-of-stream@1.4.4", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q=="],
+
+ "enhanced-resolve": ["enhanced-resolve@5.18.1", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg=="],
+
+ "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="],
+
+ "exact-mirror": ["exact-mirror@0.1.2", "", { "peerDependencies": { "@sinclair/typebox": "^0.34.15" }, "optionalPeers": ["@sinclair/typebox"] }, "sha512-wFCPCDLmHbKGUb8TOi/IS7jLsgR8WVDGtDK3CzcB4Guf/weq7G+I+DkXiRSZfbemBFOxOINKpraM6ml78vo8Zw=="],
+
+ "fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
+
+ "fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="],
+
+ "fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="],
+
+ "fd-package-json": ["fd-package-json@1.2.0", "", { "dependencies": { "walk-up-path": "^3.0.1" } }, "sha512-45LSPmWf+gC5tdCQMNH4s9Sr00bIkiD9aN7dc5hqkrEw1geRYyDQS1v1oMHAW3ysfxfndqGsrDREHHjNNbKUfA=="],
+
+ "fecha": ["fecha@4.2.3", "", {}, "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw=="],
+
+ "fflate": ["fflate@0.8.2", "", {}, "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A=="],
+
+ "file-type": ["file-type@20.5.0", "", { "dependencies": { "@tokenizer/inflate": "^0.2.6", "strtok3": "^10.2.0", "token-types": "^6.0.0", "uint8array-extras": "^1.4.0" } }, "sha512-BfHZtG/l9iMm4Ecianu7P8HRD2tBHLtjXinm4X62XBOYzi7CYA7jyqfJzOvXHqzVrVPYqBo2/GvbARMaaJkKVg=="],
+
+ "fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
+
+ "fn.name": ["fn.name@1.1.0", "", {}, "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw=="],
+
+ "formatly": ["formatly@0.2.3", "", { "dependencies": { "fd-package-json": "^1.2.0" }, "bin": { "formatly": "bin/index.mjs" } }, "sha512-WH01vbXEjh9L3bqn5V620xUAWs32CmK4IzWRRY6ep5zpa/mrisL4d9+pRVuETORVDTQw8OycSO1WC68PL51RaA=="],
+
+ "fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="],
+
+ "get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="],
+
+ "get-east-asian-width": ["get-east-asian-width@1.3.0", "", {}, "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ=="],
+
+ "get-tsconfig": ["get-tsconfig@4.10.0", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A=="],
+
+ "glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
+
+ "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
+
+ "hookable": ["hookable@5.5.3", "", {}, "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ=="],
+
+ "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
+
+ "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
+
+ "is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
+
+ "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
+
+ "is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="],
+
+ "is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
+
+ "is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
+
+ "is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="],
+
+ "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
+
+ "jiti": ["jiti@2.4.2", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A=="],
+
+ "js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="],
+
+ "knip": ["knip@5.55.1", "", { "dependencies": { "@nodelib/fs.walk": "^1.2.3", "enhanced-resolve": "^5.18.1", "fast-glob": "^3.3.3", "formatly": "^0.2.3", "jiti": "^2.4.2", "js-yaml": "^4.1.0", "minimist": "^1.2.8", "picocolors": "^1.1.0", "picomatch": "^4.0.1", "smol-toml": "^1.3.1", "strip-json-comments": "5.0.1", "zod": "^3.22.4", "zod-validation-error": "^3.0.3" }, "peerDependencies": { "@types/node": ">=18", "typescript": ">=5.0.4" }, "bin": { "knip": "bin/knip.js", "knip-bun": "bin/knip-bun.js" } }, "sha512-NYXjgGrXgMdabUKCP2TlBH/e83m9KnLc1VLyWHUtoRrCEJ/C15YtbafrpTvm3td+jE4VdDPgudvXT1IMtCx8lw=="],
+
+ "kuler": ["kuler@2.0.0", "", {}, "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A=="],
+
+ "lodash.camelcase": ["lodash.camelcase@4.3.0", "", {}, "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="],
+
+ "logestic": ["logestic@1.2.4", "", { "dependencies": { "chalk": "^5.3.0" }, "peerDependencies": { "elysia": "^1.1.3", "typescript": "^5.0.0" } }, "sha512-Wka/xFdKgqU6JBk8yxAUsqcUjPA/aExpcnm7KnOAxlLo1U71kuWGeEjPw8XVLZzLleTWwmRqJUb2yI5XZP+vAA=="],
+
+ "logform": ["logform@2.7.0", "", { "dependencies": { "@colors/colors": "1.6.0", "@types/triple-beam": "^1.3.2", "fecha": "^4.2.0", "ms": "^2.1.1", "safe-stable-stringify": "^2.3.1", "triple-beam": "^1.3.0" } }, "sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ=="],
+
+ "long": ["long@5.3.2", "", {}, "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="],
+
+ "merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
+
+ "micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="],
+
+ "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
+
+ "mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="],
+
+ "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
+
+ "nan": ["nan@2.22.2", "", {}, "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ=="],
+
+ "nanoid": ["nanoid@5.1.5", "", { "bin": { "nanoid": "bin/nanoid.js" } }, "sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw=="],
+
+ "node-cache": ["node-cache@5.1.2", "", { "dependencies": { "clone": "2.x" } }, "sha512-t1QzWwnk4sjLWaQAS8CHgOJ+RAfmHpxFWmc36IWTiWHQfs0w5JDMBS1b1ZxQteo0vVVuWJvIUKHDkkeK7vIGCg=="],
+
+ "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
+
+ "one-time": ["one-time@1.0.0", "", { "dependencies": { "fn.name": "1.x.x" } }, "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g=="],
+
+ "openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="],
+
+ "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
+
+ "pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="],
+
+ "peek-readable": ["peek-readable@7.0.0", "", {}, "sha512-nri2TO5JE3/mRryik9LlHFT53cgHfRK0Lt0BAZQXku/AW3E6XLt2GaY8siWi7dvW/m1z0ecn+J+bpDa9ZN3IsQ=="],
+
+ "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
+
+ "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="],
+
+ "protobufjs": ["protobufjs@7.5.1", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.4", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.0", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-3qx3IRjR9WPQKagdwrKjO3Gu8RgQR2qqw+1KnigWhoVjFqegIj1K3bP11sGqhxrO46/XL7lekuG4jmjL+4cLsw=="],
+
+ "pump": ["pump@3.0.2", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw=="],
+
+ "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
+
+ "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
+
+ "require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="],
+
+ "resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="],
+
+ "reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="],
+
+ "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
+
+ "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
+
+ "safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
+
+ "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
+
+ "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
+
+ "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
+
+ "simple-swizzle": ["simple-swizzle@0.2.2", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg=="],
+
+ "smol-toml": ["smol-toml@1.3.4", "", {}, "sha512-UOPtVuYkzYGee0Bd2Szz8d2G3RfMfJ2t3qVdZUAozZyAk+a0Sxa+QKix0YCwjL/A1RR0ar44nCxaoN9FxdJGwA=="],
+
+ "split-ca": ["split-ca@1.0.1", "", {}, "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ=="],
+
+ "split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
+
+ "ssh2": ["ssh2@1.16.0", "", { "dependencies": { "asn1": "^0.2.6", "bcrypt-pbkdf": "^1.0.2" }, "optionalDependencies": { "cpu-features": "~0.0.10", "nan": "^2.20.0" } }, "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg=="],
+
+ "stack-trace": ["stack-trace@0.0.10", "", {}, "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg=="],
+
+ "string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="],
+
+ "string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="],
+
+ "strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
+
+ "strip-json-comments": ["strip-json-comments@5.0.1", "", {}, "sha512-0fk9zBqO67Nq5M/m45qHCJxylV/DhBlIOVExqgOMiCCrzrhU6tCibRXNqE3jwJLftzE9SNuZtYbpzcO+i9FiKw=="],
+
+ "strtok3": ["strtok3@10.2.2", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^7.0.0" } }, "sha512-Xt18+h4s7Z8xyZ0tmBoRmzxcop97R4BAh+dXouUDCYn+Em+1P3qpkUfI5ueWLT8ynC5hZ+q4iPEmGG1urvQGBg=="],
+
+ "tapable": ["tapable@2.2.1", "", {}, "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ=="],
+
+ "tar-fs": ["tar-fs@2.1.2", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA=="],
+
+ "tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="],
+
+ "text-hex": ["text-hex@1.0.0", "", {}, "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg=="],
+
+ "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
+
+ "token-types": ["token-types@6.0.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA=="],
+
+ "triple-beam": ["triple-beam@1.4.1", "", {}, "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg=="],
+
+ "tweetnacl": ["tweetnacl@0.14.5", "", {}, "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="],
+
+ "type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="],
+
+ "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
+
+ "uint8array-extras": ["uint8array-extras@1.4.0", "", {}, "sha512-ZPtzy0hu4cZjv3z5NW9gfKnNLjoz4y6uv4HlelAjDK7sY/xOkKZv9xK/WQpcsBB3jEybChz9DPC2U/+cusjJVQ=="],
+
+ "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
+
+ "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
+
+ "uuid": ["uuid@10.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="],
+
+ "walk-up-path": ["walk-up-path@3.0.1", "", {}, "sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA=="],
+
+ "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
+
+ "winston": ["winston@3.17.0", "", { "dependencies": { "@colors/colors": "^1.6.0", "@dabh/diagnostics": "^2.0.2", "async": "^3.2.3", "is-stream": "^2.0.0", "logform": "^2.7.0", "one-time": "^1.0.0", "readable-stream": "^3.4.0", "safe-stable-stringify": "^2.3.1", "stack-trace": "0.0.x", "triple-beam": "^1.3.0", "winston-transport": "^4.9.0" } }, "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw=="],
+
+ "winston-transport": ["winston-transport@4.9.0", "", { "dependencies": { "logform": "^2.7.0", "readable-stream": "^3.6.2", "triple-beam": "^1.3.0" } }, "sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A=="],
+
+ "wrap-ansi": ["wrap-ansi@9.0.0", "", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q=="],
+
+ "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
+
+ "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="],
+
+ "yaml": ["yaml@2.7.1", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ=="],
+
+ "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="],
+
+ "yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="],
+
+ "zhead": ["zhead@2.2.4", "", {}, "sha512-8F0OI5dpWIA5IGG5NHUg9staDwz/ZPxZtvGVf01j7vHqSyZ0raHY+78atOVxRqb73AotX22uV1pXt3gYSstGag=="],
+
+ "zod": ["zod@3.24.4", "", {}, "sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg=="],
+
+ "zod-validation-error": ["zod-validation-error@3.4.1", "", { "peerDependencies": { "zod": "^3.24.4" } }, "sha512-1KP64yqDPQ3rupxNv7oXhf7KdhHHgaqbKuspVoiN93TT0xrBjql+Svjkdjq/Qh/7GSMmgQs3AfvBT0heE35thw=="],
+
+ "@scalar/themes/@scalar/types": ["@scalar/types@0.1.7", "", { "dependencies": { "@scalar/openapi-types": "0.2.0", "@unhead/schema": "^1.11.11", "nanoid": "^5.1.5", "type-fest": "^4.20.0", "zod": "^3.23.8" } }, "sha512-irIDYzTQG2KLvFbuTI8k2Pz/R4JR+zUUSykVTbEMatkzMmVFnn1VzNSMlODbadycwZunbnL2tA27AXed9URVjw=="],
+
+ "@types/ssh2/@types/node": ["@types/node@18.19.100", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-ojmMP8SZBKprc3qGrGk8Ujpo80AXkrP7G2tOT4VWr5jlr5DHjsJF+emXJz+Wm0glmy4Js62oKMdZZ6B9Y+tEcA=="],
+
+ "cliui/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
+
+ "cliui/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
+
+ "cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
+
+ "color-string/color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
+
+ "micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
+
+ "yargs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
+
+ "@scalar/themes/@scalar/types/@scalar/openapi-types": ["@scalar/openapi-types@0.2.0", "", { "dependencies": { "zod": "^3.23.8" } }, "sha512-waiKk12cRCqyUCWTOX0K1WEVX46+hVUK+zRPzAahDJ7G0TApvbNkuy5wx7aoUyEk++HHde0XuQnshXnt8jsddA=="],
+
+ "@types/ssh2/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
+
+ "cliui/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
+
+ "cliui/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
+
+ "cliui/wrap-ansi/ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="],
+
+ "yargs/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
+
+ "yargs/string-width/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
+
+ "cliui/wrap-ansi/ansi-styles/color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
+
+ "yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
+
+ "cliui/wrap-ansi/ansi-styles/color-convert/color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
+ }
+}
diff --git a/config/apprise_config_example.yml b/config/apprise_config_example.yml
deleted file mode 100644
index 88e33870..00000000
--- a/config/apprise_config_example.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-# Please see the apprise documentation
-urls:
- - tgram://bottoken/ChatID
- - rocket://user:password@hostname/RoomID/Channel
- - ntfy://topic/
diff --git a/config/hosts.yaml b/config/hosts.yaml
deleted file mode 100644
index d40e6697..00000000
--- a/config/hosts.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-mintimeout: 10000 # The minimum time to wait before querying the same server again, defaults to 5000 Ms
-
-log:
- logsize: 10 # Specify the Size of the log files in MB, default is 1MB
- LogCount: 1 # How many log files should be kept in rotation. Default is 5
-
-tags:
- raspberry: red-200
- private: violet-400
-
-hosts:
- YourHost1:
- url: hetzner
- port: 2375
-
- YourHost2:
- url: 100.78.180.21
- port: 2375
-
-container:
- dozzle: # Container name
- link: https://github.com
- icon: minecraft.png
- tags: private,raspberry
diff --git a/data/.gitignore b/data/.gitignore
new file mode 100644
index 00000000..aed31992
--- /dev/null
+++ b/data/.gitignore
@@ -0,0 +1 @@
+./dockstatapi*
diff --git a/dependency-graph.mmd b/dependency-graph.mmd
new file mode 100644
index 00000000..affe0471
--- /dev/null
+++ b/dependency-graph.mmd
@@ -0,0 +1,232 @@
+---
+config:
+ flowchart:
+ defaultRenderer: elk
+---
+
+flowchart LR
+
+subgraph 0["src"]
+1["index.ts"]
+subgraph 6["routes"]
+7["live-stacks.ts"]
+X["live-logs.ts"]
+1I["api-config.ts"]
+1K["docker-manager.ts"]
+1L["docker-stats.ts"]
+1M["docker-websocket.ts"]
+1O["logs.ts"]
+1P["stacks.ts"]
+end
+subgraph 9["core"]
+subgraph A["utils"]
+B["logger.ts"]
+W["helpers.ts"]
+17["calculations.ts"]
+1B["change-me-checker.ts"]
+1C["package-json.ts"]
+1E["swagger-readme.ts"]
+1J["response-handler.ts"]
+end
+subgraph D["database"]
+E["_dbState.ts"]
+F["index.ts"]
+G["backup.ts"]
+J["database.ts"]
+N["helper.ts"]
+O["config.ts"]
+P["containerStats.ts"]
+Q["dockerHosts.ts"]
+R["hostStats.ts"]
+T["logs.ts"]
+U["stacks.ts"]
+end
+subgraph Y["docker"]
+Z["monitor.ts"]
+14["client.ts"]
+15["scheduler.ts"]
+16["store-container-stats.ts"]
+18["store-host-stats.ts"]
+end
+subgraph 10["plugins"]
+11["plugin-manager.ts"]
+1A["loader.ts"]
+end
+subgraph 1Q["stacks"]
+1R["controller.ts"]
+end
+end
+subgraph 1F["middleware"]
+1G["auth.ts"]
+end
+end
+subgraph 2["~"]
+subgraph 3["typings"]
+4["database"]
+8["websocket"]
+H["misc"]
+S["docker"]
+V["docker-compose"]
+12["plugin"]
+19["dockerode"]
+1H["elysiajs"]
+end
+end
+5["elysia-remote-dts"]
+C["path"]
+subgraph I["fs"]
+L["promises"]
+end
+K["bun:sqlite"]
+M["os"]
+13["events"]
+1D["package.json"]
+1N["stream"]
+1-->7
+1-->F
+1-->Z
+1-->15
+1-->1A
+1-->B
+1-->1C
+1-->1E
+1-->1G
+1-->1I
+1-->1K
+1-->1L
+1-->1M
+1-->X
+1-->1O
+1-->1P
+1-->4
+1-->5
+7-->B
+7-->8
+B-->E
+B-->F
+B-->X
+B-->4
+B-->C
+F-->G
+F-->O
+F-->P
+F-->J
+F-->Q
+F-->R
+F-->T
+F-->U
+G-->E
+G-->J
+G-->N
+G-->B
+G-->H
+G-->I
+J-->K
+J-->I
+J-->L
+J-->M
+J-->C
+N-->E
+N-->B
+O-->J
+O-->N
+P-->J
+P-->N
+Q-->J
+Q-->N
+R-->J
+R-->N
+R-->S
+T-->J
+T-->N
+T-->4
+U-->W
+U-->J
+U-->N
+U-->4
+U-->V
+W-->B
+X-->B
+X-->4
+Z-->11
+Z-->F
+Z-->14
+Z-->B
+Z-->S
+11-->B
+11-->S
+11-->12
+11-->13
+14-->B
+14-->S
+15-->F
+15-->16
+15-->18
+15-->B
+15-->4
+16-->B
+16-->F
+16-->14
+16-->17
+18-->F
+18-->14
+18-->W
+18-->B
+18-->S
+18-->19
+1A-->1B
+1A-->B
+1A-->11
+1A-->I
+1A-->C
+1B-->B
+1B-->L
+1C-->1D
+1G-->F
+1G-->B
+1G-->4
+1G-->1H
+1I-->F
+1I-->G
+1I-->11
+1I-->B
+1I-->1C
+1I-->1J
+1I-->1G
+1I-->4
+1I-->I
+1J-->B
+1J-->1H
+1K-->F
+1K-->B
+1K-->1J
+1K-->S
+1L-->F
+1L-->14
+1L-->17
+1L-->W
+1L-->B
+1L-->1J
+1L-->S
+1L-->19
+1M-->F
+1M-->14
+1M-->17
+1M-->B
+1M-->1J
+1M-->1N
+1O-->F
+1O-->B
+1P-->F
+1P-->1R
+1P-->B
+1P-->1J
+1P-->4
+1R-->W
+1R-->F
+1R-->B
+1R-->7
+1R-->4
+1R-->V
+1R-->L
+
diff --git a/dependency-graph.svg b/dependency-graph.svg
new file mode 100644
index 00000000..8e7e54b0
--- /dev/null
+++ b/dependency-graph.svg
@@ -0,0 +1,1474 @@
+
+
+
+
+
diff --git a/docker/Dockerfile b/docker/Dockerfile
new file mode 100644
index 00000000..bc09e1bf
--- /dev/null
+++ b/docker/Dockerfile
@@ -0,0 +1,41 @@
+ARG BUILD_DATE
+ARG VCS_REF
+
+FROM oven/bun:alpine AS base
+WORKDIR /base
+
+COPY package.json ./
+RUN bun install -p
+
+COPY . .
+
+FROM oven/bun:alpine AS production
+WORKDIR /DockStatAPI
+
+LABEL org.opencontainers.image.title="DockStatAPI" \
+ org.opencontainers.image.description="A Dockerized DockStatAPI built with Bun on Alpine Linux." \
+ org.opencontainers.image.version="3.0.0" \
+ org.opencontainers.image.authors="info@itsnik.de" \
+ org.opencontainers.image.vendor="Its4Nik" \
+ org.opencontainers.image.licenses="CC BY-NC 4.0" \
+ org.opencontainers.image.created=$BUILD_DATE \
+ org.opencontainers.image.revision=$VCS_REF
+
+RUN apk add --no-cache curl
+
+HEALTHCHECK --timeout=10s --start-period=2s --retries=3 \
+ CMD curl --fail http://localhost:3000/health || exit 1
+
+VOLUME [ "/DockStatAPI/src/plugins" ]
+
+ENV NODE_ENV=production
+ENV LOG_LEVEL=info
+
+EXPOSE 3000
+
+COPY --from=base /base /DockStatAPI
+
+RUN adduser -D DockStatAPI && chown -R DockStatAPI:DockStatAPI /DockStatAPI
+USER DockStatAPI
+
+ENTRYPOINT [ "bun", "run", "src/index.ts" ]
diff --git a/docker/docker-compose.dev.yaml b/docker/docker-compose.dev.yaml
new file mode 100644
index 00000000..f302c585
--- /dev/null
+++ b/docker/docker-compose.dev.yaml
@@ -0,0 +1,52 @@
+name: "dockstatapi-dev"
+services:
+ socket-proxy:
+ container_name: socket-proxy
+ image: lscr.io/linuxserver/socket-proxy:latest
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock:ro
+ restart: unless-stopped
+ read_only: true
+ tmpfs:
+ - /run
+ ports:
+ - 2375:2375
+ environment:
+ - ALLOW_START=1
+ - ALLOW_STOP=1
+ - ALLOW_RESTARTS=1
+ - AUTH=1
+ - BUILD=1
+ - COMMIT=1
+ - CONFIGS=1
+ - CONTAINERS=1
+ - DISABLE_IPV6=1
+ - DISTRIBUTION=1
+ - EVENTS=1
+ - EXEC=1
+ - IMAGES=1
+ - INFO=1
+ - NETWORKS=1
+ - NODES=1
+ - PING=1
+ - PLUGINS=1
+ - POST=1
+ - PROXY_READ_TIMEOUT=240
+ - SECRETS=1
+ - SERVICES=1
+ - SESSION=1
+ - SWARM=1
+ - SYSTEM=1
+ - TASKS=1
+ - VERSION=1
+ - VOLUMES=1
+
+ sqlite-web:
+ container_name: sqlite-web
+ image: ghcr.io/coleifer/sqlite-web:latest
+ ports:
+ - 8080:8080
+ volumes:
+ - ../data:/data:ro
+ environment:
+ - SQLITE_DATABASE=dockstatapi.db
diff --git a/dockstatapi.js b/dockstatapi.js
deleted file mode 100644
index d06fb1e7..00000000
--- a/dockstatapi.js
+++ /dev/null
@@ -1,379 +0,0 @@
-const express = require('express');
-const path = require('path');
-const yaml = require('yamljs');
-const Docker = require('dockerode');
-const cors = require('cors');
-const fs = require('fs');
-const { exec } = require('child_process');
-const logger = require('./logger');
-const updateAvailable = require('./modules/updateAvailable')
-const app = express();
-const port = 7070;
-const key = process.env.SECRET || 'CHANGE-ME';
-const skipAuth = process.env.SKIP_AUTH || 'True'
-const cupUrl = process.env.CUP_URL || 'null'
-
-let config = yaml.load('./config/hosts.yaml');
-let hosts = config.hosts;
-let containerConfigs = config.container || {};
-let maxlogsize = config.log.logsize || 1;
-let LogAmount = config.log.LogCount || 5;
-let queryInterval = config.mintimeout || 5000;
-let latestStats = {};
-let hostQueues = {};
-let previousNetworkStats = {};
-let generalStats = {};
-let previousContainerStates = {};
-let previousRunningContainers = {};
-
-
-app.use(cors());
-app.use(express.json());
-
-const authenticateHeader = (req, res, next) => {
- const authHeader = req.headers['authorization'];
-
- if (skipAuth === 'True') {
- next();
- } else {
- if (!authHeader || authHeader !== key) {
- logger.error(`${authHeader} != ${key}`);
- return res.status(401).json({ error: "Unauthorized" });
- }
- else {
- next();
- }
- }
-};
-
-function createDockerClient(hostConfig) {
- return new Docker({
- host: hostConfig.url,
- port: hostConfig.port,
- });
-}
-
-function getTagColor(tag) {
- const tagsConfig = config.tags || {};
- return tagsConfig[tag] || '';
-}
-
-async function getContainerStats(docker, containerId) {
- const container = docker.getContainer(containerId);
- return new Promise((resolve, reject) => {
- container.stats({ stream: false }, (err, stats) => {
- if (err) return reject(err);
- resolve(stats);
- });
- });
-}
-
-async function handleContainerStateChanges(hostName, currentContainers) {
- const currentRunningContainers = currentContainers
- .filter(container => container.state === 'running')
- .reduce((map, container) => {
- map[container.id] = container;
- return map;
- }, {});
-
- const previousHostContainers = previousRunningContainers[hostName] || {};
-
- // Check for containers that have been removed or exited
- for (const containerId of Object.keys(previousHostContainers)) {
- const container = previousHostContainers[containerId];
- if (!currentRunningContainers[containerId]) {
- if (container.state === 'running') {
- // Container removed
- exec(`bash ./scripts/notify.sh REMOVE ${containerId} ${container.name} ${hostName} ${container.state}`, (error, stdout, stderr) => {
- if (error) {
- logger.error(`Error executing REMOVE notify.sh: ${error.message}`);
- } else {
- logger.info(`Container removed: ${container.name} (${containerId}) from host ${hostName}`);
- logger.info(stdout);
- }
- });
- }
- else if (container.state === 'exited') {
- // Container exited
- exec(`bash ./scripts/notify.sh EXIT ${containerId} ${container.name} ${hostName} ${container.state}`, (error, stdout, stderr) => {
- if (error) {
- logger.error(`Error executing EXIT notify.sh: ${error.message}`);
- } else {
- logger.info(`Container exited: ${container.name} (${containerId}) from host ${hostName}`);
- logger.info(stdout);
- }
- });
- }
- }
- }
-
- // Check for new containers or state changes
- for (const containerId of Object.keys(currentRunningContainers)) {
- const container = currentRunningContainers[containerId];
- const previousContainer = previousHostContainers[containerId];
-
- if (!previousContainer) {
- // New container added
- exec(`bash ./scripts/notify.sh ADD ${containerId} ${container.name} ${hostName} ${container.state}`, (error, stdout, stderr) => {
- if (error) {
- logger.error(`Error executing ADD notify.sh: ${error.message}`);
- } else {
- logger.info(`Container added: ${container.name} (${containerId}) to host ${hostName}`);
- logger.info(stdout);
- }
- });
- } else if (previousContainer.state !== container.state) {
- // Container state has changed
- const newState = container.state;
- if (newState === 'exited') {
- exec(`bash ./scripts/notify.sh EXIT ${containerId} ${container.name} ${hostName} ${newState}`, (error, stdout, stderr) => {
- if (error) {
- logger.error(`Error executing EXIT notify.sh: ${error.message}`);
- } else {
- logger.info(`Container exited: ${container.name} (${containerId}) from host ${hostName}`);
- logger.info(stdout);
- }
- });
- } else {
- // Any other state change
- exec(`bash ./scripts/notify.sh ANY ${containerId} ${container.name} ${hostName} ${newState}`, (error, stdout, stderr) => {
- if (error) {
- logger.error(`Error executing ANY notify.sh: ${error.message}`);
- } else {
- logger.info(`Container state changed to ${newState}: ${container.name} (${containerId}) from host ${hostName}`);
- logger.info(stdout);
- }
- });
- }
- }
- }
-
- // Update the previous state for the next comparison
- previousRunningContainers[hostName] = currentRunningContainers;
-}
-
-async function queryHostStats(hostName, hostConfig) {
- logger.debug(`Querying Docker stats for host: ${hostName} (${hostConfig.url}:${hostConfig.port})`);
-
- const docker = createDockerClient(hostConfig);
-
- try {
- const info = await docker.info();
- const totalMemory = info.MemTotal;
- const totalCPUs = info.NCPU;
- const containers = await docker.listContainers({ all: true });
-
- const statsPromises = containers.map(async (container) => {
- try {
- const containerName = container.Names[0].replace('/', '');
- const containerState = container.State;
- const updateAvailableFlag = await updateAvailable(container.Image, cupUrl);
- let networkMode = container.HostConfig.NetworkMode;
-
- // Check if network mode is in the format "container:IDXXXXXXXX"
- if (networkMode.startsWith("container:")) {
- const linkedContainerId = networkMode.split(":")[1];
- const linkedContainer = await docker.getContainer(linkedContainerId).inspect();
- const linkedContainerName = linkedContainer.Name.replace('/', ''); // Remove leading slash
-
- networkMode = `Container: ${linkedContainerName}`; // Format the network mode
- }
-
- if (containerState !== 'running') {
- previousContainerStates[container.Id] = containerState;
- return {
- name: containerName,
- id: container.Id,
- hostName: hostName,
- state: containerState,
- image: container.Image,
- update_available: updateAvailableFlag || false,
- cpu_usage: 0,
- mem_usage: 0,
- mem_limit: 0,
- net_rx: 0,
- net_tx: 0,
- current_net_rx: 0,
- current_net_tx: 0,
- networkMode: networkMode,
- link: containerConfigs[containerName]?.link || '',
- icon: containerConfigs[containerName]?.icon || '',
- tags: getTagColor(containerConfigs[containerName]?.tags || ''),
- };
- }
-
- // Fetch container stats for running containers
- const containerStats = await getContainerStats(docker, container.Id);
- const containerCpuUsage = containerStats.cpu_stats.cpu_usage.total_usage;
- const containerMemoryUsage = containerStats.memory_stats.usage;
-
- let netRx = 0, netTx = 0, currentNetRx = 0, currentNetTx = 0;
-
- if (networkMode !== 'host' && containerStats.networks?.eth0) {
- const previousStats = previousNetworkStats[container.Id] || { rx_bytes: 0, tx_bytes: 0 };
- currentNetRx = containerStats.networks.eth0.rx_bytes - previousStats.rx_bytes;
- currentNetTx = containerStats.networks.eth0.tx_bytes - previousStats.tx_bytes;
-
- previousNetworkStats[container.Id] = {
- rx_bytes: containerStats.networks.eth0.rx_bytes,
- tx_bytes: containerStats.networks.eth0.tx_bytes,
- };
-
- netRx = containerStats.networks.eth0.rx_bytes;
- netTx = containerStats.networks.eth0.tx_bytes;
- }
-
- previousContainerStates[container.Id] = containerState;
- const config = containerConfigs[containerName] || {};
-
- const tagArray = (config.tags || '')
- .split(',')
- .map(tag => {
- const color = getTagColor(tag);
- return color ? `${tag}:${color}` : tag;
- })
- .join(',');
-
- return {
- name: containerName,
- id: container.Id,
- hostName: hostName,
- image: container.Image,
- update_available: updateAvailableFlag || false,
- state: containerState,
- cpu_usage: containerCpuUsage,
- mem_usage: containerMemoryUsage,
- mem_limit: containerStats.memory_stats.limit,
- net_rx: netRx,
- net_tx: netTx,
- current_net_rx: currentNetRx,
- current_net_tx: currentNetTx,
- networkMode: networkMode,
- link: config.link || '',
- icon: config.icon || '',
- tags: tagArray,
- };
- } catch (err) {
- logger.error(`Failed to fetch stats for container ${container.Names[0]} (${container.Id}): ${err.message}`);
- return null;
- }
- });
-
- const hostStats = await Promise.all(statsPromises);
- const validStats = hostStats.filter(stat => stat !== null);
-
- const totalCpuUsage = validStats.reduce((acc, container) => acc + parseFloat(container.cpu_usage), 0);
- const totalMemoryUsage = validStats.reduce((acc, container) => acc + container.mem_usage, 0);
- const memoryUsagePercent = ((totalMemoryUsage / totalMemory) * 100).toFixed(2);
-
- generalStats[hostName] = {
- containerCount: validStats.length,
- totalCPUs: totalCPUs,
- totalMemory: totalMemory,
- cpuUsage: totalCpuUsage,
- memoryUsage: memoryUsagePercent,
- };
-
- latestStats[hostName] = validStats;
-
- logger.debug(`Fetched stats for ${validStats.length} containers from ${hostName}`);
-
- // Handle container state changes
- await handleContainerStateChanges(hostName, validStats);
- } catch (err) {
- logger.error(`Failed to fetch containers from ${hostName}: ${err.message}`);
- }
-}
-
-
-async function handleHostQueue(hostName, hostConfig) {
- while (true) {
- await queryHostStats(hostName, hostConfig);
- await new Promise(resolve => setTimeout(resolve, queryInterval));
- }
-}
-
-// Initialize the host queues
-function initializeHostQueues() {
- for (const [hostName, hostConfig] of Object.entries(hosts)) {
- hostQueues[hostName] = handleHostQueue(hostName, hostConfig);
- }
-}
-
-// Dynamically reloads the yaml file
-function reloadConfig() {
- for (const hostName in hostQueues) {
- hostQueues[hostName] = null;
- }
- try {
- config = yaml.load('./config/hosts.yaml');
- hosts = config.hosts;
- containerConfigs = config.container || {};
- maxlogsize = config.log.logsize || 1;
- LogAmount = config.log.LogCount || 5;
- queryInterval = config.mintimeout || 5000;
-
- logger.info('Configuration reloaded successfully.');
-
- initializeHostQueues();
- } catch (err) {
- logger.error(`Failed to reload configuration: ${err.message}`);
- }
-}
-
-// Watch the YAML file for changes and reload the config
-fs.watchFile('./config/hosts.yaml', (curr, prev) => {
- if (curr.mtime !== prev.mtime) {
- logger.info('Detected change in configuration file. Reloading...');
- reloadConfig();
- }
-});
-
-// Endpoint to get stats
-app.get('/stats', authenticateHeader, (req, res) => {
- res.json(latestStats);
-});
-
-// Endpoint for general Host based statistics
-app.get('/hosts', authenticateHeader, (req, res) => {
- res.json(generalStats);
-});
-
-// Read Only config endpoint
-app.get('/config', authenticateHeader, (req, res) => {
- const filePath = path.join(__dirname, './config/hosts.yaml');
- res.set('Content-Type', 'text/plain'); // Keep as plain text
- fs.readFile(filePath, 'utf8', (err, data) => {
- logger.debug('Requested config file: ' + filePath);
- if (err) {
- logger.error(err);
- res.status(500).send('Error reading file');
- } else {
- res.send(data);
- }
- });
-});
-
-app.get('/', (req, res) => {
- res.redirect(301, '/stats');
-});
-
-app.get('/status', (req, res) => {
- logger.info("Healthcheck requested");
- return res.status(200).send('UP');
-});
-
-// Start the server and log the startup message
-app.listen(port, () => {
- logger.info('=============================== DockStat ===============================')
- logger.info(`DockStatAPI is running on http://localhost:${port}/stats`);
- logger.info(`Minimum timeout between stats queries is: ${queryInterval} milliseconds`);
- logger.info(`The max size for Log files is: ${maxlogsize}MB`)
- logger.info(`The amount of log files to keep is: ${LogAmount}`);
- logger.info(`Secret Key: ${key}`)
- logger.info(`Cup URL: ${cupUrl}`)
- logger.info("Press Ctrl+C to stop the server.");
- logger.info('========================================================================')
-});
-
-initializeHostQueues();
diff --git a/entrypoint.sh b/entrypoint.sh
deleted file mode 100644
index df95b988..00000000
--- a/entrypoint.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env bash
-
-SECRET="${SECRET//\"}"
-
-export SECRET
-
-exec npm run start
\ No newline at end of file
diff --git a/knip.report.md b/knip.report.md
new file mode 100644
index 00000000..883973e8
Binary files /dev/null and b/knip.report.md differ
diff --git a/logger.js b/logger.js
deleted file mode 100644
index ebaacc38..00000000
--- a/logger.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const winston = require('winston');
-const yaml = require('yamljs');
-const config = yaml.load('./config/hosts.yaml');
-
-const maxlogsize = config.log.logsize || 1;
-const LogAmount = config.log.LogCount || 5;
-
-const logger = winston.createLogger({
- level: 'debug',
- format: winston.format.combine(
- winston.format.timestamp(),
- winston.format.json()
- ),
- transports: [
- new winston.transports.Console(),
- new winston.transports.File({
- filename: './logs/dockstat.log',
- maxsize: 1024 * 1024 * maxlogsize,
- maxFiles: LogAmount
- })
- ]
-});
-
-module.exports = logger;
\ No newline at end of file
diff --git a/modules/updateAvailable.js b/modules/updateAvailable.js
deleted file mode 100644
index 1a25ce3e..00000000
--- a/modules/updateAvailable.js
+++ /dev/null
@@ -1,32 +0,0 @@
-const logger = require('../logger');
-
-async function getData(target, url) {
-
- if (url === 'null') {
- return false;
- }
- else {
- try {
- const response = await fetch(`${url}/json`, {
- method: "GET"
- });
- if (!response.ok) {
- throw new Error(`Response status: ${response.status}`);
- }
-
- const json = await response.json();
-
- const images = json.images;
-
- for (const image in images) {
- if (target === image) {
- return images.hasOwnProperty(target);
- }
- }
- } catch (error) {
- logger.error(error.message);
- }
- }
-}
-
-module.exports = getData;
diff --git a/package-lock.json b/package-lock.json
deleted file mode 100644
index 37c8cf27..00000000
--- a/package-lock.json
+++ /dev/null
@@ -1,1548 +0,0 @@
-{
- "name": "dockstatapi",
- "version": "1.0.0",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "name": "dockstatapi",
- "version": "1.0.0",
- "license": "ISC",
- "dependencies": {
- "child_process": "^1.0.2",
- "cors": "^2.8.5",
- "dockerode": "^4.0.2",
- "express": "^4.21.0",
- "node-fetch": "^3.3.2",
- "winston": "^3.14.2",
- "yamljs": "^0.3.0"
- }
- },
- "node_modules/@balena/dockerignore": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz",
- "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==",
- "license": "Apache-2.0"
- },
- "node_modules/@colors/colors": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz",
- "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==",
- "license": "MIT",
- "engines": {
- "node": ">=0.1.90"
- }
- },
- "node_modules/@dabh/diagnostics": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz",
- "integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==",
- "license": "MIT",
- "dependencies": {
- "colorspace": "1.1.x",
- "enabled": "2.0.x",
- "kuler": "^2.0.0"
- }
- },
- "node_modules/@types/triple-beam": {
- "version": "1.3.5",
- "resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz",
- "integrity": "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==",
- "license": "MIT"
- },
- "node_modules/accepts": {
- "version": "1.3.8",
- "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
- "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==",
- "license": "MIT",
- "dependencies": {
- "mime-types": "~2.1.34",
- "negotiator": "0.6.3"
- },
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/argparse": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
- "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
- "license": "MIT",
- "dependencies": {
- "sprintf-js": "~1.0.2"
- }
- },
- "node_modules/array-flatten": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
- "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==",
- "license": "MIT"
- },
- "node_modules/asn1": {
- "version": "0.2.6",
- "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
- "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
- "license": "MIT",
- "dependencies": {
- "safer-buffer": "~2.1.0"
- }
- },
- "node_modules/async": {
- "version": "3.2.6",
- "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz",
- "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==",
- "license": "MIT"
- },
- "node_modules/balanced-match": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
- "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
- "license": "MIT"
- },
- "node_modules/base64-js": {
- "version": "1.5.1",
- "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
- "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT"
- },
- "node_modules/bcrypt-pbkdf": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
- "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
- "license": "BSD-3-Clause",
- "dependencies": {
- "tweetnacl": "^0.14.3"
- }
- },
- "node_modules/bl": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
- "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
- "license": "MIT",
- "dependencies": {
- "buffer": "^5.5.0",
- "inherits": "^2.0.4",
- "readable-stream": "^3.4.0"
- }
- },
- "node_modules/body-parser": {
- "version": "1.20.3",
- "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
- "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==",
- "dependencies": {
- "bytes": "3.1.2",
- "content-type": "~1.0.5",
- "debug": "2.6.9",
- "depd": "2.0.0",
- "destroy": "1.2.0",
- "http-errors": "2.0.0",
- "iconv-lite": "0.4.24",
- "on-finished": "2.4.1",
- "qs": "6.13.0",
- "raw-body": "2.5.2",
- "type-is": "~1.6.18",
- "unpipe": "1.0.0"
- },
- "engines": {
- "node": ">= 0.8",
- "npm": "1.2.8000 || >= 1.4.16"
- }
- },
- "node_modules/body-parser/node_modules/debug": {
- "version": "2.6.9",
- "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
- "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
- "dependencies": {
- "ms": "2.0.0"
- }
- },
- "node_modules/body-parser/node_modules/ms": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
- },
- "node_modules/brace-expansion": {
- "version": "1.1.11",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
- "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/buffer": {
- "version": "5.7.1",
- "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
- "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "base64-js": "^1.3.1",
- "ieee754": "^1.1.13"
- }
- },
- "node_modules/buildcheck": {
- "version": "0.0.6",
- "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz",
- "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==",
- "optional": true,
- "engines": {
- "node": ">=10.0.0"
- }
- },
- "node_modules/bytes": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
- "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/call-bind": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz",
- "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==",
- "dependencies": {
- "es-define-property": "^1.0.0",
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2",
- "get-intrinsic": "^1.2.4",
- "set-function-length": "^1.2.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/child_process": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/child_process/-/child_process-1.0.2.tgz",
- "integrity": "sha512-Wmza/JzL0SiWz7kl6MhIKT5ceIlnFPJX+lwUGj7Clhy5MMldsSoJR0+uvRzOS5Kv45Mq7t1PoE8TsOA9bzvb6g==",
- "license": "ISC"
- },
- "node_modules/chownr": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
- "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==",
- "license": "ISC"
- },
- "node_modules/color": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz",
- "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==",
- "license": "MIT",
- "dependencies": {
- "color-convert": "^1.9.3",
- "color-string": "^1.6.0"
- }
- },
- "node_modules/color-name": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
- "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
- "license": "MIT"
- },
- "node_modules/color-string": {
- "version": "1.9.1",
- "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
- "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
- "license": "MIT",
- "dependencies": {
- "color-name": "^1.0.0",
- "simple-swizzle": "^0.2.2"
- }
- },
- "node_modules/color/node_modules/color-convert": {
- "version": "1.9.3",
- "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
- "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
- "license": "MIT",
- "dependencies": {
- "color-name": "1.1.3"
- }
- },
- "node_modules/color/node_modules/color-name": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
- "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
- "license": "MIT"
- },
- "node_modules/colorspace": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz",
- "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==",
- "license": "MIT",
- "dependencies": {
- "color": "^3.1.3",
- "text-hex": "1.0.x"
- }
- },
- "node_modules/concat-map": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
- "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
- "license": "MIT"
- },
- "node_modules/content-disposition": {
- "version": "0.5.4",
- "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
- "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==",
- "license": "MIT",
- "dependencies": {
- "safe-buffer": "5.2.1"
- },
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/content-type": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
- "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/cookie": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
- "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/cookie-signature": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
- "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==",
- "license": "MIT"
- },
- "node_modules/cors": {
- "version": "2.8.5",
- "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz",
- "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==",
- "license": "MIT",
- "dependencies": {
- "object-assign": "^4",
- "vary": "^1"
- },
- "engines": {
- "node": ">= 0.10"
- }
- },
- "node_modules/cpu-features": {
- "version": "0.0.10",
- "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
- "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
- "hasInstallScript": true,
- "optional": true,
- "dependencies": {
- "buildcheck": "~0.0.6",
- "nan": "^2.19.0"
- },
- "engines": {
- "node": ">=10.0.0"
- }
- },
- "node_modules/data-uri-to-buffer": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz",
- "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==",
- "license": "MIT",
- "engines": {
- "node": ">= 12"
- }
- },
- "node_modules/debug": {
- "version": "4.3.6",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz",
- "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==",
- "license": "MIT",
- "dependencies": {
- "ms": "2.1.2"
- },
- "engines": {
- "node": ">=6.0"
- },
- "peerDependenciesMeta": {
- "supports-color": {
- "optional": true
- }
- }
- },
- "node_modules/define-data-property": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
- "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
- "dependencies": {
- "es-define-property": "^1.0.0",
- "es-errors": "^1.3.0",
- "gopd": "^1.0.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/depd": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
- "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==",
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/destroy": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
- "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==",
- "engines": {
- "node": ">= 0.8",
- "npm": "1.2.8000 || >= 1.4.16"
- }
- },
- "node_modules/docker-modem": {
- "version": "5.0.3",
- "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.3.tgz",
- "integrity": "sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==",
- "license": "Apache-2.0",
- "dependencies": {
- "debug": "^4.1.1",
- "readable-stream": "^3.5.0",
- "split-ca": "^1.0.1",
- "ssh2": "^1.15.0"
- },
- "engines": {
- "node": ">= 8.0"
- }
- },
- "node_modules/dockerode": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.2.tgz",
- "integrity": "sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==",
- "license": "Apache-2.0",
- "dependencies": {
- "@balena/dockerignore": "^1.0.2",
- "docker-modem": "^5.0.3",
- "tar-fs": "~2.0.1"
- },
- "engines": {
- "node": ">= 8.0"
- }
- },
- "node_modules/ee-first": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
- "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="
- },
- "node_modules/enabled": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz",
- "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==",
- "license": "MIT"
- },
- "node_modules/encodeurl": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
- "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/end-of-stream": {
- "version": "1.4.4",
- "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
- "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
- "license": "MIT",
- "dependencies": {
- "once": "^1.4.0"
- }
- },
- "node_modules/es-define-property": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz",
- "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==",
- "dependencies": {
- "get-intrinsic": "^1.2.4"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/es-errors": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
- "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/escape-html": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
- "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="
- },
- "node_modules/etag": {
- "version": "1.8.1",
- "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
- "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/express": {
- "version": "4.21.0",
- "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz",
- "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==",
- "dependencies": {
- "accepts": "~1.3.8",
- "array-flatten": "1.1.1",
- "body-parser": "1.20.3",
- "content-disposition": "0.5.4",
- "content-type": "~1.0.4",
- "cookie": "0.6.0",
- "cookie-signature": "1.0.6",
- "debug": "2.6.9",
- "depd": "2.0.0",
- "encodeurl": "~2.0.0",
- "escape-html": "~1.0.3",
- "etag": "~1.8.1",
- "finalhandler": "1.3.1",
- "fresh": "0.5.2",
- "http-errors": "2.0.0",
- "merge-descriptors": "1.0.3",
- "methods": "~1.1.2",
- "on-finished": "2.4.1",
- "parseurl": "~1.3.3",
- "path-to-regexp": "0.1.10",
- "proxy-addr": "~2.0.7",
- "qs": "6.13.0",
- "range-parser": "~1.2.1",
- "safe-buffer": "5.2.1",
- "send": "0.19.0",
- "serve-static": "1.16.2",
- "setprototypeof": "1.2.0",
- "statuses": "2.0.1",
- "type-is": "~1.6.18",
- "utils-merge": "1.0.1",
- "vary": "~1.1.2"
- },
- "engines": {
- "node": ">= 0.10.0"
- }
- },
- "node_modules/express/node_modules/debug": {
- "version": "2.6.9",
- "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
- "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
- "license": "MIT",
- "dependencies": {
- "ms": "2.0.0"
- }
- },
- "node_modules/express/node_modules/ms": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
- "license": "MIT"
- },
- "node_modules/fecha": {
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.3.tgz",
- "integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==",
- "license": "MIT"
- },
- "node_modules/fetch-blob": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz",
- "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/jimmywarting"
- },
- {
- "type": "paypal",
- "url": "https://paypal.me/jimmywarting"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "node-domexception": "^1.0.0",
- "web-streams-polyfill": "^3.0.3"
- },
- "engines": {
- "node": "^12.20 || >= 14.13"
- }
- },
- "node_modules/finalhandler": {
- "version": "1.3.1",
- "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz",
- "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==",
- "dependencies": {
- "debug": "2.6.9",
- "encodeurl": "~2.0.0",
- "escape-html": "~1.0.3",
- "on-finished": "2.4.1",
- "parseurl": "~1.3.3",
- "statuses": "2.0.1",
- "unpipe": "~1.0.0"
- },
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/finalhandler/node_modules/debug": {
- "version": "2.6.9",
- "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
- "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
- "dependencies": {
- "ms": "2.0.0"
- }
- },
- "node_modules/finalhandler/node_modules/ms": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
- },
- "node_modules/fn.name": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz",
- "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==",
- "license": "MIT"
- },
- "node_modules/formdata-polyfill": {
- "version": "4.0.10",
- "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
- "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
- "license": "MIT",
- "dependencies": {
- "fetch-blob": "^3.1.2"
- },
- "engines": {
- "node": ">=12.20.0"
- }
- },
- "node_modules/forwarded": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
- "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/fresh": {
- "version": "0.5.2",
- "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
- "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/fs-constants": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
- "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==",
- "license": "MIT"
- },
- "node_modules/fs.realpath": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
- "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
- "license": "ISC"
- },
- "node_modules/function-bind": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
- "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/get-intrinsic": {
- "version": "1.2.4",
- "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz",
- "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==",
- "dependencies": {
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2",
- "has-proto": "^1.0.1",
- "has-symbols": "^1.0.3",
- "hasown": "^2.0.0"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/glob": {
- "version": "7.2.3",
- "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
- "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
- "deprecated": "Glob versions prior to v9 are no longer supported",
- "license": "ISC",
- "dependencies": {
- "fs.realpath": "^1.0.0",
- "inflight": "^1.0.4",
- "inherits": "2",
- "minimatch": "^3.1.1",
- "once": "^1.3.0",
- "path-is-absolute": "^1.0.0"
- },
- "engines": {
- "node": "*"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/gopd": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
- "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
- "dependencies": {
- "get-intrinsic": "^1.1.3"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-property-descriptors": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
- "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
- "dependencies": {
- "es-define-property": "^1.0.0"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-proto": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
- "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-symbols": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
- "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/hasown": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
- "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
- "dependencies": {
- "function-bind": "^1.1.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/http-errors": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
- "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==",
- "dependencies": {
- "depd": "2.0.0",
- "inherits": "2.0.4",
- "setprototypeof": "1.2.0",
- "statuses": "2.0.1",
- "toidentifier": "1.0.1"
- },
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/iconv-lite": {
- "version": "0.4.24",
- "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
- "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
- "dependencies": {
- "safer-buffer": ">= 2.1.2 < 3"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/ieee754": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
- "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "BSD-3-Clause"
- },
- "node_modules/inflight": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
- "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
- "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
- "license": "ISC",
- "dependencies": {
- "once": "^1.3.0",
- "wrappy": "1"
- }
- },
- "node_modules/inherits": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
- "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
- "license": "ISC"
- },
- "node_modules/ipaddr.js": {
- "version": "1.9.1",
- "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
- "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.10"
- }
- },
- "node_modules/is-arrayish": {
- "version": "0.3.2",
- "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
- "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==",
- "license": "MIT"
- },
- "node_modules/is-stream": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
- "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
- "license": "MIT",
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/kuler": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz",
- "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==",
- "license": "MIT"
- },
- "node_modules/logform": {
- "version": "2.6.1",
- "resolved": "https://registry.npmjs.org/logform/-/logform-2.6.1.tgz",
- "integrity": "sha512-CdaO738xRapbKIMVn2m4F6KTj4j7ooJ8POVnebSgKo3KBz5axNXRAL7ZdRjIV6NOr2Uf4vjtRkxrFETOioCqSA==",
- "license": "MIT",
- "dependencies": {
- "@colors/colors": "1.6.0",
- "@types/triple-beam": "^1.3.2",
- "fecha": "^4.2.0",
- "ms": "^2.1.1",
- "safe-stable-stringify": "^2.3.1",
- "triple-beam": "^1.3.0"
- },
- "engines": {
- "node": ">= 12.0.0"
- }
- },
- "node_modules/media-typer": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
- "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/merge-descriptors": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz",
- "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==",
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/methods": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
- "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/mime": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
- "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
- "bin": {
- "mime": "cli.js"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/mime-db": {
- "version": "1.52.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
- "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/mime-types": {
- "version": "2.1.35",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
- "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
- "license": "MIT",
- "dependencies": {
- "mime-db": "1.52.0"
- },
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/mkdirp-classic": {
- "version": "0.5.3",
- "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz",
- "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==",
- "license": "MIT"
- },
- "node_modules/ms": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
- "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
- "license": "MIT"
- },
- "node_modules/nan": {
- "version": "2.20.0",
- "resolved": "https://registry.npmjs.org/nan/-/nan-2.20.0.tgz",
- "integrity": "sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==",
- "license": "MIT",
- "optional": true
- },
- "node_modules/negotiator": {
- "version": "0.6.3",
- "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
- "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/node-domexception": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
- "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/jimmywarting"
- },
- {
- "type": "github",
- "url": "https://paypal.me/jimmywarting"
- }
- ],
- "license": "MIT",
- "engines": {
- "node": ">=10.5.0"
- }
- },
- "node_modules/node-fetch": {
- "version": "3.3.2",
- "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz",
- "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==",
- "license": "MIT",
- "dependencies": {
- "data-uri-to-buffer": "^4.0.0",
- "fetch-blob": "^3.1.4",
- "formdata-polyfill": "^4.0.10"
- },
- "engines": {
- "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/node-fetch"
- }
- },
- "node_modules/object-assign": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
- "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/object-inspect": {
- "version": "1.13.2",
- "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz",
- "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/on-finished": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
- "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==",
- "dependencies": {
- "ee-first": "1.1.1"
- },
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/once": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
- "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
- "license": "ISC",
- "dependencies": {
- "wrappy": "1"
- }
- },
- "node_modules/one-time": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz",
- "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==",
- "license": "MIT",
- "dependencies": {
- "fn.name": "1.x.x"
- }
- },
- "node_modules/parseurl": {
- "version": "1.3.3",
- "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
- "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==",
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/path-is-absolute": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
- "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/path-to-regexp": {
- "version": "0.1.10",
- "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz",
- "integrity": "sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w=="
- },
- "node_modules/proxy-addr": {
- "version": "2.0.7",
- "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
- "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==",
- "license": "MIT",
- "dependencies": {
- "forwarded": "0.2.0",
- "ipaddr.js": "1.9.1"
- },
- "engines": {
- "node": ">= 0.10"
- }
- },
- "node_modules/pump": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
- "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
- "license": "MIT",
- "dependencies": {
- "end-of-stream": "^1.1.0",
- "once": "^1.3.1"
- }
- },
- "node_modules/qs": {
- "version": "6.13.0",
- "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
- "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
- "dependencies": {
- "side-channel": "^1.0.6"
- },
- "engines": {
- "node": ">=0.6"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/range-parser": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
- "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/raw-body": {
- "version": "2.5.2",
- "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
- "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
- "dependencies": {
- "bytes": "3.1.2",
- "http-errors": "2.0.0",
- "iconv-lite": "0.4.24",
- "unpipe": "1.0.0"
- },
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/readable-stream": {
- "version": "3.6.2",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
- "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
- "license": "MIT",
- "dependencies": {
- "inherits": "^2.0.3",
- "string_decoder": "^1.1.1",
- "util-deprecate": "^1.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/safe-buffer": {
- "version": "5.2.1",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
- "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT"
- },
- "node_modules/safe-stable-stringify": {
- "version": "2.4.3",
- "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz",
- "integrity": "sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g==",
- "license": "MIT",
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/safer-buffer": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
- "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
- "license": "MIT"
- },
- "node_modules/send": {
- "version": "0.19.0",
- "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz",
- "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==",
- "dependencies": {
- "debug": "2.6.9",
- "depd": "2.0.0",
- "destroy": "1.2.0",
- "encodeurl": "~1.0.2",
- "escape-html": "~1.0.3",
- "etag": "~1.8.1",
- "fresh": "0.5.2",
- "http-errors": "2.0.0",
- "mime": "1.6.0",
- "ms": "2.1.3",
- "on-finished": "2.4.1",
- "range-parser": "~1.2.1",
- "statuses": "2.0.1"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/send/node_modules/debug": {
- "version": "2.6.9",
- "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
- "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
- "dependencies": {
- "ms": "2.0.0"
- }
- },
- "node_modules/send/node_modules/debug/node_modules/ms": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
- },
- "node_modules/send/node_modules/encodeurl": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
- "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==",
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/send/node_modules/ms": {
- "version": "2.1.3",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
- "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
- },
- "node_modules/serve-static": {
- "version": "1.16.2",
- "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz",
- "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==",
- "dependencies": {
- "encodeurl": "~2.0.0",
- "escape-html": "~1.0.3",
- "parseurl": "~1.3.3",
- "send": "0.19.0"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/set-function-length": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
- "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
- "dependencies": {
- "define-data-property": "^1.1.4",
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2",
- "get-intrinsic": "^1.2.4",
- "gopd": "^1.0.1",
- "has-property-descriptors": "^1.0.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/setprototypeof": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
- "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
- },
- "node_modules/side-channel": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
- "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
- "dependencies": {
- "call-bind": "^1.0.7",
- "es-errors": "^1.3.0",
- "get-intrinsic": "^1.2.4",
- "object-inspect": "^1.13.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/simple-swizzle": {
- "version": "0.2.2",
- "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
- "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
- "license": "MIT",
- "dependencies": {
- "is-arrayish": "^0.3.1"
- }
- },
- "node_modules/split-ca": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz",
- "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==",
- "license": "ISC"
- },
- "node_modules/sprintf-js": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
- "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
- "license": "BSD-3-Clause"
- },
- "node_modules/ssh2": {
- "version": "1.15.0",
- "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.15.0.tgz",
- "integrity": "sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==",
- "hasInstallScript": true,
- "dependencies": {
- "asn1": "^0.2.6",
- "bcrypt-pbkdf": "^1.0.2"
- },
- "engines": {
- "node": ">=10.16.0"
- },
- "optionalDependencies": {
- "cpu-features": "~0.0.9",
- "nan": "^2.18.0"
- }
- },
- "node_modules/stack-trace": {
- "version": "0.0.10",
- "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",
- "integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==",
- "license": "MIT",
- "engines": {
- "node": "*"
- }
- },
- "node_modules/statuses": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
- "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/string_decoder": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
- "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
- "license": "MIT",
- "dependencies": {
- "safe-buffer": "~5.2.0"
- }
- },
- "node_modules/tar-fs": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.1.tgz",
- "integrity": "sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==",
- "license": "MIT",
- "dependencies": {
- "chownr": "^1.1.1",
- "mkdirp-classic": "^0.5.2",
- "pump": "^3.0.0",
- "tar-stream": "^2.0.0"
- }
- },
- "node_modules/tar-stream": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
- "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
- "license": "MIT",
- "dependencies": {
- "bl": "^4.0.3",
- "end-of-stream": "^1.4.1",
- "fs-constants": "^1.0.0",
- "inherits": "^2.0.3",
- "readable-stream": "^3.1.1"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/text-hex": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
- "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==",
- "license": "MIT"
- },
- "node_modules/toidentifier": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
- "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==",
- "engines": {
- "node": ">=0.6"
- }
- },
- "node_modules/triple-beam": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.4.1.tgz",
- "integrity": "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==",
- "license": "MIT",
- "engines": {
- "node": ">= 14.0.0"
- }
- },
- "node_modules/tweetnacl": {
- "version": "0.14.5",
- "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
- "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==",
- "license": "Unlicense"
- },
- "node_modules/type-is": {
- "version": "1.6.18",
- "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
- "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
- "dependencies": {
- "media-typer": "0.3.0",
- "mime-types": "~2.1.24"
- },
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/unpipe": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
- "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==",
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/util-deprecate": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
- "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
- "license": "MIT"
- },
- "node_modules/utils-merge": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
- "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4.0"
- }
- },
- "node_modules/vary": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
- "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/web-streams-polyfill": {
- "version": "3.3.3",
- "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz",
- "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==",
- "license": "MIT",
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/winston": {
- "version": "3.14.2",
- "resolved": "https://registry.npmjs.org/winston/-/winston-3.14.2.tgz",
- "integrity": "sha512-CO8cdpBB2yqzEf8v895L+GNKYJiEq8eKlHU38af3snQBQ+sdAIUepjMSguOIJC7ICbzm0ZI+Af2If4vIJrtmOg==",
- "license": "MIT",
- "dependencies": {
- "@colors/colors": "^1.6.0",
- "@dabh/diagnostics": "^2.0.2",
- "async": "^3.2.3",
- "is-stream": "^2.0.0",
- "logform": "^2.6.0",
- "one-time": "^1.0.0",
- "readable-stream": "^3.4.0",
- "safe-stable-stringify": "^2.3.1",
- "stack-trace": "0.0.x",
- "triple-beam": "^1.3.0",
- "winston-transport": "^4.7.0"
- },
- "engines": {
- "node": ">= 12.0.0"
- }
- },
- "node_modules/winston-transport": {
- "version": "4.7.1",
- "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.7.1.tgz",
- "integrity": "sha512-wQCXXVgfv/wUPOfb2x0ruxzwkcZfxcktz6JIMUaPLmcNhO4bZTwA/WtDWK74xV3F2dKu8YadrFv0qhwYjVEwhA==",
- "license": "MIT",
- "dependencies": {
- "logform": "^2.6.1",
- "readable-stream": "^3.6.2",
- "triple-beam": "^1.3.0"
- },
- "engines": {
- "node": ">= 12.0.0"
- }
- },
- "node_modules/wrappy": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
- "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
- "license": "ISC"
- },
- "node_modules/yamljs": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/yamljs/-/yamljs-0.3.0.tgz",
- "integrity": "sha512-C/FsVVhht4iPQYXOInoxUM/1ELSf9EsgKH34FofQOp6hwCPrW4vG4w5++TED3xRUo8gD7l0P1J1dLlDYzODsTQ==",
- "license": "MIT",
- "dependencies": {
- "argparse": "^1.0.7",
- "glob": "^7.0.5"
- },
- "bin": {
- "json2yaml": "bin/json2yaml",
- "yaml2json": "bin/yaml2json"
- }
- }
- }
-}
\ No newline at end of file
diff --git a/package.json b/package.json
index 372caad0..c4e322e6 100644
--- a/package.json
+++ b/package.json
@@ -1,22 +1,60 @@
{
"name": "dockstatapi",
- "version": "1.0.0",
- "description": "API for docker hosts using dockerode",
- "main": "dockerstatsapi.js",
+ "author": {
+ "email": "info@itsnik.de",
+ "name": "ItsNik",
+ "url": "https://github.com/Its4Nik"
+ },
+ "license": "CC BY-NC 4.0",
+ "contributors": [],
+ "description": "DockStatAPI is an API backend featuring plugins and more for DockStat",
+ "version": "3.0.0",
"scripts": {
- "start": "node dockstatapi.js",
- "test": "echo \"Error: no test specified\" && exit 1"
+ "start": "cross-env NODE_ENV=production LOG_LEVEL=info bun run src/index.ts",
+ "start:docker": "bun run build:docker && docker run -p 3000:3000 --rm -d --name dockstatapi -v 'plugins:/DockStatAPI/src/plugins' dockstatapi:local",
+ "dev": "docker compose -f docker/docker-compose.dev.yaml up -d && cross-env NODE_ENV=dev bun run --watch src/index.ts",
+ "dev:clean": "bun dev ; echo '\nExiting...' ; bun clean",
+ "build": "bun build --target bun src/index.ts --outdir ./dist",
+ "build:prod": "NODE_ENV=production bun build --no-native --compile --minify-whitespace --minify-syntax --target bun --outfile server ./src/index.ts",
+ "build:docker": "docker build -f docker/Dockerfile . -t 'dockstatapi:local'",
+ "clean": "bun run clean:win || bun run clean:lin",
+ "clean:win": "node -e \"process.exit(process.platform === 'win32' ? 0 : 1)\" && cmd /c del /Q data/dockstatapi* && cmd /c del /Q reports/markdown/*.md && echo 'success'",
+ "clean:lin": "node -e \"process.exit(process.platform !== 'win32' ? 0 : 1)\" && rm -f data/dockstatapi* && rm -f reports/markdown/*.md && echo 'success'",
+ "knip": "knip",
+ "lint": "biome check --formatter-enabled=true --linter-enabled=true --organize-imports-enabled=true --fix src"
},
- "keywords": [],
- "author": "Its4Nik",
- "license": "ISC",
"dependencies": {
- "child_process": "^1.0.2",
- "cors": "^2.8.5",
- "dockerode": "^4.0.2",
- "express": "^4.21.0",
- "node-fetch": "^3.3.2",
- "winston": "^3.14.2",
- "yamljs": "^0.3.0"
- }
-}
\ No newline at end of file
+ "@elysiajs/server-timing": "^1.3.0",
+ "@elysiajs/static": "^1.3.0",
+ "@elysiajs/swagger": "^1.3.0",
+ "chalk": "^5.4.1",
+ "date-fns": "^4.1.0",
+ "docker-compose": "^1.2.0",
+ "dockerode": "^4.0.6",
+ "elysia": "latest",
+ "elysia-remote-dts": "^1.0.2",
+ "knip": "latest",
+ "logestic": "^1.2.4",
+ "split2": "^4.2.0",
+ "winston": "^3.17.0",
+ "yaml": "^2.7.1"
+ },
+ "devDependencies": {
+ "@biomejs/biome": "1.9.4",
+ "@types/bun": "latest",
+ "@types/dockerode": "^3.3.38",
+ "@types/node": "^22.15.17",
+ "@types/split2": "^4.2.3",
+ "bun-types": "latest",
+ "cross-env": "^7.0.3",
+ "logform": "^2.7.0",
+ "typescript": "^5.8.3",
+ "wrap-ansi": "^9.0.0"
+ },
+ "module": "src/index.js",
+ "trustedDependencies": [
+ "protobufjs"
+ ],
+ "type": "module",
+ "private": true
+}
diff --git a/public/404.html b/public/404.html
new file mode 100644
index 00000000..39b107d4
--- /dev/null
+++ b/public/404.html
@@ -0,0 +1,99 @@
+
+
+
+
+
+
+ 404 - Page Not Found
+
+
+
+
+
+

+
404
+
+ Oops! The page you're looking for doesn't exist.
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/public/DockStat.png b/public/DockStat.png
new file mode 100644
index 00000000..d375bd49
Binary files /dev/null and b/public/DockStat.png differ
diff --git a/scripts/install_apprise.sh b/scripts/install_apprise.sh
deleted file mode 100644
index 7506d0e8..00000000
--- a/scripts/install_apprise.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-
-VENV_DIR="/api"
-
-apk update
-apk add python3 py3-pip py3-virtualenv
-
-python3 -m venv "$VENV_DIR"
-
-. "$VENV_DIR/bin/activate"
-
-pip install apprise
-
-deactivate
-
-echo "Apprise has been successfully installed in the virtual environment."
diff --git a/scripts/notify.sh b/scripts/notify.sh
deleted file mode 100755
index 54dc2262..00000000
--- a/scripts/notify.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-
-NOTIFY_TYPE=$1 # ADD, REMOVE, EXIT, ANY
-CONTAINER_ID=$2 # Container ID
-CONTAINER_NAME=$3 # Container Name
-HOST=$4 # Host Name
-STATE=$5 # Current State
-
-ADD_MESSAGE="${ADD_MESSAGE:-🆕 Container Added: $CONTAINER_NAME ($CONTAINER_ID) on $HOST}"
-REMOVE_MESSAGE="${REMOVE_MESSAGE:-🚫 Container Removed: $CONTAINER_NAME ($CONTAINER_ID) on $HOST}"
-EXIT_MESSAGE="${EXIT_MESSAGE:-❌ Container Exited: $CONTAINER_NAME ($CONTAINER_ID) on $HOST}"
-ANY_MESSAGE="${ANY_MESSAGE:-⚠️ Container State Changed: $CONTAINER_NAME ($CONTAINER_ID) on $HOST - New State: $STATE}"
-
-case "$NOTIFY_TYPE" in
- ADD)
- MESSAGE="$ADD_MESSAGE"
- ;;
- REMOVE)
- MESSAGE="$REMOVE_MESSAGE"
- ;;
- EXIT)
- MESSAGE="$EXIT_MESSAGE"
- ;;
- ANY)
- MESSAGE="$ANY_MESSAGE"
- ;;
- *)
- MESSAGE="Unknown action for $CONTAINER_NAME ($CONTAINER_ID) on $HOST"
- ;;
-esac
-
-if [[ ! -f ./config/apprise_config.yml ]]; then
- echo -n "No Apprise configuration found, aborting."
- exit 1
-fi
-
-# Send notification via Apprise
-
-### PYTHON ENVIRONMENT: ###
-. /api/bin/activate
-
-apprise -b "$MESSAGE" --config ./config/apprise_config.yml
-
-deactivate
-###########################
-
-exit 0
diff --git a/src/core/database/_dbState.ts b/src/core/database/_dbState.ts
new file mode 100644
index 00000000..e159ca05
--- /dev/null
+++ b/src/core/database/_dbState.ts
@@ -0,0 +1,5 @@
+export let backupInProgress = false;
+
+export function setBackupInProgress(val: boolean) {
+ backupInProgress = val;
+}
diff --git a/src/core/database/backup.ts b/src/core/database/backup.ts
new file mode 100644
index 00000000..4efa130c
--- /dev/null
+++ b/src/core/database/backup.ts
@@ -0,0 +1,163 @@
+import { copyFileSync, existsSync, readdirSync } from "node:fs";
+import { logger } from "~/core/utils/logger";
+import type { BackupInfo } from "~/typings/misc";
+import { backupInProgress, setBackupInProgress } from "./_dbState";
+import { db } from "./database";
+import { executeDbOperation } from "./helper";
+
+export const backupDir = "data/";
+
+export async function backupDatabase(): Promise {
+ if (backupInProgress) {
+ logger.error("Backup attempt blocked: Another backup already in progress");
+ throw new Error("Backup already in progress");
+ }
+
+ logger.debug("Starting database backup process...");
+ setBackupInProgress(true);
+
+ try {
+ logger.debug("Executing WAL checkpoint...");
+ db.exec("PRAGMA wal_checkpoint(FULL);");
+ logger.debug("WAL checkpoint completed successfully");
+
+ const now = new Date();
+ const day = String(now.getDate()).padStart(2, "0");
+ const month = String(now.getMonth() + 1).padStart(2, "0");
+ const year = now.getFullYear();
+ const dateStr = `${day}-${month}-${year}`;
+ logger.debug(`Using date string for backup: ${dateStr}`);
+
+ logger.debug(`Scanning backup directory: ${backupDir}`);
+ const files = readdirSync(backupDir);
+ logger.debug(`Found ${files.length} files in backup directory`);
+
+ const regex = new RegExp(
+ `^dockstatapi-${day}-${month}-${year}-(\\d+)\\.db\\.bak$`,
+ );
+ let maxBackupNum = 0;
+
+ for (const file of files) {
+ const match = file.match(regex);
+ if (match?.[1]) {
+ const num = Number.parseInt(match[1], 10);
+ logger.debug(`Found existing backup file: ${file} with number ${num}`);
+ if (num > maxBackupNum) {
+ maxBackupNum = num;
+ }
+ } else {
+ logger.debug(`Skipping non-matching file: ${file}`);
+ }
+ }
+
+ logger.debug(`Maximum backup number found: ${maxBackupNum}`);
+ const backupNumber = maxBackupNum + 1;
+ const backupFilename = `${backupDir}dockstatapi-${dateStr}-${backupNumber}.db.bak`;
+ logger.debug(`Generated backup filename: ${backupFilename}`);
+
+ logger.debug(`Attempting to copy database to ${backupFilename}`);
+ try {
+ copyFileSync(`${backupDir}dockstatapi.db`, backupFilename);
+ logger.info(`Backup created successfully: ${backupFilename}`);
+ logger.debug("File copy operation completed without errors");
+ } catch (e) {
+ logger.error(`Failed to create backup file: ${(e as Error).message}`);
+ throw e;
+ }
+
+ return backupFilename;
+ } finally {
+ setBackupInProgress(false);
+ logger.debug("Backup process completed, in progress flag reset");
+ }
+}
+
+export function restoreDatabase(backupFilename: string): void {
+ const backupFile = `${backupDir}${backupFilename}`;
+
+ if (backupInProgress) {
+ logger.error("Restore attempt blocked: Backup in progress");
+ throw new Error("Backup in progress. Cannot restore.");
+ }
+
+ logger.debug(`Starting database restore from ${backupFile}`);
+
+ if (!existsSync(backupFile)) {
+ logger.error(`Backup file not found: ${backupFile}`);
+ throw new Error(`Backup file ${backupFile} does not exist.`);
+ }
+
+ setBackupInProgress(true);
+ try {
+ executeDbOperation(
+ "restore",
+ () => {
+ logger.debug(`Attempting to restore database from ${backupFile}`);
+ try {
+ copyFileSync(backupFile, `${backupDir}dockstatapi.db`);
+ logger.info(`Database restored successfully from: ${backupFilename}`);
+ logger.debug("Database file replacement completed");
+ } catch (e) {
+ logger.error(`Restore failed: ${(e as Error).message}`);
+ throw e;
+ }
+ },
+ () => {
+ if (backupInProgress) {
+ logger.error("Database operation attempted during restore");
+ throw new Error("Cannot perform database operations during restore");
+ }
+ },
+ );
+ } finally {
+ setBackupInProgress(false);
+ logger.debug("Restore process completed, in progress flag reset");
+ }
+}
+
+export const findLatestBackup = (): string => {
+ logger.debug(`Searching for latest backup in directory: ${backupDir}`);
+
+ const files = readdirSync(backupDir);
+ logger.debug(`Found ${files.length} files to process`);
+
+ const backups = files
+ .map((file): BackupInfo | null => {
+ const match = file.match(
+ /^dockstatapi-(\d{2})-(\d{2})-(\d{4})-(\d+)\.db\.bak$/,
+ );
+ if (!match) {
+ logger.debug(`Skipping non-backup file: ${file}`);
+ return null;
+ }
+
+ const date = new Date(
+ Number(match[3]),
+ Number(match[2]) - 1,
+ Number(match[1]),
+ );
+ logger.debug(
+ `Found backup file: ${file} with date ${date.toISOString()}`,
+ );
+
+ return {
+ filename: file,
+ date,
+ backupNum: Number(match[4]),
+ };
+ })
+ .filter((backup): backup is BackupInfo => backup !== null)
+ .sort((a, b) => {
+ const dateDiff = b.date.getTime() - a.date.getTime();
+ return dateDiff !== 0 ? dateDiff : b.backupNum - a.backupNum;
+ });
+
+ if (!backups.length) {
+ logger.error("No valid backup files found");
+ throw new Error("No backups available");
+ }
+
+ const latestBackup = backups[0].filename;
+ logger.debug(`Determined latest backup file: ${latestBackup}`);
+ return latestBackup;
+};
diff --git a/src/core/database/config.ts b/src/core/database/config.ts
new file mode 100644
index 00000000..f2460e06
--- /dev/null
+++ b/src/core/database/config.ts
@@ -0,0 +1,55 @@
+import { db } from "./database";
+import { executeDbOperation } from "./helper";
+
+const stmt = {
+ update: db.prepare(
+ "UPDATE config SET fetching_interval = ?, keep_data_for = ?, api_key = ?",
+ ),
+ select: db.prepare(
+ "SELECT keep_data_for, fetching_interval, api_key FROM config",
+ ),
+ deleteOld: db.prepare(
+ `DELETE FROM container_stats WHERE timestamp < datetime('now', '-' || ? || ' days')`,
+ ),
+ deleteOldLogs: db.prepare(
+ `DELETE FROM backend_log_entries WHERE timestamp < datetime('now', '-' || ? || ' days')`,
+ ),
+};
+
+export function updateConfig(
+ fetching_interval: number,
+ keep_data_for: number,
+ api_key: string,
+) {
+ return executeDbOperation(
+ "Update Config",
+ () => stmt.update.run(fetching_interval, keep_data_for, api_key),
+ () => {
+ if (
+ typeof fetching_interval !== "number" ||
+ typeof keep_data_for !== "number"
+ ) {
+ throw new TypeError("Invalid config parameters");
+ }
+ },
+ );
+}
+
+export function getConfig() {
+ return executeDbOperation("Get Config", () => stmt.select.all());
+}
+
+export function deleteOldData(days: number) {
+ return executeDbOperation(
+ "Delete Old Data",
+ () => {
+ db.transaction(() => {
+ stmt.deleteOld.run(days);
+ stmt.deleteOldLogs.run(days);
+ })();
+ },
+ () => {
+ if (typeof days !== "number") throw new TypeError("Invalid days type");
+ },
+ );
+}
diff --git a/src/core/database/containerStats.ts b/src/core/database/containerStats.ts
new file mode 100644
index 00000000..a5d6bcf0
--- /dev/null
+++ b/src/core/database/containerStats.ts
@@ -0,0 +1,34 @@
+import { db } from "./database";
+import { executeDbOperation } from "./helper";
+
+const stmt = db.prepare(`
+ INSERT INTO container_stats (id, hostId, name, image, status, state, cpu_usage, memory_usage)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
+`);
+
+export function addContainerStats(
+ id: string,
+ hostId: string,
+ name: string,
+ image: string,
+ status: string,
+ state: string,
+ cpu_usage: number,
+ memory_usage: number,
+) {
+ return executeDbOperation(
+ "Add Container Stats",
+ () =>
+ stmt.run(id, hostId, name, image, status, state, cpu_usage, memory_usage),
+ () => {
+ if (
+ typeof id !== "string" ||
+ typeof hostId !== "string" ||
+ typeof cpu_usage !== "number" ||
+ typeof memory_usage !== "number"
+ ) {
+ throw new TypeError("Invalid container stats parameters");
+ }
+ },
+ );
+}
diff --git a/src/core/database/database.ts b/src/core/database/database.ts
new file mode 100644
index 00000000..f8de7cb1
--- /dev/null
+++ b/src/core/database/database.ts
@@ -0,0 +1,117 @@
+import { Database } from "bun:sqlite";
+import { existsSync } from "node:fs";
+import { mkdir } from "node:fs/promises";
+import { userInfo } from "node:os";
+import path from "node:path";
+
+const dataFolder = path.join(process.cwd(), "data");
+
+const username = userInfo().username;
+const gid = userInfo().gid;
+const uid = userInfo().uid;
+
+export let db: Database;
+
+try {
+ const databasePath = path.join(dataFolder, "dockstatapi.db");
+ console.log("Database path:", databasePath);
+ console.log(`Running as: ${username} (${uid}:${gid})`);
+
+ if (!existsSync(dataFolder)) {
+ await mkdir(dataFolder, { recursive: true, mode: 0o777 });
+ console.log("Created data directory:", dataFolder);
+ }
+
+ db = new Database(databasePath, { create: true });
+ console.log("Database opened successfully");
+
+ db.exec("PRAGMA journal_mode = WAL;");
+} catch (error) {
+ console.error(`Cannot start DockStatAPI: ${error}`);
+ process.exit(500);
+}
+
+export function init() {
+ db.exec(`
+ CREATE TABLE IF NOT EXISTS backend_log_entries (
+ timestamp STRING NOT NULL,
+ level TEXT NOT NULL,
+ message TEXT NOT NULL,
+ file TEXT NOT NULL,
+ line NUMBER NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS stacks_config (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ version INTEGER NOT NULL,
+ custom BOOLEAN NOT NULL,
+ source TEXT NOT NULL,
+ compose_spec TEXT NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS docker_hosts (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT NOT NULL,
+ hostAddress TEXT NOT NULL,
+ secure BOOLEAN NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS host_stats (
+ hostId INTEGER PRIMARY KEY NOT NULL,
+ hostName TEXT NOT NULL,
+ dockerVersion TEXT NOT NULL,
+ apiVersion TEXT NOT NULL,
+ os TEXT NOT NULL,
+ architecture TEXT NOT NULL,
+ totalMemory INTEGER NOT NULL,
+ totalCPU INTEGER NOT NULL,
+ labels TEXT NOT NULL,
+ containers INTEGER NOT NULL,
+ containersRunning INTEGER NOT NULL,
+ containersStopped INTEGER NOT NULL,
+ containersPaused INTEGER NOT NULL,
+ images INTEGER NOT NULL
+ );
+
+ CREATE TABLE IF NOT EXISTS container_stats (
+ id TEXT NOT NULL,
+ hostId TEXT NOT NULL,
+ name TEXT NOT NULL,
+ image TEXT NOT NULL,
+ status TEXT NOT NULL,
+ state TEXT NOT NULL,
+ cpu_usage FLOAT NOT NULL,
+ memory_usage,
+ timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
+ );
+
+ CREATE TABLE IF NOT EXISTS config (
+ keep_data_for NUMBER NOT NULL,
+ fetching_interval NUMBER NOT NULL,
+ api_key TEXT NOT NULL
+ );
+ `);
+
+ const configRow = db
+ .prepare("SELECT COUNT(*) AS count FROM config")
+ .get() as { count: number };
+
+ if (configRow.count === 0) {
+ db.prepare(
+ 'INSERT INTO config (keep_data_for, fetching_interval, api_key) VALUES (7, 5, "changeme")',
+ ).run();
+ }
+
+ const hostRow = db
+ .prepare("SELECT COUNT(*) AS count FROM docker_hosts")
+ .get() as { count: number };
+
+ if (hostRow.count === 0) {
+ db.prepare(
+ "INSERT INTO docker_hosts (name, hostAddress, secure) VALUES (?, ?, ?)",
+ ).run("Localhost", "localhost:2375", false);
+ }
+}
+
+init();
diff --git a/src/core/database/dockerHosts.ts b/src/core/database/dockerHosts.ts
new file mode 100644
index 00000000..2c9903db
--- /dev/null
+++ b/src/core/database/dockerHosts.ts
@@ -0,0 +1,61 @@
+import { db } from "./database";
+import { executeDbOperation } from "./helper";
+
+const stmt = {
+ insert: db.prepare(
+ "INSERT INTO docker_hosts (name, hostAddress, secure) VALUES (?, ?, ?)",
+ ),
+ selectAll: db.prepare(
+ "SELECT id, name, hostAddress, secure FROM docker_hosts ORDER BY id DESC",
+ ),
+ update: db.prepare(
+ "UPDATE docker_hosts SET hostAddress = ?, secure = ?, name = ? WHERE id = ?",
+ ),
+ delete: db.prepare("DELETE FROM docker_hosts WHERE id = ?"),
+};
+
+export function addDockerHost(host: DockerHost) {
+ return executeDbOperation(
+ "Add Docker Host",
+ () => stmt.insert.run(host.name, host.hostAddress, host.secure),
+ () => {
+ if (!host.name || !host.hostAddress)
+ throw new Error("Missing required fields");
+ if (typeof host.secure !== "boolean")
+ throw new TypeError("Invalid secure type");
+ },
+ );
+}
+
+export function getDockerHosts(): DockerHost[] {
+ return executeDbOperation("Get Docker Hosts", () => {
+ const rows = stmt.selectAll.all() as Array<
+ Omit & { secure: number }
+ >;
+ return rows.map((row) => ({
+ ...row,
+ secure: row.secure === 1,
+ }));
+ });
+}
+1;
+export function updateDockerHost(host: DockerHost) {
+ return executeDbOperation(
+ "Update Docker Host",
+ () => stmt.update.run(host.hostAddress, host.secure, host.name, host.id),
+ () => {
+ if (!host.id || typeof host.id !== "number")
+ throw new Error("Invalid host ID");
+ },
+ );
+}
+
+export function deleteDockerHost(id: number) {
+ return executeDbOperation(
+ "Delete Docker Host",
+ () => stmt.delete.run(id),
+ () => {
+ if (typeof id !== "number") throw new TypeError("Invalid ID type");
+ },
+ );
+}
diff --git a/src/core/database/helper.ts b/src/core/database/helper.ts
new file mode 100644
index 00000000..1f1cabd9
--- /dev/null
+++ b/src/core/database/helper.ts
@@ -0,0 +1,28 @@
+import { logger } from "~/core/utils/logger";
+import { backupInProgress } from "./_dbState";
+
+export function executeDbOperation(
+ label: string,
+ operation: () => T,
+ validate?: () => void,
+ dontLog?: boolean,
+): T {
+ if (backupInProgress && label !== "backup" && label !== "restore") {
+ throw new Error(
+ `backup in progress Database operation not allowed: ${label}`,
+ );
+ }
+ const startTime = Date.now();
+ if (dontLog !== true) {
+ logger.debug(`__task__ __db__ ${label} ⏳`);
+ }
+ if (validate) {
+ validate();
+ }
+ const result = operation();
+ const duration = Date.now() - startTime;
+ if (dontLog !== true) {
+ logger.debug(`__task__ __db__ ${label} ✔️ (${duration}ms)`);
+ }
+ return result;
+}
diff --git a/src/core/database/hostStats.ts b/src/core/database/hostStats.ts
new file mode 100644
index 00000000..3d48528d
--- /dev/null
+++ b/src/core/database/hostStats.ts
@@ -0,0 +1,45 @@
+import type { HostStats } from "~/typings/docker";
+import { db } from "./database";
+import { executeDbOperation } from "./helper";
+
+const stmt = db.prepare(`
+ INSERT INTO host_stats (
+ hostId, hostName, dockerVersion, apiVersion, os, architecture,
+ totalMemory, totalCPU, labels, containers, containersRunning,
+ containersStopped, containersPaused, images
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ ON CONFLICT(hostId) DO UPDATE SET
+ dockerVersion = excluded.dockerVersion,
+ apiVersion = excluded.apiVersion,
+ os = excluded.os,
+ architecture = excluded.architecture,
+ totalMemory = excluded.totalMemory,
+ totalCPU = excluded.totalCPU,
+ labels = excluded.labels,
+ containers = excluded.containers,
+ containersRunning = excluded.containersRunning,
+ containersStopped = excluded.containersStopped,
+ containersPaused = excluded.containersPaused,
+ images = excluded.images
+`);
+
+export function updateHostStats(stats: HostStats) {
+ return executeDbOperation("Update Host Stats", () =>
+ stmt.run(
+ stats.hostId,
+ stats.hostName,
+ stats.dockerVersion,
+ stats.apiVersion,
+ stats.os,
+ stats.architecture,
+ stats.totalMemory,
+ stats.totalCPU,
+ JSON.stringify(stats.labels),
+ stats.containers,
+ stats.containersRunning,
+ stats.containersStopped,
+ stats.containersPaused,
+ stats.images,
+ ),
+ );
+}
diff --git a/src/core/database/index.ts b/src/core/database/index.ts
new file mode 100644
index 00000000..c381e7a6
--- /dev/null
+++ b/src/core/database/index.ts
@@ -0,0 +1,23 @@
+import { init } from "~/core/database/database";
+
+init();
+
+import * as backup from "~/core/database/backup";
+import * as config from "~/core/database/config";
+import * as containerStats from "~/core/database/containerStats";
+import * as dockerHosts from "~/core/database/dockerHosts";
+import * as hostStats from "~/core/database/hostStats";
+import * as logs from "~/core/database/logs";
+import * as stacks from "~/core/database/stacks";
+
+export const dbFunctions = {
+ ...dockerHosts,
+ ...logs,
+ ...config,
+ ...containerStats,
+ ...hostStats,
+ ...stacks,
+ ...backup,
+};
+
+export type dbFunctions = typeof dbFunctions;
diff --git a/src/core/database/logs.ts b/src/core/database/logs.ts
new file mode 100644
index 00000000..eb815d54
--- /dev/null
+++ b/src/core/database/logs.ts
@@ -0,0 +1,79 @@
+import type { log_message } from "~/typings/database";
+import { db } from "./database";
+import { executeDbOperation } from "./helper";
+
+const stmt = {
+ insert: db.prepare(
+ "INSERT INTO backend_log_entries (level, timestamp, message, file, line) VALUES (?, ?, ?, ?, ?)",
+ ),
+ selectAll: db.prepare(
+ "SELECT level, timestamp, message, file, line FROM backend_log_entries ORDER BY timestamp DESC",
+ ),
+ selectByLevel: db.prepare(
+ "SELECT level, timestamp, message, file, line FROM backend_log_entries WHERE level = ?",
+ ),
+ deleteAll: db.prepare("DELETE FROM backend_log_entries"),
+ deleteByLevel: db.prepare("DELETE FROM backend_log_entries WHERE level = ?"),
+};
+
+function convertToLogMessage(row: log_message): log_message {
+ return {
+ level: row.level,
+ timestamp: row.timestamp,
+ message: row.message,
+ file: row.file,
+ line: row.line,
+ };
+}
+
+export function addLogEntry(data: log_message) {
+ return executeDbOperation(
+ "Add Log Entry",
+ () =>
+ stmt.insert.run(
+ data.level,
+ data.timestamp,
+ data.message,
+ data.file,
+ data.line,
+ ),
+ () => {
+ if (
+ typeof data.level !== "string" ||
+ typeof data.timestamp !== "string" ||
+ typeof data.message !== "string" ||
+ typeof data.file !== "string" ||
+ typeof data.line !== "number"
+ ) {
+ throw new TypeError(
+ "Invalid log entry parameters ${data.file} ${data.line} ${data.message} ${data}",
+ );
+ }
+ },
+ true,
+ );
+}
+
+export function getAllLogs(): log_message[] {
+ return executeDbOperation("Get All Logs", () =>
+ stmt.selectAll.all().map((row) => convertToLogMessage(row as log_message)),
+ );
+}
+
+export function getLogsByLevel(level: string): log_message[] {
+ return executeDbOperation("Get Logs By Level", () =>
+ stmt.selectByLevel
+ .all(level)
+ .map((row) => convertToLogMessage(row as log_message)),
+ );
+}
+
+export function clearAllLogs() {
+ return executeDbOperation("Clear All Logs", () => stmt.deleteAll.run());
+}
+
+export function clearLogsByLevel(level: string) {
+ return executeDbOperation("Clear Logs By Level", () =>
+ stmt.deleteByLevel.run(level),
+ );
+}
diff --git a/src/core/database/stacks.ts b/src/core/database/stacks.ts
new file mode 100644
index 00000000..1c81e6d9
--- /dev/null
+++ b/src/core/database/stacks.ts
@@ -0,0 +1,66 @@
+import type { stacks_config } from "~/typings/database";
+import type { Stack } from "~/typings/docker-compose";
+import { findObjectByKey } from "../utils/helpers";
+import { db } from "./database";
+import { executeDbOperation } from "./helper";
+
+const stmt = {
+ insert: db.prepare(`
+ INSERT INTO stacks_config (
+ name, version, custom, source, compose_spec
+ ) VALUES (?, ?, ?, ?, ?)
+ `),
+ selectAll: db.prepare(`
+ SELECT id, name, version, custom, source, compose_spec
+ FROM stacks_config
+ ORDER BY name DESC
+ `),
+ update: db.prepare(`
+ UPDATE stacks_config
+ SET name = ?, custom = ?, source = ?, compose_spec = ?
+ WHERE name = ?
+ `),
+ delete: db.prepare("DELETE FROM stacks_config WHERE id = ?"),
+};
+
+export function addStack(stack: stacks_config) {
+ executeDbOperation("Add Stack", () =>
+ stmt.insert.run(
+ stack.name,
+ stack.version,
+ stack.custom,
+ stack.source,
+ stack.compose_spec,
+ ),
+ );
+
+ return findObjectByKey(getStacks(), "name", stack.name)?.id;
+}
+
+export function getStacks() {
+ return executeDbOperation("Get Stacks", () =>
+ stmt.selectAll.all(),
+ ) as Stack[];
+}
+
+export function deleteStack(id: number) {
+ return executeDbOperation(
+ "Delete Stack",
+ () => stmt.delete.run(id),
+ () => {
+ if (typeof id !== "number") throw new TypeError("Invalid stack ID");
+ },
+ );
+}
+
+export function updateStack(stack: stacks_config) {
+ return executeDbOperation("Update Stack", () =>
+ stmt.update.run(
+ stack.version,
+ stack.custom,
+ stack.source,
+ stack.name,
+ stack.compose_spec,
+ ),
+ );
+}
diff --git a/src/core/docker/client.ts b/src/core/docker/client.ts
new file mode 100644
index 00000000..ad65540b
--- /dev/null
+++ b/src/core/docker/client.ts
@@ -0,0 +1,33 @@
+import Docker from "dockerode";
+import { logger } from "~/core/utils/logger";
+import type { DockerHost } from "~/typings/docker";
+
+export const getDockerClient = (host: DockerHost): Docker => {
+ try {
+ const inputUrl = host.hostAddress.includes("://")
+ ? host.hostAddress
+ : `${host.secure ? "https" : "http"}://${host.hostAddress}`;
+ const parsedUrl = new URL(inputUrl);
+ const hostAddress = parsedUrl.hostname;
+ const port = parsedUrl.port
+ ? Number.parseInt(parsedUrl.port)
+ : host.secure
+ ? 2376
+ : 2375;
+
+ if (Number.isNaN(port) || port < 1 || port > 65535) {
+ throw new Error("Invalid port number in Docker host URL");
+ }
+
+ return new Docker({
+ protocol: host.secure ? "https" : "http",
+ host: hostAddress,
+ port,
+ version: "v1.41",
+ // TODO: Add TLS configuration if needed
+ });
+ } catch (error) {
+ logger.error("Invalid Docker host URL configuration:", error);
+ throw new Error("Invalid Docker host configuration");
+ }
+};
diff --git a/src/core/docker/monitor.ts b/src/core/docker/monitor.ts
new file mode 100644
index 00000000..d10c3c65
--- /dev/null
+++ b/src/core/docker/monitor.ts
@@ -0,0 +1,142 @@
+import { sleep } from "bun";
+import Docker from "dockerode";
+import { dbFunctions } from "~/core/database";
+import { getDockerClient } from "~/core/docker/client";
+import { logger } from "~/core/utils/logger";
+import type { DockerHost } from "~/typings/docker";
+import type { ContainerInfo } from "~/typings/docker";
+import { pluginManager } from "../plugins/plugin-manager";
+
+export async function monitorDockerEvents() {
+ let hosts: DockerHost[];
+
+ try {
+ hosts = dbFunctions.getDockerHosts();
+ logger.debug(
+ `Retrieved ${hosts.length} Docker host(s) for event monitoring.`,
+ );
+ } catch (error: unknown) {
+ logger.error(`Error retrieving Docker hosts: ${(error as Error).message}`);
+ return;
+ }
+
+ for (const host of hosts) {
+ await startFor(host);
+ }
+}
+
+async function startFor(host: DockerHost) {
+ const docker = getDockerClient(host);
+ try {
+ await docker.ping();
+ pluginManager.handleHostReachableAgain(host.name);
+ } catch (err) {
+ logger.warn(`Restarting Stream for ${host.name} in 10 seconds...`);
+ pluginManager.handleHostUnreachable(host.name, String(err));
+ await sleep(10000);
+ startFor(host);
+ }
+
+ try {
+ const eventsStream = await docker.getEvents();
+ logger.debug(`Started events stream for host: ${host.name}`);
+
+ let buffer = "";
+
+ eventsStream.on("data", (chunk: Buffer) => {
+ buffer += chunk.toString("utf8");
+ const lines = buffer.split(/\r?\n/);
+
+ buffer = lines.pop() || "";
+
+ for (const line of lines) {
+ if (line.trim() === "") {
+ continue;
+ }
+
+ //biome-ignore lint/suspicious/noExplicitAny: Unsure what data we are receiving here
+ let event: any;
+ try {
+ event = JSON.parse(line);
+ } catch (parseErr) {
+ logger.error(
+ `Failed to parse event from host ${host.name}: ${String(parseErr)}`,
+ );
+ continue;
+ }
+
+ if (event.Type === "container") {
+ const containerInfo: ContainerInfo = {
+ id: event.Actor?.ID || event.id || "",
+ hostId: host.name,
+ name: event.Actor?.Attributes?.name || "",
+ image: event.Actor?.Attributes?.image || event.from || "",
+ status: event.status || event.Actor?.Attributes?.status || "",
+ state: event.Actor?.Attributes?.state || event.Action || "",
+ cpuUsage: 0,
+ memoryUsage: 0,
+ };
+
+ const action = event.Action;
+ logger.debug(`Triggering Action [${action}]`);
+ switch (action) {
+ case "stop":
+ pluginManager.handleContainerStop(containerInfo);
+ break;
+ case "start":
+ pluginManager.handleContainerStart(containerInfo);
+ break;
+ case "die":
+ pluginManager.handleContainerDie(containerInfo);
+ break;
+ case "kill":
+ pluginManager.handleContainerKill(containerInfo);
+ break;
+ case "create":
+ pluginManager.handleContainerCreate(containerInfo);
+ break;
+ case "destroy":
+ pluginManager.handleContainerDestroy(containerInfo);
+ break;
+ case "pause":
+ pluginManager.handleContainerPause(containerInfo);
+ break;
+ case "unpause":
+ pluginManager.handleContainerUnpause(containerInfo);
+ break;
+ case "restart":
+ pluginManager.handleContainerRestart(containerInfo);
+ break;
+ case "update":
+ pluginManager.handleContainerUpdate(containerInfo);
+ break;
+ case "health_status":
+ pluginManager.handleContainerHealthStatus(containerInfo);
+ break;
+ default:
+ logger.debug(
+ `Unhandled container event "${action}" on host ${host.name}`,
+ );
+ }
+ }
+ }
+ });
+
+ eventsStream.on("error", async (err: Error) => {
+ logger.error(`Events stream error for host ${host.name}: ${err.message}`);
+ logger.warn(`Restarting Stream for ${host.name} in 10 seconds...`);
+ await sleep(10000);
+ startFor(host);
+ });
+
+ eventsStream.on("end", () => {
+ logger.info(`Events stream ended for host ${host.name}`);
+ });
+ } catch (streamErr) {
+ logger.error(
+ `Failed to start events stream for host ${host.name}: ${String(
+ streamErr,
+ )}`,
+ );
+ }
+}
diff --git a/src/core/docker/scheduler.ts b/src/core/docker/scheduler.ts
new file mode 100644
index 00000000..8682411b
--- /dev/null
+++ b/src/core/docker/scheduler.ts
@@ -0,0 +1,115 @@
+import { dbFunctions } from "~/core/database";
+import storeContainerData from "~/core/docker/store-container-stats";
+import storeHostData from "~/core/docker/store-host-stats";
+import { logger } from "~/core/utils/logger";
+import type { config } from "~/typings/database";
+
+function convertFromMinToMs(minutes: number): number {
+ return minutes * 60 * 1000;
+}
+
+async function initialRun(
+ scheduleName: string,
+ scheduleFunction: Promise | void,
+ isAsync: boolean,
+) {
+ try {
+ if (isAsync) {
+ await scheduleFunction;
+ } else {
+ scheduleFunction;
+ }
+ logger.info(`Startup run success for: ${scheduleName}`);
+ } catch (error) {
+ logger.error(`Startup run failed for ${scheduleName}, ${error as string}`);
+ }
+}
+
+async function setSchedules() {
+ try {
+ const rawConfigData: unknown[] = dbFunctions.getConfig();
+ const configData = rawConfigData[0];
+
+ if (
+ !configData ||
+ typeof (configData as config).keep_data_for !== "number" ||
+ typeof (configData as config).fetching_interval !== "number"
+ ) {
+ logger.error("Invalid configuration data:", configData);
+ throw new Error("Invalid configuration data");
+ }
+
+ const { keep_data_for, fetching_interval } = configData as config;
+
+ if (keep_data_for === undefined) {
+ const errMsg = "keep_data_for is undefined";
+ logger.error(errMsg);
+ throw new Error(errMsg);
+ }
+
+ if (fetching_interval === undefined) {
+ const errMsg = "fetching_interval is undefined";
+ logger.error(errMsg);
+ throw new Error(errMsg);
+ }
+
+ logger.info(
+ `Scheduling: Fetching container statistics every ${fetching_interval} minutes`,
+ );
+
+ logger.info(
+ `Scheduling: Updating host statistics every ${fetching_interval} minutes`,
+ );
+
+ logger.info(
+ `Scheduling: Cleaning up Database every hour and deleting data older then ${keep_data_for} days`,
+ );
+
+ // Schedule container data fetching
+ await initialRun("storeContainerData", storeContainerData(), true);
+ setInterval(async () => {
+ try {
+ logger.info("Task Start: Fetching container data.");
+ await storeContainerData();
+ logger.info("Task End: Container data fetched successfully.");
+ } catch (error) {
+ logger.error("Error in fetching container data:", error);
+ }
+ }, convertFromMinToMs(fetching_interval));
+
+ // Schedule Host statistics updates
+ await initialRun("storeHostData", storeHostData(), true);
+ setInterval(async () => {
+ try {
+ logger.info("Task Start: Updating host stats.");
+ await storeHostData();
+ logger.info("Task End: Updating host stats successfully.");
+ } catch (error) {
+ logger.error("Error in updating host stats:", error);
+ }
+ }, convertFromMinToMs(fetching_interval));
+
+ // Schedule database cleanup
+ await initialRun(
+ "dbFunctions.deleteOldData",
+ dbFunctions.deleteOldData(keep_data_for),
+ false,
+ );
+ setInterval(() => {
+ try {
+ logger.info("Task Start: Cleaning up old database data.");
+ dbFunctions.deleteOldData(keep_data_for);
+ logger.info("Task End: Database cleanup completed.");
+ } catch (error) {
+ logger.error("Error in database cleanup task:", error);
+ }
+ }, convertFromMinToMs(60));
+
+ logger.info("Schedules have been set successfully.");
+ } catch (error) {
+ logger.error("Error setting schedules:", error);
+ throw error;
+ }
+}
+
+export { setSchedules };
diff --git a/src/core/docker/store-container-stats.ts b/src/core/docker/store-container-stats.ts
new file mode 100644
index 00000000..33b9c0fb
--- /dev/null
+++ b/src/core/docker/store-container-stats.ts
@@ -0,0 +1,98 @@
+import type Docker from "dockerode";
+import { dbFunctions } from "~/core/database";
+import { getDockerClient } from "~/core/docker/client";
+import {
+ calculateCpuPercent,
+ calculateMemoryUsage,
+} from "~/core/utils/calculations";
+import { logger } from "../utils/logger";
+
+async function storeContainerData() {
+ try {
+ const hosts = dbFunctions.getDockerHosts();
+ logger.debug("Retrieved docker hosts for storing container data");
+
+ // Process each host concurrently and wait for them all to finish
+ await Promise.all(
+ hosts.map(async (host) => {
+ const docker = getDockerClient(host);
+
+ // Test the connection with a ping
+ try {
+ await docker.ping();
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(
+ `Failed to ping docker host "${host.name}": ${errMsg}`,
+ );
+ }
+
+ let containers: Docker.ContainerInfo[] = [];
+ try {
+ containers = await docker.listContainers({ all: true });
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(
+ `Failed to list containers on host "${host.name}": ${errMsg}`,
+ );
+ }
+
+ // Process each container concurrently
+ await Promise.all(
+ containers.map(async (containerInfo) => {
+ const containerName = containerInfo.Names[0].replace(/^\//, "");
+ try {
+ const container = docker.getContainer(containerInfo.Id);
+
+ const stats: Docker.ContainerStats = await new Promise(
+ (resolve, reject) => {
+ container.stats({ stream: false }, (error, stats) => {
+ if (error) {
+ const errMsg =
+ error instanceof Error ? error.message : String(error);
+ return reject(
+ new Error(
+ `Failed to get stats for container "${containerName}" (ID: ${containerInfo.Id}) on host "${host.name}": ${errMsg}`,
+ ),
+ );
+ }
+ if (!stats) {
+ return reject(
+ new Error(
+ `No stats returned for container "${containerName}" (ID: ${containerInfo.Id}) on host "${host.name}".`,
+ ),
+ );
+ }
+ resolve(stats);
+ });
+ },
+ );
+
+ dbFunctions.addContainerStats(
+ containerInfo.Id,
+ host.name,
+ containerName,
+ containerInfo.Image,
+ containerInfo.Status,
+ containerInfo.State,
+ calculateCpuPercent(stats),
+ calculateMemoryUsage(stats),
+ );
+ } catch (error) {
+ const errMsg =
+ error instanceof Error ? error.message : String(error);
+ throw new Error(
+ `Error processing container "${containerName}" (ID: ${containerInfo.Id}) on host "${host.name}": ${errMsg}`,
+ );
+ }
+ }),
+ );
+ }),
+ );
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(`Failed to store container data: ${errMsg}`);
+ }
+}
+
+export default storeContainerData;
diff --git a/src/core/docker/store-host-stats.ts b/src/core/docker/store-host-stats.ts
new file mode 100644
index 00000000..053f37ea
--- /dev/null
+++ b/src/core/docker/store-host-stats.ts
@@ -0,0 +1,84 @@
+import { dbFunctions } from "~/core/database";
+import { getDockerClient } from "~/core/docker/client";
+import { findObjectByKey } from "~/core/utils/helpers";
+import { logger } from "~/core/utils/logger";
+import type { DockerHost, HostStats } from "~/typings/docker";
+import type { DockerInfo } from "~/typings/dockerode";
+
+function getHostByName(hostName: string): DockerHost {
+ const hosts = dbFunctions.getDockerHosts() as DockerHost[];
+ const foundHost = findObjectByKey(hosts, "name", hostName);
+ if (!foundHost) {
+ throw new Error(`Host ${hostName} not found`);
+ }
+ return foundHost;
+}
+
+async function storeHostData() {
+ try {
+ const hosts = dbFunctions.getDockerHosts() as DockerHost[];
+
+ await Promise.all(
+ hosts.map(async (host) => {
+ const docker = getDockerClient(host);
+
+ try {
+ await docker.ping();
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(
+ `Failed to ping docker host "${host.name}": ${errMsg}`,
+ );
+ }
+
+ let hostStats: DockerInfo;
+ try {
+ hostStats = await docker.info();
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(
+ `Failed to fetch stats for host "${host.name}": ${errMsg}`,
+ );
+ }
+
+ const hostId = getHostByName(host.name).id;
+
+ if (!hostId) {
+ throw new Error(`Host "${host.name}" not found`);
+ }
+
+ try {
+ const stats: HostStats = {
+ hostId: hostId,
+ hostName: host.name,
+ dockerVersion: hostStats.ServerVersion,
+ apiVersion: hostStats.Driver,
+ os: hostStats.OperatingSystem,
+ architecture: hostStats.Architecture,
+ totalMemory: hostStats.MemTotal,
+ totalCPU: hostStats.NCPU,
+ labels: hostStats.Labels,
+ images: hostStats.Images,
+ containers: hostStats.Containers,
+ containersPaused: hostStats.ContainersPaused,
+ containersRunning: hostStats.ContainersRunning,
+ containersStopped: hostStats.ContainersStopped,
+ };
+
+ dbFunctions.updateHostStats(stats);
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(
+ `Failed to store stats for host "${host.name}": ${errMsg}`,
+ );
+ }
+ }),
+ );
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ logger.error(`storeHostData failed: ${errMsg}`);
+ throw new Error(`Failed to store host data: ${errMsg}`);
+ }
+}
+
+export default storeHostData;
diff --git a/src/core/plugins/loader.ts b/src/core/plugins/loader.ts
new file mode 100644
index 00000000..854bc5ac
--- /dev/null
+++ b/src/core/plugins/loader.ts
@@ -0,0 +1,53 @@
+import fs from "node:fs";
+import path from "node:path";
+import { checkFileForChangeMe } from "../utils/change-me-checker";
+import { logger } from "../utils/logger";
+import { pluginManager } from "./plugin-manager";
+
+export async function loadPlugins(pluginDir: string) {
+ const pluginPath = path.join(process.cwd(), pluginDir);
+
+ logger.debug(`Loading plugins (${pluginPath})`);
+
+ if (!fs.existsSync(pluginPath)) {
+ throw new Error("Failed to check plugin directory");
+ }
+ logger.debug("Plugin directory exists");
+
+ let pluginCount = 0;
+ let files: string[];
+ try {
+ files = fs.readdirSync(pluginPath);
+ logger.debug(`Found ${files.length} files in plugin directory`);
+ } catch (error) {
+ throw new Error(`Failed to read plugin-directory: ${error}`);
+ }
+
+ if (!files) {
+ logger.info(`No plugins found in ${pluginPath}`);
+ return;
+ }
+
+ for (const file of files) {
+ if (!file.endsWith(".plugin.ts")) {
+ logger.debug(`Skipping non-plugin file: ${file}`);
+ continue;
+ }
+
+ const absolutePath = path.join(pluginPath, file);
+ logger.info(`Loading plugin: ${absolutePath}`);
+ try {
+ await checkFileForChangeMe(absolutePath);
+ const module = await import(absolutePath);
+ const plugin = module.default;
+ pluginManager.register(plugin);
+ pluginCount++;
+ } catch (error) {
+ logger.error(
+ `Error while importing plugin ${absolutePath}: ${error as string}`,
+ );
+ }
+ }
+
+ logger.info(`Registered ${pluginCount} plugin(s)`);
+}
diff --git a/src/core/plugins/plugin-manager.ts b/src/core/plugins/plugin-manager.ts
new file mode 100644
index 00000000..83d623f9
--- /dev/null
+++ b/src/core/plugins/plugin-manager.ts
@@ -0,0 +1,120 @@
+import { EventEmitter } from "node:events";
+import type { ContainerInfo } from "~/typings/docker";
+import type { Plugin } from "~/typings/plugin";
+import { logger } from "../utils/logger";
+
+class PluginManager extends EventEmitter {
+ private plugins: Map = new Map();
+
+ register(plugin: Plugin) {
+ try {
+ this.plugins.set(plugin.name, plugin);
+ logger.debug(`Registered plugin: ${plugin.name}`);
+ } catch (error) {
+ logger.error(
+ `Registering plugin ${plugin.name} failed: ${error as string}`,
+ );
+ }
+ }
+
+ unregister(name: string) {
+ this.plugins.delete(name);
+ }
+
+ getLoadedPlugins(): string[] {
+ return Array.from(this.plugins.keys());
+ }
+
+ // Trigger plugin flows:
+ handleContainerStop(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerStop?.(containerInfo);
+ }
+ }
+
+ handleContainerStart(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerStart?.(containerInfo);
+ }
+ }
+
+ handleContainerExit(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerExit?.(containerInfo);
+ }
+ }
+
+ handleContainerCreate(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerCreate?.(containerInfo);
+ }
+ }
+
+ handleContainerDestroy(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerDestroy?.(containerInfo);
+ }
+ }
+
+ handleContainerPause(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerPause?.(containerInfo);
+ }
+ }
+
+ handleContainerUnpause(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerUnpause?.(containerInfo);
+ }
+ }
+
+ handleContainerRestart(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerRestart?.(containerInfo);
+ }
+ }
+
+ handleContainerUpdate(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerUpdate?.(containerInfo);
+ }
+ }
+
+ handleContainerRename(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerRename?.(containerInfo);
+ }
+ }
+
+ handleContainerHealthStatus(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerHealthStatus?.(containerInfo);
+ }
+ }
+
+ handleHostUnreachable(host: string, err: string) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onHostUnreachable?.(host, err);
+ }
+ }
+
+ handleHostReachableAgain(host: string) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onHostReachableAgain?.(host);
+ }
+ }
+
+ handleContainerKill(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.onContainerKill?.(containerInfo);
+ }
+ }
+
+ handleContainerDie(containerInfo: ContainerInfo) {
+ for (const [, plugin] of this.plugins) {
+ plugin.handleContainerDie?.(containerInfo);
+ }
+ }
+}
+
+export const pluginManager = new PluginManager();
diff --git a/src/core/stacks/controller.ts b/src/core/stacks/controller.ts
new file mode 100644
index 00000000..1f506bf8
--- /dev/null
+++ b/src/core/stacks/controller.ts
@@ -0,0 +1,401 @@
+import { rm } from "node:fs/promises";
+import DockerCompose from "docker-compose";
+import YAML from "yaml";
+import { dbFunctions } from "~/core/database";
+import { logger } from "~/core/utils/logger";
+import { postToClient } from "~/routes/live-stacks";
+import type { stacks_config } from "~/typings/database";
+import type { ComposeSpec, Stack } from "~/typings/docker-compose";
+import { findObjectByKey } from "../utils/helpers";
+
+const wrapProgressCallback = (progressCallback?: (log: string) => void) => {
+ return progressCallback
+ ? (chunk: Buffer, streamSource?: "stdout" | "stderr") => {
+ const log = chunk.toString();
+ progressCallback(log);
+ }
+ : undefined;
+};
+
+async function getStackName(stack_id: number): Promise {
+ logger.debug(`Fetching stack name for id ${stack_id}`);
+ const stacks = dbFunctions.getStacks();
+ const stack = findObjectByKey(stacks, "id", stack_id);
+ if (!stack) {
+ throw new Error(`Stack with id ${stack_id} not found`);
+ }
+ return stack.name;
+}
+
+async function runStackCommand(
+ stack_id: number,
+ command: (
+ cwd: string,
+ progressCallback?: (log: string) => void,
+ ) => Promise,
+ action: string,
+): Promise {
+ try {
+ logger.debug(
+ `Starting runStackCommand for stack_id=${stack_id}, action="${action}"`,
+ );
+
+ const stackName = await getStackName(stack_id);
+ logger.debug(
+ `Retrieved stack name "${stackName}" for stack_id=${stack_id}`,
+ );
+
+ const stackPath = await getStackPath({
+ id: stack_id,
+ name: stackName,
+ } as Stack);
+ logger.debug(`Resolved stack path "${stackPath}" for stack_id=${stack_id}`);
+
+ const progressCallback = (log: string) => {
+ const message = log.trim();
+ logger.debug(
+ `Progress for stack_id=${stack_id}, action="${action}": ${message}`,
+ );
+
+ // ERROR HANDLING FOR COMPOSE ACTIONS
+ if (message.includes("Error response from daemon")) {
+ logger.error(
+ `Error response from daemon: ${
+ message.split("Error response from daemon:")[1]
+ }`,
+ );
+ }
+
+ postToClient({
+ type: "stack-progress",
+ data: {
+ stack_id,
+ action,
+ message,
+ timestamp: new Date().toISOString(),
+ },
+ });
+ };
+
+ logger.debug(
+ `Executing command for stack_id=${stack_id}, action="${action}"`,
+ );
+ const result = await command(stackPath, progressCallback);
+ logger.debug(
+ `Successfully completed command for stack_id=${stack_id}, action="${action}"`,
+ );
+
+ return result;
+ } catch (error) {
+ logger.debug(
+ `Error occurred for stack_id=${stack_id}, action="${action}": ${String(
+ error,
+ )}`,
+ );
+ postToClient({
+ type: "stack-error",
+ data: {
+ stack_id,
+ action,
+ message: String(error),
+ timestamp: new Date().toISOString(),
+ },
+ });
+ throw new Error(
+ `Error while ${action} stack "${stack_id}": ${String(error)}`,
+ );
+ }
+}
+
+async function getStackPath(stack: Stack): Promise {
+ const stackName = stack.name.trim().replace(/\s+/g, "_");
+ const stackId = stack.id;
+
+ if (!stackId) {
+ logger.error("Stack could not be parsed");
+ throw new Error("Stack could not be parsed");
+ }
+
+ return `stacks/${stackId}-${stackName}`;
+}
+
+async function createStackYAML(compose_spec: Stack): Promise {
+ const yaml = YAML.stringify(compose_spec.compose_spec);
+ const stackPath = await getStackPath(compose_spec);
+ await Bun.write(`${stackPath}/docker-compose.yaml`, yaml, {
+ createPath: true,
+ });
+}
+
+export async function deployStack(stack_config: stacks_config): Promise {
+ try {
+ logger.debug(`Deploying Stack: ${JSON.stringify(stack_config)}`);
+
+ if (!stack_config.name) {
+ throw new Error("Stack name needed");
+ }
+
+ const jsonStringStack = {
+ ...stack_config,
+ compose_spec: JSON.stringify(stack_config.compose_spec),
+ };
+
+ const stackId = dbFunctions.addStack(jsonStringStack);
+
+ if (!stackId) {
+ throw new Error("Failed to add stack to database");
+ }
+
+ postToClient({
+ type: "stack-status",
+ data: {
+ stack_id: stackId,
+ status: "pending",
+ message: "Creating stack configuration",
+ },
+ });
+
+ const stackYaml: Stack = {
+ id: stackId,
+ name: stack_config.name,
+ source: stack_config.source,
+ version: stack_config.version,
+ compose_spec: stack_config.compose_spec as unknown as ComposeSpec, // Weird stuff i am doing here... smh
+ };
+
+ await createStackYAML(stackYaml);
+
+ await runStackCommand(
+ stackId,
+ (cwd, progressCallback) =>
+ DockerCompose.upAll({
+ cwd,
+ log: true,
+ callback: wrapProgressCallback(progressCallback),
+ }),
+ "deploying",
+ );
+
+ postToClient({
+ type: "stack-status",
+ data: {
+ stack_id: stackId,
+ status: "deployed",
+ message: "Stack deployed successfully",
+ },
+ });
+ } catch (error: unknown) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+ logger.error(errorMsg);
+ postToClient({
+ type: "stack-error",
+ data: {
+ stack_id: 0,
+ action: "deploying",
+ message: errorMsg,
+ timestamp: new Date().toISOString(),
+ },
+ });
+ throw new Error(errorMsg);
+ }
+}
+
+export async function stopStack(stack_id: number): Promise {
+ // Note the await to discard the result (convert to void)
+ await runStackCommand(
+ stack_id,
+ (cwd, progressCallback) =>
+ DockerCompose.downAll({
+ cwd,
+ log: true,
+ callback: wrapProgressCallback(progressCallback),
+ }),
+ "stopping",
+ );
+}
+
+export async function startStack(stack_id: number): Promise {
+ await runStackCommand(
+ stack_id,
+ (cwd, progressCallback) =>
+ DockerCompose.upAll({
+ cwd,
+ log: true,
+ callback: wrapProgressCallback(progressCallback),
+ }),
+ "starting",
+ );
+}
+
+export async function pullStackImages(stack_id: number): Promise {
+ await runStackCommand(
+ stack_id,
+ (cwd, progressCallback) =>
+ DockerCompose.pullAll({
+ cwd,
+ log: true,
+ callback: wrapProgressCallback(progressCallback),
+ }),
+ "pulling-images",
+ );
+}
+
+export async function restartStack(stack_id: number): Promise {
+ await runStackCommand(
+ stack_id,
+ (cwd, progressCallback) =>
+ DockerCompose.restartAll({
+ cwd,
+ log: true,
+ callback: wrapProgressCallback(progressCallback),
+ }),
+ "restarting",
+ );
+}
+
+export async function getStackStatus(
+ stack_id: number,
+ //biome-ignore lint/suspicious/noExplicitAny:
+): Promise> {
+ const status = await runStackCommand(
+ stack_id,
+ async (cwd) => {
+ const rawStatus = await DockerCompose.ps({ cwd });
+ //biome-ignore lint/suspicious/noExplicitAny:
+ return rawStatus.data.services.reduce((acc: any, service: any) => {
+ acc[service.name] = service.state;
+ return acc;
+ }, {});
+ },
+ "status-check",
+ );
+ return status;
+}
+
+export async function removeStack(stack_id: number): Promise {
+ try {
+ const _ = dbFunctions.deleteStack(stack_id);
+
+ await runStackCommand(
+ stack_id,
+ async (cwd, progressCallback) => {
+ await DockerCompose.down({
+ cwd,
+ log: true,
+ callback: wrapProgressCallback(progressCallback),
+ });
+ },
+ "removing",
+ );
+
+ const stackName = await getStackName(stack_id);
+ const stackPath = await getStackPath({
+ id: stack_id,
+ name: stackName,
+ } as Stack);
+
+ try {
+ await rm(stackPath, { recursive: true });
+ } catch (error) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+ logger.error(errorMsg);
+ postToClient({
+ type: "stack-error",
+ data: {
+ stack_id,
+ action: "removing",
+ message: errorMsg,
+ timestamp: new Date().toISOString(),
+ },
+ });
+ throw new Error(errorMsg);
+ }
+
+ postToClient({
+ type: "stack-removed",
+ data: {
+ stack_id,
+ message: "Stack removed successfully",
+ },
+ });
+ } catch (error: unknown) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+ logger.error(errorMsg);
+ postToClient({
+ type: "stack-error",
+ data: {
+ stack_id,
+ action: "removing",
+ message: errorMsg,
+ timestamp: new Date().toISOString(),
+ },
+ });
+ throw new Error(errorMsg);
+ }
+}
+
+interface DockerServiceStatus {
+ status: string;
+ ports: string[];
+}
+
+interface StackStatus {
+ services: Record;
+ healthy: number;
+ unhealthy: number;
+ total: number;
+}
+
+type StacksStatus = Record;
+
+export async function getAllStacksStatus(): Promise {
+ try {
+ const stacks = dbFunctions.getStacks();
+
+ const statusResults = await Promise.all(
+ stacks.map(async (stack) => {
+ const status = await runStackCommand(
+ stack.id as number,
+ async (cwd) => {
+ const rawStatus = await DockerCompose.ps({ cwd });
+ const services = rawStatus.data.services.reduce(
+ (acc: Record, service) => {
+ acc[service.name] = {
+ status: service.state,
+ ports: service.ports.map(
+ (port) => `${port.mapped?.address}:${port.mapped?.port}`,
+ ),
+ };
+ return acc;
+ },
+ {},
+ );
+
+ const statusValues = Object.values(services);
+ return {
+ services,
+ healthy: statusValues.filter(
+ (s) => s.status === "running" || s.status.includes("Up"),
+ ).length,
+ unhealthy: statusValues.filter(
+ (s) => s.status !== "running" && !s.status.includes("Up"),
+ ).length,
+ total: statusValues.length,
+ };
+ },
+ "status-check",
+ );
+ return { stackId: stack.id, status };
+ }),
+ );
+
+ return statusResults.reduce((acc, { stackId, status }) => {
+ acc[String(stackId)] = status;
+ return acc;
+ }, {} as StacksStatus);
+ } catch (error: unknown) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+ logger.error(errorMsg);
+ throw new Error(errorMsg);
+ }
+}
diff --git a/src/core/utils/calculations.ts b/src/core/utils/calculations.ts
new file mode 100644
index 00000000..fbb7a422
--- /dev/null
+++ b/src/core/utils/calculations.ts
@@ -0,0 +1,37 @@
+import type Docker from "dockerode";
+
+const calculateCpuPercent = (stats: Docker.ContainerStats): number => {
+ const cpuDelta =
+ stats.cpu_stats.cpu_usage.total_usage -
+ stats.precpu_stats.cpu_usage.total_usage;
+ const systemDelta =
+ stats.cpu_stats.system_cpu_usage - stats.precpu_stats.system_cpu_usage;
+
+ if (cpuDelta <= 0) {
+ return 0.0000001;
+ }
+
+ if (systemDelta <= 0) {
+ return 0.0000001;
+ }
+
+ const data = (cpuDelta / systemDelta) * 100;
+
+ if (data === null) {
+ return 0.0000001;
+ }
+
+ return data * 10;
+};
+
+const calculateMemoryUsage = (stats: Docker.ContainerStats): number => {
+ if (stats.memory_stats.usage === null) {
+ return 0.0000001;
+ }
+
+ const data = (stats.memory_stats.usage / stats.memory_stats.limit) * 100;
+
+ return data;
+};
+
+export { calculateCpuPercent, calculateMemoryUsage };
diff --git a/src/core/utils/change-me-checker.ts b/src/core/utils/change-me-checker.ts
new file mode 100644
index 00000000..d5aefb4b
--- /dev/null
+++ b/src/core/utils/change-me-checker.ts
@@ -0,0 +1,18 @@
+import { readFile } from "node:fs/promises";
+import { logger } from "~/core/utils/logger";
+
+export async function checkFileForChangeMe(filePath: string) {
+ const regex = /change[\W_]*me/i;
+ let content = "";
+ try {
+ content = await readFile(filePath, "utf-8");
+ } catch (error) {
+ logger.error("Error reading file:", error);
+ }
+
+ if (regex.test(content)) {
+ throw new Error(
+ `The file contains ${regex.exec(content)}. Please update it.`,
+ );
+ }
+}
diff --git a/src/core/utils/helpers.ts b/src/core/utils/helpers.ts
new file mode 100644
index 00000000..6c3e79e6
--- /dev/null
+++ b/src/core/utils/helpers.ts
@@ -0,0 +1,13 @@
+import { logger } from "./logger";
+
+export function findObjectByKey(
+ array: T[],
+ key: keyof T,
+ value: T[keyof T],
+): T | undefined {
+ logger.debug(
+ `Searching for key: ${String(key)} with value: ${String(value)}`,
+ );
+ const data = array.find((item) => item[key] === value);
+ return data;
+}
diff --git a/src/core/utils/logger.ts b/src/core/utils/logger.ts
new file mode 100644
index 00000000..f9304ab1
--- /dev/null
+++ b/src/core/utils/logger.ts
@@ -0,0 +1,202 @@
+import path from "node:path";
+import chalk, { type ChalkInstance } from "chalk";
+import type { TransformableInfo } from "logform";
+import { createLogger, format, transports } from "winston";
+import wrapAnsi from "wrap-ansi";
+
+import { dbFunctions } from "~/core/database";
+
+import { logToClients } from "~/routes/live-logs";
+
+import type { log_message } from "~/typings/database";
+
+import { backupInProgress } from "../database/_dbState";
+
+const padNewlines = process.env.PAD_NEW_LINES !== "false";
+
+type LogLevel =
+ | "error"
+ | "warn"
+ | "info"
+ | "debug"
+ | "verbose"
+ | "silly"
+ | "task"
+ | "ut";
+
+// biome-ignore lint/suspicious/noControlCharactersInRegex:
+const ansiRegex = /\x1B\[[0-?9;]*[mG]/g;
+
+const formatTerminalMessage = (message: string, prefix: string): string => {
+ try {
+ const cleanPrefix = prefix.replace(ansiRegex, "");
+ const maxWidth = process.stdout.columns || 80;
+ const wrapWidth = Math.max(maxWidth - cleanPrefix.length - 3, 20);
+
+ if (!padNewlines) return message;
+
+ const wrapped = wrapAnsi(message, wrapWidth, {
+ trim: true,
+ hard: true,
+ wordWrap: true,
+ });
+
+ return wrapped
+ .split("\n")
+ .map((line, index) => {
+ return index === 0 ? line : `${" ".repeat(cleanPrefix.length)}${line}`;
+ })
+ .join("\n");
+ } catch (error) {
+ console.error("Error formatting terminal message:", error);
+ return message;
+ }
+};
+
+const levelColors: Record = {
+ error: chalk.red.bold,
+ warn: chalk.yellow.bold,
+ info: chalk.green.bold,
+ debug: chalk.blue.bold,
+ verbose: chalk.cyan.bold,
+ silly: chalk.magenta.bold,
+ task: chalk.cyan.bold,
+ ut: chalk.hex("#9D00FF"),
+};
+
+const parseTimestamp = (timestamp: string): string => {
+ const [datePart, timePart] = timestamp.split(" ");
+ const [day, month] = datePart.split("/");
+ const [hours, minutes, seconds] = timePart.split(":");
+ const year = new Date().getFullYear();
+ const date = new Date(
+ year,
+ Number.parseInt(month) - 1,
+ Number.parseInt(day),
+ Number.parseInt(hours),
+ Number.parseInt(minutes),
+ Number.parseInt(seconds),
+ );
+ return date.toISOString();
+};
+
+const handleWebSocketLog = (log: log_message) => {
+ try {
+ logToClients({
+ ...log,
+ timestamp: parseTimestamp(log.timestamp),
+ });
+ } catch (error) {
+ console.error(
+ `WebSocket logging failed: ${
+ error instanceof Error ? error.message : error
+ }`,
+ );
+ }
+};
+
+const handleDatabaseLog = (log: log_message): void => {
+ if (backupInProgress) {
+ return;
+ }
+ try {
+ dbFunctions.addLogEntry({
+ ...log,
+ timestamp: parseTimestamp(log.timestamp),
+ });
+ } catch (error) {
+ console.error(
+ `Database logging failed: ${
+ error instanceof Error ? error.message : error
+ }`,
+ );
+ }
+};
+
+export const logger = createLogger({
+ level: process.env.LOG_LEVEL || "debug",
+ format: format.combine(
+ format.timestamp({ format: "DD/MM HH:mm:ss" }),
+ format((info) => {
+ const stack = new Error().stack?.split("\n");
+ let file = "unknown";
+ let line = 0;
+
+ if (stack) {
+ for (let i = 2; i < stack.length; i++) {
+ const lineStr = stack[i].trim();
+ if (
+ !lineStr.includes("node_modules") &&
+ !lineStr.includes(path.basename(__filename))
+ ) {
+ const matches = lineStr.match(/\(?(.+):(\d+):(\d+)\)?$/);
+ if (matches) {
+ file = path.basename(matches[1]);
+ line = Number.parseInt(matches[2], 10);
+ break;
+ }
+ }
+ }
+ }
+ return { ...info, file, line };
+ })(),
+ format.printf((info) => {
+ const { timestamp, level, message, file, line } =
+ info as TransformableInfo & log_message;
+ let processedLevel = level as LogLevel;
+ let processedMessage = String(message);
+
+ if (processedMessage.startsWith("__task__")) {
+ processedMessage = processedMessage
+ .replace(/__task__/g, "")
+ .trimStart();
+ processedLevel = "task";
+ if (processedMessage.startsWith("__db__")) {
+ processedMessage = processedMessage
+ .replace(/__db__/g, "")
+ .trimStart();
+ processedMessage = `${chalk.magenta("DB")} ${processedMessage}`;
+ }
+ } else if (processedMessage.startsWith("__UT__")) {
+ processedMessage = processedMessage.replace(/__UT__/g, "").trimStart();
+ processedLevel = "ut";
+ }
+
+ if (file.endsWith("plugin.ts")) {
+ processedMessage = `[ ${chalk.grey(file)} ] ${processedMessage}`;
+ }
+
+ const paddedLevel = processedLevel.toUpperCase().padEnd(5);
+ const coloredLevel = (levelColors[processedLevel] || chalk.white)(
+ paddedLevel,
+ );
+ const coloredContext = chalk.cyan(`${file}:${line}`);
+ const coloredTimestamp = chalk.yellow(timestamp);
+
+ const prefix = `${paddedLevel} [ ${timestamp} ] - `;
+ const combinedContent = `${processedMessage} - ${coloredContext}`;
+
+ const formattedMessage = padNewlines
+ ? formatTerminalMessage(combinedContent, prefix)
+ : combinedContent;
+
+ handleDatabaseLog({
+ level: processedLevel,
+ timestamp: timestamp,
+ message: processedMessage,
+ file: file,
+ line: line,
+ });
+ handleWebSocketLog({
+ level: processedLevel,
+ timestamp: timestamp,
+ message: processedMessage,
+ file: file,
+ line: line,
+ });
+
+ return `${coloredLevel} [ ${coloredTimestamp} ] - ${formattedMessage}`;
+ }),
+ ),
+ transports: [new transports.Console()],
+});
diff --git a/src/core/utils/package-json.ts b/src/core/utils/package-json.ts
new file mode 100644
index 00000000..20958a4c
--- /dev/null
+++ b/src/core/utils/package-json.ts
@@ -0,0 +1,25 @@
+import packageJson from "~/../package.json";
+
+const { version, description, license, dependencies, devDependencies } =
+ packageJson;
+let { contributors } = packageJson;
+
+const authorName = packageJson.author.name;
+const authorEmail = packageJson.author.email;
+const authorWebsite = packageJson.author.url;
+
+if (contributors.length === 0) {
+ contributors = [":(" as never];
+}
+
+export {
+ version,
+ description,
+ authorName,
+ authorEmail,
+ authorWebsite,
+ license,
+ contributors,
+ dependencies,
+ devDependencies,
+};
diff --git a/src/core/utils/response-handler.ts b/src/core/utils/response-handler.ts
new file mode 100644
index 00000000..00d5b464
--- /dev/null
+++ b/src/core/utils/response-handler.ts
@@ -0,0 +1,42 @@
+import { logger } from "~/core/utils/logger";
+import type { set } from "~/typings/elysiajs";
+
+export const responseHandler = {
+ error(
+ set: set,
+ error: string,
+ response_message: string,
+ error_code?: number,
+ ) {
+ set.status = error_code || 500;
+ logger.error(`${response_message} - ${error}`);
+ return { success: false, message: response_message, error: String(error) };
+ },
+
+ ok(set: set, response_message: string) {
+ set.status = 200;
+ logger.debug(response_message);
+ return { success: true, message: response_message };
+ },
+
+ simple_error(set: set, response_message: string, status_code?: number) {
+ set.status = status_code || 502;
+ logger.warn(response_message);
+ return { success: false, message: response_message };
+ },
+
+ reject(
+ set: set,
+ reject: CallableFunction,
+ response_message: string,
+ error?: string,
+ ) {
+ set.status = 501;
+ if (error) {
+ logger.error(`${response_message} - ${error}`);
+ } else {
+ logger.error(response_message);
+ }
+ return reject(new Error(response_message));
+ },
+};
diff --git a/src/core/utils/swagger-readme.ts b/src/core/utils/swagger-readme.ts
new file mode 100644
index 00000000..c1457c68
--- /dev/null
+++ b/src/core/utils/swagger-readme.ts
@@ -0,0 +1,66 @@
+export const swaggerReadme: string = `
+[Download API type sheet](/server.d.ts)
+
+
+
+
+Docker infrastructure management API with real-time monitoring and orchestration capabilities.
+
+## Key Features
+
+- **Stack Orchestration**
+ Deploy/update Docker stacks (compose v3+) with custom configurations
+- **Container Monitoring**
+ Real-time metrics (CPU/RAM/status) across multiple Docker hosts
+- **Centralized Logging**
+ Structured log management with retention policies and filtering
+- **Host Management**
+ Multi-host configuration with connection health checks
+- **Plugin System**
+ Extensible architecture for custom monitoring integrations
+
+## Installation & Setup
+
+**Prerequisites**:
+- Node.js 18+
+- Docker Engine 23+
+- Bun runtime
+
+\`\`\`bash
+# Clone repo
+git clone https://github.com/Its4Nik/DockStatAPI.git
+cd DockStatAPI
+# Install dependencies
+bun install
+
+# Start development server
+bun run dev
+\`\`\`
+
+## Configuration
+
+**Environment Variables**:
+\`\`\`ini
+PAD_NEW_LINES=true
+NODE_ENV=production
+LOG_LEVEL=info
+\`\`\`
+
+## Security
+
+1. Always use HTTPS in production
+2. Rotate API keys regularly
+3. Restrict host connections to trusted networks
+4. Enable Docker Engine TLS authentication
+
+## Contributing
+
+1. Fork repository
+2. Create feature branch (\`feat/my-feature\`)
+3. Submit PR with detailed description
+
+**Code Style**:
+- TypeScript strict mode
+- Elysia framework conventions
+- Prettier formatting
+`;
diff --git a/src/index.ts b/src/index.ts
new file mode 100644
index 00000000..e52e8a71
--- /dev/null
+++ b/src/index.ts
@@ -0,0 +1,182 @@
+import { serverTiming } from "@elysiajs/server-timing";
+import staticPlugin from "@elysiajs/static";
+import { swagger } from "@elysiajs/swagger";
+import { Elysia } from "elysia";
+import { dts } from "elysia-remote-dts";
+import { Logestic } from "logestic";
+import { dbFunctions } from "~/core/database";
+import { monitorDockerEvents } from "~/core/docker/monitor";
+import { setSchedules } from "~/core/docker/scheduler";
+import { loadPlugins } from "~/core/plugins/loader";
+import { logger } from "~/core/utils/logger";
+import {
+ authorWebsite,
+ contributors,
+ license,
+} from "~/core/utils/package-json";
+import { swaggerReadme } from "~/core/utils/swagger-readme";
+import { validateApiKey } from "~/middleware/auth";
+import { apiConfigRoutes } from "~/routes/api-config";
+import { dockerRoutes } from "~/routes/docker-manager";
+import { dockerStatsRoutes } from "~/routes/docker-stats";
+import { dockerWebsocketRoutes } from "~/routes/docker-websocket";
+import { liveLogs } from "~/routes/live-logs";
+import { backendLogs } from "~/routes/logs";
+import { stackRoutes } from "~/routes/stacks";
+import type { config } from "~/typings/database";
+import { liveStacks } from "./routes/live-stacks";
+
+console.log("");
+
+logger.info("Starting DockStatAPI");
+
+const DockStatAPI = new Elysia({
+ normalize: true,
+ precompile: true,
+})
+ .use(Logestic.preset("fancy"))
+ .use(staticPlugin())
+ .use(serverTiming())
+ .use(
+ dts("./src/index.ts", {
+ tsconfig: "./tsconfig.json",
+ compilerOptions: {
+ strict: true,
+ },
+ }),
+ )
+ .use(
+ swagger({
+ documentation: {
+ info: {
+ title: "DockStatAPI",
+ version: "3.0.0",
+ description: swaggerReadme,
+ },
+ components: {
+ securitySchemes: {
+ apiKeyAuth: {
+ type: "apiKey" as const,
+ name: "x-api-key",
+ in: "header",
+ description: "API key for authentication",
+ },
+ },
+ },
+ security: [
+ {
+ apiKeyAuth: [],
+ },
+ ],
+ tags: [
+ {
+ name: "Statistics",
+ description:
+ "All endpoints for fetching statistics of hosts / containers",
+ },
+ {
+ name: "Management",
+ description: "Various endpoints for managing DockStatAPI",
+ },
+ {
+ name: "Stacks",
+ description: "DockStat's Stack functionality",
+ },
+ {
+ name: "Utils",
+ description: "Various utilities which might be useful",
+ },
+ ],
+ },
+ }),
+ )
+ .onBeforeHandle(async (context) => {
+ const { path, request, set } = context;
+
+ if (
+ path === "/health" ||
+ path.startsWith("/swagger") ||
+ path.startsWith("/public")
+ ) {
+ logger.info(`Requested unguarded route: ${path}`);
+ return;
+ }
+
+ const validation = await validateApiKey(request, set);
+
+ if (!validation) {
+ throw new Error("Error while checking API key");
+ }
+
+ if (!validation.success) {
+ set.status = 400;
+
+ throw new Error(validation.error);
+ }
+ })
+ .onError(({ code, set, path, error }) => {
+ if (code === "NOT_FOUND") {
+ logger.warn(`Unknown route (${path}), showing error page!`);
+ set.status = 404;
+ set.headers["Content-Type"] = "text/html";
+ return Bun.file("public/404.html");
+ }
+
+ logger.error(`Internal server error at ${path}: ${error.message}`);
+ set.status = 500;
+ set.headers["Content-Type"] = "text/html";
+ return { success: false, message: error.message };
+ })
+ .use(dockerRoutes)
+ .use(dockerStatsRoutes)
+ .use(backendLogs)
+ .use(dockerWebsocketRoutes)
+ .use(apiConfigRoutes)
+ .use(stackRoutes)
+ .use(liveLogs)
+ .use(liveStacks)
+ .get("/health", () => ({ status: "healthy" }), {
+ tags: ["Utils"],
+ response: { message: "healthy" },
+ })
+ .listen(process.env.DOCKSTATAPI_PORT || 3000, ({ hostname, port }) => {
+ console.log("----- [ ############## ]");
+ logger.info(`DockStatAPI is running at http://${hostname}:${port}`);
+ logger.info(
+ `Swagger API Documentation available at http://${hostname}:${port}/swagger`,
+ );
+ logger.info(`License: ${license}`);
+ logger.info(`Author: ${authorWebsite}`);
+ logger.info(`Contributors: ${contributors}`);
+ });
+
+const initializeServer = async () => {
+ try {
+ await loadPlugins("./src/plugins");
+ await setSchedules();
+
+ monitorDockerEvents().catch((error) => {
+ logger.error(`Monitoring Error: ${error}`);
+ });
+
+ const configData = dbFunctions.getConfig() as config[];
+ const apiKey = configData[0].api_key;
+
+ if (apiKey === "changeme") {
+ logger.warn(
+ "Default API Key of 'changeme' detected. Please change your API Key via the `/config/update` route!",
+ );
+ }
+
+ logger.info("Started server");
+ console.log("----- [ ############## ]");
+ } catch (error) {
+ logger.error("Error while starting server:", error);
+ process.exit(1);
+ }
+};
+
+await initializeServer();
+
+export type App = typeof DockStatAPI;
+export { DockStatAPI };
diff --git a/src/middleware/auth.ts b/src/middleware/auth.ts
new file mode 100644
index 00000000..3a730229
--- /dev/null
+++ b/src/middleware/auth.ts
@@ -0,0 +1,89 @@
+import { dbFunctions } from "~/core/database";
+import { logger } from "~/core/utils/logger";
+
+import type { config } from "~/typings/database";
+import type { set } from "~/typings/elysiajs";
+
+export async function hashApiKey(apiKey: string): Promise {
+ logger.debug("Hashing API key");
+ try {
+ logger.debug("API key hashed successfully");
+ return await Bun.password.hash(apiKey);
+ } catch (error) {
+ logger.error("Error hashing API key", error);
+ throw new Error("Failed to hash API key");
+ }
+}
+
+async function validateApiKeyHash(
+ providedKey: string,
+ storedHash: string,
+): Promise {
+ logger.debug("Validating API key hash");
+ try {
+ const isValid = await Bun.password.verify(providedKey, storedHash);
+ logger.debug(`API key validation result: ${isValid}`);
+ return isValid;
+ } catch (error) {
+ logger.error("Error validating API key hash", error);
+ return false;
+ }
+}
+
+async function getApiKeyFromDb(
+ apiKey: string,
+): Promise<{ hash: string } | null> {
+ const dbApiKey = (dbFunctions.getConfig() as config[])[0].api_key;
+ logger.debug(`Querying database for API key: ${apiKey}`);
+ return Promise.resolve({
+ hash: dbApiKey,
+ });
+}
+
+export async function validateApiKey(request: Request, set: set) {
+ const apiKey = request.headers.get("x-api-key");
+
+ if (process.env.NODE_ENV !== "production") {
+ logger.warn(
+ "API Key validation deactivated, since running in development mode",
+ );
+ return { success: true, apiKey };
+ }
+
+ if (!apiKey) {
+ logger.error(`API key missing from request ${request.url}`);
+ set.status = 401;
+ return { error: "API key required", success: false, apiKey };
+ }
+
+ logger.debug("API key validation initiated");
+
+ try {
+ const dbRecord = await getApiKeyFromDb(apiKey);
+
+ if (!dbRecord) {
+ logger.error("API key not found in database");
+ set.status = 401;
+ return { success: false, error: "Invalid API key" };
+ }
+
+ if (dbRecord.hash === "changeme") {
+ logger.error("Please change your API Key!");
+ return { success: true, apiKey };
+ }
+
+ const isValid = await validateApiKeyHash(apiKey, dbRecord.hash);
+
+ if (!isValid) {
+ logger.error("Invalid API key provided");
+ set.status = 401;
+ return { success: false, error: "Invalid API key", apiKey };
+ }
+
+ logger.info("Valid API key used");
+ } catch (error) {
+ logger.error("Error during API key validation", error);
+ set.status = 500;
+ return { success: false, error: "Internal server error", apiKey };
+ }
+}
diff --git a/src/plugins/example.plugin.ts b/src/plugins/example.plugin.ts
new file mode 100644
index 00000000..633eea41
--- /dev/null
+++ b/src/plugins/example.plugin.ts
@@ -0,0 +1,98 @@
+import { logger } from "~/core/utils/logger";
+
+import type { ContainerInfo } from "~/typings/docker";
+import type { Plugin } from "~/typings/plugin";
+
+// See https://outline.itsnik.de/s/dockstat/doc/plugin-development-3UBj9gNMKF for more info
+
+const ExamplePlugin: Plugin = {
+ name: "Example Plugin",
+
+ async onContainerStart(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} started on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerStop(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} stopped on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerExit(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} exited on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerCreate(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} created on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerDestroy(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} destroyed on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerPause(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} pause on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerUnpause(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} resumed on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerRestart(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} restarted on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerUpdate(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} updated on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerRename(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} renamed on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerHealthStatus(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} changed status to ${containerInfo.status}`,
+ );
+ },
+
+ async onHostUnreachable(host: string, err: string) {
+ logger.info(`Server ${host} unreachable - ${err}`);
+ },
+
+ async onHostReachableAgain(host: string) {
+ logger.info(`Server ${host} reachable`);
+ },
+
+ async handleContainerDie(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} died on ${containerInfo.hostId}`,
+ );
+ },
+
+ async onContainerKill(containerInfo: ContainerInfo) {
+ logger.info(
+ `Container ${containerInfo.name} killed on ${containerInfo.hostId}`,
+ );
+ },
+} satisfies Plugin;
+
+export default ExamplePlugin;
diff --git a/src/plugins/telegram.plugin.ts b/src/plugins/telegram.plugin.ts
new file mode 100644
index 00000000..0b83d434
--- /dev/null
+++ b/src/plugins/telegram.plugin.ts
@@ -0,0 +1,35 @@
+import { logger } from "~/core/utils/logger";
+
+import type { ContainerInfo } from "~/typings/docker";
+import type { Plugin } from "~/typings/plugin";
+
+const TELEGRAM_BOT_TOKEN = "CHANGE_ME"; // Replace with your bot token
+const TELEGRAM_CHAT_ID = "CHANGE_ME"; // Replace with your chat ID
+
+const TelegramNotificationPlugin: Plugin = {
+ name: "Telegram Notification Plugin",
+ async onContainerStart(containerInfo: ContainerInfo) {
+ const message = `Container Started: ${containerInfo.name} on ${containerInfo.hostId}`;
+ try {
+ const response = await fetch(
+ `https://api.telegram.org/bot${TELEGRAM_BOT_TOKEN}/sendMessage`,
+ {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({
+ chat_id: TELEGRAM_CHAT_ID,
+ text: message,
+ }),
+ },
+ );
+ if (!response.ok) {
+ logger.error(`HTTP error ${response.status}`);
+ }
+ logger.info("Telegram notification sent.");
+ } catch (error) {
+ logger.error("Failed to send Telegram notification", error as string);
+ }
+ },
+} satisfies Plugin;
+
+export default TelegramNotificationPlugin;
diff --git a/src/routes/api-config.ts b/src/routes/api-config.ts
new file mode 100644
index 00000000..6e2de1eb
--- /dev/null
+++ b/src/routes/api-config.ts
@@ -0,0 +1,585 @@
+import { existsSync, readdirSync, unlinkSync } from "node:fs";
+import { Elysia, t } from "elysia";
+import { dbFunctions } from "~/core/database";
+import { backupDir } from "~/core/database/backup";
+import { pluginManager } from "~/core/plugins/plugin-manager";
+import { logger } from "~/core/utils/logger";
+import {
+ authorEmail,
+ authorName,
+ authorWebsite,
+ contributors,
+ dependencies,
+ description,
+ devDependencies,
+ license,
+ version,
+} from "~/core/utils/package-json";
+import { responseHandler } from "~/core/utils/response-handler";
+import { hashApiKey } from "~/middleware/auth";
+import type { config } from "~/typings/database";
+
+export const apiConfigRoutes = new Elysia({ prefix: "/config" })
+ .get(
+ "",
+ async ({ set }) => {
+ try {
+ const data = dbFunctions.getConfig() as config[];
+ const distinct = data[0];
+ set.status = 200;
+
+ logger.debug("Fetched backend config");
+ return distinct;
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Returns current API configuration including data retention policies and security settings",
+ responses: {
+ "200": {
+ description: "Successfully retrieved configuration",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ fetching_interval: {
+ type: "number",
+ example: 5,
+ },
+ keep_data_for: {
+ type: "number",
+ example: 7,
+ },
+ api_key: {
+ type: "string",
+ example: "hashed_api_key",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error retrieving configuration",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error getting the DockStatAPI config",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+ .get(
+ "/plugins",
+ ({ set }) => {
+ try {
+ return pluginManager.getLoadedPlugins();
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Lists all active plugins with their registration details and status",
+ responses: {
+ "200": {
+ description: "Successfully retrieved plugins",
+ content: {
+ "application/json": {
+ schema: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ name: {
+ type: "string",
+ example: "example-plugin",
+ },
+ version: {
+ type: "string",
+ example: "1.0.0",
+ },
+ status: {
+ type: "string",
+ example: "active",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error retrieving plugins",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error getting all registered plugins",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+ .post(
+ "/update",
+ async ({ set, body }) => {
+ try {
+ const { fetching_interval, keep_data_for, api_key } = body;
+
+ dbFunctions.updateConfig(
+ fetching_interval,
+ keep_data_for,
+ await hashApiKey(api_key),
+ );
+ return responseHandler.ok(set, "Updated DockStatAPI config");
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Modifies core API settings including data collection intervals, retention periods, and security credentials",
+ responses: {
+ "200": {
+ description: "Successfully updated configuration",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Updated DockStatAPI config",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error updating configuration",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error updating the DockStatAPI config",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ body: t.Object({
+ fetching_interval: t.Number(),
+ keep_data_for: t.Number(),
+ api_key: t.String(),
+ }),
+ },
+ )
+ .get(
+ "/package",
+ async () => {
+ try {
+ logger.debug("Fetching package.json");
+ const data = {
+ version: version,
+ description: description,
+ license: license,
+ authorName: authorName,
+ authorEmail: authorEmail,
+ authorWebsite: authorWebsite,
+ contributors: contributors,
+ dependencies: dependencies,
+ devDependencies: devDependencies,
+ };
+
+ logger.debug(
+ `Received: ${JSON.stringify(data).length} chars in package.json`,
+ );
+
+ if (JSON.stringify(data).length <= 10) {
+ throw new Error("Failed to read package.json");
+ }
+
+ return data;
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Displays package metadata including dependencies, contributors, and licensing information",
+ responses: {
+ "200": {
+ description: "Successfully retrieved package information",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ version: {
+ type: "string",
+ example: "3.0.0",
+ },
+ description: {
+ type: "string",
+ example:
+ "DockStatAPI is an API backend featuring plugins and more for DockStat",
+ },
+ license: {
+ type: "string",
+ example: "CC BY-NC 4.0",
+ },
+ authorName: {
+ type: "string",
+ example: "ItsNik",
+ },
+ authorEmail: {
+ type: "string",
+ example: "info@itsnik.de",
+ },
+ authorWebsite: {
+ type: "string",
+ example: "https://github.com/Its4Nik",
+ },
+ contributors: {
+ type: "array",
+ items: {
+ type: "string",
+ },
+ example: [],
+ },
+ dependencies: {
+ type: "object",
+ example: {
+ "@elysiajs/server-timing": "^1.2.1",
+ "@elysiajs/static": "^1.2.0",
+ },
+ },
+ devDependencies: {
+ type: "object",
+ example: {
+ "@biomejs/biome": "1.9.4",
+ "@types/dockerode": "^3.3.38",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error retrieving package information",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error while reading package.json",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+ .post(
+ "/backup",
+ async ({ set }) => {
+ try {
+ const backupFilename = await dbFunctions.backupDatabase();
+ return responseHandler.ok(set, backupFilename);
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description: "Backs up the internal database",
+ responses: {
+ "200": {
+ description: "Successfully created backup",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "backup_2024-03-20_12-00-00.db.bak",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error creating backup",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error backing up",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+ .get(
+ "/backup",
+ async ({ set }) => {
+ try {
+ const backupFiles = readdirSync(backupDir);
+
+ const filteredFiles = backupFiles.filter((file: string) => {
+ return !(
+ file.endsWith(".db") ||
+ file.endsWith(".db-shm") ||
+ file.endsWith(".db-wal")
+ );
+ });
+
+ return filteredFiles;
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description: "Lists all available backups",
+ responses: {
+ "200": {
+ description: "Successfully retrieved backup list",
+ content: {
+ "application/json": {
+ schema: {
+ type: "array",
+ items: {
+ type: "string",
+ },
+ example: [
+ "backup_2024-03-20_12-00-00.db.bak",
+ "backup_2024-03-19_12-00-00.db.bak",
+ ],
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error retrieving backup list",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Reading Backup directory",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+
+ .get(
+ "/backup/download",
+ async ({ query, set }) => {
+ try {
+ const filename = query.filename || dbFunctions.findLatestBackup();
+ const filePath = `${backupDir}/${filename}`;
+
+ if (!existsSync(filePath)) {
+ throw new Error("Backup file not found");
+ }
+
+ set.headers["Content-Type"] = "application/octet-stream";
+ set.headers["Content-Disposition"] =
+ `attachment; filename="${filename}"`;
+ return Bun.file(filePath);
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Download a specific backup or the latest if no filename is provided",
+ responses: {
+ "200": {
+ description: "Successfully downloaded backup file",
+ content: {
+ "application/octet-stream": {
+ schema: {
+ type: "string",
+ format: "binary",
+ example: "Binary backup file content",
+ },
+ },
+ },
+ headers: {
+ "Content-Disposition": {
+ schema: {
+ type: "string",
+ example:
+ 'attachment; filename="backup_2024-03-20_12-00-00.db.bak"',
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error downloading backup",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Backup download failed",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ query: t.Object({
+ filename: t.Optional(t.String()),
+ }),
+ },
+ )
+ .post(
+ "/restore",
+ async ({ body, set }) => {
+ try {
+ const { file } = body;
+
+ set.headers["Content-Type"] = "text/html";
+
+ if (!file) {
+ throw new Error("No file uploaded");
+ }
+
+ if (!file.name.endsWith(".db.bak")) {
+ throw new Error("Invalid file type. Expected .db.bak");
+ }
+
+ const tempPath = `${backupDir}/upload_${Date.now()}.db.bak`;
+ const fileBuffer = await file.arrayBuffer();
+
+ await Bun.write(tempPath, fileBuffer);
+ dbFunctions.restoreDatabase(tempPath);
+ unlinkSync(tempPath);
+
+ return responseHandler.ok(set, "Database restored successfully");
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ body: t.Object({ file: t.File() }),
+ detail: {
+ tags: ["Management"],
+ description: "Restore database from uploaded backup file",
+ responses: {
+ "200": {
+ description: "Successfully restored database",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Database restored successfully",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error restoring database",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Database restoration error",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ );
diff --git a/src/routes/docker-manager.ts b/src/routes/docker-manager.ts
new file mode 100644
index 00000000..fcd877e9
--- /dev/null
+++ b/src/routes/docker-manager.ts
@@ -0,0 +1,255 @@
+import { Elysia, t } from "elysia";
+import { dbFunctions } from "~/core/database";
+import { logger } from "~/core/utils/logger";
+import { responseHandler } from "~/core/utils/response-handler";
+import type { DockerHost } from "~/typings/docker";
+
+export const dockerRoutes = new Elysia({ prefix: "/docker-config" })
+ .post(
+ "/add-host",
+ async ({ set, body }) => {
+ try {
+ dbFunctions.addDockerHost(body as DockerHost);
+ return responseHandler.ok(set, `Added docker host (${body.name})`);
+ } catch (error: unknown) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Registers a new Docker host to the monitoring system with connection details",
+ responses: {
+ "200": {
+ description: "Successfully added Docker host",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Added docker host (Localhost)",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error adding Docker host",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error adding docker Host",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ body: t.Object({
+ name: t.String(),
+ hostAddress: t.String(),
+ secure: t.Boolean(),
+ }),
+ },
+ )
+
+ .post(
+ "/update-host",
+ async ({ set, body }) => {
+ try {
+ set.status = 200;
+ dbFunctions.updateDockerHost(body);
+ return responseHandler.ok(set, `Updated docker host (${body.id})`);
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Modifies existing Docker host configuration parameters (name, address, security)",
+ responses: {
+ "200": {
+ description: "Successfully updated Docker host",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Updated docker host (1)",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error updating Docker host",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to update host",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ body: t.Object({
+ id: t.Number(),
+ name: t.String(),
+ hostAddress: t.String(),
+ secure: t.Boolean(),
+ }),
+ },
+ )
+
+ .get(
+ "/hosts",
+ async ({ set }) => {
+ try {
+ const dockerHosts = dbFunctions.getDockerHosts();
+
+ logger.debug("Retrieved docker hosts");
+ return dockerHosts;
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Lists all configured Docker hosts with their connection settings",
+ responses: {
+ "200": {
+ description: "Successfully retrieved Docker hosts",
+ content: {
+ "application/json": {
+ schema: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ id: {
+ type: "number",
+ example: 1,
+ },
+ name: {
+ type: "string",
+ example: "Localhost",
+ },
+ hostAddress: {
+ type: "string",
+ example: "localhost:2375",
+ },
+ secure: {
+ type: "boolean",
+ example: false,
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error retrieving Docker hosts",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to retrieve hosts",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+
+ .delete(
+ "/hosts/:id",
+ async ({ set, params }) => {
+ try {
+ set.status = 200;
+ dbFunctions.deleteDockerHost(params.id);
+ return responseHandler.ok(set, `Deleted docker host (${params.id})`);
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Removes Docker host from monitoring system and clears associated data",
+ responses: {
+ "200": {
+ description: "Successfully deleted Docker host",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Deleted docker host (1)",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error deleting Docker host",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to delete host",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ params: t.Object({
+ id: t.Number(),
+ }),
+ },
+ );
diff --git a/src/routes/docker-stats.ts b/src/routes/docker-stats.ts
new file mode 100644
index 00000000..aa968d2d
--- /dev/null
+++ b/src/routes/docker-stats.ts
@@ -0,0 +1,598 @@
+import type Docker from "dockerode";
+import { Elysia } from "elysia";
+import { dbFunctions } from "~/core/database";
+import { getDockerClient } from "~/core/docker/client";
+import {
+ calculateCpuPercent,
+ calculateMemoryUsage,
+} from "~/core/utils/calculations";
+import { findObjectByKey } from "~/core/utils/helpers";
+import { logger } from "~/core/utils/logger";
+import { responseHandler } from "~/core/utils/response-handler";
+import type { ContainerInfo, DockerHost, HostStats } from "~/typings/docker";
+import type { DockerInfo } from "~/typings/dockerode";
+
+export const dockerStatsRoutes = new Elysia({ prefix: "/docker" })
+ .get(
+ "/containers",
+ async ({ set }) => {
+ try {
+ const hosts = dbFunctions.getDockerHosts() as DockerHost[];
+ const containers: ContainerInfo[] = [];
+
+ await Promise.all(
+ hosts.map(async (host) => {
+ try {
+ const docker = getDockerClient(host);
+ try {
+ await docker.ping();
+ } catch (pingError) {
+ return responseHandler.error(
+ set,
+ pingError as string,
+ "Docker host connection failed",
+ );
+ }
+
+ const hostContainers = await docker.listContainers({ all: true });
+
+ await Promise.all(
+ hostContainers.map(async (containerInfo) => {
+ try {
+ const container = docker.getContainer(containerInfo.Id);
+ const stats = await new Promise(
+ (resolve, reject) => {
+ container.stats({ stream: false }, (error, stats) => {
+ if (error) {
+ return responseHandler.reject(
+ set,
+ reject,
+ "An error occurred",
+ error,
+ );
+ }
+ if (!stats) {
+ return responseHandler.reject(
+ set,
+ reject,
+ "No stats available",
+ );
+ }
+ resolve(stats);
+ });
+ },
+ );
+
+ containers.push({
+ id: containerInfo.Id,
+ hostId: `${host.id}`,
+ name: containerInfo.Names[0].replace(/^\//, ""),
+ image: containerInfo.Image,
+ status: containerInfo.Status,
+ state: containerInfo.State,
+ cpuUsage: calculateCpuPercent(stats),
+ memoryUsage: calculateMemoryUsage(stats),
+ stats: stats,
+ info: containerInfo,
+ });
+ } catch (containerError) {
+ logger.error(
+ "Error fetching container stats,",
+ containerError,
+ );
+ }
+ }),
+ );
+ logger.debug(`Fetched stats for ${host.name}`);
+ } catch (error) {
+ const errMsg =
+ error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ }),
+ );
+
+ logger.debug("Fetched all containers across all hosts");
+ return { containers };
+ } catch (error) {
+ const errMsg = error instanceof Error ? error.message : String(error);
+ throw new Error(errMsg);
+ }
+ },
+ {
+ detail: {
+ tags: ["Statistics"],
+ description:
+ "Collects real-time statistics for all Docker containers across monitored hosts, including CPU and memory utilization",
+ responses: {
+ "200": {
+ description: "Successfully retrieved container statistics",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ containers: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ id: {
+ type: "string",
+ example: "abc123def456",
+ },
+ hostId: {
+ type: "string",
+ example: "1",
+ },
+ name: {
+ type: "string",
+ example: "example-container",
+ },
+ image: {
+ type: "string",
+ example: "nginx:latest",
+ },
+ status: {
+ type: "string",
+ example: "running",
+ },
+ state: {
+ type: "string",
+ example: "running",
+ },
+ cpuUsage: {
+ type: "number",
+ example: 0.5,
+ },
+ memoryUsage: {
+ type: "number",
+ example: 1024,
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error retrieving container statistics",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to retrieve containers",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+ .get(
+ "/hosts",
+ async ({ set }) => {
+ try {
+ const hosts = dbFunctions.getDockerHosts() as DockerHost[];
+
+ const stats: HostStats[] = [];
+
+ for (const host of hosts) {
+ const docker = getDockerClient(host);
+ const info: DockerInfo = await docker.info();
+
+ const config: HostStats = {
+ hostId: host.id as number,
+ hostName: host.name,
+ dockerVersion: info.ServerVersion,
+ apiVersion: info.Driver,
+ os: info.OperatingSystem,
+ architecture: info.Architecture,
+ totalMemory: info.MemTotal,
+ totalCPU: info.NCPU,
+ labels: info.Labels,
+ images: info.Images,
+ containers: info.Containers,
+ containersPaused: info.ContainersPaused,
+ containersRunning: info.ContainersRunning,
+ containersStopped: info.ContainersStopped,
+ };
+
+ stats.push(config);
+ }
+
+ logger.debug("Fetched all hosts");
+ return stats;
+ } catch (error) {
+ return responseHandler.error(
+ set,
+ error as string,
+ "Failed to retrieve host config",
+ );
+ }
+ },
+ {
+ detail: {
+ tags: ["Statistics"],
+ description:
+ "Provides detailed system metrics and Docker runtime information for specified host",
+ responses: {
+ "200": {
+ description: "Successfully retrieved host statistics",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ hostId: {
+ type: "number",
+ example: 1,
+ },
+ hostName: {
+ type: "string",
+ example: "Localhost",
+ },
+ dockerVersion: {
+ type: "string",
+ example: "24.0.5",
+ },
+ apiVersion: {
+ type: "string",
+ example: "1.41",
+ },
+ os: {
+ type: "string",
+ example: "Linux",
+ },
+ architecture: {
+ type: "string",
+ example: "x86_64",
+ },
+ totalMemory: {
+ type: "number",
+ example: 16777216,
+ },
+ totalCPU: {
+ type: "number",
+ example: 4,
+ },
+ labels: {
+ type: "array",
+ items: {
+ type: "string",
+ },
+ example: ["environment=production"],
+ },
+ images: {
+ type: "number",
+ example: 10,
+ },
+ containers: {
+ type: "number",
+ example: 5,
+ },
+ containersPaused: {
+ type: "number",
+ example: 0,
+ },
+ containersRunning: {
+ type: "number",
+ example: 4,
+ },
+ containersStopped: {
+ type: "number",
+ example: 1,
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error retrieving host statistics",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to retrieve host config",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+ .get(
+ "/hosts",
+ async ({ set }) => {
+ try {
+ const hosts = dbFunctions.getDockerHosts() as DockerHost[];
+
+ const stats: HostStats[] = [];
+
+ for (const host of hosts) {
+ const docker = getDockerClient(host);
+ const info: DockerInfo = await docker.info();
+
+ const config: HostStats = {
+ hostId: host.id as number,
+ hostName: host.name,
+ dockerVersion: info.ServerVersion,
+ apiVersion: info.Driver,
+ os: info.OperatingSystem,
+ architecture: info.Architecture,
+ totalMemory: info.MemTotal,
+ totalCPU: info.NCPU,
+ labels: info.Labels,
+ images: info.Images,
+ containers: info.Containers,
+ containersPaused: info.ContainersPaused,
+ containersRunning: info.ContainersRunning,
+ containersStopped: info.ContainersStopped,
+ };
+
+ stats.push(config);
+ }
+
+ logger.debug("Fetched stats for all hosts");
+ return stats;
+ } catch (error) {
+ return responseHandler.error(
+ set,
+ error as string,
+ "Failed to retrieve host config",
+ );
+ }
+ },
+ {
+ detail: {
+ tags: ["Statistics"],
+ description:
+ "Provides detailed system metrics and Docker runtime information for all hosts",
+ responses: {
+ "200": {
+ description: "Successfully retrieved host statistics",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ hostId: {
+ type: "number",
+ example: 1,
+ },
+ hostName: {
+ type: "string",
+ example: "Localhost",
+ },
+ dockerVersion: {
+ type: "string",
+ example: "24.0.5",
+ },
+ apiVersion: {
+ type: "string",
+ example: "1.41",
+ },
+ os: {
+ type: "string",
+ example: "Linux",
+ },
+ architecture: {
+ type: "string",
+ example: "x86_64",
+ },
+ totalMemory: {
+ type: "number",
+ example: 16777216,
+ },
+ totalCPU: {
+ type: "number",
+ example: 4,
+ },
+ labels: {
+ type: "array",
+ items: {
+ type: "string",
+ },
+ example: ["environment=production"],
+ },
+ images: {
+ type: "number",
+ example: 10,
+ },
+ containers: {
+ type: "number",
+ example: 5,
+ },
+ containersPaused: {
+ type: "number",
+ example: 0,
+ },
+ containersRunning: {
+ type: "number",
+ example: 4,
+ },
+ containersStopped: {
+ type: "number",
+ example: 1,
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error retrieving host statistics",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to retrieve host config",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+ .get(
+ "/hosts/:id",
+ async ({ params, set }) => {
+ try {
+ const hosts = dbFunctions.getDockerHosts() as DockerHost[];
+
+ const host = findObjectByKey(hosts, "id", Number(params.id));
+ if (!host) {
+ return responseHandler.simple_error(
+ set,
+ `Host (${params.id}) not found`,
+ );
+ }
+
+ const docker = getDockerClient(host);
+ const info: DockerInfo = await docker.info();
+
+ const config: HostStats = {
+ hostId: host.id as number,
+ hostName: host.name,
+ dockerVersion: info.ServerVersion,
+ apiVersion: info.Driver,
+ os: info.OperatingSystem,
+ architecture: info.Architecture,
+ totalMemory: info.MemTotal,
+ totalCPU: info.NCPU,
+ labels: info.Labels,
+ images: info.Images,
+ containers: info.Containers,
+ containersPaused: info.ContainersPaused,
+ containersRunning: info.ContainersRunning,
+ containersStopped: info.ContainersStopped,
+ };
+
+ logger.debug(`Fetched config for ${host.name}`);
+ return config;
+ } catch (error) {
+ return responseHandler.error(
+ set,
+ error as string,
+ "Failed to retrieve host config",
+ );
+ }
+ },
+ {
+ detail: {
+ tags: ["Statistics"],
+ description:
+ "Provides detailed system metrics and Docker runtime information for specified host",
+ responses: {
+ "200": {
+ description: "Successfully retrieved host statistics",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ hostId: {
+ type: "number",
+ example: 1,
+ },
+ hostName: {
+ type: "string",
+ example: "Localhost",
+ },
+ dockerVersion: {
+ type: "string",
+ example: "24.0.5",
+ },
+ apiVersion: {
+ type: "string",
+ example: "1.41",
+ },
+ os: {
+ type: "string",
+ example: "Linux",
+ },
+ architecture: {
+ type: "string",
+ example: "x86_64",
+ },
+ totalMemory: {
+ type: "number",
+ example: 16777216,
+ },
+ totalCPU: {
+ type: "number",
+ example: 4,
+ },
+ labels: {
+ type: "array",
+ items: {
+ type: "string",
+ },
+ example: ["environment=production"],
+ },
+ images: {
+ type: "number",
+ example: 10,
+ },
+ containers: {
+ type: "number",
+ example: 5,
+ },
+ containersPaused: {
+ type: "number",
+ example: 0,
+ },
+ containersRunning: {
+ type: "number",
+ example: 4,
+ },
+ containersStopped: {
+ type: "number",
+ example: 1,
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error retrieving host statistics",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to retrieve host config",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ );
diff --git a/src/routes/docker-websocket.ts b/src/routes/docker-websocket.ts
new file mode 100644
index 00000000..51aefcd8
--- /dev/null
+++ b/src/routes/docker-websocket.ts
@@ -0,0 +1,136 @@
+import type { Readable } from "node:stream";
+import { Elysia } from "elysia";
+import type { ElysiaWS } from "elysia/dist/ws";
+import split2 from "split2";
+
+import { dbFunctions } from "~/core/database";
+import { getDockerClient } from "~/core/docker/client";
+import {
+ calculateCpuPercent,
+ calculateMemoryUsage,
+} from "~/core/utils/calculations";
+import { logger } from "~/core/utils/logger";
+import { responseHandler } from "~/core/utils/response-handler";
+
+//biome-ignore lint/suspicious/noExplicitAny:
+const activeDockerConnections = new Set>();
+const connectionStreams = new Map<
+ //biome-ignore lint/suspicious/noExplicitAny:
+ ElysiaWS,
+ Array<{ statsStream: Readable; splitStream: ReturnType }>
+>();
+
+export const dockerWebsocketRoutes = new Elysia({ prefix: "/docker" }).ws(
+ "/stats",
+ {
+ async open(ws) {
+ activeDockerConnections.add(ws);
+ connectionStreams.set(ws, []);
+
+ ws.send(JSON.stringify({ message: "Connection established" }));
+ logger.info(`New Docker WebSocket established (${ws.id})`);
+
+ try {
+ const hosts = dbFunctions.getDockerHosts();
+ logger.debug(`Retrieved ${hosts.length} docker host(s)`);
+
+ for (const host of hosts) {
+ if (ws.readyState !== 1) {
+ break;
+ }
+
+ const docker = getDockerClient(host);
+ await docker.ping();
+ const containers = await docker.listContainers({ all: true });
+ logger.debug(
+ `Found ${containers.length} containers on ${host.name} (id: ${host.id})`,
+ );
+
+ for (const containerInfo of containers) {
+ if (ws.readyState !== 1) {
+ break;
+ }
+
+ const container = docker.getContainer(containerInfo.Id);
+ const statsStream = (await container.stats({
+ stream: true,
+ })) as Readable;
+ const splitStream = split2();
+
+ connectionStreams.get(ws)?.push({ statsStream, splitStream });
+
+ statsStream
+ .on("close", () => splitStream.destroy())
+ .pipe(splitStream)
+ .on("data", (line: string) => {
+ if (ws.readyState !== 1 || !line) {
+ return;
+ }
+ try {
+ const stats = JSON.parse(line);
+ ws.send(
+ JSON.stringify({
+ id: containerInfo.Id,
+ hostId: host.id,
+ name: containerInfo.Names[0].replace(/^\//, ""),
+ image: containerInfo.Image,
+ status: containerInfo.Status,
+ state: containerInfo.State,
+ cpuUsage: calculateCpuPercent(stats) || 0,
+ memoryUsage: calculateMemoryUsage(stats) || 0,
+ }),
+ );
+ } catch (error) {
+ logger.error(`Parse error: ${error}`);
+ }
+ })
+ .on("error", (error: Error) => {
+ logger.error(`Stream error: ${error}`);
+ statsStream.destroy();
+ ws.send(
+ JSON.stringify({
+ hostId: host.name,
+ containerId: containerInfo.Id,
+ error: `Stats stream error: ${error}`,
+ }),
+ );
+ });
+ }
+ }
+ } catch (error) {
+ logger.error(`Connection error: ${error}`);
+ ws.send(
+ JSON.stringify(
+ responseHandler.error(
+ { headers: {} },
+ error as string,
+ "Docker connection failed",
+ 500,
+ ),
+ ),
+ );
+ }
+ },
+
+ message(ws, message) {
+ if (message === "pong") ws.pong();
+ },
+
+ close(ws) {
+ logger.info(`Closing connection ${ws.id}`);
+ activeDockerConnections.delete(ws);
+
+ const streams = connectionStreams.get(ws) || [];
+ for (const { statsStream, splitStream } of streams) {
+ try {
+ statsStream.unpipe(splitStream);
+ statsStream.destroy();
+ splitStream.destroy();
+ } catch (error) {
+ logger.error(`Cleanup error: ${error}`);
+ }
+ }
+ connectionStreams.delete(ws);
+ },
+ },
+);
diff --git a/src/routes/live-logs.ts b/src/routes/live-logs.ts
new file mode 100644
index 00000000..cf6e4b5b
--- /dev/null
+++ b/src/routes/live-logs.ts
@@ -0,0 +1,38 @@
+import { Elysia } from "elysia";
+import type { ElysiaWS } from "elysia/dist/ws";
+
+import { logger } from "~/core/utils/logger";
+
+import type { log_message } from "~/typings/database";
+
+//biome-ignore lint/suspicious/noExplicitAny:
+const activeConnections = new Set>();
+
+export const liveLogs = new Elysia({ prefix: "/logs" }).ws("/ws", {
+ open(ws) {
+ activeConnections.add(ws);
+ ws.send({
+ message: "Connection established",
+ level: "info",
+ timestamp: new Date().toISOString(),
+ file: "live-logs.ts",
+ line: 14,
+ });
+ logger.info(`New Logs WebSocket established (${ws.id})`);
+ },
+ close(ws) {
+ logger.info(`Logs WebSocket closed (${ws.id})`);
+ activeConnections.delete(ws);
+ },
+});
+
+export function logToClients(data: log_message) {
+ for (const ws of activeConnections) {
+ try {
+ ws.send(JSON.stringify(data));
+ } catch (error) {
+ activeConnections.delete(ws);
+ logger.error("Failed to send to WebSocket:", error);
+ }
+ }
+}
diff --git a/src/routes/live-stacks.ts b/src/routes/live-stacks.ts
new file mode 100644
index 00000000..77aa2857
--- /dev/null
+++ b/src/routes/live-stacks.ts
@@ -0,0 +1,30 @@
+import { Elysia } from "elysia";
+import type { ElysiaWS } from "elysia/dist/ws";
+import { logger } from "~/core/utils/logger";
+import type { stackSocketMessage } from "~/typings/websocket";
+
+//biome-ignore lint/suspicious/noExplicitAny: Any = Connections
+const activeConnections = new Set>();
+
+export const liveStacks = new Elysia().ws("/stacks", {
+ open(ws) {
+ activeConnections.add(ws);
+ ws.send({ message: "Connection established" });
+ logger.info(`New Stacks WebSocket established (${ws.id})`);
+ },
+ close(ws) {
+ logger.info(`Stacks WebSocket closed (${ws.id})`);
+ activeConnections.delete(ws);
+ },
+});
+
+export function postToClient(data: stackSocketMessage) {
+ for (const ws of activeConnections) {
+ try {
+ ws.send(JSON.stringify(data));
+ } catch (error) {
+ activeConnections.delete(ws);
+ logger.error("Failed to send to WebSocket:", error);
+ }
+ }
+}
diff --git a/src/routes/logs.ts b/src/routes/logs.ts
new file mode 100644
index 00000000..17da1fb7
--- /dev/null
+++ b/src/routes/logs.ts
@@ -0,0 +1,261 @@
+import { Elysia } from "elysia";
+
+import { dbFunctions } from "~/core/database";
+import { logger } from "~/core/utils/logger";
+
+export const backendLogs = new Elysia({ prefix: "/logs" })
+ .get(
+ "",
+ async ({ set }) => {
+ try {
+ const logs = dbFunctions.getAllLogs();
+ //
+ logger.debug("Retrieved all logs");
+ return logs;
+ } catch (error) {
+ set.status = 500;
+ logger.error("Failed to retrieve logs,", error);
+ return { error: "Failed to retrieve logs" };
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Retrieves complete application log history from persistent storage",
+ responses: {
+ "200": {
+ description: "Successfully retrieved logs",
+ content: {
+ "application/json": {
+ schema: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ id: {
+ type: "number",
+ example: 1,
+ },
+ level: {
+ type: "string",
+ example: "info",
+ },
+ message: {
+ type: "string",
+ example: "Application started",
+ },
+ timestamp: {
+ type: "string",
+ example: "2024-03-20T12:00:00Z",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "500": {
+ description: "Error retrieving logs",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to retrieve logs",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+
+ .get(
+ "/:level",
+ async ({ params: { level }, set }) => {
+ try {
+ const logs = dbFunctions.getLogsByLevel(level);
+
+ logger.debug(`Retrieved logs (level: ${level})`);
+ return logs;
+ } catch (error) {
+ set.status = 500;
+ logger.error("Failed to retrieve logs");
+ return { error: "Failed to retrieve logs" };
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description:
+ "Filters logs by severity level (debug, info, warn, error, fatal)",
+ responses: {
+ "200": {
+ description: "Successfully retrieved logs by level",
+ content: {
+ "application/json": {
+ schema: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ id: {
+ type: "number",
+ example: 1,
+ },
+ level: {
+ type: "string",
+ example: "info",
+ },
+ message: {
+ type: "string",
+ example: "Application started",
+ },
+ timestamp: {
+ type: "string",
+ example: "2024-03-20T12:00:00Z",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "500": {
+ description: "Error retrieving logs",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to retrieve logs",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+
+ .delete(
+ "/",
+ async ({ set }) => {
+ try {
+ set.status = 200;
+
+ dbFunctions.clearAllLogs();
+ return { success: true };
+ } catch (error) {
+ set.status = 500;
+ logger.error("Could not delete all logs,", error);
+ return { error: "Could not delete all logs" };
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description: "Purges all historical log records from the database",
+ responses: {
+ "200": {
+ description: "Successfully cleared all logs",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ success: {
+ type: "boolean",
+ example: true,
+ },
+ },
+ },
+ },
+ },
+ },
+ "500": {
+ description: "Error clearing logs",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Could not delete all logs",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+
+ .delete(
+ "/:level",
+ async ({ params: { level }, set }) => {
+ try {
+ dbFunctions.clearLogsByLevel(level);
+
+ logger.debug(`Cleared all logs with level: ${level}`);
+ return { success: true };
+ } catch (error) {
+ set.status = 500;
+ logger.error("Could not clear logs with level", level, ",", error);
+ return { error: "Failed to retrieve logs" };
+ }
+ },
+ {
+ detail: {
+ tags: ["Management"],
+ description: "Clears log entries matching specified severity level",
+ responses: {
+ "200": {
+ description: "Successfully cleared logs by level",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ success: {
+ type: "boolean",
+ example: true,
+ },
+ },
+ },
+ },
+ },
+ },
+ "500": {
+ description: "Error clearing logs",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Failed to retrieve logs",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ );
diff --git a/src/routes/stacks.ts b/src/routes/stacks.ts
new file mode 100644
index 00000000..3ac2b86d
--- /dev/null
+++ b/src/routes/stacks.ts
@@ -0,0 +1,598 @@
+import { Elysia, t } from "elysia";
+import { dbFunctions } from "~/core/database";
+import {
+ deployStack,
+ getAllStacksStatus,
+ getStackStatus,
+ pullStackImages,
+ removeStack,
+ restartStack,
+ startStack,
+ stopStack,
+} from "~/core/stacks/controller";
+import { logger } from "~/core/utils/logger";
+import { responseHandler } from "~/core/utils/response-handler";
+import type { stacks_config } from "~/typings/database";
+
+export const stackRoutes = new Elysia({ prefix: "/stacks" })
+ .post(
+ "/deploy",
+ async ({ set, body }) => {
+ try {
+ await deployStack(body as stacks_config);
+ logger.info(`Deployed Stack (${body.name})`);
+ return responseHandler.ok(
+ set,
+ `Stack ${body.name} deployed successfully`,
+ );
+ } catch (error) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+
+ return responseHandler.error(
+ set,
+ errorMsg,
+ "Error deploying stack, please check the server logs for more information",
+ );
+ }
+ },
+ {
+ detail: {
+ tags: ["Stacks"],
+ description:
+ "Deploys a new Docker stack using a provided compose specification, allowing custom configurations and image updates",
+ responses: {
+ "200": {
+ description: "Successfully deployed stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Stack example-stack deployed successfully",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error deploying stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error deploying stack",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ body: t.Object({
+ name: t.String(),
+ version: t.Number(),
+ custom: t.Boolean(),
+ source: t.String(),
+ compose_spec: t.Any(),
+ }),
+ },
+ )
+ .post(
+ "/start",
+ async ({ set, body }) => {
+ try {
+ if (!body.stackId) {
+ throw new Error("Stack ID needed");
+ }
+ await startStack(body.stackId);
+ logger.info(`Started Stack (${body.stackId})`);
+ return responseHandler.ok(
+ set,
+ `Stack ${body.stackId} started successfully`,
+ );
+ } catch (error) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+
+ return responseHandler.error(set, errorMsg, "Error starting stack");
+ }
+ },
+ {
+ detail: {
+ tags: ["Stacks"],
+ description:
+ "Initiates a Docker stack, starting all associated containers",
+ responses: {
+ "200": {
+ description: "Successfully started stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Stack 1 started successfully",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error starting stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error starting stack",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ body: t.Object({
+ stackId: t.Number(),
+ }),
+ },
+ )
+ .post(
+ "/stop",
+ async ({ set, body }) => {
+ try {
+ if (!body.stackId) {
+ throw new Error("Stack needed");
+ }
+ await stopStack(body.stackId);
+ logger.info(`Stopped Stack (${body.stackId})`);
+ return responseHandler.ok(
+ set,
+ `Stack ${body.stackId} stopped successfully`,
+ );
+ } catch (error) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+
+ return responseHandler.error(set, errorMsg, "Error stopping stack");
+ }
+ },
+ {
+ detail: {
+ tags: ["Stacks"],
+ description:
+ "Halts a running Docker stack and its containers while preserving configurations",
+ responses: {
+ "200": {
+ description: "Successfully stopped stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Stack 1 stopped successfully",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error stopping stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error stopping stack",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ body: t.Object({
+ stackId: t.Number(),
+ }),
+ },
+ )
+ .post(
+ "/restart",
+ async ({ set, body }) => {
+ try {
+ if (!body.stackId) {
+ throw new Error("Stack needed");
+ }
+ await restartStack(body.stackId);
+ logger.info(`Restarted Stack (${body.stackId})`);
+ return responseHandler.ok(
+ set,
+ `Stack ${body.stackId} restarted successfully`,
+ );
+ } catch (error) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+
+ return responseHandler.error(set, errorMsg, "Error restarting stack");
+ }
+ },
+ {
+ detail: {
+ tags: ["Stacks"],
+ description:
+ "Performs full stack restart - stops and restarts all stack components in sequence",
+ responses: {
+ "200": {
+ description: "Successfully restarted stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Stack 1 restarted successfully",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error restarting stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error restarting stack",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ body: t.Object({
+ stackId: t.Number(),
+ }),
+ },
+ )
+ .post(
+ "/pull-images",
+ async ({ set, body }) => {
+ try {
+ if (!body.stackId) {
+ throw new Error("Stack needed");
+ }
+ await pullStackImages(body.stackId);
+ logger.info(`Pulled Stack images (${body.stackId})`);
+ return responseHandler.ok(
+ set,
+ `Images for stack ${body.stackId} pulled successfully`,
+ );
+ } catch (error) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+
+ return responseHandler.error(set, errorMsg, "Error pulling images");
+ }
+ },
+ {
+ detail: {
+ tags: ["Stacks"],
+ description:
+ "Updates container images for a stack using Docker's pull mechanism (requires stack ID)",
+ responses: {
+ "200": {
+ description: "Successfully pulled images",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Images for stack 1 pulled successfully",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error pulling images",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error pulling images",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ body: t.Object({
+ stackId: t.Number(),
+ }),
+ },
+ )
+ .get(
+ "/status",
+ async ({ set, query }) => {
+ try {
+ // biome-ignore lint/suspicious/noExplicitAny:
+ let status: Record;
+ let res = {};
+
+ logger.debug("Entering stack status handler");
+ logger.debug(`Request body: ${JSON.stringify(query)}`);
+
+ if (query.stackId) {
+ logger.debug(`Fetching status for stackId=${query.stackId}`);
+ status = await getStackStatus(query.stackId);
+ logger.debug(
+ `Retrieved status for stackId=${query.stackId}: ${JSON.stringify(status)}`,
+ );
+
+ res = responseHandler.ok(
+ set,
+ `Stack ${query.stackId} status retrieved successfully`,
+ );
+ logger.info("Fetched Stack status");
+ } else {
+ logger.debug("Fetching status for all stacks");
+ status = await getAllStacksStatus();
+ logger.debug(
+ `Retrieved status for all stacks: ${JSON.stringify(status)}`,
+ );
+
+ res = responseHandler.ok(set, "Fetched all Stack's status");
+ logger.info("Fetched all Stack status");
+ }
+
+ logger.debug("Returning response with status data");
+ return { ...res, status: status };
+ } catch (error) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+ logger.debug(`Error occurred while fetching stack status: ${errorMsg}`);
+
+ return responseHandler.error(
+ set,
+ errorMsg,
+ "Error getting stack status",
+ );
+ }
+ },
+ {
+ detail: {
+ tags: ["Stacks"],
+ description:
+ "Retrieves operational status for either a specific stack (by ID) or all managed stacks",
+ responses: {
+ "200": {
+ description: "Successfully retrieved stack status",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Stack 1 status retrieved successfully",
+ },
+ status: {
+ type: "object",
+ properties: {
+ name: {
+ type: "string",
+ example: "example-stack",
+ },
+ status: {
+ type: "string",
+ example: "running",
+ },
+ containers: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ name: {
+ type: "string",
+ example: "example-stack_web_1",
+ },
+ status: {
+ type: "string",
+ example: "running",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error getting stack status",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error getting stack status",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ query: t.Optional(
+ t.Object({
+ stackId: t.Number(),
+ }),
+ ),
+ },
+ )
+ .get(
+ "/",
+ async ({ set }) => {
+ try {
+ const stacks = dbFunctions.getStacks();
+ logger.info("Fetched Stacks");
+ return stacks;
+ } catch (error) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+
+ return responseHandler.error(set, errorMsg, "Error getting stacks");
+ }
+ },
+ {
+ detail: {
+ tags: ["Stacks"],
+ description:
+ "Lists all registered stacks with their complete configuration details",
+ responses: {
+ "200": {
+ description: "Successfully retrieved stacks",
+ content: {
+ "application/json": {
+ schema: {
+ type: "array",
+ items: {
+ type: "object",
+ properties: {
+ id: {
+ type: "number",
+ example: 1,
+ },
+ name: {
+ type: "string",
+ example: "example-stack",
+ },
+ version: {
+ type: "number",
+ example: 1,
+ },
+ source: {
+ type: "string",
+ example: "github.com/example/repo",
+ },
+ automatic_reboot_on_error: {
+ type: "boolean",
+ example: true,
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error getting stacks",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error getting stacks",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ )
+ .delete(
+ "/",
+ async ({ set, body }) => {
+ try {
+ const { stackId } = body;
+ await removeStack(stackId);
+ logger.info(`Deleted Stack ${stackId}`);
+ return responseHandler.ok(set, `Stack ${stackId} deleted successfully`);
+ } catch (error) {
+ const errorMsg = error instanceof Error ? error.message : String(error);
+
+ return responseHandler.error(set, errorMsg, "Error deleting stack");
+ }
+ },
+ {
+ detail: {
+ tags: ["Stacks"],
+ description:
+ "Permanently removes a stack configuration and cleans up associated resources",
+ responses: {
+ "200": {
+ description: "Successfully deleted stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ message: {
+ type: "string",
+ example: "Stack 1 deleted successfully",
+ },
+ },
+ },
+ },
+ },
+ },
+ "400": {
+ description: "Error deleting stack",
+ content: {
+ "application/json": {
+ schema: {
+ type: "object",
+ properties: {
+ error: {
+ type: "string",
+ example: "Error deleting stack",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ body: t.Object({
+ stackId: t.Number(),
+ }),
+ },
+ );
diff --git a/src/routes/utils.ts b/src/routes/utils.ts
new file mode 100644
index 00000000..e69de29b
diff --git a/src/tests/api-config.spec.ts b/src/tests/api-config.spec.ts
new file mode 100644
index 00000000..ba3e7b32
--- /dev/null
+++ b/src/tests/api-config.spec.ts
@@ -0,0 +1,344 @@
+import { afterAll, beforeEach, describe, expect, it, mock } from "bun:test";
+import { Elysia } from "elysia";
+import { logger } from "~/core/utils/logger";
+import { apiConfigRoutes } from "~/routes/api-config";
+import { generateMarkdownReport, recordTestResult } from "./markdown-exporter";
+import type { TestContext } from "./markdown-exporter";
+
+const mockDb = {
+ updateConfig: mock(() => ({})),
+ backupDatabase: mock(
+ () => `dockstatapi-${new Date().toISOString().slice(0, 10)}.db.bak`,
+ ),
+ restoreDatabase: mock(),
+ findLatestBackup: mock(() => "dockstatapi-2025-05-06.db.bak"),
+};
+
+mock.module("node:fs", () => ({
+ existsSync: mock((path) => path.includes("dockstatapi")),
+ readdirSync: mock(() => [
+ "dockstatapi-2025-05-06.db.bak",
+ "dockstatapi.db",
+ "dockstatapi.db-shm",
+ ]),
+ unlinkSync: mock(),
+}));
+
+const mockPlugins = [
+ {
+ name: "docker-monitor",
+ version: "1.2.0",
+ status: "active",
+ },
+];
+
+const createTestApp = () =>
+ new Elysia().use(apiConfigRoutes).decorate({
+ dbFunctions: mockDb,
+ pluginManager: {
+ getLoadedPlugins: mock(() => mockPlugins),
+ getPlugin: mock((name) => mockPlugins.find((p) => p.name === name)),
+ },
+ logger: {
+ ...logger,
+ debug: mock(),
+ error: mock(),
+ info: mock(),
+ },
+ });
+
+async function captureTestContext(
+ req: Request,
+ res: Response,
+): Promise {
+ const responseStatus = res.status;
+ const responseHeaders = Object.fromEntries(res.headers.entries());
+ let responseBody: string;
+
+ try {
+ responseBody = await res.clone().json();
+ } catch (parseError) {
+ try {
+ responseBody = await res.clone().text();
+ } catch (textError) {
+ responseBody = "Unparseable response content";
+ }
+ }
+
+ return {
+ request: {
+ method: req.method,
+ url: req.url,
+ headers: Object.fromEntries(req.headers.entries()),
+ body: req.body ? await req.clone().text() : undefined,
+ },
+ response: {
+ status: responseStatus,
+ headers: responseHeaders,
+ body: responseBody,
+ },
+ };
+}
+
+describe("API Configuration Endpoints", () => {
+ beforeEach(() => {
+ mockDb.updateConfig.mockClear();
+ });
+
+ describe("Core Configuration", () => {
+ it("should retrieve current config with hashed API key", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+
+ try {
+ const app = createTestApp();
+ const req = new Request("http://localhost:3000/config");
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(200);
+ expect(context.response.body).toMatchObject({
+ fetching_interval: expect.any(Number),
+ keep_data_for: expect.any(Number),
+ });
+
+ recordTestResult({
+ name: "should retrieve current config with hashed API key",
+ suite: "API Configuration Endpoints - Core Configuration",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "should retrieve current config with hashed API key",
+ suite: "API Configuration Endpoints - Core Configuration",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "200 OK with valid config structure",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+
+ it("should handle config update with valid payload", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+
+ try {
+ const app = createTestApp();
+ const requestBody = {
+ fetching_interval: 15,
+ keep_data_for: 30,
+ api_key: "new-valid-key",
+ };
+ const req = new Request("http://localhost:3000/config/update", {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(requestBody),
+ });
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(200);
+ expect(context.response.body).toMatchObject({
+ success: true,
+ message: expect.stringContaining("Updated"),
+ });
+
+ recordTestResult({
+ name: "should handle config update with valid payload",
+ suite: "API Configuration Endpoints - Core Configuration",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "should handle config update with valid payload",
+ suite: "API Configuration Endpoints - Core Configuration",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "200 OK with update confirmation",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+ });
+
+ describe("Plugin Management", () => {
+ it("should list active plugins with metadata", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+
+ try {
+ const app = createTestApp();
+ const req = new Request("http://localhost:3000/config/plugins");
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(200);
+ expect(context.response.body).toEqual(
+ [],
+ //expect.arrayContaining([
+ // expect.objectContaining({
+ // name: expect.any(String),
+ // version: expect.any(String),
+ // status: expect.any(String),
+ // }),
+ //])
+ );
+
+ recordTestResult({
+ name: "should list active plugins with metadata",
+ suite: "API Configuration Endpoints - Plugin Management",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "should list active plugins with metadata",
+ suite: "API Configuration Endpoints - Plugin Management",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "200 OK with plugin list",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+ });
+
+ describe("Backup Management", () => {
+ it("should generate timestamped backup files", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+
+ try {
+ const app = createTestApp();
+ const req = new Request("http://localhost:3000/config/backup", {
+ method: "POST",
+ });
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(200);
+ const { message } = context.response.body as { message: string };
+ expect(message).toMatch(
+ /^data\/dockstatapi-\d{2}-\d{2}-\d{4}-1\.db\.bak$/,
+ );
+
+ recordTestResult({
+ name: "should generate timestamped backup files",
+ suite: "API Configuration Endpoints - Backup Management",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "should generate timestamped backup files",
+ suite: "API Configuration Endpoints - Backup Management",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "200 OK with backup path",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+
+ it("should list valid backup files", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+
+ try {
+ const app = createTestApp();
+ const req = new Request("http://localhost:3000/config/backup");
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(200);
+ const backups = context.response.body as string[];
+ expect(backups).toEqual(
+ expect.arrayContaining([expect.stringMatching(/\.db\.bak$/)]),
+ );
+
+ recordTestResult({
+ name: "should list valid backup files",
+ suite: "API Configuration Endpoints - Backup Management",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "should list valid backup files",
+ suite: "API Configuration Endpoints - Backup Management",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "200 OK with backup list",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+ });
+
+ describe("Error Handling", () => {
+ it("should return proper error format", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+
+ try {
+ const app = createTestApp();
+ const req = new Request("http://localhost:3000/random_link", {
+ method: "GET",
+ headers: { "Content-Type": "application/json" },
+ });
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(404);
+
+ recordTestResult({
+ name: "should return proper error format",
+ suite:
+ "API Configuration Endpoints - Error Handling of unkown routes",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "should return proper error format",
+ suite: "API Configuration Endpoints - Error Handling",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "500 Error with structured error format",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+ });
+});
+
+afterAll(() => {
+ generateMarkdownReport();
+});
diff --git a/src/tests/docker-manager.spec.ts b/src/tests/docker-manager.spec.ts
new file mode 100644
index 00000000..865b2aa1
--- /dev/null
+++ b/src/tests/docker-manager.spec.ts
@@ -0,0 +1,482 @@
+import { afterAll, beforeEach, describe, expect, it, mock } from "bun:test";
+import { Elysia } from "elysia";
+import { dbFunctions } from "~/core/database";
+import { dockerRoutes } from "~/routes/docker-manager";
+import {
+ generateMarkdownReport,
+ recordTestResult,
+ testResults,
+} from "./markdown-exporter";
+import type { TestContext } from "./markdown-exporter";
+
+type DockerHost = {
+ id?: number;
+ name: string;
+ hostAddress: string;
+ secure: boolean;
+};
+
+const mockDb = {
+ addDockerHost: mock(() => ({
+ changes: 1,
+ lastInsertRowid: 1,
+ })),
+ updateDockerHost: mock(() => ({
+ changes: 1,
+ lastInsertRowid: 1,
+ })),
+ getDockerHosts: mock(() => []),
+ deleteDockerHost: mock(() => ({
+ changes: 1,
+ lastInsertRowid: 1,
+ })),
+};
+
+mock.module("~/core/database", () => ({
+ dbFunctions: mockDb,
+}));
+
+mock.module("~/core/utils/logger", () => ({
+ logger: {
+ debug: mock(),
+ info: mock(),
+ error: mock(),
+ },
+}));
+
+const createApp = () => new Elysia().use(dockerRoutes).decorate({});
+
+async function captureTestContext(
+ req: Request,
+ res: Response,
+): Promise {
+ const responseStatus = res.status;
+ const responseHeaders = Object.fromEntries(res.headers.entries());
+ let responseBody: unknown;
+
+ try {
+ responseBody = await res.clone().json();
+ } catch (parseError) {
+ try {
+ responseBody = await res.clone().text();
+ } catch {
+ responseBody = "Unparseable response content";
+ }
+ }
+
+ return {
+ request: {
+ method: req.method,
+ url: req.url,
+ headers: Object.fromEntries(req.headers.entries()),
+ body: req.body ? await req.clone().text() : undefined,
+ },
+ response: {
+ status: responseStatus,
+ headers: responseHeaders,
+ body: responseBody,
+ },
+ };
+}
+
+describe("Docker Configuration Endpoints", () => {
+ beforeEach(() => {
+ mockDb.addDockerHost.mockClear();
+ mockDb.updateDockerHost.mockClear();
+ mockDb.getDockerHosts.mockClear();
+ mockDb.deleteDockerHost.mockClear();
+ });
+
+ describe("POST /docker-config/add-host", () => {
+ it("should add a docker host successfully", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+ const host: DockerHost = {
+ name: "Host1",
+ hostAddress: "127.0.0.1:2375",
+ secure: false,
+ };
+
+ try {
+ const app = createApp();
+ const req = new Request(
+ "http://localhost:3000/docker-config/add-host",
+ {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(host),
+ },
+ );
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(200);
+ expect(context.response.body).toMatchObject({
+ message: `Added docker host (${host.name})`,
+ });
+ expect(mockDb.addDockerHost).toHaveBeenCalledWith(host);
+
+ recordTestResult({
+ name: "add-host success",
+ suite: "Docker Config - Add Host",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "add-host success",
+ suite: "Docker Config - Add Host",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "200 OK with success message",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+
+ it("should handle error when adding a docker host fails", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+ const host: DockerHost = {
+ name: "Host2",
+ hostAddress: "invalid",
+ secure: true,
+ };
+
+ // Set mock implementation
+ mockDb.addDockerHost.mockImplementationOnce(() => {
+ throw new Error("Mock Database Error");
+ });
+
+ try {
+ const app = createApp();
+ const req = new Request(
+ "http://localhost:3000/docker-config/add-host",
+ {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(host),
+ },
+ );
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(500);
+ expect(context.response).toMatchObject({
+ body: expect.any(String),
+ });
+
+ recordTestResult({
+ name: "add-host failure",
+ suite: "Docker Config - Add Host",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "add-host failure",
+ suite: "Docker Config - Add Host",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "400 Error with error structure",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+ });
+
+ describe("POST /docker-config/update-host", () => {
+ it("should update a docker host successfully", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+ const host: DockerHost = {
+ id: 1,
+ name: "Host1-upd",
+ hostAddress: "127.0.0.1:2376",
+ secure: true,
+ };
+
+ try {
+ const app = createApp();
+ const req = new Request(
+ "http://localhost:3000/docker-config/update-host",
+ {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(host),
+ },
+ );
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(200);
+ expect(context.response.body).toMatchObject({
+ message: `Updated docker host (${host.id})`,
+ });
+ expect(mockDb.updateDockerHost).toHaveBeenCalledWith(host);
+
+ recordTestResult({
+ name: "update-host success",
+ suite: "Docker Config - Update Host",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "update-host success",
+ suite: "Docker Config - Update Host",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "200 OK with update confirmation",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+
+ it("should handle error when update fails", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+ const host: DockerHost = {
+ id: 2,
+ name: "Host2",
+ hostAddress: "x",
+ secure: false,
+ };
+
+ mockDb.updateDockerHost.mockImplementationOnce(() => {
+ throw new Error("Update error");
+ });
+
+ try {
+ const app = createApp();
+ const req = new Request(
+ "http://localhost:3000/docker-config/update-host",
+ {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(host),
+ },
+ );
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(500);
+ expect(context.response).toMatchObject({
+ body: expect.any(String),
+ });
+
+ recordTestResult({
+ name: "update-host failure",
+ suite: "Docker Config - Update Host",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "update-host failure",
+ suite: "Docker Config - Update Host",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "400 Error with error details",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+ });
+
+ describe("GET /docker-config/hosts", () => {
+ it("should retrieve list of hosts", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+ const hosts: DockerHost[] = [
+ { id: 1, name: "H1", hostAddress: "a", secure: false },
+ ];
+
+ mockDb.getDockerHosts.mockImplementation(() => hosts as never[]);
+
+ try {
+ const app = createApp();
+ const req = new Request("http://localhost:3000/docker-config/hosts");
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(200);
+ expect(context.response.body).toEqual(hosts);
+
+ recordTestResult({
+ name: "get-hosts success",
+ suite: "Docker Config - List Hosts",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "get-hosts success",
+ suite: "Docker Config - List Hosts",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "200 OK with hosts array",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+
+ it("should handle error when retrieval fails", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+
+ mockDb.getDockerHosts.mockImplementationOnce(() => {
+ throw new Error("Fetch error");
+ });
+
+ try {
+ const app = createApp();
+ const req = new Request("http://localhost:3000/docker-config/hosts");
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(500);
+ expect(context.response).toMatchObject({
+ body: expect.any(String),
+ });
+
+ recordTestResult({
+ name: "get-hosts failure",
+ suite: "Docker Config - List Hosts",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "get-hosts failure",
+ suite: "Docker Config - List Hosts",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "400 Error with error details",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+ });
+
+ describe("DELETE /docker-config/hosts/:id", () => {
+ it("should delete a host successfully", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+ const id = 5;
+
+ try {
+ const app = createApp();
+ const req = new Request(
+ `http://localhost:3000/docker-config/hosts/${id}`,
+ {
+ method: "DELETE",
+ },
+ );
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(200);
+ expect(context.response.body).toMatchObject({
+ message: `Deleted docker host (${id})`,
+ });
+ expect(mockDb.deleteDockerHost).toHaveBeenCalledWith(id);
+
+ recordTestResult({
+ name: "delete-host success",
+ suite: "Docker Config - Delete Host",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "delete-host success",
+ suite: "Docker Config - Delete Host",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "200 OK with deletion confirmation",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+
+ it("should handle error when delete fails", async () => {
+ const start = Date.now();
+ let context: TestContext | undefined;
+ const id = 6;
+
+ mockDb.deleteDockerHost.mockImplementationOnce(() => {
+ throw new Error("Delete error");
+ });
+
+ try {
+ const app = createApp();
+ const req = new Request(
+ `http://localhost:3000/docker-config/hosts/${id}`,
+ {
+ method: "DELETE",
+ },
+ );
+ const res = await app.handle(req);
+ context = await captureTestContext(req, res);
+
+ expect(res.status).toBe(500);
+ expect(context.response).toMatchObject({
+ body: expect.any(String),
+ });
+
+ recordTestResult({
+ name: "delete-host failure",
+ suite: "Docker Config - Delete Host",
+ time: Date.now() - start,
+ context,
+ });
+ } catch (error) {
+ recordTestResult({
+ name: "delete-host failure",
+ suite: "Docker Config - Delete Host",
+ time: Date.now() - start,
+ error: error as Error,
+ context,
+ errorDetails: {
+ expected: "400 Error with error details",
+ received: context?.response,
+ },
+ });
+ throw error;
+ }
+ });
+ });
+});
+
+afterAll(() => {
+ generateMarkdownReport();
+});
diff --git a/src/tests/markdown-exporter.ts b/src/tests/markdown-exporter.ts
new file mode 100644
index 00000000..2d55b48e
--- /dev/null
+++ b/src/tests/markdown-exporter.ts
@@ -0,0 +1,144 @@
+import { mkdirSync, writeFileSync } from "node:fs";
+import { format } from "date-fns";
+import { logger } from "~/core/utils/logger";
+
+export type TestContext = {
+ request: {
+ method: string;
+ url: string;
+ headers: Record;
+ query?: Record;
+ body?: unknown;
+ };
+ response: {
+ status: number;
+ headers: Record;
+ body?: unknown;
+ };
+};
+
+type ErrorDetails = {
+ expected?: unknown;
+ received?: unknown;
+};
+
+type TestResult = {
+ name: string;
+ suite: string;
+ time: number;
+ error?: Error;
+ context?: TestContext;
+ errorDetails?: ErrorDetails;
+};
+
+export function recordTestResult(result: TestResult) {
+ logger.debug(`__UT__ Recording test result: ${JSON.stringify(result)}`);
+ testResults.push(result);
+}
+
+export const testResults: TestResult[] = [];
+
+function formatContextMarkdown(
+ context?: TestContext,
+ errorDetails?: ErrorDetails,
+): string {
+ if (!context) return "";
+
+ let md = "```\n";
+ md += "=== REQUEST ===\n";
+ md += `Method: ${context.request.method}\n`;
+ md += `URL: ${context.request.url}\n`;
+ if (context.request.query) {
+ md += `Query Params: ${JSON.stringify(context.request.query, null, 2)}\n`;
+ }
+ md += `Headers: ${JSON.stringify(context.request.headers, null, 2)}\n`;
+ if (context.request.body) {
+ md += `Body: ${JSON.stringify(context.request.body, null, 2)}\n`;
+ }
+ md += "\n=== RESPONSE ===\n";
+ md += `Status: ${context.response.status}\n`;
+ md += `Headers: ${JSON.stringify(context.response.headers, null, 2)}\n`;
+ if (context.response.body) {
+ md += `Body: ${JSON.stringify(context.response.body, null, 2)}\n`;
+ }
+ if (errorDetails) {
+ md += "\n=== ERROR DETAILS ===\n";
+ md += `Expected: ${JSON.stringify(errorDetails.expected, null, 2)}\n`;
+ md += `Received: ${JSON.stringify(errorDetails.received, null, 2)}\n`;
+ }
+ md += "```\n";
+ return md;
+}
+
+export function generateMarkdownReport() {
+ if (testResults.length === 0) {
+ logger.warn("No test results to generate markdown report.");
+ return;
+ }
+
+ const totalTests = testResults.length;
+ const totalErrors = testResults.filter((r) => r.error).length;
+
+ const testSuites = testResults.reduce(
+ (suites, result) => {
+ if (!suites[result.suite]) {
+ suites[result.suite] = [];
+ }
+ suites[result.suite].push(result);
+ return suites;
+ },
+ {} as Record,
+ );
+
+ let md = `# Test Report - ${format(new Date(), "yyyy-MM-dd")}\n`;
+ md += `\n**Total Tests:** ${totalTests}
+`;
+ md += `**Total Failures:** ${totalErrors}\n`;
+
+ for (const [suiteName, cases] of Object.entries(testSuites)) {
+ const suiteErrors = cases.filter((c) => c.error).length;
+ md += `\n## Suite: ${suiteName}
+`;
+ md += `- Tests: ${cases.length}
+`;
+ md += `- Failures: ${suiteErrors}\n`;
+
+ for (const test of cases) {
+ const status = test.error ? "❌ Failed" : "✅ Passed";
+ md += `\n### ${test.name} (${(test.time / 1000).toFixed(2)}s)
+`;
+ md += `- Status: **${status}** \n`;
+
+ if (test.error) {
+ const msg = test.error.message
+ .replace(//g, ">");
+ const stack = test.error.stack
+ ?.replace(//g, ">");
+ md += "\n\nError Details
\n\n";
+ md += `**Message:** ${msg} \n`;
+ if (stack) {
+ md += `\n\`\`\`\n${stack}\n\`\`\`\n`;
+ }
+ md += " \n";
+ }
+
+ if (test.context) {
+ md += "\n\nRequest/Response Context
\n\n";
+ md += formatContextMarkdown(test.context, test.errorDetails);
+ md += " \n";
+ }
+ }
+ }
+
+ // Ensure directory exists
+ mkdirSync("reports/markdown", { recursive: true });
+ const filename = `reports/markdown/test-report-${format(
+ new Date(),
+ "yyyy-MM-dd",
+ )}.md`;
+ writeFileSync(filename, md, "utf8");
+
+ logger.debug(`__UT__ Markdown report written to ${filename}`);
+}
diff --git a/src/typings b/src/typings
new file mode 160000
index 00000000..9cae829b
--- /dev/null
+++ b/src/typings
@@ -0,0 +1 @@
+Subproject commit 9cae829bead60cd13351b757340f3225649cb11d
diff --git a/tsconfig.json b/tsconfig.json
new file mode 100644
index 00000000..dad4550b
--- /dev/null
+++ b/tsconfig.json
@@ -0,0 +1,107 @@
+{
+ "compilerOptions": {
+ /* Visit https://aka.ms/tsconfig to read more about this file */
+
+ /* Projects */
+ // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
+ "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
+ // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
+ // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
+ // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
+ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
+
+ /* Language and Environment */
+ "target": "ES2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
+ // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
+ // "jsx": "preserve", /* Specify what JSX code is generated. */
+ // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
+ // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
+ // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
+ // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
+ // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
+ // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
+ // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
+ // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
+ // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
+ "outDir": "build/",
+ /* Modules */
+ "module": "ES2022" /* Specify what module code is generated. */,
+ // "rootDir": "./", /* Specify the root folder within your source files. */
+ "moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */,
+ // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
+ "paths": {
+ "~/*": ["./src/*"]
+ } /* Specify a set of entries that re-map imports to additional lookup locations. */,
+ // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
+ // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
+ "types": [
+ "bun-types"
+ ] /* Specify type package names to be included without being referenced in a source file. */,
+ // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
+ // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
+ "resolveJsonModule": true /* Enable importing .json files. */,
+ // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */
+
+ /* JavaScript Support */
+ // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
+ // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
+ // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
+
+ /* Emit */
+ // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
+ // "declarationMap": true, /* Create sourcemaps for d.ts files. */
+ // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
+ // "sourceMap": true, /* Create source map files for emitted JavaScript files. */
+ // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
+ // "outDir": "./", /* Specify an output folder for all emitted files. */
+ // "removeComments": true, /* Disable emitting comments. */
+ // "noEmit": true, /* Disable emitting files from a compilation. */
+ // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
+ // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
+ // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
+ // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
+ // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
+ "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
+ "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
+ // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
+ // "newLine": "crlf", /* Set the newline character for emitting files. */
+ // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
+ // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
+ // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
+ // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
+ // "declarationDir": "./", /* Specify the output directory for generated declaration files. */
+ // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
+
+ /* Interop Constraints */
+ // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
+ // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
+ "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
+ // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
+ "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
+
+ /* Type Checking */
+ "strict": true /* Enable all strict type-checking options. */,
+ // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
+ // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
+ // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
+ // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
+ // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
+ // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
+ // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
+ // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
+ // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
+ // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
+ // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
+ // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
+ // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
+ // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
+ // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
+ // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
+ // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
+ // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
+
+ /* Completeness */
+ // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
+ "skipLibCheck": true /* Skip type checking all .d.ts files. */
+ }
+}