diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index bf36febf..ebde61ca 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,43 +1,41 @@ -{ - "name": "Kadena Indexer", - "build": { - "dockerfile": "Dockerfile", - "args": { - "NODE_VERSION": "18", - "INSTALL_AWS_CLI": "true", - "INSTALL_TERRAFORM": "true", - "INSTALL_DOCKER_CLI": "true", - "TERRAFORM_VERSION": "1.7.3" - } - }, - "mounts": [ - "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" - ], - "forwardPorts": [3000, 9090, 3001], - "postCreateCommand": "yarn install", - "remoteUser": "node", - "customizations": { - "vscode": { - "settings": { - "editor.tabSize": 2, - "eslint.validate": ["javascript", "typescript"], - "prettier.enable": true, - "editor.formatOnSave": true, - "prettier.singleQuote": false, - "terminal.integrated.shell.linux": "/bin/bash", - "editor.defaultFormatter": "esbenp.prettier-vscode", - "files.exclude": { - "**/.git": true, - "**/.DS_Store": true, - "**/node_modules": true - } - }, - "extensions": [ - "esbenp.prettier-vscode", - "dbaeumer.vscode-eslint", - "ms-azuretools.vscode-docker", - "hashicorp.terraform" - ] - } - } -} +{ + "name": "Kadena Indexer", + "build": { + "dockerfile": "Dockerfile", + "args": { + "NODE_VERSION": "18", + "INSTALL_AWS_CLI": "true", + "INSTALL_TERRAFORM": "true", + "INSTALL_DOCKER_CLI": "true", + "TERRAFORM_VERSION": "1.7.3" + } + }, + "mounts": ["source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind"], + "forwardPorts": [3000, 9090, 3001], + "postCreateCommand": "yarn install", + "remoteUser": "node", + "customizations": { + "vscode": { + "settings": { + "editor.tabSize": 2, + "eslint.validate": ["javascript", "typescript"], + "prettier.enable": true, + "editor.formatOnSave": true, + "prettier.singleQuote": false, + "terminal.integrated.shell.linux": "/bin/bash", + "editor.defaultFormatter": "esbenp.prettier-vscode", + "files.exclude": { + "**/.git": true, + "**/.DS_Store": true, + "**/node_modules": true + } + }, + "extensions": [ + "esbenp.prettier-vscode", + "dbaeumer.vscode-eslint", + "ms-azuretools.vscode-docker", + "hashicorp.terraform" + ] + } + } +} diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 7f696f0f..5f958ed3 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,14 +1,15 @@ --- name: Bug Report about: Create a bug report to help us improve -title: "fix: " +title: 'fix: ' labels: bug -assignees: "" +assignees: '' --- ## Bug Report #### Version: + - Release: v1.0.0 - Tag: release-1.0.0 - Commit: abc123f @@ -23,4 +24,3 @@ assignees: "" - diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index fc53a525..9a1e887f 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,9 +1,9 @@ --- name: Feature Request about: Suggest an idea or enhancement for this project -title: "feat:" +title: 'feat:' labels: enhancement -assignees: "" +assignees: '' --- ## Feature Request diff --git a/.github/ISSUE_TEMPLATE/refactor_request.md b/.github/ISSUE_TEMPLATE/refactor_request.md index 635238f3..db99aa34 100644 --- a/.github/ISSUE_TEMPLATE/refactor_request.md +++ b/.github/ISSUE_TEMPLATE/refactor_request.md @@ -1,9 +1,9 @@ --- name: Refactor Request about: Suggest the refactoring for this project -title: "refactor:" +title: 'refactor:' labels: enhancement -assignees: "" +assignees: '' --- ## Refactor Request @@ -18,4 +18,3 @@ Please include if your refactor request is related to a problem. --> - diff --git a/.github/workflows/indexer.yml b/.github/workflows/indexer.yml index ac772cef..22e851bf 100644 --- a/.github/workflows/indexer.yml +++ b/.github/workflows/indexer.yml @@ -1,31 +1,31 @@ -name: "Indexer Build" - -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Use Node.js 18 - uses: actions/setup-node@v2 - with: - node-version: "18" - cache: "yarn" - - - name: Install dependencies - run: yarn install - working-directory: ./indexer - - - name: Build - run: tsc - working-directory: ./indexer +name: 'Indexer Build' + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Use Node.js 18 + uses: actions/setup-node@v2 + with: + node-version: '18' + cache: 'yarn' + + - name: Install dependencies + run: yarn install + working-directory: ./indexer + + - name: Build + run: tsc + working-directory: ./indexer diff --git a/.github/workflows/terraform.yml b/.github/workflows/terraform.yml index 8aa0c42b..0a032c21 100644 --- a/.github/workflows/terraform.yml +++ b/.github/workflows/terraform.yml @@ -1,60 +1,59 @@ -name: "Terraform Deployment" - -on: - workflow_dispatch: - -jobs: - terraform-plan: - runs-on: ubuntu-latest - env: - TF_VAR_AWS_ACCESS_KEY_ID: ${{ vars.TF_VAR_AWS_ACCESS_KEY_ID }} - TF_VAR_AWS_SECRET_ACCESS_KEY: ${{ vars.TF_VAR_AWS_SECRET_ACCESS_KEY }} - TF_VAR_AWS_ACCOUNT_ID: ${{ vars.TF_VAR_AWS_ACCOUNT_ID }} - TF_VAR_AWS_USER_NAME: ${{ vars.TF_VAR_AWS_USER_NAME }} - TF_VAR_AWS_DB_USERNAME: ${{ vars.TF_VAR_AWS_DB_USERNAME }} - TF_VAR_AWS_DB_PASSWORD: ${{ vars.TF_VAR_AWS_DB_PASSWORD }} - TF_VAR_db_password: ${{ vars.TF_VAR_db_password }} - - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Setup Terraform - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.7.3 - terraform_wrapper: false - - - name: Terraform Init - run: terraform init - working-directory: ./terraform - - - name: Terraform Plan - run: terraform plan -out=tfplan - working-directory: ./terraform - - - name: Upload Terraform Plan - uses: actions/upload-artifact@v2 - with: - name: terraform-plan - path: ./terraform/tfplan - - terraform-apply: - needs: terraform-plan - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Setup Terraform - uses: hashicorp/setup-terraform@v1 - - - name: Download Terraform Plan - uses: actions/download-artifact@v2 - with: - name: terraform-plan - path: ./terraform - -# - name: Terraform Apply -# run: terraform apply tfplan -# working-directory: ./terraform +name: 'Terraform Deployment' + +on: + workflow_dispatch: + +jobs: + terraform-plan: + runs-on: ubuntu-latest + env: + TF_VAR_AWS_ACCESS_KEY_ID: ${{ vars.TF_VAR_AWS_ACCESS_KEY_ID }} + TF_VAR_AWS_SECRET_ACCESS_KEY: ${{ vars.TF_VAR_AWS_SECRET_ACCESS_KEY }} + TF_VAR_AWS_ACCOUNT_ID: ${{ vars.TF_VAR_AWS_ACCOUNT_ID }} + TF_VAR_AWS_USER_NAME: ${{ vars.TF_VAR_AWS_USER_NAME }} + TF_VAR_AWS_DB_USERNAME: ${{ vars.TF_VAR_AWS_DB_USERNAME }} + TF_VAR_AWS_DB_PASSWORD: ${{ vars.TF_VAR_AWS_DB_PASSWORD }} + TF_VAR_db_password: ${{ vars.TF_VAR_db_password }} + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: 1.7.3 + terraform_wrapper: false + + - name: Terraform Init + run: terraform init + working-directory: ./terraform + + - name: Terraform Plan + run: terraform plan -out=tfplan + working-directory: ./terraform + + - name: Upload Terraform Plan + uses: actions/upload-artifact@v2 + with: + name: terraform-plan + path: ./terraform/tfplan + + terraform-apply: + needs: terraform-plan + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v1 + + - name: Download Terraform Plan + uses: actions/download-artifact@v2 + with: + name: terraform-plan + path: ./terraform +# - name: Terraform Apply +# run: terraform apply tfplan +# working-directory: ./terraform diff --git a/README.md b/README.md index 552955f8..4a222d17 100644 --- a/README.md +++ b/README.md @@ -1,40 +1,40 @@ -# Kadindexer - Kadena Indexer - -[![Build](https://github.com/hack-a-chain-software/indexer-kadena/actions/workflows/indexer.yml/badge.svg)](https://github.com/hack-a-chain-software/indexer-kadena/actions/workflows/indexer.yml) - -- [`@kadena-indexer/indexer`](indexer/README.md): The indexer package, which is responsible for scanning and storing blocks for Kadena blockchain. -- [`@kadena-indexer/terraform`](terraform/README.md): The Terraform configuration for provisioning the infrastructure required to run the indexer and the node. -- [`@kadena-indexer/backfill`](backfill/README.md): The backfill package, which is responsible for backfilling the indexer data. - -## Requirements - -- Install dependencies -- See individual package READMEs for specific prerequisites - -## Installation - -Install dependencies with the following command: - -```bash -yarn install -``` - -## Quick Start - -This is the quickest way to get the indexer running. - -Install [Docker](https://www.docker.com/). - -Fill the `.env` file in the `indexer` folder. See [Environment Variables Reference](../indexer/README.md#32-environment-variables-reference). - -```bash -cp indexer/.env.template indexer/.env -``` - -To start all services: -```bash -yarn indexer dev -``` - -**NOTE:** Using the image on with the composer require the database `DB_USERNAME` to default to `postgres`. - +# Kadindexer - Kadena Indexer + +[![Build](https://github.com/hack-a-chain-software/indexer-kadena/actions/workflows/indexer.yml/badge.svg)](https://github.com/hack-a-chain-software/indexer-kadena/actions/workflows/indexer.yml) + +- [`@kadena-indexer/indexer`](indexer/README.md): The indexer package, which is responsible for scanning and storing blocks for Kadena blockchain. +- [`@kadena-indexer/terraform`](terraform/README.md): The Terraform configuration for provisioning the infrastructure required to run the indexer and the node. +- [`@kadena-indexer/backfill`](backfill/README.md): The backfill package, which is responsible for backfilling the indexer data. + +## Requirements + +- Install dependencies +- See individual package READMEs for specific prerequisites + +## Installation + +Install dependencies with the following command: + +```bash +yarn install +``` + +## Quick Start + +This is the quickest way to get the indexer running. + +Install [Docker](https://www.docker.com/). + +Fill the `.env` file in the `indexer` folder. See [Environment Variables Reference](../indexer/README.md#32-environment-variables-reference). + +```bash +cp indexer/.env.template indexer/.env +``` + +To start all services: + +```bash +yarn indexer dev +``` + +**NOTE:** Using the image on with the composer require the database `DB_USERNAME` to default to `postgres`. diff --git a/backfill/README.md b/backfill/README.md index f27ab31b..3a103e68 100644 --- a/backfill/README.md +++ b/backfill/README.md @@ -14,6 +14,7 @@ The Kadindexer Backfill is a utility tool designed to synchronize historical blo ## 3. Setup ### 3.1. Starting Docker + Start Docker Desktop from command line or via IOS application. ```bash @@ -26,21 +27,21 @@ sudo systemctl start docker ### 3.2. Environment Variables -| Variable | Description | Example | -|----------|-------------|---------| -| `CERT_PATH` | Path to SSL certificate bundle | `./global-bundle.pem` | -| `SYNC_BASE_URL` | Base URL for the Chainweb API | `https://api.chainweb.com/chainweb/0.0` | -| `CHAIN_ID` | ID of the chain to backfill | `0` | -| `NETWORK` | Kadena network to sync from | `mainnet01` | -| `SYNC_MIN_HEIGHT` | Starting block height for backfill | `5370495` | -| `SYNC_FETCH_INTERVAL_IN_BLOCKS` | Number of blocks to fetch in each interval | `100` | -| `SYNC_ATTEMPTS_MAX_RETRY` | Maximum number of retry attempts | `5` | -| `SYNC_ATTEMPTS_INTERVAL_IN_MS` | Interval between retry attempts in milliseconds | `500` | -| `DB_USERNAME` | PostgreSQL database username | `postgres` | -| `DB_PASSWORD` | PostgreSQL database password | `password` | -| `DB_NAME` | Name of the database | `indexer` | -| `DB_HOST` | Database host address | `localhost` | -| `DB_PORT` | Database port number | `5432` | +| Variable | Description | Example | +| ------------------------------- | ----------------------------------------------- | --------------------------------------- | +| `CERT_PATH` | Path to SSL certificate bundle | `./global-bundle.pem` | +| `SYNC_BASE_URL` | Base URL for the Chainweb API | `https://api.chainweb.com/chainweb/0.0` | +| `CHAIN_ID` | ID of the chain to backfill | `0` | +| `NETWORK` | Kadena network to sync from | `mainnet01` | +| `SYNC_MIN_HEIGHT` | Starting block height for backfill | `5370495` | +| `SYNC_FETCH_INTERVAL_IN_BLOCKS` | Number of blocks to fetch in each interval | `100` | +| `SYNC_ATTEMPTS_MAX_RETRY` | Maximum number of retry attempts | `5` | +| `SYNC_ATTEMPTS_INTERVAL_IN_MS` | Interval between retry attempts in milliseconds | `500` | +| `DB_USERNAME` | PostgreSQL database username | `postgres` | +| `DB_PASSWORD` | PostgreSQL database password | `password` | +| `DB_NAME` | Name of the database | `indexer` | +| `DB_HOST` | Database host address | `localhost` | +| `DB_PORT` | Database port number | `5432` | **NOTE:** The example Kadena node API from chainweb will not work for the indexer purpose. You will need to run your own Kadena node and set the `NODE_API_URL` to your node's API URL. @@ -53,6 +54,7 @@ Please refer to the [Kadena Indexer README](../indexer/README.md) for instructio ### 4.2. Build the backfill image Build the image: + ```bash docker build -t chainbychain -f Dockerfile . ``` @@ -60,40 +62,49 @@ docker build -t chainbychain -f Dockerfile . ### 4.3. Run the container #### Dockerfile (Chain by Chain) + This Dockerfile is designed to run the backfill process for a single chain at a time. It's useful when you need to: + - Sync data for a specific chain ID - Have more granular control over the backfill process - Debug issues with a particular chain - Manage resources more efficiently #### Dockerfile.indexes + This Dockerfile is specifically for recreating database indexes. Use this when you need to: + - Rebuild corrupted indexes - Optimize existing indexes - Add new indexes to improve query performance - Perform database maintenance #### Dockerfile.middle-backfill + This Dockerfile orchestrates the backfill process across all chains simultaneously. It's beneficial when you want to: + - Perform a complete system backfill - Sync data for all chains in parallel - Save time by running multiple chain syncs concurrently - Ensure consistency across all chains For single chain backfill: + ```bash docker build -t chainbychain -f Dockerfile . docker run --rm --name chainbychain --env-file .env chainbychain ``` For rebuilding indexes: + ```bash docker build -t rebuild-indexes -f Dockerfile.indexes . docker run --rm --name rebuild-indexes --env-file .env rebuild-indexes ``` For all chains backfill: + ```bash docker build -t all-chains -f Dockerfile.middle-backfill . docker run --rm --name all-chains --env-file .env all-chains -``` \ No newline at end of file +``` diff --git a/indexer/README.md b/indexer/README.md index 8bc1d6d6..fa418dd0 100644 --- a/indexer/README.md +++ b/indexer/README.md @@ -1,15 +1,18 @@ # Kadena Indexer - Infrastructure Configuration ### 🚀 Getting Started + - [Introduction](#1-introduction) - [Prerequisites](#2-prerequisites) ### ⚙️ Configuration + - [Environment Setup](#3-environment-setup) - [Configure Variables](#31-configure-environment-variables) - [Variables Reference](#32-environment-variables-reference) ### 🐳 Docker Setup + - [Starting Docker](#41-starting-docker) - [Dev Container](#42-dev-container) - [Running Options](#43-running-with-docker) @@ -18,9 +21,11 @@ - [Temporary Containers](#45-running-separately-with-temporary-containers) ## 1. Introduction + This directory contains the instructions on how to set up the Docker container for the Kadena indexer, configure the environment variables, and run the indexer. We present two options for running the indexer, by using Docker Compose or running the services separately. ## 2. Prerequisites + - [Docker](https://www.docker.com/) - [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) for VSCode or Cursor (optional) - Installed dependencies @@ -31,6 +36,7 @@ This directory contains the instructions on how to set up the Docker container f ## 3. Environment Setup ### 3.1. Configure Environment Variables + Under the `/indexer` directory, run the following command to create an `.env` file using the `.env.template` as a reference: ```bash @@ -38,28 +44,29 @@ cp indexer/.env.template indexer/.env ``` ### 3.2. Environment Variables Reference -| Variable | Description | Example | -|----------|-------------|---------| -| `NODE_API_URL` | Base URL for the Kadena node API | `https://api.chainweb.com` | -| `SYNC_BASE_URL` | Base URL for the Chainweb API | `https://api.chainweb.com/chainweb/0.0` | -| `SYNC_MIN_HEIGHT` | Minimum height to start syncing from | `0` | -| `SYNC_FETCH_INTERVAL_IN_BLOCKS` | Interval in blocks to fetch | `100` | -| `SYNC_NETWORK` | Network to sync | `mainnet01`, `testnet04`, `devnet` | -| `KADENA_GRAPHQL_API_URL` | GraphQL API host | `localhost` | -| `KADENA_GRAPHQL_API_PORT` | GraphQL API port | `3000` | -| `DB_USERNAME` | PostgreSQL database username | `postgres` | -| `DB_PASSWORD` | PostgreSQL database password | `your_password` | -| `DB_NAME` | PostgreSQL database name | `indexer` | -| `DB_HOST` | PostgreSQL database host | `localhost` | -| `DB_SSL_ENABLED` | Enable/disable SSL for database | `true` or `false` | + +| Variable | Description | Example | +| ------------------------------- | ------------------------------------ | --------------------------------------- | +| `NODE_API_URL` | Base URL for the Kadena node API | `https://api.chainweb.com` | +| `SYNC_BASE_URL` | Base URL for the Chainweb API | `https://api.chainweb.com/chainweb/0.0` | +| `SYNC_MIN_HEIGHT` | Minimum height to start syncing from | `0` | +| `SYNC_FETCH_INTERVAL_IN_BLOCKS` | Interval in blocks to fetch | `100` | +| `SYNC_NETWORK` | Network to sync | `mainnet01`, `testnet04`, `devnet` | +| `KADENA_GRAPHQL_API_URL` | GraphQL API host | `localhost` | +| `KADENA_GRAPHQL_API_PORT` | GraphQL API port | `3000` | +| `DB_USERNAME` | PostgreSQL database username | `postgres` | +| `DB_PASSWORD` | PostgreSQL database password | `your_password` | +| `DB_NAME` | PostgreSQL database name | `indexer` | +| `DB_HOST` | PostgreSQL database host | `localhost` | +| `DB_SSL_ENABLED` | Enable/disable SSL for database | `true` or `false` | **NOTE:** The example Kadena node API from chainweb will not work for the indexer purpose. You will need to run your own Kadena node and set the `NODE_API_URL` to your node's API URL. ## 4. Docker Setup ### 4.1. Starting Docker -Start Docker Desktop from command line or via IOS application. +Start Docker Desktop from command line or via IOS application. ```bash # MacOS - Start Docker Desktop from command line @@ -72,11 +79,13 @@ sudo systemctl start docker **NOTE:** Make sure to check the `.env` file to set the correct environment variables. ### 4.2. Dev Container + This project is configured to run in a dev container. You can use the `Dev Containers: Open Folder in Container` command in VSCode to open the project in a dev container. This will automatically install the required dependencies and set up the environment. To use the dev container, you need to have Docker installed on your machine. If you don't have Dev Containers installed, you can install it from the [VSCode Marketplace](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers). ### 4.3. Running with Docker + ```bash # Build a Docker image named 'kadena-indexer' using the Dockerfile in current directory sudo docker build -t kadena-indexer:latest . @@ -87,12 +96,14 @@ sudo docker run --env-file ./indexer/.env -p 3000:3000 kadena-indexer:latest ### 4.4. Running with Docker Compose Docker Compose provides a way to run the entire indexer stack with a single command. While you could run each service separately (database, migrations, GraphQL server, and streaming service), Docker Compose orchestrates all these components together, handling their dependencies and startup order automatically. The services are defined in `docker-compose.yml`, which includes: + - PostgreSQL database - Database migrations - GraphQL API server - Streaming indexer service To start all services: + ```bash yarn dev ``` @@ -100,6 +111,7 @@ yarn dev **NOTE:** Using the image on with the composer require the database `DB_USERNAME` to default to `postgres`. ### 4.5. Running Postgres Container + This workflow will start the PostgreSQL database in a temporary container. Remove the `--rm` flag to keep the container running after the command is finished. ```bash @@ -118,6 +130,7 @@ docker run --rm --name postgres-indexer \ ## 5. Indexer ### 5.1. Running the Indexer + Assuming you've already started the Docker container, you can run the following commands to start the indexer: **Note**: Run each command in a separate terminal window -- with exeption of `yarn create:database`, as they are long-running process. @@ -162,6 +175,7 @@ yarn test **NOTE:** This is not being actively maintained at the moment. Install act for local testing: + ```bash # For MacOS brew install act @@ -172,6 +186,7 @@ sudo apt-get install act ``` Then run the indexer workflow by using the following command: + ```bash yarn run-indexer-workflow ``` diff --git a/indexer/codegen.yml b/indexer/codegen.yml index 6650b5d7..52146a4e 100644 --- a/indexer/codegen.yml +++ b/indexer/codegen.yml @@ -2,8 +2,8 @@ schema: ./src/kadena-server/config/schema.graphql generates: ./src/kadena-server/config/graphql-types.ts: # Output path for the generated types plugins: - - "typescript" - - "typescript-resolvers" + - 'typescript' + - 'typescript-resolvers' config: scalars: DateTime: Date diff --git a/indexer/config/config.js b/indexer/config/config.js index 6c6890de..ae111185 100644 --- a/indexer/config/config.js +++ b/indexer/config/config.js @@ -1,23 +1,21 @@ -const fs = require("fs"); -const { Transaction } = require("sequelize"); -const isSslEnabled = process.env.DB_SSL_ENABLED === "true"; +const fs = require('fs'); +const { Transaction } = require('sequelize'); +const isSslEnabled = process.env.DB_SSL_ENABLED === 'true'; module.exports = { development: { username: process.env.DB_USERNAME, password: process.env.DB_PASSWORD, database: process.env.DB_NAME, - host: process.env.DB_HOST || "localhost", - dialect: "postgres", + host: process.env.DB_HOST || 'localhost', + dialect: 'postgres', logging: false, ...(isSslEnabled && { dialectOptions: { ssl: { require: true, rejectUnauthorized: true, - ca: fs - .readFileSync(__dirname + "/../src/config/global-bundle.pem") - .toString(), + ca: fs.readFileSync(__dirname + '/../src/config/global-bundle.pem').toString(), }, }, }), diff --git a/indexer/docker-compose.yml b/indexer/docker-compose.yml index 19f52ecc..9dd4ecfe 100644 --- a/indexer/docker-compose.yml +++ b/indexer/docker-compose.yml @@ -9,11 +9,11 @@ services: POSTGRES_PASSWORD: ${DB_PASSWORD} POSTGRES_DB: ${DB_NAME} ports: - - "5432:5432" + - '5432:5432' volumes: - ${PWD}/indexer/postgres:/var/lib/postgresql/data healthcheck: - test: ["CMD-SHELL", "pg_isready -U ${DB_USERNAME}"] + test: ['CMD-SHELL', 'pg_isready -U ${DB_USERNAME}'] interval: 10s timeout: 5s retries: 5 @@ -25,7 +25,7 @@ services: container_name: db-migration environment: DB_HOST: indexer-db - command: ["yarn", "create:database"] + command: ['yarn', 'create:database'] depends_on: indexer-db: condition: service_healthy @@ -37,9 +37,9 @@ services: container_name: kad-indexer-graphql environment: DB_HOST: indexer-db - command: ["yarn", "dev:graphql"] + command: ['yarn', 'dev:graphql'] ports: - - "3001:3001" + - '3001:3001' depends_on: db-migration: condition: service_completed_successfully @@ -52,7 +52,7 @@ services: environment: DB_HOST: indexer-db KADENA_GRAPHQL_API_URL: http://graphql-app - command: ["yarn", "dev:streaming"] + command: ['yarn', 'dev:streaming'] depends_on: graphql-app: condition: service_started diff --git a/indexer/jest.config.ts b/indexer/jest.config.ts index 5f65f4b4..cfd66e6b 100644 --- a/indexer/jest.config.ts +++ b/indexer/jest.config.ts @@ -1,11 +1,11 @@ export default { - preset: "ts-jest", - testEnvironment: "node", + preset: 'ts-jest', + testEnvironment: 'node', transform: { - "^.+\\.ts$": "ts-jest", + '^.+\\.ts$': 'ts-jest', }, - extensionsToTreatAsEsm: [".ts"], + extensionsToTreatAsEsm: ['.ts'], moduleNameMapper: { - "^(\\.{1,2}/.*)\\.js$": "$1", + '^(\\.{1,2}/.*)\\.js$': '$1', }, }; diff --git a/indexer/migrations/20241105002419-add-index-to-signers-pubkey.js b/indexer/migrations/20241105002419-add-index-to-signers-pubkey.js index 1e7730ec..2180549b 100644 --- a/indexer/migrations/20241105002419-add-index-to-signers-pubkey.js +++ b/indexer/migrations/20241105002419-add-index-to-signers-pubkey.js @@ -1,15 +1,15 @@ -"use strict"; +'use strict'; /** @type {import('sequelize-cli').Migration} */ module.exports = { async up(queryInterface) { - await queryInterface.addIndex("Signers", { - fields: ["pubkey"], - name: "signers_pubkey_idx", + await queryInterface.addIndex('Signers', { + fields: ['pubkey'], + name: 'signers_pubkey_idx', }); }, async down(queryInterface) { - await queryInterface.removeIndex("Signers", "signers_pubkey_idx"); + await queryInterface.removeIndex('Signers', 'signers_pubkey_idx'); }, }; diff --git a/indexer/migrations/20241213221154-add-comments-to-avoid-name-clashing-in-postgraphile.js b/indexer/migrations/20241213221154-add-comments-to-avoid-name-clashing-in-postgraphile.js index 5ebe4b32..d6f91e74 100644 --- a/indexer/migrations/20241213221154-add-comments-to-avoid-name-clashing-in-postgraphile.js +++ b/indexer/migrations/20241213221154-add-comments-to-avoid-name-clashing-in-postgraphile.js @@ -1,4 +1,4 @@ -"use strict"; +'use strict'; /** @type {import('sequelize-cli').Migration} */ module.exports = { diff --git a/indexer/src/cache/init.ts b/indexer/src/cache/init.ts index bb9807be..117c286d 100644 --- a/indexer/src/cache/init.ts +++ b/indexer/src/cache/init.ts @@ -1,11 +1,7 @@ -import { ResolverContext } from "../kadena-server/config/apollo-server-config"; -import NodeCache from "node-cache"; -import { - HASH_RATE_AND_TOTAL_DIFFICULTY_KEY, - NETWORK_STATISTICS_KEY, - NODE_INFO_KEY, -} from "./keys"; -import { HashRateAndTotalDifficulty } from "../kadena-server/repository/application/network-repository"; +import { ResolverContext } from '../kadena-server/config/apollo-server-config'; +import NodeCache from 'node-cache'; +import { HASH_RATE_AND_TOTAL_DIFFICULTY_KEY, NETWORK_STATISTICS_KEY, NODE_INFO_KEY } from './keys'; +import { HashRateAndTotalDifficulty } from '../kadena-server/repository/application/network-repository'; export const MEMORY_CACHE = new NodeCache({ stdTTL: 0 }); @@ -30,7 +26,7 @@ export default async function initCache(context: ResolverContext) { }; MEMORY_CACHE.set(HASH_RATE_AND_TOTAL_DIFFICULTY_KEY, newValue); } catch (err) { - console.log("Error getting hash rate and total difficulty", err); + console.log('Error getting hash rate and total difficulty', err); } } @@ -39,7 +35,7 @@ export default async function initCache(context: ResolverContext) { const networkStatistics = await networkRepository.getNetworkStatistics(); MEMORY_CACHE.set(NETWORK_STATISTICS_KEY, networkStatistics); } catch (err) { - console.log("Error getting network statistics", err); + console.log('Error getting network statistics', err); } } @@ -48,7 +44,7 @@ export default async function initCache(context: ResolverContext) { const nodeInfo = await networkRepository.getNodeInfo(); MEMORY_CACHE.set(NODE_INFO_KEY, nodeInfo); } catch (err) { - console.log("Error getting node info", err); + console.log('Error getting node info', err); } } diff --git a/indexer/src/cache/keys.ts b/indexer/src/cache/keys.ts index fbea6408..37b9a113 100644 --- a/indexer/src/cache/keys.ts +++ b/indexer/src/cache/keys.ts @@ -1,4 +1,3 @@ -export const HASH_RATE_AND_TOTAL_DIFFICULTY_KEY = - "HASH_RATE_AND_TOTAL_DIFFICULTY_KEY"; -export const NETWORK_STATISTICS_KEY = "NETWORK_STATISTICS_KEY"; -export const NODE_INFO_KEY = "NODE_INFO_KEY"; +export const HASH_RATE_AND_TOTAL_DIFFICULTY_KEY = 'HASH_RATE_AND_TOTAL_DIFFICULTY_KEY'; +export const NETWORK_STATISTICS_KEY = 'NETWORK_STATISTICS_KEY'; +export const NODE_INFO_KEY = 'NODE_INFO_KEY'; diff --git a/indexer/src/config/database.ts b/indexer/src/config/database.ts index d7cc2fc2..5b5b6ec1 100644 --- a/indexer/src/config/database.ts +++ b/indexer/src/config/database.ts @@ -1,24 +1,24 @@ -import fs from "fs"; +import fs from 'fs'; -import { Sequelize, Transaction } from "sequelize"; -import { getRequiredEnvString } from "../utils/helpers"; -import { Pool } from "pg"; +import { Sequelize, Transaction } from 'sequelize'; +import { getRequiredEnvString } from '../utils/helpers'; +import { Pool } from 'pg'; -const DB_USERNAME = getRequiredEnvString("DB_USERNAME"); -const DB_PASSWORD = getRequiredEnvString("DB_PASSWORD"); -const DB_NAME = getRequiredEnvString("DB_NAME"); -const DB_HOST = getRequiredEnvString("DB_HOST"); -const DB_SSL_ENABLED = getRequiredEnvString("DB_SSL_ENABLED"); +const DB_USERNAME = getRequiredEnvString('DB_USERNAME'); +const DB_PASSWORD = getRequiredEnvString('DB_PASSWORD'); +const DB_NAME = getRequiredEnvString('DB_NAME'); +const DB_HOST = getRequiredEnvString('DB_HOST'); +const DB_SSL_ENABLED = getRequiredEnvString('DB_SSL_ENABLED'); const DB_CONNECTION = `postgres://${DB_USERNAME}:${DB_PASSWORD}@${DB_HOST}/${DB_NAME}`; -const isSslEnabled = DB_SSL_ENABLED === "true"; +const isSslEnabled = DB_SSL_ENABLED === 'true'; export const rootPgPool = new Pool({ connectionString: DB_CONNECTION, ...(isSslEnabled && { ssl: { rejectUnauthorized: true, - ca: fs.readFileSync(__dirname + "/global-bundle.pem").toString(), + ca: fs.readFileSync(__dirname + '/global-bundle.pem').toString(), }, }), }); @@ -28,8 +28,8 @@ export const sequelize = new Sequelize( process.env.DB_USERNAME as string, process.env.DB_PASSWORD as string, { - host: process.env.DB_HOST || "localhost", - dialect: "postgres", + host: process.env.DB_HOST || 'localhost', + dialect: 'postgres', pool: { max: 20, min: 1, @@ -45,7 +45,7 @@ export const sequelize = new Sequelize( ssl: { require: true, rejectUnauthorized: true, - ca: fs.readFileSync(__dirname + "/global-bundle.pem").toString(), + ca: fs.readFileSync(__dirname + '/global-bundle.pem').toString(), }, }, }), @@ -56,9 +56,9 @@ export const sequelize = new Sequelize( export async function closeDatabase(): Promise { try { await sequelize.close(); - console.log("Connection has been closed successfully."); + console.log('Connection has been closed successfully.'); } catch (error) { - console.error("Unable to close the connection:", error); + console.error('Unable to close the connection:', error); throw error; } } diff --git a/indexer/src/config/init.ts b/indexer/src/config/init.ts index ab3cca46..748955e6 100644 --- a/indexer/src/config/init.ts +++ b/indexer/src/config/init.ts @@ -1,13 +1,13 @@ -import { QueryTypes } from "sequelize"; -import { sequelize } from "./database"; -import "../models/guard"; +import { QueryTypes } from 'sequelize'; +import { sequelize } from './database'; +import '../models/guard'; export async function initializeDatabase(noTrigger = true): Promise { try { await sequelize.authenticate(); - console.log("Connection has been established successfully."); + console.log('Connection has been established successfully.'); } catch (error) { - console.error("Unable to connect to the database:", error); + console.error('Unable to connect to the database:', error); throw error; } @@ -23,7 +23,7 @@ export async function initializeDatabase(noTrigger = true): Promise { ); if (row?.exists) { - console.log("Creation skipped."); + console.log('Creation skipped.'); return; } @@ -34,7 +34,7 @@ export async function initializeDatabase(noTrigger = true): Promise { `); await sequelize.sync({ force: false }); - console.log("Tables have been synchronized successfully."); + console.log('Tables have been synchronized successfully.'); if (noTrigger) return; @@ -42,7 +42,7 @@ export async function initializeDatabase(noTrigger = true): Promise { // Balances // -------------------------------- - console.log("Sync update_balances()..."); + console.log('Sync update_balances()...'); // Create the update_balances function await sequelize.query(` @@ -138,7 +138,7 @@ export async function initializeDatabase(noTrigger = true): Promise { $$ LANGUAGE plpgsql; `); - console.log("Sync update_balances_trigger()..."); + console.log('Sync update_balances_trigger()...'); // Create the trigger await sequelize.query(` @@ -222,7 +222,7 @@ export async function initializeDatabase(noTrigger = true): Promise { // Orphan blocks // -------------------------------- - console.log("Sync public.check_canonical()..."); + console.log('Sync public.check_canonical()...'); // Create the check canonical function await sequelize.query(` @@ -335,7 +335,7 @@ export async function initializeDatabase(noTrigger = true): Promise { // $function$ // ;`); - console.log("Sync public.check_upward_orphans()..."); + console.log('Sync public.check_upward_orphans()...'); await sequelize.query(` CREATE OR REPLACE FUNCTION public.check_upward_orphans() @@ -411,7 +411,7 @@ export async function initializeDatabase(noTrigger = true): Promise { $function$ ;`); - console.log("Sync blocks_propagate_canonical_function()..."); + console.log('Sync blocks_propagate_canonical_function()...'); // Propagate canonical trigger to transactions await sequelize.query(` @@ -425,7 +425,7 @@ export async function initializeDatabase(noTrigger = true): Promise { END; $$ LANGUAGE plpgsql;`); - console.log("Sync blocks_propagate_canonical..."); + console.log('Sync blocks_propagate_canonical...'); await sequelize.query(` CREATE OR REPLACE TRIGGER blocks_propagate_canonical @@ -433,7 +433,7 @@ export async function initializeDatabase(noTrigger = true): Promise { FOR EACH ROW EXECUTE FUNCTION blocks_propagate_canonical_function();`); - console.log("Sync transactions_propagate_canonical_function..."); + console.log('Sync transactions_propagate_canonical_function...'); // Propagate canonical trigger to transfers await sequelize.query(` @@ -447,7 +447,7 @@ export async function initializeDatabase(noTrigger = true): Promise { END; $$ LANGUAGE plpgsql;`); - console.log("Sync transactions_propagate_canonical..."); + console.log('Sync transactions_propagate_canonical...'); await sequelize.query(` CREATE OR REPLACE TRIGGER transactions_propagate_canonical @@ -464,7 +464,7 @@ export async function initializeDatabase(noTrigger = true): Promise { // FOR EACH ROW // EXECUTE FUNCTION check_backward_orphans();`); - console.log("Sync check_orphan_blocks_upward..."); + console.log('Sync check_orphan_blocks_upward...'); await sequelize.query(` CREATE OR REPLACE TRIGGER check_orphan_blocks_upward @@ -472,7 +472,7 @@ export async function initializeDatabase(noTrigger = true): Promise { FOR EACH ROW EXECUTE FUNCTION check_upward_orphans();`); - console.log("Sync public.update_transactions_count()..."); + console.log('Sync public.update_transactions_count()...'); await sequelize.query(` CREATE OR REPLACE FUNCTION public.update_transactions_count() @@ -490,7 +490,7 @@ export async function initializeDatabase(noTrigger = true): Promise { $function$ ;`); - console.log("Sync trigger_update_transactions_count..."); + console.log('Sync trigger_update_transactions_count...'); await sequelize.query(` CREATE OR REPLACE TRIGGER trigger_update_transactions_count after @@ -501,7 +501,7 @@ export async function initializeDatabase(noTrigger = true): Promise { // Update fungibles count - console.log("Sync public.update_fungibles_count()..."); + console.log('Sync public.update_fungibles_count()...'); await sequelize.query(` CREATE OR REPLACE FUNCTION public.update_fungibles_count() @@ -522,7 +522,7 @@ export async function initializeDatabase(noTrigger = true): Promise { $function$ ;`); - console.log("Sync trigger_update_fungibles_count..."); + console.log('Sync trigger_update_fungibles_count...'); await sequelize.query(` create or replace trigger trigger_update_fungibles_count after @@ -533,7 +533,7 @@ export async function initializeDatabase(noTrigger = true): Promise { // Update polyfungibles count - console.log("Sync public.update_polyfungibles_count()..."); + console.log('Sync public.update_polyfungibles_count()...'); await sequelize.query(` CREATE OR REPLACE FUNCTION public.update_polyfungibles_count() @@ -555,7 +555,7 @@ export async function initializeDatabase(noTrigger = true): Promise { ; `); - console.log("Sync trigger_update_polyfungibles_count..."); + console.log('Sync trigger_update_polyfungibles_count...'); await sequelize.query(` create or replace trigger trigger_update_polyfungibles_count after @@ -564,7 +564,7 @@ export async function initializeDatabase(noTrigger = true): Promise { public."Balances" for each row execute function update_polyfungibles_count() ;`); - console.log("Sync public.get_holders_by_module..."); + console.log('Sync public.get_holders_by_module...'); await sequelize.query(` CREATE OR REPLACE FUNCTION public.get_holders_by_module( @@ -626,9 +626,9 @@ export async function initializeDatabase(noTrigger = true): Promise { END; $$ LANGUAGE plpgsql;`); - console.log("Trigger function and trigger have been created successfully."); + console.log('Trigger function and trigger have been created successfully.'); } catch (error) { - console.error("Unable to create tables:", error); + console.error('Unable to create tables:', error); throw error; } } diff --git a/indexer/src/index.ts b/indexer/src/index.ts index bd355e8a..3fb47e26 100644 --- a/indexer/src/index.ts +++ b/indexer/src/index.ts @@ -1,24 +1,24 @@ -import dotenv from "dotenv"; -console.log("Loading environment variables..."); +import dotenv from 'dotenv'; +console.log('Loading environment variables...'); dotenv.config(); -import { program } from "commander"; -import { startStreaming } from "./services/sync/streaming"; -import { usePostgraphile } from "./server/metrics"; -import { useKadenaGraphqlServer } from "./kadena-server/server"; -import { closeDatabase } from "./config/database"; -import { initializeDatabase } from "./config/init"; -import { startGuardsBackfill } from "./services/sync/guards"; -import { startBackfillCoinbaseTransactions } from "./services/sync/coinbase"; +import { program } from 'commander'; +import { startStreaming } from './services/sync/streaming'; +import { usePostgraphile } from './server/metrics'; +import { useKadenaGraphqlServer } from './kadena-server/server'; +import { closeDatabase } from './config/database'; +import { initializeDatabase } from './config/init'; +import { startGuardsBackfill } from './services/sync/guards'; +import { startBackfillCoinbaseTransactions } from './services/sync/coinbase'; program - .option("-s, --streaming", "Start streaming blockchain data") - .option("-g, --oldGraphql", "Start GraphQL server based on Postgraphile") - .option("-t, --graphql", "Start GraphQL server based on kadena schema") - .option("-f, --guards", "Backfill the guards") + .option('-s, --streaming', 'Start streaming blockchain data') + .option('-g, --oldGraphql', 'Start GraphQL server based on Postgraphile') + .option('-t, --graphql', 'Start GraphQL server based on kadena schema') + .option('-f, --guards', 'Backfill the guards') // this option shouldn't be used if you initialize the indexer from the beginning - .option("-c, --coinbase", "Backfill coinbase transactions") - .option("-z, --database", "Init the database"); + .option('-c, --coinbase', 'Backfill coinbase transactions') + .option('-z, --database', 'Init the database'); program.parse(process.argv); @@ -47,10 +47,10 @@ async function main() { } else if (options.graphql) { await useKadenaGraphqlServer(); } else { - console.log("No specific task requested."); + console.log('No specific task requested.'); } } catch (error) { - console.error("An error occurred during the initialization:", error); + console.error('An error occurred during the initialization:', error); } } @@ -60,11 +60,11 @@ async function main() { */ async function handleGracefulShutdown(signal: string) { console.log(`Received ${signal}. Graceful shutdown start.`); - console.log("Graceful shutdown complete."); + console.log('Graceful shutdown complete.'); process.exit(0); } -process.on("SIGINT", handleGracefulShutdown); -process.on("SIGTERM", handleGracefulShutdown); +process.on('SIGINT', handleGracefulShutdown); +process.on('SIGTERM', handleGracefulShutdown); main(); diff --git a/indexer/src/jobs/publisher-job.ts b/indexer/src/jobs/publisher-job.ts index 04104c93..3d6c4878 100644 --- a/indexer/src/jobs/publisher-job.ts +++ b/indexer/src/jobs/publisher-job.ts @@ -1,8 +1,8 @@ -import zod from "zod"; -import { getRequiredEnvString } from "../utils/helpers"; +import zod from 'zod'; +import { getRequiredEnvString } from '../utils/helpers'; -const KADENA_GRAPHQL_URL = getRequiredEnvString("KADENA_GRAPHQL_API_URL"); -const KADENA_GRAPHQL_PORT = getRequiredEnvString("KADENA_GRAPHQL_API_PORT"); +const KADENA_GRAPHQL_URL = getRequiredEnvString('KADENA_GRAPHQL_API_URL'); +const KADENA_GRAPHQL_PORT = getRequiredEnvString('KADENA_GRAPHQL_API_PORT'); export const dispatchInfoSchema = zod.object({ hash: zod.string(), @@ -18,8 +18,8 @@ export const dispatch = async (dispatchInfo: DispatchInfo) => { try { const url = `${KADENA_GRAPHQL_URL}:${KADENA_GRAPHQL_PORT}/new-block`; const response = await fetch(url, { - method: "POST", - headers: { "Content-Type": "application/json" }, + method: 'POST', + headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(dispatchInfo), }); @@ -29,8 +29,8 @@ export const dispatch = async (dispatchInfo: DispatchInfo) => { } return true; } catch (err: unknown) { - const errorData = err instanceof Error ? err.message : "Unknown error"; - console.error("Dispatcher error:", errorData); + const errorData = err instanceof Error ? err.message : 'Unknown error'; + console.error('Dispatcher error:', errorData); return false; } }; diff --git a/indexer/src/kadena-server/config/apollo-server-config.ts b/indexer/src/kadena-server/config/apollo-server-config.ts index c675583e..f21d91e1 100644 --- a/indexer/src/kadena-server/config/apollo-server-config.ts +++ b/indexer/src/kadena-server/config/apollo-server-config.ts @@ -1,27 +1,25 @@ -import DataLoader from "dataloader"; -import BlockRepository, { - BlockOutput, -} from "../repository/application/block-repository"; +import DataLoader from 'dataloader'; +import BlockRepository, { BlockOutput } from '../repository/application/block-repository'; import TransactionRepository, { TransactionOutput, -} from "../repository/application/transaction-repository"; -import BalanceRepository from "../repository/application/balance-repository"; -import EventRepository from "../repository/application/event-repository"; -import TransferRepository from "../repository/application/transfer-repository"; -import NetworkRepository from "../repository/application/network-repository"; -import { PubSub } from "graphql-subscriptions"; -import BlockDbRepository from "../repository/infra/repository/block-db-repository"; -import TransactionDbRepository from "../repository/infra/repository/transaction-db-repository"; -import BalanceDbRepository from "../repository/infra/repository/balance-db-repository"; -import EventDbRepository from "../repository/infra/repository/event-db-repository"; -import TransferDbRepository from "../repository/infra/repository/transfer-db-repository"; -import NetworkDbRepository from "../repository/infra/repository/network-db-repository"; -import GasGateway from "../repository/gateway/gas-gateway"; -import GasApiGateway from "../repository/infra/gateway/gas-api-gateway"; -import MempoolGateway from "../repository/gateway/mempool-gateway"; -import MempoolApiGateway from "../repository/infra/gateway/mempool-api-gateway"; -import PactGateway from "../repository/gateway/pact-gateway"; -import PactApiGateway from "../repository/infra/gateway/pact-api-gateway"; +} from '../repository/application/transaction-repository'; +import BalanceRepository from '../repository/application/balance-repository'; +import EventRepository from '../repository/application/event-repository'; +import TransferRepository from '../repository/application/transfer-repository'; +import NetworkRepository from '../repository/application/network-repository'; +import { PubSub } from 'graphql-subscriptions'; +import BlockDbRepository from '../repository/infra/repository/block-db-repository'; +import TransactionDbRepository from '../repository/infra/repository/transaction-db-repository'; +import BalanceDbRepository from '../repository/infra/repository/balance-db-repository'; +import EventDbRepository from '../repository/infra/repository/event-db-repository'; +import TransferDbRepository from '../repository/infra/repository/transfer-db-repository'; +import NetworkDbRepository from '../repository/infra/repository/network-db-repository'; +import GasGateway from '../repository/gateway/gas-gateway'; +import GasApiGateway from '../repository/infra/gateway/gas-api-gateway'; +import MempoolGateway from '../repository/gateway/mempool-gateway'; +import MempoolApiGateway from '../repository/infra/gateway/mempool-api-gateway'; +import PactGateway from '../repository/gateway/pact-gateway'; +import PactApiGateway from '../repository/infra/gateway/pact-api-gateway'; export const publishSubscribe = new PubSub(); @@ -62,16 +60,16 @@ export const createGraphqlContext = () => { return Promise.resolve({ ...context, - getBlocksByHashesLoader: new DataLoader((hashes) => + getBlocksByHashesLoader: new DataLoader(hashes => blockRepository.getBlockByHashes(hashes as string[]), ), - getBlocksByEventIdsLoader: new DataLoader((eventIds) => + getBlocksByEventIdsLoader: new DataLoader(eventIds => blockRepository.getBlocksByEventIds(eventIds as string[]), ), - getTransactionsByEventIdsLoader: new DataLoader((eventIds) => + getTransactionsByEventIdsLoader: new DataLoader(eventIds => transactionRepository.getTransactionsByEventIds(eventIds as string[]), ), - getBlocksByTransactionIdsLoader: new DataLoader((hashes) => + getBlocksByTransactionIdsLoader: new DataLoader(hashes => blockRepository.getBlocksByTransactionIds(hashes as string[]), ), }); diff --git a/indexer/src/kadena-server/config/graphql-types.ts b/indexer/src/kadena-server/config/graphql-types.ts index 671bb249..50b862c2 100644 --- a/indexer/src/kadena-server/config/graphql-types.ts +++ b/indexer/src/kadena-server/config/graphql-types.ts @@ -4,23 +4,27 @@ export type InputMaybe = Maybe; export type Exact = { [K in keyof T]: T[K] }; export type MakeOptional = Omit & { [SubKey in K]?: Maybe }; export type MakeMaybe = Omit & { [SubKey in K]: Maybe }; -export type MakeEmpty = { [_ in K]?: never }; -export type Incremental = T | { [P in keyof T]?: P extends ' $fragmentName' | '__typename' ? T[P] : never }; +export type MakeEmpty = { + [_ in K]?: never; +}; +export type Incremental = + | T + | { [P in keyof T]?: P extends ' $fragmentName' | '__typename' ? T[P] : never }; export type Omit = Pick>; export type RequireFields = Omit & { [P in K]-?: NonNullable }; /** All built-in and custom scalars, mapped to their actual values */ export type Scalars = { - ID: { input: string; output: string; } - String: { input: string; output: string; } - Boolean: { input: boolean; output: boolean; } - Int: { input: number; output: number; } - Float: { input: number; output: number; } + ID: { input: string; output: string }; + String: { input: string; output: string }; + Boolean: { input: boolean; output: boolean }; + Int: { input: number; output: number }; + Float: { input: number; output: number }; /** The `BigInt` scalar type represents non-fractional signed whole numeric values. */ - BigInt: { input: any; output: any; } + BigInt: { input: any; output: any }; /** A date-time string at UTC, such as 2007-12-03T10:15:30Z, compliant with the `date-time` format outlined in section 5.6 of the RFC 3339 profile of the ISO 8601 standard for representation of dates and times using the Gregorian calendar. */ - DateTime: { input: Date; output: Date; } + DateTime: { input: Date; output: Date }; /** Floats that will have a value of 0 or more. */ - Decimal: { input: any; output: any; } + Decimal: { input: any; output: any }; }; /** A unit of information that stores a set of verified transactions. */ @@ -51,7 +55,6 @@ export type Block = Node & { weight: Scalars['String']['output']; }; - /** A unit of information that stores a set of verified transactions. */ export type BlockEventsArgs = { after?: InputMaybe; @@ -60,7 +63,6 @@ export type BlockEventsArgs = { last?: InputMaybe; }; - /** A unit of information that stores a set of verified transactions. */ export type BlockTransactionsArgs = { after?: InputMaybe; @@ -160,7 +162,6 @@ export type FungibleAccount = Node & { transfers: FungibleAccountTransfersConnection; }; - /** A fungible-specific account. */ export type FungibleAccountTransactionsArgs = { after?: InputMaybe; @@ -169,7 +170,6 @@ export type FungibleAccountTransactionsArgs = { last?: InputMaybe; }; - /** A fungible-specific account. */ export type FungibleAccountTransfersArgs = { after?: InputMaybe; @@ -219,7 +219,6 @@ export type FungibleChainAccount = Node & { transfers: FungibleChainAccountTransfersConnection; }; - /** A fungible specific chain-account. */ export type FungibleChainAccountTransactionsArgs = { after?: InputMaybe; @@ -228,7 +227,6 @@ export type FungibleChainAccountTransactionsArgs = { last?: InputMaybe; }; - /** A fungible specific chain-account. */ export type FungibleChainAccountTransfersArgs = { after?: InputMaybe; @@ -345,7 +343,6 @@ export type NonFungibleAccount = Node & { transactions: NonFungibleAccountTransactionsConnection; }; - /** A non-fungible-specific account. */ export type NonFungibleAccountTransactionsArgs = { after?: InputMaybe; @@ -378,7 +375,6 @@ export type NonFungibleChainAccount = Node & { transactions: NonFungibleChainAccountTransactionsConnection; }; - /** A chain and non-fungible-specific account. */ export type NonFungibleChainAccountTransactionsArgs = { after?: InputMaybe; @@ -483,16 +479,16 @@ export type Query = { fungibleChainAccountsByPublicKey: Array; /** * Estimate the gas limit for one or more transactions. Throws an error when the transaction fails or is invalid. The input accepts a JSON object and based on the parameters passed it will determine what type of format it is and return the gas limit estimation. The following types are supported: - *   + * * - `full-transaction`: A complete transaction object. Required parameters: `cmd`, `hash` and `sigs`. * - `stringified-command`: A JSON stringified command. Required parameters: `cmd`. It also optionally accepts `sigs`. * - `full-command`: A full command. Required parameters: `payload`, `meta` and `signers`. * - `partial-command`: A partial command. Required parameters: `payload` and either `meta` or `signers`. In case `meta` is not given, but `signers` is given, you can also add `chainId` as a parameter. * - `payload`: A just the payload of a command. Required parameters: `payload` and `chainId`. * - `code`: The code of an execution. Required parameters: `code` and `chainId`. - *   + * * Every type accepts an optional parameter called `networkId` to override the default value from the environment variables. - *   + * * Example of the input needed for a type `code` query: `gasLimitEstimate(input: "{\"code\":\"(coin.details \\\"k:1234\\\")\",\"chainId\":\"3\"}")` */ gasLimitEstimate: Array; @@ -523,12 +519,10 @@ export type Query = { transfers: QueryTransfersConnection; }; - export type QueryBlockArgs = { hash: Scalars['String']['input']; }; - export type QueryBlocksFromDepthArgs = { after?: InputMaybe; before?: InputMaybe; @@ -538,7 +532,6 @@ export type QueryBlocksFromDepthArgs = { minimumDepth: Scalars['Int']['input']; }; - export type QueryBlocksFromHeightArgs = { after?: InputMaybe; before?: InputMaybe; @@ -549,7 +542,6 @@ export type QueryBlocksFromHeightArgs = { startHeight: Scalars['Int']['input']; }; - export type QueryCompletedBlockHeightsArgs = { after?: InputMaybe; before?: InputMaybe; @@ -560,7 +552,6 @@ export type QueryCompletedBlockHeightsArgs = { last?: InputMaybe; }; - export type QueryEventsArgs = { after?: InputMaybe; before?: InputMaybe; @@ -577,78 +568,65 @@ export type QueryEventsArgs = { requestKey?: InputMaybe; }; - export type QueryFungibleAccountArgs = { accountName: Scalars['String']['input']; fungibleName?: InputMaybe; }; - export type QueryFungibleAccountsByPublicKeyArgs = { fungibleName?: InputMaybe; publicKey: Scalars['String']['input']; }; - export type QueryFungibleChainAccountArgs = { accountName: Scalars['String']['input']; chainId: Scalars['String']['input']; fungibleName?: InputMaybe; }; - export type QueryFungibleChainAccountsArgs = { accountName: Scalars['String']['input']; chainIds?: InputMaybe>; fungibleName?: InputMaybe; }; - export type QueryFungibleChainAccountsByPublicKeyArgs = { chainId: Scalars['String']['input']; fungibleName?: InputMaybe; publicKey: Scalars['String']['input']; }; - export type QueryGasLimitEstimateArgs = { input: Array; }; - export type QueryNodeArgs = { id: Scalars['ID']['input']; }; - export type QueryNodesArgs = { ids: Array; }; - export type QueryNonFungibleAccountArgs = { accountName: Scalars['String']['input']; }; - export type QueryNonFungibleChainAccountArgs = { accountName: Scalars['String']['input']; chainId: Scalars['String']['input']; }; - export type QueryPactQueryArgs = { pactQuery: Array; }; - export type QueryTransactionArgs = { blockHash?: InputMaybe; minimumDepth?: InputMaybe; requestKey: Scalars['String']['input']; }; - export type QueryTransactionsArgs = { accountName?: InputMaybe; after?: InputMaybe; @@ -664,7 +642,6 @@ export type QueryTransactionsArgs = { requestKey?: InputMaybe; }; - export type QueryTransactionsByPublicKeyArgs = { after?: InputMaybe; before?: InputMaybe; @@ -673,7 +650,6 @@ export type QueryTransactionsByPublicKeyArgs = { publicKey: Scalars['String']['input']; }; - export type QueryTransfersArgs = { accountName?: InputMaybe; after?: InputMaybe; @@ -801,9 +777,9 @@ export type Subscription = { __typename?: 'Subscription'; /** * Listen for events by qualifiedName (e.g. `coin.TRANSFER`). - *   + * * The parametersFilter is a stringified JSON object that matches the [JSON object property filters](https://www.prisma.io/docs/orm/prisma-client/special-fields-and-types/working-with-json-fields#filter-on-object-property) from Prisma. - *   + * * An example of such a filter parameter value: `events(parametersFilter: "{\"array_starts_with\": \"k:abcdefg\"}")` */ events?: Maybe>; @@ -815,7 +791,6 @@ export type Subscription = { transaction?: Maybe; }; - export type SubscriptionEventsArgs = { chainId?: InputMaybe; minimumDepth?: InputMaybe; @@ -823,18 +798,15 @@ export type SubscriptionEventsArgs = { qualifiedEventName: Scalars['String']['input']; }; - export type SubscriptionNewBlocksArgs = { chainIds?: InputMaybe>; }; - export type SubscriptionNewBlocksFromDepthArgs = { chainIds?: InputMaybe>; minimumDepth: Scalars['Int']['input']; }; - export type SubscriptionTransactionArgs = { chainId?: InputMaybe; requestKey: Scalars['String']['input']; @@ -919,7 +891,6 @@ export type TransactionResult = { transfers: TransactionResultTransfersConnection; }; - /** The result of a transaction. */ export type TransactionResultEventsArgs = { after?: InputMaybe; @@ -928,7 +899,6 @@ export type TransactionResultEventsArgs = { last?: InputMaybe; }; - /** The result of a transaction. */ export type TransactionResultTransfersArgs = { after?: InputMaybe; @@ -1006,38 +976,43 @@ export type UserGuard = IGuard & { raw: Scalars['String']['output']; }; - - export type ResolverTypeWrapper = Promise | T; - export type ResolverWithResolve = { resolve: ResolverFn; }; -export type Resolver = ResolverFn | ResolverWithResolve; +export type Resolver = + | ResolverFn + | ResolverWithResolve; export type ResolverFn = ( parent: TParent, args: TArgs, context: TContext, - info: GraphQLResolveInfo + info: GraphQLResolveInfo, ) => Promise | TResult; export type SubscriptionSubscribeFn = ( parent: TParent, args: TArgs, context: TContext, - info: GraphQLResolveInfo + info: GraphQLResolveInfo, ) => AsyncIterable | Promise>; export type SubscriptionResolveFn = ( parent: TParent, args: TArgs, context: TContext, - info: GraphQLResolveInfo + info: GraphQLResolveInfo, ) => TResult | Promise; -export interface SubscriptionSubscriberObject { +export interface SubscriptionSubscriberObject< + TResult, + TKey extends string, + TParent, + TContext, + TArgs, +> { subscribe: SubscriptionSubscribeFn<{ [key in TKey]: TResult }, TParent, TContext, TArgs>; resolve?: SubscriptionResolveFn; } @@ -1051,17 +1026,27 @@ export type SubscriptionObject | SubscriptionResolverObject; -export type SubscriptionResolver = +export type SubscriptionResolver< + TResult, + TKey extends string, + TParent = {}, + TContext = {}, + TArgs = {}, +> = | ((...args: any[]) => SubscriptionObject) | SubscriptionObject; export type TypeResolveFn = ( parent: TParent, context: TContext, - info: GraphQLResolveInfo + info: GraphQLResolveInfo, ) => Maybe | Promise>; -export type IsTypeOfResolverFn = (obj: T, context: TContext, info: GraphQLResolveInfo) => boolean | Promise; +export type IsTypeOfResolverFn = ( + obj: T, + context: TContext, + info: GraphQLResolveInfo, +) => boolean | Promise; export type NextResolverFn = () => Promise; @@ -1070,47 +1055,158 @@ export type DirectiveResolverFn TResult | Promise; /** Mapping of union types */ export type ResolversUnionTypes<_RefType extends Record> = { - TransactionInfo: ( TransactionMempoolInfo ) | ( Omit & { block: _RefType['Block'], events: _RefType['TransactionResultEventsConnection'], transfers: _RefType['TransactionResultTransfersConnection'] } ); - TransactionPayload: ( ContinuationPayload ) | ( ExecutionPayload ); + TransactionInfo: + | TransactionMempoolInfo + | (Omit & { + block: _RefType['Block']; + events: _RefType['TransactionResultEventsConnection']; + transfers: _RefType['TransactionResultTransfersConnection']; + }); + TransactionPayload: ContinuationPayload | ExecutionPayload; }; /** Mapping of interface types */ export type ResolversInterfaceTypes<_RefType extends Record> = { - IGuard: ( KeysetGuard ) | ( RawGuard ) | ( UserGuard ); - Node: ( Omit & { events: _RefType['BlockEventsConnection'], minerAccount: _RefType['FungibleChainAccount'], parent?: Maybe<_RefType['Block']>, transactions: _RefType['BlockTransactionsConnection'] } ) | ( Omit & { block: _RefType['Block'], transaction?: Maybe<_RefType['Transaction']> } ) | ( Omit & { chainAccounts: Array<_RefType['FungibleChainAccount']>, transactions: _RefType['FungibleAccountTransactionsConnection'], transfers: _RefType['FungibleAccountTransfersConnection'] } ) | ( Omit & { guard: _RefType['IGuard'], transactions: _RefType['FungibleChainAccountTransactionsConnection'], transfers: _RefType['FungibleChainAccountTransfersConnection'] } ) | ( Omit & { chainAccounts: Array<_RefType['NonFungibleChainAccount']>, nonFungibleTokenBalances: Array<_RefType['NonFungibleTokenBalance']>, transactions: _RefType['NonFungibleAccountTransactionsConnection'] } ) | ( Omit & { nonFungibleTokenBalances: Array<_RefType['NonFungibleTokenBalance']>, transactions: _RefType['NonFungibleChainAccountTransactionsConnection'] } ) | ( Omit & { guard: _RefType['IGuard'] } ) | ( Signer ) | ( Omit & { cmd: _RefType['TransactionCommand'], orphanedTransactions?: Maybe>>, result: _RefType['TransactionInfo'] } ) | ( Omit & { block: _RefType['Block'], crossChainTransfer?: Maybe<_RefType['Transfer']>, transaction?: Maybe<_RefType['Transaction']> } ); + IGuard: KeysetGuard | RawGuard | UserGuard; + Node: + | (Omit & { + events: _RefType['BlockEventsConnection']; + minerAccount: _RefType['FungibleChainAccount']; + parent?: Maybe<_RefType['Block']>; + transactions: _RefType['BlockTransactionsConnection']; + }) + | (Omit & { + block: _RefType['Block']; + transaction?: Maybe<_RefType['Transaction']>; + }) + | (Omit & { + chainAccounts: Array<_RefType['FungibleChainAccount']>; + transactions: _RefType['FungibleAccountTransactionsConnection']; + transfers: _RefType['FungibleAccountTransfersConnection']; + }) + | (Omit & { + guard: _RefType['IGuard']; + transactions: _RefType['FungibleChainAccountTransactionsConnection']; + transfers: _RefType['FungibleChainAccountTransfersConnection']; + }) + | (Omit & { + chainAccounts: Array<_RefType['NonFungibleChainAccount']>; + nonFungibleTokenBalances: Array<_RefType['NonFungibleTokenBalance']>; + transactions: _RefType['NonFungibleAccountTransactionsConnection']; + }) + | (Omit & { + nonFungibleTokenBalances: Array<_RefType['NonFungibleTokenBalance']>; + transactions: _RefType['NonFungibleChainAccountTransactionsConnection']; + }) + | (Omit & { guard: _RefType['IGuard'] }) + | Signer + | (Omit & { + cmd: _RefType['TransactionCommand']; + orphanedTransactions?: Maybe>>; + result: _RefType['TransactionInfo']; + }) + | (Omit & { + block: _RefType['Block']; + crossChainTransfer?: Maybe<_RefType['Transfer']>; + transaction?: Maybe<_RefType['Transaction']>; + }); }; /** Mapping between all available schema types and the resolvers types */ export type ResolversTypes = { BigInt: ResolverTypeWrapper; - Block: ResolverTypeWrapper & { events: ResolversTypes['BlockEventsConnection'], minerAccount: ResolversTypes['FungibleChainAccount'], parent?: Maybe, transactions: ResolversTypes['BlockTransactionsConnection'] }>; - BlockEventsConnection: ResolverTypeWrapper & { edges: Array }>; - BlockEventsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Event'] }>; + Block: ResolverTypeWrapper< + Omit & { + events: ResolversTypes['BlockEventsConnection']; + minerAccount: ResolversTypes['FungibleChainAccount']; + parent?: Maybe; + transactions: ResolversTypes['BlockTransactionsConnection']; + } + >; + BlockEventsConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + BlockEventsConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Event'] } + >; BlockNeighbor: ResolverTypeWrapper; - BlockTransactionsConnection: ResolverTypeWrapper & { edges: Array }>; - BlockTransactionsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transaction'] }>; + BlockTransactionsConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + BlockTransactionsConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Transaction'] } + >; Boolean: ResolverTypeWrapper; ContinuationPayload: ResolverTypeWrapper; DateTime: ResolverTypeWrapper; Decimal: ResolverTypeWrapper; - Event: ResolverTypeWrapper & { block: ResolversTypes['Block'], transaction?: Maybe }>; + Event: ResolverTypeWrapper< + Omit & { + block: ResolversTypes['Block']; + transaction?: Maybe; + } + >; ExecutionPayload: ResolverTypeWrapper; Float: ResolverTypeWrapper; - FungibleAccount: ResolverTypeWrapper & { chainAccounts: Array, transactions: ResolversTypes['FungibleAccountTransactionsConnection'], transfers: ResolversTypes['FungibleAccountTransfersConnection'] }>; - FungibleAccountTransactionsConnection: ResolverTypeWrapper & { edges: Array }>; - FungibleAccountTransactionsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transaction'] }>; - FungibleAccountTransfersConnection: ResolverTypeWrapper & { edges: Array }>; - FungibleAccountTransfersConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transfer'] }>; - FungibleChainAccount: ResolverTypeWrapper & { guard: ResolversTypes['IGuard'], transactions: ResolversTypes['FungibleChainAccountTransactionsConnection'], transfers: ResolversTypes['FungibleChainAccountTransfersConnection'] }>; - FungibleChainAccountTransactionsConnection: ResolverTypeWrapper & { edges: Array }>; - FungibleChainAccountTransactionsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transaction'] }>; - FungibleChainAccountTransfersConnection: ResolverTypeWrapper & { edges: Array }>; - FungibleChainAccountTransfersConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transfer'] }>; + FungibleAccount: ResolverTypeWrapper< + Omit & { + chainAccounts: Array; + transactions: ResolversTypes['FungibleAccountTransactionsConnection']; + transfers: ResolversTypes['FungibleAccountTransfersConnection']; + } + >; + FungibleAccountTransactionsConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + FungibleAccountTransactionsConnectionEdge: ResolverTypeWrapper< + Omit & { + node: ResolversTypes['Transaction']; + } + >; + FungibleAccountTransfersConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + FungibleAccountTransfersConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Transfer'] } + >; + FungibleChainAccount: ResolverTypeWrapper< + Omit & { + guard: ResolversTypes['IGuard']; + transactions: ResolversTypes['FungibleChainAccountTransactionsConnection']; + transfers: ResolversTypes['FungibleChainAccountTransfersConnection']; + } + >; + FungibleChainAccountTransactionsConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + FungibleChainAccountTransactionsConnectionEdge: ResolverTypeWrapper< + Omit & { + node: ResolversTypes['Transaction']; + } + >; + FungibleChainAccountTransfersConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + FungibleChainAccountTransfersConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Transfer'] } + >; GasLimitEstimation: ResolverTypeWrapper; GenesisHeight: ResolverTypeWrapper; GraphConfiguration: ResolverTypeWrapper; @@ -1120,80 +1216,235 @@ export type ResolversTypes = { KeysetGuard: ResolverTypeWrapper; NetworkInfo: ResolverTypeWrapper; Node: ResolverTypeWrapper['Node']>; - NonFungibleAccount: ResolverTypeWrapper & { chainAccounts: Array, nonFungibleTokenBalances: Array, transactions: ResolversTypes['NonFungibleAccountTransactionsConnection'] }>; - NonFungibleAccountTransactionsConnection: ResolverTypeWrapper & { edges: Array }>; - NonFungibleAccountTransactionsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transaction'] }>; - NonFungibleChainAccount: ResolverTypeWrapper & { nonFungibleTokenBalances: Array, transactions: ResolversTypes['NonFungibleChainAccountTransactionsConnection'] }>; - NonFungibleChainAccountTransactionsConnection: ResolverTypeWrapper & { edges: Array }>; - NonFungibleChainAccountTransactionsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transaction'] }>; + NonFungibleAccount: ResolverTypeWrapper< + Omit & { + chainAccounts: Array; + nonFungibleTokenBalances: Array; + transactions: ResolversTypes['NonFungibleAccountTransactionsConnection']; + } + >; + NonFungibleAccountTransactionsConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + NonFungibleAccountTransactionsConnectionEdge: ResolverTypeWrapper< + Omit & { + node: ResolversTypes['Transaction']; + } + >; + NonFungibleChainAccount: ResolverTypeWrapper< + Omit & { + nonFungibleTokenBalances: Array; + transactions: ResolversTypes['NonFungibleChainAccountTransactionsConnection']; + } + >; + NonFungibleChainAccountTransactionsConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + NonFungibleChainAccountTransactionsConnectionEdge: ResolverTypeWrapper< + Omit & { + node: ResolversTypes['Transaction']; + } + >; NonFungibleToken: ResolverTypeWrapper; - NonFungibleTokenBalance: ResolverTypeWrapper & { guard: ResolversTypes['IGuard'] }>; + NonFungibleTokenBalance: ResolverTypeWrapper< + Omit & { guard: ResolversTypes['IGuard'] } + >; PactQuery: PactQuery; PactQueryData: PactQueryData; PactQueryResponse: ResolverTypeWrapper; PageInfo: ResolverTypeWrapper; Query: ResolverTypeWrapper<{}>; - QueryBlocksFromDepthConnection: ResolverTypeWrapper & { edges: Array> }>; - QueryBlocksFromDepthConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Block'] }>; - QueryBlocksFromHeightConnection: ResolverTypeWrapper & { edges: Array> }>; - QueryBlocksFromHeightConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Block'] }>; - QueryCompletedBlockHeightsConnection: ResolverTypeWrapper & { edges: Array> }>; - QueryCompletedBlockHeightsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Block'] }>; - QueryEventsConnection: ResolverTypeWrapper & { edges: Array }>; - QueryEventsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Event'] }>; - QueryTransactionsByPublicKeyConnection: ResolverTypeWrapper & { edges: Array> }>; - QueryTransactionsByPublicKeyConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transaction'] }>; - QueryTransactionsConnection: ResolverTypeWrapper & { edges: Array }>; - QueryTransactionsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transaction'] }>; - QueryTransfersConnection: ResolverTypeWrapper & { edges: Array }>; - QueryTransfersConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transfer'] }>; + QueryBlocksFromDepthConnection: ResolverTypeWrapper< + Omit & { + edges: Array>; + } + >; + QueryBlocksFromDepthConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Block'] } + >; + QueryBlocksFromHeightConnection: ResolverTypeWrapper< + Omit & { + edges: Array>; + } + >; + QueryBlocksFromHeightConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Block'] } + >; + QueryCompletedBlockHeightsConnection: ResolverTypeWrapper< + Omit & { + edges: Array>; + } + >; + QueryCompletedBlockHeightsConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Block'] } + >; + QueryEventsConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + QueryEventsConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Event'] } + >; + QueryTransactionsByPublicKeyConnection: ResolverTypeWrapper< + Omit & { + edges: Array>; + } + >; + QueryTransactionsByPublicKeyConnectionEdge: ResolverTypeWrapper< + Omit & { + node: ResolversTypes['Transaction']; + } + >; + QueryTransactionsConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + QueryTransactionsConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Transaction'] } + >; + QueryTransfersConnection: ResolverTypeWrapper< + Omit & { + edges: Array; + } + >; + QueryTransfersConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Transfer'] } + >; RawGuard: ResolverTypeWrapper; Signer: ResolverTypeWrapper; String: ResolverTypeWrapper; Subscription: ResolverTypeWrapper<{}>; - Transaction: ResolverTypeWrapper & { cmd: ResolversTypes['TransactionCommand'], orphanedTransactions?: Maybe>>, result: ResolversTypes['TransactionInfo'] }>; + Transaction: ResolverTypeWrapper< + Omit & { + cmd: ResolversTypes['TransactionCommand']; + orphanedTransactions?: Maybe>>; + result: ResolversTypes['TransactionInfo']; + } + >; TransactionCapability: ResolverTypeWrapper; - TransactionCommand: ResolverTypeWrapper & { meta: ResolversTypes['TransactionMeta'], payload: ResolversTypes['TransactionPayload'] }>; + TransactionCommand: ResolverTypeWrapper< + Omit & { + meta: ResolversTypes['TransactionMeta']; + payload: ResolversTypes['TransactionPayload']; + } + >; TransactionInfo: ResolverTypeWrapper['TransactionInfo']>; TransactionMempoolInfo: ResolverTypeWrapper; TransactionMeta: ResolverTypeWrapper; - TransactionPayload: ResolverTypeWrapper['TransactionPayload']>; - TransactionResult: ResolverTypeWrapper & { block: ResolversTypes['Block'], events: ResolversTypes['TransactionResultEventsConnection'], transfers: ResolversTypes['TransactionResultTransfersConnection'] }>; - TransactionResultEventsConnection: ResolverTypeWrapper & { edges: Array> }>; - TransactionResultEventsConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Event'] }>; - TransactionResultTransfersConnection: ResolverTypeWrapper & { edges: Array> }>; - TransactionResultTransfersConnectionEdge: ResolverTypeWrapper & { node: ResolversTypes['Transfer'] }>; + TransactionPayload: ResolverTypeWrapper< + ResolversUnionTypes['TransactionPayload'] + >; + TransactionResult: ResolverTypeWrapper< + Omit & { + block: ResolversTypes['Block']; + events: ResolversTypes['TransactionResultEventsConnection']; + transfers: ResolversTypes['TransactionResultTransfersConnection']; + } + >; + TransactionResultEventsConnection: ResolverTypeWrapper< + Omit & { + edges: Array>; + } + >; + TransactionResultEventsConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Event'] } + >; + TransactionResultTransfersConnection: ResolverTypeWrapper< + Omit & { + edges: Array>; + } + >; + TransactionResultTransfersConnectionEdge: ResolverTypeWrapper< + Omit & { node: ResolversTypes['Transfer'] } + >; TransactionSignature: ResolverTypeWrapper; - Transfer: ResolverTypeWrapper & { block: ResolversTypes['Block'], crossChainTransfer?: Maybe, transaction?: Maybe }>; + Transfer: ResolverTypeWrapper< + Omit & { + block: ResolversTypes['Block']; + crossChainTransfer?: Maybe; + transaction?: Maybe; + } + >; UserGuard: ResolverTypeWrapper; }; /** Mapping between all available schema types and the resolvers parents */ export type ResolversParentTypes = { BigInt: Scalars['BigInt']['output']; - Block: Omit & { events: ResolversParentTypes['BlockEventsConnection'], minerAccount: ResolversParentTypes['FungibleChainAccount'], parent?: Maybe, transactions: ResolversParentTypes['BlockTransactionsConnection'] }; - BlockEventsConnection: Omit & { edges: Array }; - BlockEventsConnectionEdge: Omit & { node: ResolversParentTypes['Event'] }; + Block: Omit & { + events: ResolversParentTypes['BlockEventsConnection']; + minerAccount: ResolversParentTypes['FungibleChainAccount']; + parent?: Maybe; + transactions: ResolversParentTypes['BlockTransactionsConnection']; + }; + BlockEventsConnection: Omit & { + edges: Array; + }; + BlockEventsConnectionEdge: Omit & { + node: ResolversParentTypes['Event']; + }; BlockNeighbor: BlockNeighbor; - BlockTransactionsConnection: Omit & { edges: Array }; - BlockTransactionsConnectionEdge: Omit & { node: ResolversParentTypes['Transaction'] }; + BlockTransactionsConnection: Omit & { + edges: Array; + }; + BlockTransactionsConnectionEdge: Omit & { + node: ResolversParentTypes['Transaction']; + }; Boolean: Scalars['Boolean']['output']; ContinuationPayload: ContinuationPayload; DateTime: Scalars['DateTime']['output']; Decimal: Scalars['Decimal']['output']; - Event: Omit & { block: ResolversParentTypes['Block'], transaction?: Maybe }; + Event: Omit & { + block: ResolversParentTypes['Block']; + transaction?: Maybe; + }; ExecutionPayload: ExecutionPayload; Float: Scalars['Float']['output']; - FungibleAccount: Omit & { chainAccounts: Array, transactions: ResolversParentTypes['FungibleAccountTransactionsConnection'], transfers: ResolversParentTypes['FungibleAccountTransfersConnection'] }; - FungibleAccountTransactionsConnection: Omit & { edges: Array }; - FungibleAccountTransactionsConnectionEdge: Omit & { node: ResolversParentTypes['Transaction'] }; - FungibleAccountTransfersConnection: Omit & { edges: Array }; - FungibleAccountTransfersConnectionEdge: Omit & { node: ResolversParentTypes['Transfer'] }; - FungibleChainAccount: Omit & { guard: ResolversParentTypes['IGuard'], transactions: ResolversParentTypes['FungibleChainAccountTransactionsConnection'], transfers: ResolversParentTypes['FungibleChainAccountTransfersConnection'] }; - FungibleChainAccountTransactionsConnection: Omit & { edges: Array }; - FungibleChainAccountTransactionsConnectionEdge: Omit & { node: ResolversParentTypes['Transaction'] }; - FungibleChainAccountTransfersConnection: Omit & { edges: Array }; - FungibleChainAccountTransfersConnectionEdge: Omit & { node: ResolversParentTypes['Transfer'] }; + FungibleAccount: Omit & { + chainAccounts: Array; + transactions: ResolversParentTypes['FungibleAccountTransactionsConnection']; + transfers: ResolversParentTypes['FungibleAccountTransfersConnection']; + }; + FungibleAccountTransactionsConnection: Omit & { + edges: Array; + }; + FungibleAccountTransactionsConnectionEdge: Omit< + FungibleAccountTransactionsConnectionEdge, + 'node' + > & { node: ResolversParentTypes['Transaction'] }; + FungibleAccountTransfersConnection: Omit & { + edges: Array; + }; + FungibleAccountTransfersConnectionEdge: Omit & { + node: ResolversParentTypes['Transfer']; + }; + FungibleChainAccount: Omit & { + guard: ResolversParentTypes['IGuard']; + transactions: ResolversParentTypes['FungibleChainAccountTransactionsConnection']; + transfers: ResolversParentTypes['FungibleChainAccountTransfersConnection']; + }; + FungibleChainAccountTransactionsConnection: Omit< + FungibleChainAccountTransactionsConnection, + 'edges' + > & { edges: Array }; + FungibleChainAccountTransactionsConnectionEdge: Omit< + FungibleChainAccountTransactionsConnectionEdge, + 'node' + > & { node: ResolversParentTypes['Transaction'] }; + FungibleChainAccountTransfersConnection: Omit< + FungibleChainAccountTransfersConnection, + 'edges' + > & { edges: Array }; + FungibleChainAccountTransfersConnectionEdge: Omit< + FungibleChainAccountTransfersConnectionEdge, + 'node' + > & { node: ResolversParentTypes['Transfer'] }; GasLimitEstimation: GasLimitEstimation; GenesisHeight: GenesisHeight; GraphConfiguration: GraphConfiguration; @@ -1203,51 +1454,132 @@ export type ResolversParentTypes = { KeysetGuard: KeysetGuard; NetworkInfo: NetworkInfo; Node: ResolversInterfaceTypes['Node']; - NonFungibleAccount: Omit & { chainAccounts: Array, nonFungibleTokenBalances: Array, transactions: ResolversParentTypes['NonFungibleAccountTransactionsConnection'] }; - NonFungibleAccountTransactionsConnection: Omit & { edges: Array }; - NonFungibleAccountTransactionsConnectionEdge: Omit & { node: ResolversParentTypes['Transaction'] }; - NonFungibleChainAccount: Omit & { nonFungibleTokenBalances: Array, transactions: ResolversParentTypes['NonFungibleChainAccountTransactionsConnection'] }; - NonFungibleChainAccountTransactionsConnection: Omit & { edges: Array }; - NonFungibleChainAccountTransactionsConnectionEdge: Omit & { node: ResolversParentTypes['Transaction'] }; + NonFungibleAccount: Omit< + NonFungibleAccount, + 'chainAccounts' | 'nonFungibleTokenBalances' | 'transactions' + > & { + chainAccounts: Array; + nonFungibleTokenBalances: Array; + transactions: ResolversParentTypes['NonFungibleAccountTransactionsConnection']; + }; + NonFungibleAccountTransactionsConnection: Omit< + NonFungibleAccountTransactionsConnection, + 'edges' + > & { edges: Array }; + NonFungibleAccountTransactionsConnectionEdge: Omit< + NonFungibleAccountTransactionsConnectionEdge, + 'node' + > & { node: ResolversParentTypes['Transaction'] }; + NonFungibleChainAccount: Omit< + NonFungibleChainAccount, + 'nonFungibleTokenBalances' | 'transactions' + > & { + nonFungibleTokenBalances: Array; + transactions: ResolversParentTypes['NonFungibleChainAccountTransactionsConnection']; + }; + NonFungibleChainAccountTransactionsConnection: Omit< + NonFungibleChainAccountTransactionsConnection, + 'edges' + > & { edges: Array }; + NonFungibleChainAccountTransactionsConnectionEdge: Omit< + NonFungibleChainAccountTransactionsConnectionEdge, + 'node' + > & { node: ResolversParentTypes['Transaction'] }; NonFungibleToken: NonFungibleToken; - NonFungibleTokenBalance: Omit & { guard: ResolversParentTypes['IGuard'] }; + NonFungibleTokenBalance: Omit & { + guard: ResolversParentTypes['IGuard']; + }; PactQuery: PactQuery; PactQueryData: PactQueryData; PactQueryResponse: PactQueryResponse; PageInfo: PageInfo; Query: {}; - QueryBlocksFromDepthConnection: Omit & { edges: Array> }; - QueryBlocksFromDepthConnectionEdge: Omit & { node: ResolversParentTypes['Block'] }; - QueryBlocksFromHeightConnection: Omit & { edges: Array> }; - QueryBlocksFromHeightConnectionEdge: Omit & { node: ResolversParentTypes['Block'] }; - QueryCompletedBlockHeightsConnection: Omit & { edges: Array> }; - QueryCompletedBlockHeightsConnectionEdge: Omit & { node: ResolversParentTypes['Block'] }; - QueryEventsConnection: Omit & { edges: Array }; - QueryEventsConnectionEdge: Omit & { node: ResolversParentTypes['Event'] }; - QueryTransactionsByPublicKeyConnection: Omit & { edges: Array> }; - QueryTransactionsByPublicKeyConnectionEdge: Omit & { node: ResolversParentTypes['Transaction'] }; - QueryTransactionsConnection: Omit & { edges: Array }; - QueryTransactionsConnectionEdge: Omit & { node: ResolversParentTypes['Transaction'] }; - QueryTransfersConnection: Omit & { edges: Array }; - QueryTransfersConnectionEdge: Omit & { node: ResolversParentTypes['Transfer'] }; + QueryBlocksFromDepthConnection: Omit & { + edges: Array>; + }; + QueryBlocksFromDepthConnectionEdge: Omit & { + node: ResolversParentTypes['Block']; + }; + QueryBlocksFromHeightConnection: Omit & { + edges: Array>; + }; + QueryBlocksFromHeightConnectionEdge: Omit & { + node: ResolversParentTypes['Block']; + }; + QueryCompletedBlockHeightsConnection: Omit & { + edges: Array>; + }; + QueryCompletedBlockHeightsConnectionEdge: Omit< + QueryCompletedBlockHeightsConnectionEdge, + 'node' + > & { node: ResolversParentTypes['Block'] }; + QueryEventsConnection: Omit & { + edges: Array; + }; + QueryEventsConnectionEdge: Omit & { + node: ResolversParentTypes['Event']; + }; + QueryTransactionsByPublicKeyConnection: Omit & { + edges: Array>; + }; + QueryTransactionsByPublicKeyConnectionEdge: Omit< + QueryTransactionsByPublicKeyConnectionEdge, + 'node' + > & { node: ResolversParentTypes['Transaction'] }; + QueryTransactionsConnection: Omit & { + edges: Array; + }; + QueryTransactionsConnectionEdge: Omit & { + node: ResolversParentTypes['Transaction']; + }; + QueryTransfersConnection: Omit & { + edges: Array; + }; + QueryTransfersConnectionEdge: Omit & { + node: ResolversParentTypes['Transfer']; + }; RawGuard: RawGuard; Signer: Signer; String: Scalars['String']['output']; Subscription: {}; - Transaction: Omit & { cmd: ResolversParentTypes['TransactionCommand'], orphanedTransactions?: Maybe>>, result: ResolversParentTypes['TransactionInfo'] }; + Transaction: Omit & { + cmd: ResolversParentTypes['TransactionCommand']; + orphanedTransactions?: Maybe>>; + result: ResolversParentTypes['TransactionInfo']; + }; TransactionCapability: TransactionCapability; - TransactionCommand: Omit & { meta: ResolversParentTypes['TransactionMeta'], payload: ResolversParentTypes['TransactionPayload'] }; + TransactionCommand: Omit & { + meta: ResolversParentTypes['TransactionMeta']; + payload: ResolversParentTypes['TransactionPayload']; + }; TransactionInfo: ResolversUnionTypes['TransactionInfo']; TransactionMempoolInfo: TransactionMempoolInfo; TransactionMeta: TransactionMeta; TransactionPayload: ResolversUnionTypes['TransactionPayload']; - TransactionResult: Omit & { block: ResolversParentTypes['Block'], events: ResolversParentTypes['TransactionResultEventsConnection'], transfers: ResolversParentTypes['TransactionResultTransfersConnection'] }; - TransactionResultEventsConnection: Omit & { edges: Array> }; - TransactionResultEventsConnectionEdge: Omit & { node: ResolversParentTypes['Event'] }; - TransactionResultTransfersConnection: Omit & { edges: Array> }; - TransactionResultTransfersConnectionEdge: Omit & { node: ResolversParentTypes['Transfer'] }; + TransactionResult: Omit & { + block: ResolversParentTypes['Block']; + events: ResolversParentTypes['TransactionResultEventsConnection']; + transfers: ResolversParentTypes['TransactionResultTransfersConnection']; + }; + TransactionResultEventsConnection: Omit & { + edges: Array>; + }; + TransactionResultEventsConnectionEdge: Omit & { + node: ResolversParentTypes['Event']; + }; + TransactionResultTransfersConnection: Omit & { + edges: Array>; + }; + TransactionResultTransfersConnectionEdge: Omit< + TransactionResultTransfersConnectionEdge, + 'node' + > & { node: ResolversParentTypes['Transfer'] }; TransactionSignature: TransactionSignature; - Transfer: Omit & { block: ResolversParentTypes['Block'], crossChainTransfer?: Maybe, transaction?: Maybe }; + Transfer: Omit & { + block: ResolversParentTypes['Block']; + crossChainTransfer?: Maybe; + transaction?: Maybe; + }; UserGuard: UserGuard; }; @@ -1255,12 +1587,20 @@ export interface BigIntScalarConfig extends GraphQLScalarTypeConfig = { +export type BlockResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['Block'] = ResolversParentTypes['Block'], +> = { chainId?: Resolver; creationTime?: Resolver; difficulty?: Resolver; epoch?: Resolver; - events?: Resolver>; + events?: Resolver< + ResolversTypes['BlockEventsConnection'], + ParentType, + ContextType, + Partial + >; flags?: Resolver; hash?: Resolver; height?: Resolver; @@ -1272,44 +1612,76 @@ export type BlockResolvers; powHash?: Resolver; target?: Resolver; - transactions?: Resolver>; + transactions?: Resolver< + ResolversTypes['BlockTransactionsConnection'], + ParentType, + ContextType, + Partial + >; weight?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type BlockEventsConnectionResolvers = { +export type BlockEventsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['BlockEventsConnection'] = ResolversParentTypes['BlockEventsConnection'], +> = { edges?: Resolver, ParentType, ContextType>; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type BlockEventsConnectionEdgeResolvers = { +export type BlockEventsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['BlockEventsConnectionEdge'] = ResolversParentTypes['BlockEventsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type BlockNeighborResolvers = { +export type BlockNeighborResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['BlockNeighbor'] = ResolversParentTypes['BlockNeighbor'], +> = { chainId?: Resolver; hash?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type BlockTransactionsConnectionResolvers = { - edges?: Resolver, ParentType, ContextType>; +export type BlockTransactionsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['BlockTransactionsConnection'] = ResolversParentTypes['BlockTransactionsConnection'], +> = { + edges?: Resolver< + Array, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type BlockTransactionsConnectionEdgeResolvers = { +export type BlockTransactionsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['BlockTransactionsConnectionEdge'] = ResolversParentTypes['BlockTransactionsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type ContinuationPayloadResolvers = { +export type ContinuationPayloadResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['ContinuationPayload'] = ResolversParentTypes['ContinuationPayload'], +> = { data?: Resolver; pactId?: Resolver, ParentType, ContextType>; proof?: Resolver, ParentType, ContextType>; @@ -1318,15 +1690,20 @@ export type ContinuationPayloadResolvers; }; -export interface DateTimeScalarConfig extends GraphQLScalarTypeConfig { +export interface DateTimeScalarConfig + extends GraphQLScalarTypeConfig { name: 'DateTime'; } -export interface DecimalScalarConfig extends GraphQLScalarTypeConfig { +export interface DecimalScalarConfig + extends GraphQLScalarTypeConfig { name: 'Decimal'; } -export type EventResolvers = { +export type EventResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['Event'] = ResolversParentTypes['Event'], +> = { block?: Resolver; chainId?: Resolver; height?: Resolver; @@ -1342,88 +1719,172 @@ export type EventResolvers; }; -export type ExecutionPayloadResolvers = { +export type ExecutionPayloadResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['ExecutionPayload'] = ResolversParentTypes['ExecutionPayload'], +> = { code?: Resolver, ParentType, ContextType>; data?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleAccountResolvers = { +export type FungibleAccountResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleAccount'] = ResolversParentTypes['FungibleAccount'], +> = { accountName?: Resolver; chainAccounts?: Resolver, ParentType, ContextType>; fungibleName?: Resolver; id?: Resolver; totalBalance?: Resolver; - transactions?: Resolver>; - transfers?: Resolver>; + transactions?: Resolver< + ResolversTypes['FungibleAccountTransactionsConnection'], + ParentType, + ContextType, + Partial + >; + transfers?: Resolver< + ResolversTypes['FungibleAccountTransfersConnection'], + ParentType, + ContextType, + Partial + >; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleAccountTransactionsConnectionResolvers = { - edges?: Resolver, ParentType, ContextType>; +export type FungibleAccountTransactionsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleAccountTransactionsConnection'] = ResolversParentTypes['FungibleAccountTransactionsConnection'], +> = { + edges?: Resolver< + Array, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleAccountTransactionsConnectionEdgeResolvers = { +export type FungibleAccountTransactionsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleAccountTransactionsConnectionEdge'] = ResolversParentTypes['FungibleAccountTransactionsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleAccountTransfersConnectionResolvers = { - edges?: Resolver, ParentType, ContextType>; +export type FungibleAccountTransfersConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleAccountTransfersConnection'] = ResolversParentTypes['FungibleAccountTransfersConnection'], +> = { + edges?: Resolver< + Array, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleAccountTransfersConnectionEdgeResolvers = { +export type FungibleAccountTransfersConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleAccountTransfersConnectionEdge'] = ResolversParentTypes['FungibleAccountTransfersConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleChainAccountResolvers = { +export type FungibleChainAccountResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleChainAccount'] = ResolversParentTypes['FungibleChainAccount'], +> = { accountName?: Resolver; balance?: Resolver; chainId?: Resolver; fungibleName?: Resolver; guard?: Resolver; id?: Resolver; - transactions?: Resolver>; - transfers?: Resolver>; + transactions?: Resolver< + ResolversTypes['FungibleChainAccountTransactionsConnection'], + ParentType, + ContextType, + Partial + >; + transfers?: Resolver< + ResolversTypes['FungibleChainAccountTransfersConnection'], + ParentType, + ContextType, + Partial + >; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleChainAccountTransactionsConnectionResolvers = { - edges?: Resolver, ParentType, ContextType>; +export type FungibleChainAccountTransactionsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleChainAccountTransactionsConnection'] = ResolversParentTypes['FungibleChainAccountTransactionsConnection'], +> = { + edges?: Resolver< + Array, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleChainAccountTransactionsConnectionEdgeResolvers = { +export type FungibleChainAccountTransactionsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleChainAccountTransactionsConnectionEdge'] = ResolversParentTypes['FungibleChainAccountTransactionsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleChainAccountTransfersConnectionResolvers = { - edges?: Resolver, ParentType, ContextType>; +export type FungibleChainAccountTransfersConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleChainAccountTransfersConnection'] = ResolversParentTypes['FungibleChainAccountTransfersConnection'], +> = { + edges?: Resolver< + Array, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type FungibleChainAccountTransfersConnectionEdgeResolvers = { +export type FungibleChainAccountTransfersConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['FungibleChainAccountTransfersConnectionEdge'] = ResolversParentTypes['FungibleChainAccountTransfersConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type GasLimitEstimationResolvers = { +export type GasLimitEstimationResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['GasLimitEstimation'] = ResolversParentTypes['GasLimitEstimation'], +> = { amount?: Resolver; inputType?: Resolver; transaction?: Resolver; @@ -1432,33 +1893,49 @@ export type GasLimitEstimationResolvers; }; -export type GenesisHeightResolvers = { +export type GenesisHeightResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['GenesisHeight'] = ResolversParentTypes['GenesisHeight'], +> = { chainId?: Resolver; height?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type GraphConfigurationResolvers = { +export type GraphConfigurationResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['GraphConfiguration'] = ResolversParentTypes['GraphConfiguration'], +> = { minimumBlockHeight?: Resolver, ParentType, ContextType>; version?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type IGuardResolvers = { +export type IGuardResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['IGuard'] = ResolversParentTypes['IGuard'], +> = { __resolveType: TypeResolveFn<'KeysetGuard' | 'RawGuard' | 'UserGuard', ParentType, ContextType>; keys?: Resolver, ParentType, ContextType>; predicate?: Resolver; raw?: Resolver; }; -export type KeysetGuardResolvers = { +export type KeysetGuardResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['KeysetGuard'] = ResolversParentTypes['KeysetGuard'], +> = { keys?: Resolver, ParentType, ContextType>; predicate?: Resolver; raw?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type NetworkInfoResolvers = { +export type NetworkInfoResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['NetworkInfo'] = ResolversParentTypes['NetworkInfo'], +> = { apiVersion?: Resolver; coinsInCirculation?: Resolver; genesisHeights?: Resolver, ParentType, ContextType>; @@ -1476,63 +1953,141 @@ export type NetworkInfoResolvers; }; -export type NodeResolvers = { - __resolveType: TypeResolveFn<'Block' | 'Event' | 'FungibleAccount' | 'FungibleChainAccount' | 'NonFungibleAccount' | 'NonFungibleChainAccount' | 'NonFungibleTokenBalance' | 'Signer' | 'Transaction' | 'Transfer', ParentType, ContextType>; +export type NodeResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['Node'] = ResolversParentTypes['Node'], +> = { + __resolveType: TypeResolveFn< + | 'Block' + | 'Event' + | 'FungibleAccount' + | 'FungibleChainAccount' + | 'NonFungibleAccount' + | 'NonFungibleChainAccount' + | 'NonFungibleTokenBalance' + | 'Signer' + | 'Transaction' + | 'Transfer', + ParentType, + ContextType + >; id?: Resolver; }; -export type NonFungibleAccountResolvers = { +export type NonFungibleAccountResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['NonFungibleAccount'] = ResolversParentTypes['NonFungibleAccount'], +> = { accountName?: Resolver; - chainAccounts?: Resolver, ParentType, ContextType>; + chainAccounts?: Resolver< + Array, + ParentType, + ContextType + >; id?: Resolver; - nonFungibleTokenBalances?: Resolver, ParentType, ContextType>; - transactions?: Resolver>; + nonFungibleTokenBalances?: Resolver< + Array, + ParentType, + ContextType + >; + transactions?: Resolver< + ResolversTypes['NonFungibleAccountTransactionsConnection'], + ParentType, + ContextType, + Partial + >; __isTypeOf?: IsTypeOfResolverFn; }; -export type NonFungibleAccountTransactionsConnectionResolvers = { - edges?: Resolver, ParentType, ContextType>; +export type NonFungibleAccountTransactionsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['NonFungibleAccountTransactionsConnection'] = ResolversParentTypes['NonFungibleAccountTransactionsConnection'], +> = { + edges?: Resolver< + Array, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type NonFungibleAccountTransactionsConnectionEdgeResolvers = { +export type NonFungibleAccountTransactionsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['NonFungibleAccountTransactionsConnectionEdge'] = ResolversParentTypes['NonFungibleAccountTransactionsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type NonFungibleChainAccountResolvers = { +export type NonFungibleChainAccountResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['NonFungibleChainAccount'] = ResolversParentTypes['NonFungibleChainAccount'], +> = { accountName?: Resolver; chainId?: Resolver; id?: Resolver; - nonFungibleTokenBalances?: Resolver, ParentType, ContextType>; - transactions?: Resolver>; + nonFungibleTokenBalances?: Resolver< + Array, + ParentType, + ContextType + >; + transactions?: Resolver< + ResolversTypes['NonFungibleChainAccountTransactionsConnection'], + ParentType, + ContextType, + Partial + >; __isTypeOf?: IsTypeOfResolverFn; }; -export type NonFungibleChainAccountTransactionsConnectionResolvers = { - edges?: Resolver, ParentType, ContextType>; +export type NonFungibleChainAccountTransactionsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['NonFungibleChainAccountTransactionsConnection'] = ResolversParentTypes['NonFungibleChainAccountTransactionsConnection'], +> = { + edges?: Resolver< + Array, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type NonFungibleChainAccountTransactionsConnectionEdgeResolvers = { +export type NonFungibleChainAccountTransactionsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['NonFungibleChainAccountTransactionsConnectionEdge'] = ResolversParentTypes['NonFungibleChainAccountTransactionsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type NonFungibleTokenResolvers = { +export type NonFungibleTokenResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['NonFungibleToken'] = ResolversParentTypes['NonFungibleToken'], +> = { precision?: Resolver; supply?: Resolver; uri?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type NonFungibleTokenBalanceResolvers = { +export type NonFungibleTokenBalanceResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['NonFungibleTokenBalance'] = ResolversParentTypes['NonFungibleTokenBalance'], +> = { accountName?: Resolver; balance?: Resolver; chainId?: Resolver; @@ -1544,7 +2099,11 @@ export type NonFungibleTokenBalanceResolvers; }; -export type PactQueryResponseResolvers = { +export type PactQueryResponseResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['PactQueryResponse'] = ResolversParentTypes['PactQueryResponse'], +> = { chainId?: Resolver; code?: Resolver; error?: Resolver, ParentType, ContextType>; @@ -1553,7 +2112,10 @@ export type PactQueryResponseResolvers; }; -export type PageInfoResolvers = { +export type PageInfoResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['PageInfo'] = ResolversParentTypes['PageInfo'], +> = { endCursor?: Resolver, ParentType, ContextType>; hasNextPage?: Resolver; hasPreviousPage?: Resolver; @@ -1561,128 +2123,316 @@ export type PageInfoResolvers; }; -export type QueryResolvers = { - block?: Resolver, ParentType, ContextType, RequireFields>; - blocksFromDepth?: Resolver, ParentType, ContextType, RequireFields>; - blocksFromHeight?: Resolver>; - completedBlockHeights?: Resolver>; - events?: Resolver>; - fungibleAccount?: Resolver, ParentType, ContextType, RequireFields>; - fungibleAccountsByPublicKey?: Resolver, ParentType, ContextType, RequireFields>; - fungibleChainAccount?: Resolver, ParentType, ContextType, RequireFields>; - fungibleChainAccounts?: Resolver>, ParentType, ContextType, RequireFields>; - fungibleChainAccountsByPublicKey?: Resolver, ParentType, ContextType, RequireFields>; - gasLimitEstimate?: Resolver, ParentType, ContextType, RequireFields>; +export type QueryResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['Query'] = ResolversParentTypes['Query'], +> = { + block?: Resolver< + Maybe, + ParentType, + ContextType, + RequireFields + >; + blocksFromDepth?: Resolver< + Maybe, + ParentType, + ContextType, + RequireFields + >; + blocksFromHeight?: Resolver< + ResolversTypes['QueryBlocksFromHeightConnection'], + ParentType, + ContextType, + RequireFields + >; + completedBlockHeights?: Resolver< + ResolversTypes['QueryCompletedBlockHeightsConnection'], + ParentType, + ContextType, + RequireFields + >; + events?: Resolver< + ResolversTypes['QueryEventsConnection'], + ParentType, + ContextType, + RequireFields + >; + fungibleAccount?: Resolver< + Maybe, + ParentType, + ContextType, + RequireFields + >; + fungibleAccountsByPublicKey?: Resolver< + Array, + ParentType, + ContextType, + RequireFields + >; + fungibleChainAccount?: Resolver< + Maybe, + ParentType, + ContextType, + RequireFields + >; + fungibleChainAccounts?: Resolver< + Maybe>, + ParentType, + ContextType, + RequireFields + >; + fungibleChainAccountsByPublicKey?: Resolver< + Array, + ParentType, + ContextType, + RequireFields< + QueryFungibleChainAccountsByPublicKeyArgs, + 'chainId' | 'fungibleName' | 'publicKey' + > + >; + gasLimitEstimate?: Resolver< + Array, + ParentType, + ContextType, + RequireFields + >; graphConfiguration?: Resolver; lastBlockHeight?: Resolver, ParentType, ContextType>; networkInfo?: Resolver, ParentType, ContextType>; - node?: Resolver, ParentType, ContextType, RequireFields>; - nodes?: Resolver>, ParentType, ContextType, RequireFields>; - nonFungibleAccount?: Resolver, ParentType, ContextType, RequireFields>; - nonFungibleChainAccount?: Resolver, ParentType, ContextType, RequireFields>; - pactQuery?: Resolver, ParentType, ContextType, RequireFields>; - transaction?: Resolver, ParentType, ContextType, RequireFields>; - transactions?: Resolver>; - transactionsByPublicKey?: Resolver>; - transfers?: Resolver>; -}; - -export type QueryBlocksFromDepthConnectionResolvers = { - edges?: Resolver>, ParentType, ContextType>; + node?: Resolver< + Maybe, + ParentType, + ContextType, + RequireFields + >; + nodes?: Resolver< + Array>, + ParentType, + ContextType, + RequireFields + >; + nonFungibleAccount?: Resolver< + Maybe, + ParentType, + ContextType, + RequireFields + >; + nonFungibleChainAccount?: Resolver< + Maybe, + ParentType, + ContextType, + RequireFields + >; + pactQuery?: Resolver< + Array, + ParentType, + ContextType, + RequireFields + >; + transaction?: Resolver< + Maybe, + ParentType, + ContextType, + RequireFields + >; + transactions?: Resolver< + ResolversTypes['QueryTransactionsConnection'], + ParentType, + ContextType, + Partial + >; + transactionsByPublicKey?: Resolver< + ResolversTypes['QueryTransactionsByPublicKeyConnection'], + ParentType, + ContextType, + RequireFields + >; + transfers?: Resolver< + ResolversTypes['QueryTransfersConnection'], + ParentType, + ContextType, + Partial + >; +}; + +export type QueryBlocksFromDepthConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryBlocksFromDepthConnection'] = ResolversParentTypes['QueryBlocksFromDepthConnection'], +> = { + edges?: Resolver< + Array>, + ParentType, + ContextType + >; pageInfo?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryBlocksFromDepthConnectionEdgeResolvers = { +export type QueryBlocksFromDepthConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryBlocksFromDepthConnectionEdge'] = ResolversParentTypes['QueryBlocksFromDepthConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryBlocksFromHeightConnectionResolvers = { - edges?: Resolver>, ParentType, ContextType>; +export type QueryBlocksFromHeightConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryBlocksFromHeightConnection'] = ResolversParentTypes['QueryBlocksFromHeightConnection'], +> = { + edges?: Resolver< + Array>, + ParentType, + ContextType + >; pageInfo?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryBlocksFromHeightConnectionEdgeResolvers = { +export type QueryBlocksFromHeightConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryBlocksFromHeightConnectionEdge'] = ResolversParentTypes['QueryBlocksFromHeightConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryCompletedBlockHeightsConnectionResolvers = { - edges?: Resolver>, ParentType, ContextType>; +export type QueryCompletedBlockHeightsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryCompletedBlockHeightsConnection'] = ResolversParentTypes['QueryCompletedBlockHeightsConnection'], +> = { + edges?: Resolver< + Array>, + ParentType, + ContextType + >; pageInfo?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryCompletedBlockHeightsConnectionEdgeResolvers = { +export type QueryCompletedBlockHeightsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryCompletedBlockHeightsConnectionEdge'] = ResolversParentTypes['QueryCompletedBlockHeightsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryEventsConnectionResolvers = { +export type QueryEventsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryEventsConnection'] = ResolversParentTypes['QueryEventsConnection'], +> = { edges?: Resolver, ParentType, ContextType>; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryEventsConnectionEdgeResolvers = { +export type QueryEventsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryEventsConnectionEdge'] = ResolversParentTypes['QueryEventsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryTransactionsByPublicKeyConnectionResolvers = { - edges?: Resolver>, ParentType, ContextType>; +export type QueryTransactionsByPublicKeyConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryTransactionsByPublicKeyConnection'] = ResolversParentTypes['QueryTransactionsByPublicKeyConnection'], +> = { + edges?: Resolver< + Array>, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryTransactionsByPublicKeyConnectionEdgeResolvers = { +export type QueryTransactionsByPublicKeyConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryTransactionsByPublicKeyConnectionEdge'] = ResolversParentTypes['QueryTransactionsByPublicKeyConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryTransactionsConnectionResolvers = { - edges?: Resolver, ParentType, ContextType>; +export type QueryTransactionsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryTransactionsConnection'] = ResolversParentTypes['QueryTransactionsConnection'], +> = { + edges?: Resolver< + Array, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryTransactionsConnectionEdgeResolvers = { +export type QueryTransactionsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryTransactionsConnectionEdge'] = ResolversParentTypes['QueryTransactionsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryTransfersConnectionResolvers = { +export type QueryTransfersConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryTransfersConnection'] = ResolversParentTypes['QueryTransfersConnection'], +> = { edges?: Resolver, ParentType, ContextType>; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type QueryTransfersConnectionEdgeResolvers = { +export type QueryTransfersConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['QueryTransfersConnectionEdge'] = ResolversParentTypes['QueryTransfersConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type RawGuardResolvers = { +export type RawGuardResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['RawGuard'] = ResolversParentTypes['RawGuard'], +> = { keys?: Resolver, ParentType, ContextType>; predicate?: Resolver; raw?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type SignerResolvers = { +export type SignerResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['Signer'] = ResolversParentTypes['Signer'], +> = { address?: Resolver, ParentType, ContextType>; clist?: Resolver, ParentType, ContextType>; id?: Resolver; @@ -1692,30 +2442,72 @@ export type SignerResolvers; }; -export type SubscriptionResolvers = { - events?: SubscriptionResolver>, "events", ParentType, ContextType, RequireFields>; - newBlocks?: SubscriptionResolver>, "newBlocks", ParentType, ContextType, Partial>; - newBlocksFromDepth?: SubscriptionResolver>, "newBlocksFromDepth", ParentType, ContextType, RequireFields>; - transaction?: SubscriptionResolver, "transaction", ParentType, ContextType, RequireFields>; -}; - -export type TransactionResolvers = { +export type SubscriptionResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['Subscription'] = ResolversParentTypes['Subscription'], +> = { + events?: SubscriptionResolver< + Maybe>, + 'events', + ParentType, + ContextType, + RequireFields + >; + newBlocks?: SubscriptionResolver< + Maybe>, + 'newBlocks', + ParentType, + ContextType, + Partial + >; + newBlocksFromDepth?: SubscriptionResolver< + Maybe>, + 'newBlocksFromDepth', + ParentType, + ContextType, + RequireFields + >; + transaction?: SubscriptionResolver< + Maybe, + 'transaction', + ParentType, + ContextType, + RequireFields + >; +}; + +export type TransactionResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['Transaction'] = ResolversParentTypes['Transaction'], +> = { cmd?: Resolver; hash?: Resolver; id?: Resolver; - orphanedTransactions?: Resolver>>, ParentType, ContextType>; + orphanedTransactions?: Resolver< + Maybe>>, + ParentType, + ContextType + >; result?: Resolver; sigs?: Resolver, ParentType, ContextType>; __isTypeOf?: IsTypeOfResolverFn; }; -export type TransactionCapabilityResolvers = { +export type TransactionCapabilityResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionCapability'] = ResolversParentTypes['TransactionCapability'], +> = { args?: Resolver; name?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type TransactionCommandResolvers = { +export type TransactionCommandResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionCommand'] = ResolversParentTypes['TransactionCommand'], +> = { meta?: Resolver; networkId?: Resolver; nonce?: Resolver; @@ -1724,16 +2516,32 @@ export type TransactionCommandResolvers; }; -export type TransactionInfoResolvers = { - __resolveType: TypeResolveFn<'TransactionMempoolInfo' | 'TransactionResult', ParentType, ContextType>; +export type TransactionInfoResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionInfo'] = ResolversParentTypes['TransactionInfo'], +> = { + __resolveType: TypeResolveFn< + 'TransactionMempoolInfo' | 'TransactionResult', + ParentType, + ContextType + >; }; -export type TransactionMempoolInfoResolvers = { +export type TransactionMempoolInfoResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionMempoolInfo'] = ResolversParentTypes['TransactionMempoolInfo'], +> = { status?: Resolver, ParentType, ContextType>; __isTypeOf?: IsTypeOfResolverFn; }; -export type TransactionMetaResolvers = { +export type TransactionMetaResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionMeta'] = ResolversParentTypes['TransactionMeta'], +> = { chainId?: Resolver; creationTime?: Resolver; gasLimit?: Resolver; @@ -1743,58 +2551,107 @@ export type TransactionMetaResolvers; }; -export type TransactionPayloadResolvers = { +export type TransactionPayloadResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionPayload'] = ResolversParentTypes['TransactionPayload'], +> = { __resolveType: TypeResolveFn<'ContinuationPayload' | 'ExecutionPayload', ParentType, ContextType>; }; -export type TransactionResultResolvers = { +export type TransactionResultResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionResult'] = ResolversParentTypes['TransactionResult'], +> = { badResult?: Resolver, ParentType, ContextType>; block?: Resolver; continuation?: Resolver, ParentType, ContextType>; eventCount?: Resolver, ParentType, ContextType>; - events?: Resolver>; + events?: Resolver< + ResolversTypes['TransactionResultEventsConnection'], + ParentType, + ContextType, + Partial + >; gas?: Resolver; goodResult?: Resolver, ParentType, ContextType>; height?: Resolver; logs?: Resolver, ParentType, ContextType>; metadata?: Resolver; transactionId?: Resolver, ParentType, ContextType>; - transfers?: Resolver>; + transfers?: Resolver< + ResolversTypes['TransactionResultTransfersConnection'], + ParentType, + ContextType, + Partial + >; __isTypeOf?: IsTypeOfResolverFn; }; -export type TransactionResultEventsConnectionResolvers = { - edges?: Resolver>, ParentType, ContextType>; +export type TransactionResultEventsConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionResultEventsConnection'] = ResolversParentTypes['TransactionResultEventsConnection'], +> = { + edges?: Resolver< + Array>, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type TransactionResultEventsConnectionEdgeResolvers = { +export type TransactionResultEventsConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionResultEventsConnectionEdge'] = ResolversParentTypes['TransactionResultEventsConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type TransactionResultTransfersConnectionResolvers = { - edges?: Resolver>, ParentType, ContextType>; +export type TransactionResultTransfersConnectionResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionResultTransfersConnection'] = ResolversParentTypes['TransactionResultTransfersConnection'], +> = { + edges?: Resolver< + Array>, + ParentType, + ContextType + >; pageInfo?: Resolver; totalCount?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type TransactionResultTransfersConnectionEdgeResolvers = { +export type TransactionResultTransfersConnectionEdgeResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionResultTransfersConnectionEdge'] = ResolversParentTypes['TransactionResultTransfersConnectionEdge'], +> = { cursor?: Resolver; node?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type TransactionSignatureResolvers = { +export type TransactionSignatureResolvers< + ContextType = any, + ParentType extends + ResolversParentTypes['TransactionSignature'] = ResolversParentTypes['TransactionSignature'], +> = { sig?: Resolver; __isTypeOf?: IsTypeOfResolverFn; }; -export type TransferResolvers = { +export type TransferResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['Transfer'] = ResolversParentTypes['Transfer'], +> = { amount?: Resolver; block?: Resolver; blockHash?: Resolver; @@ -1813,7 +2670,10 @@ export type TransferResolvers; }; -export type UserGuardResolvers = { +export type UserGuardResolvers< + ContextType = any, + ParentType extends ResolversParentTypes['UserGuard'] = ResolversParentTypes['UserGuard'], +> = { args?: Resolver, ParentType, ContextType>; fun?: Resolver; keys?: Resolver, ParentType, ContextType>; @@ -1896,4 +2756,3 @@ export type Resolvers = { Transfer?: TransferResolvers; UserGuard?: UserGuardResolvers; }; - diff --git a/indexer/src/kadena-server/domain/gas/input-checker.gas.ts b/indexer/src/kadena-server/domain/gas/input-checker.gas.ts index ce94acd0..295f3358 100644 --- a/indexer/src/kadena-server/domain/gas/input-checker.gas.ts +++ b/indexer/src/kadena-server/domain/gas/input-checker.gas.ts @@ -1,5 +1,5 @@ -import { GasLimitEstimationError } from "../../errors/gas-limit-estimation-error"; -import { IGasLimitEstimationInput } from "./parser.gas"; +import { GasLimitEstimationError } from '../../errors/gas-limit-estimation-error'; +import { IGasLimitEstimationInput } from './parser.gas'; import { CodeInput, FullCommandInput, @@ -8,50 +8,50 @@ import { PayloadInput, StringifiedCommandInput, UserInput, -} from "./types.gas"; +} from './types.gas'; export function determineInputType(input: IGasLimitEstimationInput): UserInput { - if ("cmd" in input && "hash" in input && "sigs" in input) { + if ('cmd' in input && 'hash' in input && 'sigs' in input) { return { - type: "full-transaction", + type: 'full-transaction', preflight: true, signatureVerification: true, ...input, } as FullTransactionInput; - } else if ("cmd" in input) { + } else if ('cmd' in input) { return { - type: "stringified-command", + type: 'stringified-command', preflight: true, signatureVerification: false, ...input, } as StringifiedCommandInput; - } else if ("payload" in input && "meta" in input && "signers" in input) { + } else if ('payload' in input && 'meta' in input && 'signers' in input) { return { - type: "full-command", - preflight: "networkId" in input ? true : false, + type: 'full-command', + preflight: 'networkId' in input ? true : false, signatureVerification: false, ...input, } as FullCommandInput; } else if ( - "payload" in input && - ("meta" in input || ("signers" in input && "chainId" in input)) + 'payload' in input && + ('meta' in input || ('signers' in input && 'chainId' in input)) ) { return { - type: "partial-command", - preflight: "networkId" in input ? true : false, + type: 'partial-command', + preflight: 'networkId' in input ? true : false, signatureVerification: false, ...input, } as PartialCommandInput; - } else if ("payload" in input && "chainId" in input) { + } else if ('payload' in input && 'chainId' in input) { return { - type: "payload", + type: 'payload', preflight: false, signatureVerification: false, ...input, } as PayloadInput; - } else if ("code" in input && "chainId" in input) { + } else if ('code' in input && 'chainId' in input) { return { - type: "code", + type: 'code', preflight: false, signatureVerification: false, ...input, @@ -59,6 +59,6 @@ export function determineInputType(input: IGasLimitEstimationInput): UserInput { } throw new GasLimitEstimationError( - "Unknown input type. Please see the README for the accepted input format.", + 'Unknown input type. Please see the README for the accepted input format.', ); } diff --git a/indexer/src/kadena-server/domain/gas/parser.gas.ts b/indexer/src/kadena-server/domain/gas/parser.gas.ts index 5acdd2d1..bc0643f0 100644 --- a/indexer/src/kadena-server/domain/gas/parser.gas.ts +++ b/indexer/src/kadena-server/domain/gas/parser.gas.ts @@ -1,5 +1,5 @@ -import zod from "zod"; -import { GasLimitEstimationError } from "../../errors/gas-limit-estimation-error"; +import zod from 'zod'; +import { GasLimitEstimationError } from '../../errors/gas-limit-estimation-error'; const schema = zod.object({ cmd: zod.string().optional(), @@ -20,7 +20,7 @@ export function parseInput(input: string): IGasLimitEstimationInput { return schema.parse(parsed); } catch (e) { throw new GasLimitEstimationError( - "Unable to parse input as JSON. Please see the README for the accepted input format.", + 'Unable to parse input as JSON. Please see the README for the accepted input format.', ); } } diff --git a/indexer/src/kadena-server/domain/gas/transaction.gas.ts b/indexer/src/kadena-server/domain/gas/transaction.gas.ts index 1f57ae07..d0c398b5 100644 --- a/indexer/src/kadena-server/domain/gas/transaction.gas.ts +++ b/indexer/src/kadena-server/domain/gas/transaction.gas.ts @@ -1,31 +1,28 @@ -import { createTransaction, IUnsignedCommand } from "@kadena/client"; -import { composePactCommand } from "@kadena/client/fp"; -import { UserInput } from "./types.gas"; -import { hash as hashFunction } from "@kadena/cryptography-utils"; -import { GasLimitEstimationError } from "../../errors/gas-limit-estimation-error"; +import { createTransaction, IUnsignedCommand } from '@kadena/client'; +import { composePactCommand } from '@kadena/client/fp'; +import { UserInput } from './types.gas'; +import { hash as hashFunction } from '@kadena/cryptography-utils'; +import { GasLimitEstimationError } from '../../errors/gas-limit-estimation-error'; -export const buildTransactionPayload = ( - input: UserInput, - networkId: string, -): IUnsignedCommand => { +export const buildTransactionPayload = (input: UserInput, networkId: string): IUnsignedCommand => { let transaction: IUnsignedCommand; switch (input.type) { - case "full-transaction": + case 'full-transaction': transaction = { cmd: input.cmd, hash: input.hash, - sigs: input.sigs.map((s) => ({ sig: s })), + sigs: input.sigs.map(s => ({ sig: s })), }; break; - case "stringified-command": + case 'stringified-command': transaction = { cmd: input.cmd, hash: hashFunction(input.cmd), - sigs: input.sigs?.map((s) => ({ sig: s })) || [], + sigs: input.sigs?.map(s => ({ sig: s })) || [], }; break; - case "full-command": + case 'full-command': transaction = createTransaction( composePactCommand( { payload: input.payload }, @@ -40,8 +37,8 @@ export const buildTransactionPayload = ( }), ); break; - case "partial-command": - if (!input.meta && "chainId" in input) { + case 'partial-command': + if (!input.meta && 'chainId' in input) { input.meta = { chainId: input.chainId }; } @@ -59,7 +56,7 @@ export const buildTransactionPayload = ( }), ); break; - case "payload": + case 'payload': transaction = createTransaction( composePactCommand( { payload: input.payload }, @@ -72,7 +69,7 @@ export const buildTransactionPayload = ( }), ); break; - case "code": + case 'code': transaction = createTransaction( composePactCommand( { @@ -93,9 +90,7 @@ export const buildTransactionPayload = ( ); break; default: - throw new GasLimitEstimationError( - "Something went wrong generating the transaction.", - ); + throw new GasLimitEstimationError('Something went wrong generating the transaction.'); } return transaction; diff --git a/indexer/src/kadena-server/domain/gas/types.gas.ts b/indexer/src/kadena-server/domain/gas/types.gas.ts index b8c22901..067ffef9 100644 --- a/indexer/src/kadena-server/domain/gas/types.gas.ts +++ b/indexer/src/kadena-server/domain/gas/types.gas.ts @@ -1,4 +1,4 @@ -import { ChainId } from "@kadena/types"; +import { ChainId } from '@kadena/types'; export interface IBaseInput { preflight: boolean; @@ -6,7 +6,7 @@ export interface IBaseInput { } export type FullTransactionInput = IBaseInput & { - type: "full-transaction"; + type: 'full-transaction'; cmd: string; hash: string; sigs: string[]; @@ -14,14 +14,14 @@ export type FullTransactionInput = IBaseInput & { }; export type StringifiedCommandInput = IBaseInput & { - type: "stringified-command"; + type: 'stringified-command'; cmd: string; sigs?: string[]; networkId?: string; }; export type FullCommandInput = IBaseInput & { - type: "full-command"; + type: 'full-command'; payload: any; meta: any; signers: any[]; @@ -29,7 +29,7 @@ export type FullCommandInput = IBaseInput & { }; export type PartialCommandInput = IBaseInput & { - type: "partial-command"; + type: 'partial-command'; payload: any; meta?: any; signers?: any[]; @@ -38,14 +38,14 @@ export type PartialCommandInput = IBaseInput & { }; export type PayloadInput = IBaseInput & { - type: "payload"; + type: 'payload'; payload: any; chainId: ChainId; networkId?: string; }; export type CodeInput = IBaseInput & { - type: "code"; + type: 'code'; code: string; chainId: ChainId; networkId?: string; diff --git a/indexer/src/kadena-server/errors/gas-limit-estimation-error.ts b/indexer/src/kadena-server/errors/gas-limit-estimation-error.ts index a998d9ce..41ac18ee 100644 --- a/indexer/src/kadena-server/errors/gas-limit-estimation-error.ts +++ b/indexer/src/kadena-server/errors/gas-limit-estimation-error.ts @@ -1,4 +1,4 @@ -import zod from "zod"; +import zod from 'zod'; const errorSchema = zod.object({ message: zod.string(), diff --git a/indexer/src/kadena-server/errors/pact-command-error.ts b/indexer/src/kadena-server/errors/pact-command-error.ts index c74b6df6..90dc388a 100644 --- a/indexer/src/kadena-server/errors/pact-command-error.ts +++ b/indexer/src/kadena-server/errors/pact-command-error.ts @@ -1,4 +1,4 @@ -import zod from "zod"; +import zod from 'zod'; const errorSchema = zod.object({ message: zod.string(), diff --git a/indexer/src/kadena-server/repository/application/balance-repository.ts b/indexer/src/kadena-server/repository/application/balance-repository.ts index 6292d417..09be0f68 100644 --- a/indexer/src/kadena-server/repository/application/balance-repository.ts +++ b/indexer/src/kadena-server/repository/application/balance-repository.ts @@ -1,7 +1,4 @@ -import { - FungibleAccount, - FungibleChainAccount, -} from "../../config/graphql-types"; +import { FungibleAccount, FungibleChainAccount } from '../../config/graphql-types'; export interface INonFungibleTokenBalance { id: string; @@ -27,19 +24,13 @@ export interface INonFungibleChainAccount { export type FungibleAccountOutput = Omit< FungibleAccount, - "chainAccounts" | "transactions" | "transfers" + 'chainAccounts' | 'transactions' | 'transfers' >; -export type FungibleChainAccountOutput = Omit< - FungibleChainAccount, - "transactions" | "transfers" ->; +export type FungibleChainAccountOutput = Omit; export default interface BalanceRepository { - getAccountInfo( - accountName: string, - fungibleName?: string | null, - ): Promise; + getAccountInfo(accountName: string, fungibleName?: string | null): Promise; getChainsAccountInfo( accountName: string, @@ -47,10 +38,7 @@ export default interface BalanceRepository { chainIds?: string[], ): Promise; - getAccountsByPublicKey( - publicKey: string, - fungibleName: string, - ): Promise; + getAccountsByPublicKey(publicKey: string, fungibleName: string): Promise; getChainAccountsByPublicKey( publicKey: string, @@ -58,9 +46,7 @@ export default interface BalanceRepository { chainId: string, ): Promise; - getNonFungibleAccountInfo( - accountName: string, - ): Promise; + getNonFungibleAccountInfo(accountName: string): Promise; getNonFungibleChainAccountInfo( accountName: string, diff --git a/indexer/src/kadena-server/repository/application/block-repository.ts b/indexer/src/kadena-server/repository/application/block-repository.ts index 600a5b62..0eba6e1f 100644 --- a/indexer/src/kadena-server/repository/application/block-repository.ts +++ b/indexer/src/kadena-server/repository/application/block-repository.ts @@ -1,12 +1,7 @@ -import { - Block, - FungibleChainAccount, - InputMaybe, - PageInfo, -} from "../../config/graphql-types"; -import { PaginationsParams } from "../pagination"; -import { ConnectionEdge } from "../types"; -import { TransactionOutput } from "./transaction-repository"; +import { Block, FungibleChainAccount, InputMaybe, PageInfo } from '../../config/graphql-types'; +import { PaginationsParams } from '../pagination'; +import { ConnectionEdge } from '../types'; +import { TransactionOutput } from './transaction-repository'; export interface GetBlocksFromDepthParams extends PaginationsParams { chainIds?: InputMaybe; @@ -31,14 +26,13 @@ export interface GetLatestBlocksParams { chainIds?: string[]; } -export type BlockOutput = Omit< - Block, - "parent" | "events" | "minerAccount" | "transactions" -> & { parentHash: string }; +export type BlockOutput = Omit & { + parentHash: string; +}; export type FungibleChainAccountOutput = Omit< FungibleChainAccount, - "transactions" | "transfers" | "hash" + 'transactions' | 'transfers' | 'hash' >; export default interface BlockRepository { @@ -55,10 +49,7 @@ export default interface BlockRepository { pageInfo: PageInfo; edges: ConnectionEdge[]; }>; - getMinerData( - hash: string, - chainId: string, - ): Promise; + getMinerData(hash: string, chainId: string): Promise; getLowestBlockHeight(): Promise; diff --git a/indexer/src/kadena-server/repository/application/event-repository.ts b/indexer/src/kadena-server/repository/application/event-repository.ts index 66f7fae3..b2e13f28 100644 --- a/indexer/src/kadena-server/repository/application/event-repository.ts +++ b/indexer/src/kadena-server/repository/application/event-repository.ts @@ -1,13 +1,12 @@ -import { Event, PageInfo } from "../../config/graphql-types"; -import { PaginationsParams } from "../pagination"; -import { ConnectionEdge } from "../types"; +import { Event, PageInfo } from '../../config/graphql-types'; +import { PaginationsParams } from '../pagination'; +import { ConnectionEdge } from '../types'; export interface GetBlockEventsParams extends PaginationsParams { hash: string; } -export type GetTransactionEventsParams = GetTotalTransactionEventsCount & - PaginationsParams; +export type GetTransactionEventsParams = GetTotalTransactionEventsCount & PaginationsParams; export type GetEventsParams = GetTotalEventsCount & PaginationsParams; @@ -25,7 +24,7 @@ export interface GetTotalTransactionEventsCount { transactionId: string; } -export type EventOutput = Omit & { +export type EventOutput = Omit & { eventId: string; }; @@ -57,9 +56,7 @@ export default interface EventRepository { edges: ConnectionEdge[]; }>; getTotalEventsCount(hash: GetTotalEventsCount): Promise; - getTotalTransactionEventsCount( - hash: GetTotalTransactionEventsCount, - ): Promise; + getTotalTransactionEventsCount(hash: GetTotalTransactionEventsCount): Promise; getTotalCountOfBlockEvents(hash: string): Promise; getLastEventId(): Promise; getLastEvents(params: GetLastEventsParams): Promise; diff --git a/indexer/src/kadena-server/repository/application/network-repository.ts b/indexer/src/kadena-server/repository/application/network-repository.ts index a4f308ae..7f1c55ba 100644 --- a/indexer/src/kadena-server/repository/application/network-repository.ts +++ b/indexer/src/kadena-server/repository/application/network-repository.ts @@ -25,9 +25,7 @@ type AllInfo = NetworkStatistics & HashRateAndTotalDifficulty & GetNodeInfo; export default interface NetworkRepository { getNetworkStatistics(): Promise; - getHashRateAndTotalDifficulty( - chainIds: number[], - ): Promise; + getHashRateAndTotalDifficulty(chainIds: number[]): Promise; getNodeInfo(): Promise; getAllInfo(): Promise; } diff --git a/indexer/src/kadena-server/repository/application/transaction-repository.ts b/indexer/src/kadena-server/repository/application/transaction-repository.ts index ea9d6402..7dde245f 100644 --- a/indexer/src/kadena-server/repository/application/transaction-repository.ts +++ b/indexer/src/kadena-server/repository/application/transaction-repository.ts @@ -1,14 +1,8 @@ -import { - PageInfo, - Signer, - Transaction, - TransactionMeta, -} from "../../config/graphql-types"; -import { PaginationsParams } from "../pagination"; -import { ConnectionEdge } from "../types"; +import { PageInfo, Signer, Transaction, TransactionMeta } from '../../config/graphql-types'; +import { PaginationsParams } from '../pagination'; +import { ConnectionEdge } from '../types'; -export type GetTransactionsParams = GetTransactionsCountParams & - PaginationsParams; +export type GetTransactionsParams = GetTransactionsCountParams & PaginationsParams; export interface GetTransactionsByPublicKeyParams extends PaginationsParams { publicKey: string; @@ -32,8 +26,8 @@ export interface GetTransactionsByRequestKey { minimumDepth?: number | null; } -export type TransactionOutput = Omit & { - cmd: Omit; +export type TransactionOutput = Omit & { + cmd: Omit; } & { databaseTransactionId: string; blockHash: string; blockHeight: number }; export type TransactionMetaOutput = TransactionMeta; @@ -45,23 +39,14 @@ export default interface TransactionRepository { edges: ConnectionEdge[]; }>; getTransactionsCount(params: GetTransactionsCountParams): Promise; - getTransactionsByRequestKey( - params: GetTransactionsByRequestKey, - ): Promise; + getTransactionsByRequestKey(params: GetTransactionsByRequestKey): Promise; getTransactionByTransferId(transferId: string): Promise; - getTransactionMetaInfoById( - transactionId: string, - ): Promise; - getTransactionsByPublicKey( - params: GetTransactionsByPublicKeyParams, - ): Promise<{ + getTransactionMetaInfoById(transactionId: string): Promise; + getTransactionsByPublicKey(params: GetTransactionsByPublicKeyParams): Promise<{ pageInfo: PageInfo; edges: ConnectionEdge[]; }>; getTransactionsByPublicKeyCount(publicKey: string): Promise; getTransactionsByEventIds(eventIds: string[]): Promise; - getSigners( - transactionId: string, - orderIndex?: number, - ): Promise; + getSigners(transactionId: string, orderIndex?: number): Promise; } diff --git a/indexer/src/kadena-server/repository/application/transfer-repository.ts b/indexer/src/kadena-server/repository/application/transfer-repository.ts index 715c263f..238e9504 100644 --- a/indexer/src/kadena-server/repository/application/transfer-repository.ts +++ b/indexer/src/kadena-server/repository/application/transfer-repository.ts @@ -1,6 +1,6 @@ -import { PageInfo, Transfer } from "../../config/graphql-types"; -import { PaginationsParams } from "../pagination"; -import { ConnectionEdge } from "../types"; +import { PageInfo, Transfer } from '../../config/graphql-types'; +import { PaginationsParams } from '../pagination'; +import { ConnectionEdge } from '../types'; export type GetTransfersParams = GetTotalCountParams & PaginationsParams & { @@ -26,19 +26,18 @@ export interface GetCrossChainTransferByPactIdParams { amount: string; } -export type TransferOutput = Omit< - Transfer, - "block" | "transaction" | "crossChainTransfer" -> & { transferId: string; pactId: string | null; blockHash: string }; +export type TransferOutput = Omit & { + transferId: string; + pactId: string | null; + blockHash: string; +}; export default interface TransferRepository { getTransfers(params: GetTransfersParams): Promise<{ pageInfo: PageInfo; edges: ConnectionEdge[]; }>; - getTransfersByTransactionId( - params: GetTransfersByTransactionIdParams, - ): Promise<{ + getTransfersByTransactionId(params: GetTransfersByTransactionIdParams): Promise<{ pageInfo: PageInfo; edges: ConnectionEdge[]; }>; diff --git a/indexer/src/kadena-server/repository/gateway/gas-gateway.ts b/indexer/src/kadena-server/repository/gateway/gas-gateway.ts index d4ff4e9b..370cf760 100644 --- a/indexer/src/kadena-server/repository/gateway/gas-gateway.ts +++ b/indexer/src/kadena-server/repository/gateway/gas-gateway.ts @@ -1,6 +1,6 @@ -import { IUnsignedCommand } from "@kadena/types"; -import { GasLimitEstimation } from "../../config/graphql-types"; -import { UserInput } from "../../domain/gas/types.gas"; +import { IUnsignedCommand } from '@kadena/types'; +import { GasLimitEstimation } from '../../config/graphql-types'; +import { UserInput } from '../../domain/gas/types.gas'; export type EstimeGasOutput = GasLimitEstimation; diff --git a/indexer/src/kadena-server/repository/gateway/mempool-gateway.ts b/indexer/src/kadena-server/repository/gateway/mempool-gateway.ts index b2544997..25b7e9a6 100644 --- a/indexer/src/kadena-server/repository/gateway/mempool-gateway.ts +++ b/indexer/src/kadena-server/repository/gateway/mempool-gateway.ts @@ -1,13 +1,7 @@ -import { Transaction } from "../../config/graphql-types"; +import { Transaction } from '../../config/graphql-types'; -export type TransactionOutput = Omit< - Transaction, - "orphanedTransactions" | "result" ->; +export type TransactionOutput = Omit; export default interface MempoolGateway { - getPendingTransaction( - requestKey: string, - chainId: string, - ): Promise; + getPendingTransaction(requestKey: string, chainId: string): Promise; } diff --git a/indexer/src/kadena-server/repository/infra/base64-id-generators.ts b/indexer/src/kadena-server/repository/infra/base64-id-generators.ts index be1b6542..54859db6 100644 --- a/indexer/src/kadena-server/repository/infra/base64-id-generators.ts +++ b/indexer/src/kadena-server/repository/infra/base64-id-generators.ts @@ -1,6 +1,6 @@ export const getNonFungibleAccountBase64ID = (accountName: string): string => { const inputString = `NonFungibleAccount:${accountName}`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; @@ -9,6 +9,6 @@ export const getNonFungibleChainAccountBase64ID = ( accountName: string, ): string => { const inputString = `NonFungibleChainAccount:[\"${chainId}\",\"${accountName}\"]`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; diff --git a/indexer/src/kadena-server/repository/infra/gateway/gas-api-gateway.ts b/indexer/src/kadena-server/repository/infra/gateway/gas-api-gateway.ts index 2a6fe2ac..3a518c38 100644 --- a/indexer/src/kadena-server/repository/infra/gateway/gas-api-gateway.ts +++ b/indexer/src/kadena-server/repository/infra/gateway/gas-api-gateway.ts @@ -1,11 +1,11 @@ -import { createClient, IUnsignedCommand } from "@kadena/client"; -import { GasLimitEstimation } from "../../../config/graphql-types"; -import { GasLimitEstimationError } from "../../../errors/gas-limit-estimation-error"; -import GasGateway, { EstimeGasOutput } from "../../gateway/gas-gateway"; -import { UserInput } from "../../../domain/gas/types.gas"; -import { getRequiredEnvString } from "../../../../utils/helpers"; +import { createClient, IUnsignedCommand } from '@kadena/client'; +import { GasLimitEstimation } from '../../../config/graphql-types'; +import { GasLimitEstimationError } from '../../../errors/gas-limit-estimation-error'; +import GasGateway, { EstimeGasOutput } from '../../gateway/gas-gateway'; +import { UserInput } from '../../../domain/gas/types.gas'; +import { getRequiredEnvString } from '../../../../utils/helpers'; -const SYNC_BASE_URL = getRequiredEnvString("SYNC_BASE_URL"); +const SYNC_BASE_URL = getRequiredEnvString('SYNC_BASE_URL'); export default class GasApiGateway implements GasGateway { async estimateGas( @@ -24,7 +24,7 @@ export default class GasApiGateway implements GasGateway { }); const result = await client.local(transaction, configuration); - if (result.result.status === "failure") { + if (result.result.status === 'failure') { throw result.result.error; } @@ -39,7 +39,7 @@ export default class GasApiGateway implements GasGateway { return response; } catch (error) { throw new GasLimitEstimationError( - "Chainweb Node was unable to estimate the gas limit", + 'Chainweb Node was unable to estimate the gas limit', error, ); } diff --git a/indexer/src/kadena-server/repository/infra/gateway/mempool-api-gateway.ts b/indexer/src/kadena-server/repository/infra/gateway/mempool-api-gateway.ts index bb140ffa..10d97365 100644 --- a/indexer/src/kadena-server/repository/infra/gateway/mempool-api-gateway.ts +++ b/indexer/src/kadena-server/repository/infra/gateway/mempool-api-gateway.ts @@ -1,9 +1,9 @@ -import { getRequiredEnvString } from "../../../../utils/helpers"; -import MempoolGateway from "../../gateway/mempool-gateway"; -import zod from "zod"; +import { getRequiredEnvString } from '../../../../utils/helpers'; +import MempoolGateway from '../../gateway/mempool-gateway'; +import zod from 'zod'; -const SYNC_BASE_URL = getRequiredEnvString("SYNC_BASE_URL"); -const NETWORK_ID = getRequiredEnvString("SYNC_NETWORK"); +const SYNC_BASE_URL = getRequiredEnvString('SYNC_BASE_URL'); +const NETWORK_ID = getRequiredEnvString('SYNC_NETWORK'); const ZodSignature = zod.object({ sig: zod.string(), @@ -29,7 +29,7 @@ const ZodMeta = zod.object({ }); const ZodExecData = zod.object({ - "account-keyset": zod.object({ + 'account-keyset': zod.object({ pred: zod.string(), keys: zod.array(zod.string()), }), @@ -64,16 +64,13 @@ const ZodSchema = zod.object({ export type MempoolResponse = zod.infer; export default class MempoolApiGateway implements MempoolGateway { - async getPendingTransaction( - requestKey: string, - chainId: string, - ): Promise { + async getPendingTransaction(requestKey: string, chainId: string): Promise { const url = `${SYNC_BASE_URL}/${NETWORK_ID}/chain/${chainId}/mempool/lookup`; const res = await fetch(url, { - method: "POST", + method: 'POST', headers: { - "Content-Type": "application/json", + 'Content-Type': 'application/json', }, // use base64 body: JSON.stringify({ requestKey }), diff --git a/indexer/src/kadena-server/repository/infra/gateway/pact-api-gateway.ts b/indexer/src/kadena-server/repository/infra/gateway/pact-api-gateway.ts index 6aca590b..e04ba94b 100644 --- a/indexer/src/kadena-server/repository/infra/gateway/pact-api-gateway.ts +++ b/indexer/src/kadena-server/repository/infra/gateway/pact-api-gateway.ts @@ -1,19 +1,19 @@ -import { handleSingleQuery } from "../../../utils/raw-query"; -import PactGateway, { GetNftsInfoParams } from "../../gateway/pact-gateway"; +import { handleSingleQuery } from '../../../utils/raw-query'; +import PactGateway, { GetNftsInfoParams } from '../../gateway/pact-gateway'; export default class PactApiGateway implements PactGateway { async getNftsInfo(data: GetNftsInfoParams) { - const promises = data.map(async (nft) => { + const promises = data.map(async nft => { const query = { chainId: nft.chainId, code: `(marmalade-v2.ledger.get-token-info (token-id))`, - data: [{ key: "token-id", value: nft.tokenId }], + data: [{ key: 'token-id', value: nft.tokenId }], }; const res = await handleSingleQuery(query); - const result = JSON.parse(res.result ?? "{}"); + const result = JSON.parse(res.result ?? '{}'); return { - version: result.version ?? "unknown", - uri: result.uri ?? "unknown", + version: result.version ?? 'unknown', + uri: result.uri ?? 'unknown', supply: result?.supply ? Number(result.supply) : 0, precision: result.precision ? Number(result.precision) : 0, }; diff --git a/indexer/src/kadena-server/repository/infra/repository/balance-db-repository.ts b/indexer/src/kadena-server/repository/infra/repository/balance-db-repository.ts index 33acfed9..b996a6af 100644 --- a/indexer/src/kadena-server/repository/infra/repository/balance-db-repository.ts +++ b/indexer/src/kadena-server/repository/infra/repository/balance-db-repository.ts @@ -1,28 +1,25 @@ -import { rootPgPool } from "../../../../config/database"; -import BalanceModel from "../../../../models/balance"; -import { - formatBalance_NODE, - formatGuard_NODE, -} from "../../../../utils/chainweb-node"; -import { handleSingleQuery } from "../../../utils/raw-query"; +import { rootPgPool } from '../../../../config/database'; +import BalanceModel from '../../../../models/balance'; +import { formatBalance_NODE, formatGuard_NODE } from '../../../../utils/chainweb-node'; +import { handleSingleQuery } from '../../../utils/raw-query'; import BalanceRepository, { FungibleAccountOutput, FungibleChainAccountOutput, INonFungibleAccount, INonFungibleChainAccount, INonFungibleTokenBalance, -} from "../../application/balance-repository"; +} from '../../application/balance-repository'; import { getNonFungibleAccountBase64ID, getNonFungibleChainAccountBase64ID, -} from "../base64-id-generators"; -import { fungibleAccountValidator } from "../schema-validator/fungible-account-validator"; -import { fungibleChainAccountValidator } from "../schema-validator/fungible-chain-account-validator"; -import { nonFungibleTokenBalanceValidator } from "../schema-validator/non-fungible-token-balance-validator"; +} from '../base64-id-generators'; +import { fungibleAccountValidator } from '../schema-validator/fungible-account-validator'; +import { fungibleChainAccountValidator } from '../schema-validator/fungible-chain-account-validator'; +import { nonFungibleTokenBalanceValidator } from '../schema-validator/non-fungible-token-balance-validator'; export default class BalanceDbRepository implements BalanceRepository { // TODO: waiting for orphan blocks mechanism to be ready - async getAccountInfo(accountName: string, fungibleName = "coin") { + async getAccountInfo(accountName: string, fungibleName = 'coin') { const account = await BalanceModel.findOne({ where: { account: accountName, @@ -38,13 +35,10 @@ export default class BalanceDbRepository implements BalanceRepository { group by b.account `; - const { rows } = await rootPgPool.query(totalBalanceQuery, [ - accountName, - fungibleName, - ]); + const { rows } = await rootPgPool.query(totalBalanceQuery, [accountName, fungibleName]); if (!account || !rows?.length) { - throw new Error("Account not found."); + throw new Error('Account not found.'); } const accountInfo = fungibleAccountValidator.mapFromSequelize(account); @@ -74,7 +68,7 @@ export default class BalanceDbRepository implements BalanceRepository { const { rows } = await rootPgPool.query(query, queryParams); - const output = rows.map((r) => fungibleChainAccountValidator.validate(r)); + const output = rows.map(r => fungibleChainAccountValidator.validate(r)); return output; } @@ -101,10 +95,10 @@ export default class BalanceDbRepository implements BalanceRepository { group by b.account `; - const { rows: totalBalanceRows } = await rootPgPool.query( - totalBalanceQuery, - [rows.map((a) => a.account), fungibleName], - ); + const { rows: totalBalanceRows } = await rootPgPool.query(totalBalanceQuery, [ + rows.map(a => a.account), + fungibleName, + ]); const balanceMapping = totalBalanceRows.reduce( (acum, cur) => ({ @@ -113,7 +107,7 @@ export default class BalanceDbRepository implements BalanceRepository { }), {}, ); - const output = rows.map((r) => { + const output = rows.map(r => { return { ...fungibleAccountValidator.validate(r), totalBalance: balanceMapping[r.account], @@ -144,7 +138,7 @@ export default class BalanceDbRepository implements BalanceRepository { const balanceQueryParams: any = []; const placeholders = rows - .map((r) => ({ + .map(r => ({ chainId: r.chainId, module: r.module, account: r.account, @@ -154,7 +148,7 @@ export default class BalanceDbRepository implements BalanceRepository { const startIndex = index * 3 + 1; return `($${startIndex}, $${startIndex + 1}, $${startIndex + 2})`; }) - .join(", "); + .join(', '); const balanceQuery = ` SELECT @@ -167,20 +161,13 @@ export default class BalanceDbRepository implements BalanceRepository { WHERE (b."chainId", b.module, b.account) IN (${placeholders}) `; - const { rows: accountRows } = await rootPgPool.query( - balanceQuery, - balanceQueryParams, - ); + const { rows: accountRows } = await rootPgPool.query(balanceQuery, balanceQueryParams); - const output = accountRows.map((r) => - fungibleChainAccountValidator.validate(r), - ); + const output = accountRows.map(r => fungibleChainAccountValidator.validate(r)); return output; } - async getNonFungibleAccountInfo( - accountName: string, - ): Promise { + async getNonFungibleAccountInfo(accountName: string): Promise { const queryParams = [accountName]; let query = ` SELECT b.id, b."chainId", b.balance, b."tokenId", b.account @@ -193,7 +180,7 @@ export default class BalanceDbRepository implements BalanceRepository { if (rows.length === 0) return null; - const nonFungibleTokenBalances = rows.map((row) => { + const nonFungibleTokenBalances = rows.map(row => { return nonFungibleTokenBalanceValidator.validate(row); }); @@ -223,7 +210,7 @@ export default class BalanceDbRepository implements BalanceRepository { if (rows.length === 0) return null; - const nonFungibleTokenBalances = rows.map((row) => { + const nonFungibleTokenBalances = rows.map(row => { return nonFungibleTokenBalanceValidator.validate(row); }); @@ -261,7 +248,7 @@ export default class BalanceDbRepository implements BalanceRepository { async getAccountInfo_NODE( accountName: string, - fungibleName = "coin", + fungibleName = 'coin', ): Promise { const query = ` SELECT DISTINCT b."chainId" @@ -271,9 +258,9 @@ export default class BalanceDbRepository implements BalanceRepository { `; const { rows } = await rootPgPool.query(query, [accountName, fungibleName]); - const chainIds = rows.map((r) => Number(r.chainId)); + const chainIds = rows.map(r => Number(r.chainId)); - const balancePromises = chainIds.map((c) => { + const balancePromises = chainIds.map(c => { const query = { chainId: c.toString(), code: `(${fungibleName}.details \"${accountName}\")`, @@ -281,10 +268,8 @@ export default class BalanceDbRepository implements BalanceRepository { return handleSingleQuery(query); }); - const balances = (await Promise.all(balancePromises)).filter( - (b) => b.status === "success", - ); - const balancesNumber = balances.map((b) => formatBalance_NODE(b)); + const balances = (await Promise.all(balancePromises)).filter(b => b.status === 'success'); + const balancesNumber = balances.map(b => formatBalance_NODE(b)); const totalBalance = balancesNumber.reduce((acc, cur) => acc + cur, 0); const accountInfo = fungibleAccountValidator.mapFromSequelize({ @@ -310,17 +295,14 @@ export default class BalanceDbRepository implements BalanceRepository { WHERE b.account = $1 AND b.module = $2 `; - const { rows } = await rootPgPool.query(query, [ - accountName, - fungibleName, - ]); - const chainIds = rows.map((r) => Number(r.chainId)); + const { rows } = await rootPgPool.query(query, [accountName, fungibleName]); + const chainIds = rows.map(r => Number(r.chainId)); chainIdsParam.push(...chainIds); } else { - chainIdsParam.push(...chainIds.map((c) => Number(c))); + chainIdsParam.push(...chainIds.map(c => Number(c))); } - const balancePromises = chainIdsParam.map((c) => { + const balancePromises = chainIdsParam.map(c => { const query = { chainId: c.toString(), code: `(${fungibleName}.details \"${accountName}\")`, @@ -328,9 +310,7 @@ export default class BalanceDbRepository implements BalanceRepository { return handleSingleQuery(query); }); - const rows = (await Promise.all(balancePromises)).filter( - (b) => b.status === "success", - ); + const rows = (await Promise.all(balancePromises)).filter(b => b.status === 'success'); const rowsMapped = rows.map((row, index) => { const balance = formatBalance_NODE(row); @@ -345,9 +325,7 @@ export default class BalanceDbRepository implements BalanceRepository { }; }); - const output = rowsMapped.map((r) => - fungibleChainAccountValidator.validate(r), - ); + const output = rowsMapped.map(r => fungibleChainAccountValidator.validate(r)); return output; } @@ -362,9 +340,7 @@ export default class BalanceDbRepository implements BalanceRepository { WHERE g."publicKey" = $1 `; - const { rows: guardRows } = await rootPgPool.query(guardsQuery, [ - publicKey, - ]); + const { rows: guardRows } = await rootPgPool.query(guardsQuery, [publicKey]); if (!guardRows?.length) { const params = [`k:${publicKey}`, fungibleName]; @@ -378,7 +354,7 @@ export default class BalanceDbRepository implements BalanceRepository { if (!rows.length) return []; - const balancePromises = rows.map((r) => { + const balancePromises = rows.map(r => { const query = { chainId: r.chainId.toString(), code: `(${fungibleName}.details \"${r.account}\")`, @@ -386,10 +362,8 @@ export default class BalanceDbRepository implements BalanceRepository { return handleSingleQuery(query); }); - const balances = (await Promise.all(balancePromises)).filter( - (b) => b.status === "success", - ); - const balancesNumber = balances.map((q) => formatBalance_NODE(q)); + const balances = (await Promise.all(balancePromises)).filter(b => b.status === 'success'); + const balancesNumber = balances.map(q => formatBalance_NODE(q)); const totalBalance = balancesNumber.reduce((acc, cur) => acc + cur, 0); const accountInfo = fungibleAccountValidator.mapFromSequelize({ @@ -424,7 +398,7 @@ export default class BalanceDbRepository implements BalanceRepository { const accountsPromises = Object.entries(groupedByAccount).map( async ([account, chainIds], index) => { - const balances = chainIds.map(async (c) => { + const balances = chainIds.map(async c => { const query = { chainId: c.toString(), code: `(${fungibleName}.details \"${account}\")`, @@ -434,10 +408,7 @@ export default class BalanceDbRepository implements BalanceRepository { }); const balancesNumber = await Promise.all(balances); - const totalBalance: Number = balancesNumber.reduce( - (acc, cur) => acc + cur, - 0, - ); + const totalBalance: Number = balancesNumber.reduce((acc, cur) => acc + cur, 0); return { id: index.toString(), @@ -464,16 +435,10 @@ export default class BalanceDbRepository implements BalanceRepository { WHERE g."publicKey" = $1 `; - const { rows: guardRows } = await rootPgPool.query(guardsQuery, [ - publicKey, - ]); + const { rows: guardRows } = await rootPgPool.query(guardsQuery, [publicKey]); - const params = [ - guardRows?.length ? publicKey : `k:${publicKey}`, - fungibleName, - chainId, - ]; - let query = ""; + const params = [guardRows?.length ? publicKey : `k:${publicKey}`, fungibleName, chainId]; + let query = ''; if (!guardRows?.length) { query = ` SELECT b.id, b.account, b."chainId", b.module @@ -494,7 +459,7 @@ export default class BalanceDbRepository implements BalanceRepository { } const { rows } = await rootPgPool.query(query, params); - const balancesWithQuery = rows.map(async (r) => { + const balancesWithQuery = rows.map(async r => { const query = { chainId: r.chainId.toString(), code: `(${fungibleName}.details \"${r.account}\")`, @@ -510,10 +475,10 @@ export default class BalanceDbRepository implements BalanceRepository { }); const queries = (await Promise.all(balancesWithQuery)).filter( - (b) => b.balanceQuery.status === "success", + b => b.balanceQuery.status === 'success', ); - const balances = queries.map((b) => { + const balances = queries.map(b => { const balance = formatBalance_NODE(b.balanceQuery).toString(); return { ...b, @@ -522,9 +487,7 @@ export default class BalanceDbRepository implements BalanceRepository { }; }); - const output = balances.map((r) => - fungibleChainAccountValidator.validate(r), - ); + const output = balances.map(r => fungibleChainAccountValidator.validate(r)); return output; } } diff --git a/indexer/src/kadena-server/repository/infra/repository/block-db-repository.ts b/indexer/src/kadena-server/repository/infra/repository/block-db-repository.ts index b82d547b..b1e20c57 100644 --- a/indexer/src/kadena-server/repository/infra/repository/block-db-repository.ts +++ b/indexer/src/kadena-server/repository/infra/repository/block-db-repository.ts @@ -1,22 +1,22 @@ -import { FindOptions, Op, QueryTypes } from "sequelize"; -import { rootPgPool, sequelize } from "../../../../config/database"; -import BlockModel, { BlockAttributes } from "../../../../models/block"; +import { FindOptions, Op, QueryTypes } from 'sequelize'; +import { rootPgPool, sequelize } from '../../../../config/database'; +import BlockModel, { BlockAttributes } from '../../../../models/block'; import BlockRepository, { BlockOutput, GetBlocksBetweenHeightsParams, GetBlocksFromDepthParams, GetCompletedBlocksParams, GetLatestBlocksParams, -} from "../../application/block-repository"; -import { getPageInfo, getPaginationParams } from "../../pagination"; -import { blockValidator } from "../schema-validator/block-schema-validator"; -import Balance from "../../../../models/balance"; -import { handleSingleQuery } from "../../../utils/raw-query"; -import { formatGuard_NODE } from "../../../../utils/chainweb-node"; -import { MEMORY_CACHE } from "../../../../cache/init"; -import { NODE_INFO_KEY } from "../../../../cache/keys"; -import { GetNodeInfo } from "../../application/network-repository"; -import { TransactionOutput } from "../../application/transaction-repository"; +} from '../../application/block-repository'; +import { getPageInfo, getPaginationParams } from '../../pagination'; +import { blockValidator } from '../schema-validator/block-schema-validator'; +import Balance from '../../../../models/balance'; +import { handleSingleQuery } from '../../../utils/raw-query'; +import { formatGuard_NODE } from '../../../../utils/chainweb-node'; +import { MEMORY_CACHE } from '../../../../cache/init'; +import { NODE_INFO_KEY } from '../../../../cache/keys'; +import { GetNodeInfo } from '../../application/network-repository'; +import { TransactionOutput } from '../../application/transaction-repository'; export default class BlockDbRepository implements BlockRepository { async getBlockByHash(hash: string) { @@ -25,7 +25,7 @@ export default class BlockDbRepository implements BlockRepository { }); if (!block) { - throw new Error("Block not found."); + throw new Error('Block not found.'); } return blockValidator.mapFromSequelize(block); @@ -56,12 +56,12 @@ export default class BlockDbRepository implements BlockRepository { ...(!!chainIds?.length && { chainId: { [Op.in]: chainIds } }), }, limit, - order: [["id", order]], + order: [['id', order]], }; const rows = await BlockModel.findAll(query); - const edges = rows.map((row) => ({ + const edges = rows.map(row => ({ cursor: row.id.toString(), node: blockValidator.mapFromSequelize(row), })); @@ -89,7 +89,7 @@ export default class BlockDbRepository implements BlockRepository { }); const queryParams: (string | number | string[])[] = [limit, startHeight]; - let conditions = ""; + let conditions = ''; if (before) { queryParams.push(before); @@ -134,7 +134,7 @@ export default class BlockDbRepository implements BlockRepository { const { rows: blockRows } = await rootPgPool.query(query, queryParams); - const edges = blockRows.map((row) => ({ + const edges = blockRows.map(row => ({ cursor: row.id.toString(), node: blockValidator.validate(row), })); @@ -185,7 +185,7 @@ export default class BlockDbRepository implements BlockRepository { async getChainIds() { const nodeInfo = MEMORY_CACHE.get(NODE_INFO_KEY) as GetNodeInfo; - return nodeInfo.nodeChains.map((chainId) => Number(chainId)); + return nodeInfo.nodeChains.map(chainId => Number(chainId)); } async getCompletedBlocks(params: GetCompletedBlocksParams) { @@ -206,9 +206,7 @@ export default class BlockDbRepository implements BlockRepository { last, }); - const chainIds = chainIdsParam?.length - ? chainIdsParam - : await this.getChainIds(); + const chainIds = chainIdsParam?.length ? chainIdsParam : await this.getChainIds(); if (completedHeights) { const query = ` @@ -220,12 +218,9 @@ export default class BlockDbRepository implements BlockRepository { LIMIT $2; `; - const { rows: heightRows } = await rootPgPool.query(query, [ - chainIds.length, - heightCount, - ]); + const { rows: heightRows } = await rootPgPool.query(query, [chainIds.length, heightCount]); - const totalCompletedHeights = heightRows.map((r) => r.height) as number[]; + const totalCompletedHeights = heightRows.map(r => r.height) as number[]; if (totalCompletedHeights.length > 0) { const queryParams: any[] = [ @@ -235,16 +230,16 @@ export default class BlockDbRepository implements BlockRepository { totalCompletedHeights[0], ]; - let conditions = ""; + let conditions = ''; if (after) { queryParams.push(after); - conditions += "\nAND id < $5"; + conditions += '\nAND id < $5'; } if (before) { queryParams.push(before); - conditions += "\nAND id > $5"; + conditions += '\nAND id > $5'; } let queryOne = ` @@ -257,12 +252,9 @@ export default class BlockDbRepository implements BlockRepository { LIMIT $1 `; - const { rows: blockRows } = await rootPgPool.query( - queryOne, - queryParams, - ); + const { rows: blockRows } = await rootPgPool.query(queryOne, queryParams); - const edges = blockRows.map((row) => ({ + const edges = blockRows.map(row => ({ cursor: row.id.toString(), node: blockValidator.validate(row), })); @@ -281,24 +273,22 @@ export default class BlockDbRepository implements BlockRepository { LIMIT $1 `; - const { rows: heightRows } = await rootPgPool.query(queryTwo, [ - heightCount, - ]); + const { rows: heightRows } = await rootPgPool.query(queryTwo, [heightCount]); - const totalCompletedHeights = heightRows.map((r) => r.height) as number[]; + const totalCompletedHeights = heightRows.map(r => r.height) as number[]; const queryParams: any[] = [limit, chainIds, totalCompletedHeights]; - let conditions = ""; + let conditions = ''; if (after) { queryParams.push(after); - conditions += "\nAND id < $4"; + conditions += '\nAND id < $4'; } if (before) { queryParams.push(before); - conditions += "\nAND id > $4"; + conditions += '\nAND id > $4'; } let queryThree = ` @@ -313,7 +303,7 @@ export default class BlockDbRepository implements BlockRepository { const { rows: blockRows } = await rootPgPool.query(queryThree, queryParams); - const edges = blockRows.map((row) => ({ + const edges = blockRows.map(row => ({ cursor: row.id.toString(), node: blockValidator.validate(row), })); @@ -323,7 +313,7 @@ export default class BlockDbRepository implements BlockRepository { } async getBlocksByEventIds(eventIds: readonly string[]) { - console.log("Batching for event IDs:", eventIds); + console.log('Batching for event IDs:', eventIds); const { rows: blockRows } = await rootPgPool.query( `SELECT b.*, e.id as "eventId" @@ -335,7 +325,7 @@ export default class BlockDbRepository implements BlockRepository { ); if (blockRows.length !== eventIds.length) { - throw new Error("There was an issue fetching blocks for event IDs."); + throw new Error('There was an issue fetching blocks for event IDs.'); } const blockMap = blockRows.reduce( @@ -346,11 +336,11 @@ export default class BlockDbRepository implements BlockRepository { {}, ); - return eventIds.map((eventId) => blockMap[eventId]) as BlockOutput[]; + return eventIds.map(eventId => blockMap[eventId]) as BlockOutput[]; } async getBlocksByTransactionIds(transactionIds: string[]) { - console.log("Batching for transactionIds IDs:", transactionIds); + console.log('Batching for transactionIds IDs:', transactionIds); const { rows: blockRows } = await rootPgPool.query( `SELECT b.id, @@ -374,9 +364,7 @@ export default class BlockDbRepository implements BlockRepository { ); if (blockRows.length !== transactionIds.length) { - throw new Error( - "There was an issue fetching blocks for transaction IDs.", - ); + throw new Error('There was an issue fetching blocks for transaction IDs.'); } const blockMap = blockRows.reduce( @@ -387,11 +375,11 @@ export default class BlockDbRepository implements BlockRepository { {}, ); - return transactionIds.map((id) => blockMap[id]) as BlockOutput[]; + return transactionIds.map(id => blockMap[id]) as BlockOutput[]; } async getBlockByHashes(hashes: string[]): Promise { - console.log("Batching for hashes:", hashes); + console.log('Batching for hashes:', hashes); const { rows: blockRows } = await rootPgPool.query( `SELECT b.id, @@ -413,9 +401,7 @@ export default class BlockDbRepository implements BlockRepository { ); if (blockRows.length !== hashes.length) { - throw new Error( - "There was an issue fetching blocks for transaction IDs.", - ); + throw new Error('There was an issue fetching blocks for transaction IDs.'); } const blockMap = blockRows.reduce( @@ -426,13 +412,13 @@ export default class BlockDbRepository implements BlockRepository { {}, ); - return hashes.map((hash) => blockMap[hash]) as BlockOutput[]; + return hashes.map(hash => blockMap[hash]) as BlockOutput[]; } async getLowestBlockHeight(): Promise { const block = await BlockModel.findOne({ - order: [["height", "ASC"]], - attributes: ["height"], + order: [['height', 'ASC']], + attributes: ['height'], }); return block?.height || 0; @@ -440,8 +426,8 @@ export default class BlockDbRepository implements BlockRepository { async getLastBlockHeight(): Promise { const block = await BlockModel.findOne({ - order: [["height", "DESC"]], - attributes: ["height"], + order: [['height', 'DESC']], + attributes: ['height'], }); return block?.height || 0; @@ -450,7 +436,7 @@ export default class BlockDbRepository implements BlockRepository { async getTotalCountOfBlockEvents(blockHash: string): Promise { const block = await BlockModel.findOne({ where: { hash: blockHash }, - attributes: ["transactionsCount"], + attributes: ['transactionsCount'], }); return block?.transactionsCount || 0; @@ -465,10 +451,10 @@ export default class BlockDbRepository implements BlockRepository { ...(chainIds.length && { chainId: { [Op.in]: chainIds } }), }, limit: 100, - order: [["id", "DESC"]], + order: [['id', 'DESC']], }); - const output = blocks.map((b) => blockValidator.mapFromSequelize(b)); + const output = blocks.map(b => blockValidator.mapFromSequelize(b)); return output; } @@ -493,15 +479,11 @@ export default class BlockDbRepository implements BlockRepository { GROUP BY root_hash; `; - const { rows } = await rootPgPool.query(query, [ - transactions.map((t) => t.blockHash), - ]); + const { rows } = await rootPgPool.query(query, [transactions.map(t => t.blockHash)]); rows.sort((a, b) => b.depth - a.depth); - const output = rows.map((r) => - transactions.find((t) => t.blockHash === r.root_hash), - ) as any; + const output = rows.map(r => transactions.find(t => t.blockHash === r.root_hash)) as any; return output; } diff --git a/indexer/src/kadena-server/repository/infra/repository/event-db-repository.ts b/indexer/src/kadena-server/repository/infra/repository/event-db-repository.ts index 16994663..99a34f99 100644 --- a/indexer/src/kadena-server/repository/infra/repository/event-db-repository.ts +++ b/indexer/src/kadena-server/repository/infra/repository/event-db-repository.ts @@ -1,5 +1,5 @@ -import { rootPgPool } from "../../../../config/database"; -import { PageInfo } from "../../../config/graphql-types"; +import { rootPgPool } from '../../../../config/database'; +import { PageInfo } from '../../../config/graphql-types'; import EventRepository, { EventOutput, GetBlockEventsParams, @@ -9,10 +9,10 @@ import EventRepository, { GetTotalEventsCount, GetTotalTransactionEventsCount, GetTransactionEventsParams, -} from "../../application/event-repository"; -import { getPageInfo, getPaginationParams } from "../../pagination"; -import { ConnectionEdge } from "../../types"; -import { eventValidator } from "../schema-validator/event-schema-validator"; +} from '../../application/event-repository'; +import { getPageInfo, getPaginationParams } from '../../pagination'; +import { ConnectionEdge } from '../../types'; +import { eventValidator } from '../schema-validator/event-schema-validator'; export default class EventDbRepository implements EventRepository { async getEvent(params: GetEventParams): Promise { @@ -44,13 +44,7 @@ export default class EventDbRepository implements EventRepository { return output; } async getBlockEvents(params: GetBlockEventsParams) { - const { - hash, - after: afterEncoded, - before: beforeEncoded, - first, - last, - } = params; + const { hash, after: afterEncoded, before: beforeEncoded, first, last } = params; const { limit, order, after, before } = getPaginationParams({ after: afterEncoded, @@ -61,7 +55,7 @@ export default class EventDbRepository implements EventRepository { const queryParams = [limit, hash]; - let conditions = ""; + let conditions = ''; if (before) { queryParams.push(before); @@ -94,7 +88,7 @@ export default class EventDbRepository implements EventRepository { const { rows } = await rootPgPool.query(query, queryParams); - const edges = rows.map((row) => ({ + const edges = rows.map(row => ({ cursor: row.id.toString(), node: eventValidator.validate(row), })); @@ -112,9 +106,7 @@ export default class EventDbRepository implements EventRepository { WHERE b.hash = $1 `; - const { rows: countResult } = await rootPgPool.query(totalCountQuery, [ - hash, - ]); + const { rows: countResult } = await rootPgPool.query(totalCountQuery, [hash]); const totalCount = parseInt(countResult[0].count, 10); return totalCount; @@ -142,12 +134,12 @@ export default class EventDbRepository implements EventRepository { last, }); - const splitted = qualifiedEventName.split("."); - const name = splitted.pop() ?? ""; - const module = splitted.join("."); + const splitted = qualifiedEventName.split('.'); + const name = splitted.pop() ?? ''; + const module = splitted.join('.'); const queryParams: (string | number)[] = [limit]; - let conditions = ""; + let conditions = ''; queryParams.push(module); conditions += `WHERE e.module = $${queryParams.length}`; @@ -214,7 +206,7 @@ export default class EventDbRepository implements EventRepository { const { rows } = await rootPgPool.query(query, queryParams); - const edges = rows.map((row) => ({ + const edges = rows.map(row => ({ cursor: row.id.toString(), node: eventValidator.validate(row), })); @@ -234,12 +226,12 @@ export default class EventDbRepository implements EventRepository { requestKey, } = params; - const splitted = qualifiedEventName.split("."); - const name = splitted.pop() ?? ""; - const module = splitted.join("."); + const splitted = qualifiedEventName.split('.'); + const name = splitted.pop() ?? ''; + const module = splitted.join('.'); const queryParams: (string | number)[] = []; - let conditions = ""; + let conditions = ''; queryParams.push(module); conditions += `WHERE e.module = $${queryParams.length}`; @@ -284,10 +276,7 @@ export default class EventDbRepository implements EventRepository { ${conditions} `; - const { rows: countResult } = await rootPgPool.query( - totalCountQuery, - queryParams, - ); + const { rows: countResult } = await rootPgPool.query(totalCountQuery, queryParams); const totalCount = parseInt(countResult[0].count, 10); return totalCount; } @@ -295,13 +284,7 @@ export default class EventDbRepository implements EventRepository { async getTransactionEvents( params: GetTransactionEventsParams, ): Promise<{ pageInfo: PageInfo; edges: ConnectionEdge[] }> { - const { - transactionId, - after: afterEncoded, - before: beforeEncoded, - first, - last, - } = params; + const { transactionId, after: afterEncoded, before: beforeEncoded, first, last } = params; const { limit, order, after, before } = getPaginationParams({ after: afterEncoded, @@ -311,7 +294,7 @@ export default class EventDbRepository implements EventRepository { }); const queryParams: (string | number)[] = [limit, transactionId]; - let conditions = ""; + let conditions = ''; if (after) { queryParams.push(after); @@ -344,7 +327,7 @@ export default class EventDbRepository implements EventRepository { const { rows } = await rootPgPool.query(query, queryParams); - const edges = rows.map((row) => ({ + const edges = rows.map(row => ({ cursor: row.id.toString(), node: eventValidator.validate(row), })); @@ -364,10 +347,7 @@ export default class EventDbRepository implements EventRepository { WHERE t.id = $1 `; - const { rows: countResult } = await rootPgPool.query( - totalCountQuery, - queryParams, - ); + const { rows: countResult } = await rootPgPool.query(totalCountQuery, queryParams); const totalCount = parseInt(countResult[0].count, 10); return totalCount; } @@ -386,12 +366,12 @@ export default class EventDbRepository implements EventRepository { minimumDepth, }: GetLastEventsParams) { const queryParams = []; - let conditions = ""; - let limitCondition = lastEventId ? "LIMIT 5" : "LIMIT 100"; + let conditions = ''; + let limitCondition = lastEventId ? 'LIMIT 5' : 'LIMIT 100'; - const splitted = qualifiedEventName.split("."); - const name = splitted.pop() ?? ""; - const module = splitted.join("."); + const splitted = qualifiedEventName.split('.'); + const name = splitted.pop() ?? ''; + const module = splitted.join('.'); queryParams.push(module); conditions += `WHERE e.module = $${queryParams.length}`; @@ -429,7 +409,7 @@ export default class EventDbRepository implements EventRepository { const { rows } = await rootPgPool.query(query, queryParams); const events = rows - .map((e) => eventValidator.validate(e)) + .map(e => eventValidator.validate(e)) .sort((a, b) => Number(b.id) - Number(a.id)); return events; diff --git a/indexer/src/kadena-server/repository/infra/repository/network-db-repository.ts b/indexer/src/kadena-server/repository/infra/repository/network-db-repository.ts index ca5a3c2b..48ef579a 100644 --- a/indexer/src/kadena-server/repository/infra/repository/network-db-repository.ts +++ b/indexer/src/kadena-server/repository/infra/repository/network-db-repository.ts @@ -1,39 +1,36 @@ -import { Op } from "sequelize"; -import BlockModel from "../../../../models/block"; +import { Op } from 'sequelize'; +import BlockModel from '../../../../models/block'; import NetworkRepository, { GetNodeInfo, HashRateAndTotalDifficulty, NetworkStatistics, -} from "../../application/network-repository"; +} from '../../application/network-repository'; import { BlockWithDifficulty, calculateBlockDifficulty, calculateTotalDifficulty, -} from "../../../utils/difficulty"; -import { calculateNetworkHashRate } from "../../../utils/hashrate"; -import { rootPgPool } from "../../../../config/database"; -import { nodeInfoValidator } from "../schema-validator/node-info-validator"; -import { getRequiredEnvString } from "../../../../utils/helpers"; -import { MEMORY_CACHE } from "../../../../cache/init"; -import { - HASH_RATE_AND_TOTAL_DIFFICULTY_KEY, - NETWORK_STATISTICS_KEY, -} from "../../../../cache/keys"; -import { getCirculationNumber } from "../../../utils/coin-circulation"; +} from '../../../utils/difficulty'; +import { calculateNetworkHashRate } from '../../../utils/hashrate'; +import { rootPgPool } from '../../../../config/database'; +import { nodeInfoValidator } from '../schema-validator/node-info-validator'; +import { getRequiredEnvString } from '../../../../utils/helpers'; +import { MEMORY_CACHE } from '../../../../cache/init'; +import { HASH_RATE_AND_TOTAL_DIFFICULTY_KEY, NETWORK_STATISTICS_KEY } from '../../../../cache/keys'; +import { getCirculationNumber } from '../../../utils/coin-circulation'; -const HOST_URL = getRequiredEnvString("NODE_API_URL"); -const SYNC_BASE_URL = getRequiredEnvString("SYNC_BASE_URL"); -const NETWORK_ID = getRequiredEnvString("SYNC_NETWORK"); +const HOST_URL = getRequiredEnvString('NODE_API_URL'); +const SYNC_BASE_URL = getRequiredEnvString('SYNC_BASE_URL'); +const NETWORK_ID = getRequiredEnvString('SYNC_NETWORK'); -const NODE_INFO_KEY = "NODE_INFO_KEY"; +const NODE_INFO_KEY = 'NODE_INFO_KEY'; export default class NetworkDbRepository implements NetworkRepository { async getCut(): Promise { const response = await fetch(`${SYNC_BASE_URL}/${NETWORK_ID}/cut`, { - method: "GET", + method: 'GET', headers: { - accept: "application/json;charset=utf-8, application/json", - "cache-control": "no-cache", + accept: 'application/json;charset=utf-8, application/json', + 'cache-control': 'no-cache', }, }); const data = await response.json(); @@ -71,10 +68,7 @@ export default class NetworkDbRepository implements NetworkRepository { const { rows: totalTransactionsCountRows } = await rootPgPool.query( totalTransactionsCountQuery, ); - const transactionCount = parseInt( - totalTransactionsCountRows[0].totalTransactionsCount, - 10, - ); + const transactionCount = parseInt(totalTransactionsCountRows[0].totalTransactionsCount, 10); const coinsInCirculation = await this.getCoinsInCirculation(); @@ -88,8 +82,8 @@ export default class NetworkDbRepository implements NetworkRepository { async getHashRateAndTotalDifficulty(chainIds: number[]) { const lastBlock = await BlockModel.findOne({ - order: [["height", "DESC"]], - attributes: ["height"], + order: [['height', 'DESC']], + attributes: ['height'], }); const currentHeight = lastBlock?.height ?? 0; @@ -100,7 +94,7 @@ export default class NetworkDbRepository implements NetworkRepository { [Op.gte]: Number(currentHeight) - 4, }, }, - attributes: ["creationTime", "target", "height", "chainId"], + attributes: ['creationTime', 'target', 'height', 'chainId'], }); const blocksWithDifficulty: BlockWithDifficulty[] = []; @@ -120,11 +114,7 @@ export default class NetworkDbRepository implements NetworkRepository { const output = { networkHashRate: Number(calculateNetworkHashRate(blocksWithDifficulty)), totalDifficulty: Number( - calculateTotalDifficulty( - BigInt(currentHeight), - blocksWithDifficulty, - chainIds, - ), + calculateTotalDifficulty(BigInt(currentHeight), blocksWithDifficulty, chainIds), ), }; @@ -133,10 +123,10 @@ export default class NetworkDbRepository implements NetworkRepository { async getNodeInfo(): Promise { const response = await fetch(`${HOST_URL}/info`, { - method: "GET", + method: 'GET', headers: { - accept: "application/json;charset=utf-8, application/json", - "cache-control": "no-cache", + accept: 'application/json;charset=utf-8, application/json', + 'cache-control': 'no-cache', }, }); const data = await response.json(); @@ -147,9 +137,7 @@ export default class NetworkDbRepository implements NetworkRepository { async getAllInfo() { const nodeInfo = MEMORY_CACHE.get(NODE_INFO_KEY) as GetNodeInfo; - const networkStatistics = MEMORY_CACHE.get( - NETWORK_STATISTICS_KEY, - ) as NetworkStatistics; + const networkStatistics = MEMORY_CACHE.get(NETWORK_STATISTICS_KEY) as NetworkStatistics; const HashRateAndTotalDifficulty = MEMORY_CACHE.get( HASH_RATE_AND_TOTAL_DIFFICULTY_KEY, ) as HashRateAndTotalDifficulty; diff --git a/indexer/src/kadena-server/repository/infra/repository/transaction-db-repository.ts b/indexer/src/kadena-server/repository/infra/repository/transaction-db-repository.ts index 0630cbda..ae33ed5d 100644 --- a/indexer/src/kadena-server/repository/infra/repository/transaction-db-repository.ts +++ b/indexer/src/kadena-server/repository/infra/repository/transaction-db-repository.ts @@ -1,21 +1,20 @@ -import { rootPgPool } from "../../../../config/database"; +import { rootPgPool } from '../../../../config/database'; import TransactionRepository, { GetTransactionsByPublicKeyParams, GetTransactionsByRequestKey, GetTransactionsCountParams, GetTransactionsParams, TransactionOutput, -} from "../../application/transaction-repository"; -import { getPageInfo, getPaginationParams } from "../../pagination"; -import { transactionMetaValidator } from "../schema-validator/transaction-meta-schema-validator"; -import { transactionValidator } from "../schema-validator/transaction-schema-validator"; -import { signerMetaValidator } from "../schema-validator/signer-schema-validator"; -import { MEMORY_CACHE } from "../../../../cache/init"; -import { NETWORK_STATISTICS_KEY } from "../../../../cache/keys"; -import { NetworkStatistics } from "../../application/network-repository"; - -const operator = (paramsLength: number) => - paramsLength > 2 ? `\nAND` : "WHERE"; +} from '../../application/transaction-repository'; +import { getPageInfo, getPaginationParams } from '../../pagination'; +import { transactionMetaValidator } from '../schema-validator/transaction-meta-schema-validator'; +import { transactionValidator } from '../schema-validator/transaction-schema-validator'; +import { signerMetaValidator } from '../schema-validator/signer-schema-validator'; +import { MEMORY_CACHE } from '../../../../cache/init'; +import { NETWORK_STATISTICS_KEY } from '../../../../cache/keys'; +import { NetworkStatistics } from '../../application/network-repository'; + +const operator = (paramsLength: number) => (paramsLength > 2 ? `\nAND` : 'WHERE'); export default class TransactionDbRepository implements TransactionRepository { private createBlockConditions( @@ -23,7 +22,7 @@ export default class TransactionDbRepository implements TransactionRepository { queryParams: Array, ) { const { blockHash, chainId, maxHeight, minHeight, minimumDepth } = params; - let blocksConditions = ""; + let blocksConditions = ''; const blockParams: (string | number)[] = [...queryParams]; if (blockHash) { @@ -63,15 +62,8 @@ export default class TransactionDbRepository implements TransactionRepository { params: GetTransactionsParams, queryParams: Array, ) { - const { - accountName, - after, - before, - requestKey, - fungibleName, - hasTokenId = false, - } = params; - let conditions = ""; + const { accountName, after, before, requestKey, fungibleName, hasTokenId = false } = params; + let conditions = ''; const transactionParams: (string | number)[] = [...queryParams]; if (accountName) { transactionParams.push(accountName); @@ -145,36 +137,37 @@ export default class TransactionDbRepository implements TransactionRepository { first, last, }); - const isBlockQueryFirst = - blockHash || minHeight || maxHeight || minimumDepth || chainId; + const isBlockQueryFirst = blockHash || minHeight || maxHeight || minimumDepth || chainId; const queryParams: (string | number)[] = []; - let blocksConditions = ""; - let transactionsConditions = ""; + let blocksConditions = ''; + let transactionsConditions = ''; if (isBlockQueryFirst) { - const { blockParams, blocksConditions: bConditions } = - this.createBlockConditions(params, [limit]); + const { blockParams, blocksConditions: bConditions } = this.createBlockConditions(params, [ + limit, + ]); - const { params: txParams, conditions: txConditions } = - this.createTransactionConditions(params, blockParams); + const { params: txParams, conditions: txConditions } = this.createTransactionConditions( + params, + blockParams, + ); queryParams.push(...txParams); transactionsConditions = txConditions; blocksConditions = bConditions; } else { - const { conditions, params: txParams } = this.createTransactionConditions( + const { conditions, params: txParams } = this.createTransactionConditions(params, [limit]); + const { blocksConditions: bConditions, blockParams } = this.createBlockConditions( params, - [limit], + txParams, ); - const { blocksConditions: bConditions, blockParams } = - this.createBlockConditions(params, txParams); queryParams.push(...blockParams); transactionsConditions = conditions; blocksConditions = bConditions; } - let query = ""; + let query = ''; if (isBlockQueryFirst) { query = ` WITH filtered_block AS ( @@ -247,7 +240,7 @@ export default class TransactionDbRepository implements TransactionRepository { const { rows } = await rootPgPool.query(query, queryParams); - const edges = rows.map((row) => ({ + const edges = rows.map(row => ({ cursor: row.id.toString(), node: transactionValidator.validate(row), })); @@ -319,7 +312,7 @@ export default class TransactionDbRepository implements TransactionRepository { async getTransactionsByRequestKey(params: GetTransactionsByRequestKey) { const { requestKey, blockHash, minimumDepth } = params; const queryParams: (string | number)[] = [requestKey]; - let conditions = ""; + let conditions = ''; if (blockHash) { queryParams.push(blockHash); @@ -360,7 +353,7 @@ export default class TransactionDbRepository implements TransactionRepository { const { rows } = await rootPgPool.query(query, queryParams); - const output = rows.map((row) => transactionValidator.validate(row)); + const output = rows.map(row => transactionValidator.validate(row)); return output; } @@ -380,7 +373,7 @@ export default class TransactionDbRepository implements TransactionRepository { }); const queryParams: (string | number)[] = [limit, publicKey]; - let cursorCondition = ""; + let cursorCondition = ''; if (after) { cursorCondition = `\nAND t.id < $3`; @@ -428,7 +421,7 @@ export default class TransactionDbRepository implements TransactionRepository { const { rows } = await rootPgPool.query(query, queryParams); - const edges = rows.map((row) => ({ + const edges = rows.map(row => ({ cursor: row.id.toString(), node: transactionValidator.validate(row), })); @@ -448,19 +441,15 @@ export default class TransactionDbRepository implements TransactionRepository { `; const { rows } = await rootPgPool.query(query, [publicKey]); - const totalCount = parseInt(rows?.[0]?.count ?? "0", 10); + const totalCount = parseInt(rows?.[0]?.count ?? '0', 10); return totalCount; } - async getTransactionsCount( - params: GetTransactionsCountParams, - ): Promise { - const hasNoParams = Object.values(params).every((v) => !v); + async getTransactionsCount(params: GetTransactionsCountParams): Promise { + const hasNoParams = Object.values(params).every(v => !v); if (hasNoParams) { - const cachedData = MEMORY_CACHE.get( - NETWORK_STATISTICS_KEY, - ); + const cachedData = MEMORY_CACHE.get(NETWORK_STATISTICS_KEY); return cachedData?.transactionCount ?? 0; } @@ -478,11 +467,10 @@ export default class TransactionDbRepository implements TransactionRepository { const transactionsParams: (string | number)[] = []; const blockParams: (string | number)[] = []; - let transactionsConditions = ""; - let blocksConditions = ""; + let transactionsConditions = ''; + let blocksConditions = ''; - const localOperator = (paramsLength: number) => - paramsLength > 1 ? `\nAND` : "WHERE"; + const localOperator = (paramsLength: number) => (paramsLength > 1 ? `\nAND` : 'WHERE'); if (accountName) { transactionsParams.push(accountName); @@ -561,7 +549,7 @@ export default class TransactionDbRepository implements TransactionRepository { ) SELECT COUNT(*) as count FROM filtered_transactions t - ${blocksConditions ? `JOIN "Blocks" b ON b.id = t."blockId"` : ""} + ${blocksConditions ? `JOIN "Blocks" b ON b.id = t."blockId"` : ''} ${blocksConditions} `; @@ -574,10 +562,8 @@ export default class TransactionDbRepository implements TransactionRepository { return totalCount; } - async getTransactionsByEventIds( - eventIds: readonly string[], - ): Promise { - console.log("Batching for event IDs:", eventIds); + async getTransactionsByEventIds(eventIds: readonly string[]): Promise { + console.log('Batching for event IDs:', eventIds); const { rows } = await rootPgPool.query( `SELECT t.id as id, @@ -609,7 +595,7 @@ export default class TransactionDbRepository implements TransactionRepository { ); if (rows.length !== eventIds.length) { - throw new Error("There was an issue fetching blocks for event IDs."); + throw new Error('There was an issue fetching blocks for event IDs.'); } const transactionMap = rows.reduce( @@ -620,9 +606,7 @@ export default class TransactionDbRepository implements TransactionRepository { {}, ); - return eventIds.map( - (eventId) => transactionMap[eventId], - ) as TransactionOutput[]; + return eventIds.map(eventId => transactionMap[eventId]) as TransactionOutput[]; } async getSigners(transactionId: string, orderIndex?: number) { @@ -645,7 +629,7 @@ export default class TransactionDbRepository implements TransactionRepository { const { rows } = await rootPgPool.query(query, queryParams); - const output = rows.map((row) => signerMetaValidator.validate(row)); + const output = rows.map(row => signerMetaValidator.validate(row)); return output; } diff --git a/indexer/src/kadena-server/repository/infra/repository/transfer-db-repository.ts b/indexer/src/kadena-server/repository/infra/repository/transfer-db-repository.ts index 74708faa..3de10b0d 100644 --- a/indexer/src/kadena-server/repository/infra/repository/transfer-db-repository.ts +++ b/indexer/src/kadena-server/repository/infra/repository/transfer-db-repository.ts @@ -1,14 +1,14 @@ -import { rootPgPool } from "../../../../config/database"; +import { rootPgPool } from '../../../../config/database'; import TransferRepository, { GetCrossChainTransferByPactIdParams, GetTotalCountParams, GetTransfersByTransactionIdParams, GetTransfersParams, -} from "../../application/transfer-repository"; -import { getPageInfo, getPaginationParams } from "../../pagination"; -import { transferSchemaValidator } from "../schema-validator/transfer-schema-validator"; +} from '../../application/transfer-repository'; +import { getPageInfo, getPaginationParams } from '../../pagination'; +import { transferSchemaValidator } from '../schema-validator/transfer-schema-validator'; -const operator = (paramsLength: number) => (paramsLength > 2 ? `AND` : "WHERE"); +const operator = (paramsLength: number) => (paramsLength > 2 ? `AND` : 'WHERE'); export default class TransferDbRepository implements TransferRepository { async getTransfers(params: GetTransfersParams) { @@ -33,7 +33,7 @@ export default class TransferDbRepository implements TransferRepository { last, }); const queryParams: (string | number)[] = [limit]; - let conditions = ""; + let conditions = ''; if (accountName) { queryParams.push(accountName); @@ -77,7 +77,7 @@ export default class TransferDbRepository implements TransferRepository { conditions += `\n${op} transfers.modulehash = $${queryParams.length}`; } - let query = ""; + let query = ''; if (blockHash) { queryParams.push(blockHash); @@ -197,7 +197,7 @@ export default class TransferDbRepository implements TransferRepository { const { rows } = await rootPgPool.query(query, queryParams); - const edges = rows.map((row) => ({ + const edges = rows.map(row => ({ cursor: row.id.toString(), node: transferSchemaValidator.validate(row), })); @@ -206,10 +206,7 @@ export default class TransferDbRepository implements TransferRepository { return pageInfo; } - async getCrossChainTransferByPactId({ - amount, - pactId, - }: GetCrossChainTransferByPactIdParams) { + async getCrossChainTransferByPactId({ amount, pactId }: GetCrossChainTransferByPactIdParams) { const query = ` select transfers.id as id, transfers.amount as "transferAmount", @@ -240,7 +237,7 @@ export default class TransferDbRepository implements TransferRepository { } async getTotalCountOfTransfers(params: GetTotalCountParams): Promise { - const hasNoParams = Object.values(params).every((v) => !v); + const hasNoParams = Object.values(params).every(v => !v); if (hasNoParams) { const totalTransfersCountQuery = ` @@ -251,18 +248,11 @@ export default class TransferDbRepository implements TransferRepository { return transfersCount; } - const { - blockHash, - accountName, - chainId, - transactionId, - fungibleName, - requestKey, - } = params; + const { blockHash, accountName, chainId, transactionId, fungibleName, requestKey } = params; const queryParams: (string | number)[] = []; - let conditions = ""; + let conditions = ''; - const localOperator = (length: number) => (length > 1 ? `\nAND` : "WHERE"); + const localOperator = (length: number) => (length > 1 ? `\nAND` : 'WHERE'); if (accountName) { queryParams.push(accountName); @@ -308,23 +298,14 @@ export default class TransferDbRepository implements TransferRepository { ${conditions} `; - const { rows: countResult } = await rootPgPool.query( - totalCountQuery, - queryParams, - ); + const { rows: countResult } = await rootPgPool.query(totalCountQuery, queryParams); const totalCount = parseInt(countResult[0].count, 10); return totalCount; } async getTransfersByTransactionId(params: GetTransfersByTransactionIdParams) { - const { - transactionId, - after: afterEncoded, - before: beforeEncoded, - first, - last, - } = params; + const { transactionId, after: afterEncoded, before: beforeEncoded, first, last } = params; const { limit, order, after, before } = getPaginationParams({ after: afterEncoded, @@ -334,7 +315,7 @@ export default class TransferDbRepository implements TransferRepository { }); const queryParams: (string | number)[] = [limit, transactionId]; - let conditions = ""; + let conditions = ''; if (before) { queryParams.push(before); @@ -371,7 +352,7 @@ export default class TransferDbRepository implements TransferRepository { const { rows } = await rootPgPool.query(query, queryParams); - const edges = rows.map((row) => ({ + const edges = rows.map(row => ({ cursor: row.id.toString(), node: transferSchemaValidator.validate(row), })); diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/block-schema-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/block-schema-validator.ts index 061d2a1a..3c6d81b4 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/block-schema-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/block-schema-validator.ts @@ -1,9 +1,9 @@ -import { BlockAttributes } from "../../../../models/block"; -import zod from "zod"; -import { BlockOutput } from "../../application/block-repository"; -import { convertStringToDate } from "../../../utils/date"; -import { calculateBlockDifficulty } from "../../../utils/difficulty"; -import { int64ToUint64String } from "../../../../utils/int-uint-64"; +import { BlockAttributes } from '../../../../models/block'; +import zod from 'zod'; +import { BlockOutput } from '../../application/block-repository'; +import { convertStringToDate } from '../../../utils/date'; +import { calculateBlockDifficulty } from '../../../utils/difficulty'; +import { int64ToUint64String } from '../../../../utils/int-uint-64'; const schema = zod.object({ id: zod.number(), @@ -23,7 +23,7 @@ const schema = zod.object({ const getBase64ID = (hash: string): string => { const inputString = `Block:${hash.toString()}`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; @@ -35,7 +35,7 @@ const validate = (row: any): BlockOutput => { creationTime: convertStringToDate(res.creationTime), epoch: convertStringToDate(res.epochStart), flags: int64ToUint64String(res.featureFlags), - powHash: "...", // TODO (STREAMING) + powHash: '...', // TODO (STREAMING) hash: res.hash, height: res.height, nonce: res.nonce, @@ -58,7 +58,7 @@ const mapFromSequelize = (blockModel: BlockAttributes): BlockOutput => { parentHash: blockModel.parent, chainId: blockModel.chainId, creationTime: convertStringToDate(blockModel.creationTime), - powHash: "...", // TODO (STREAMING) + powHash: '...', // TODO (STREAMING) difficulty: Number(calculateBlockDifficulty(blockModel.target)), epoch: convertStringToDate(blockModel.epochStart), flags: int64ToUint64String(blockModel.featureFlags), diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/event-schema-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/event-schema-validator.ts index f286dee0..a7412b5a 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/event-schema-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/event-schema-validator.ts @@ -1,5 +1,5 @@ -import { EventOutput } from "../../application/event-repository"; -import zod from "zod"; +import { EventOutput } from '../../application/event-repository'; +import zod from 'zod'; const schema = zod.object({ id: zod.number(), @@ -13,13 +13,9 @@ const schema = zod.object({ parameters: zod.array(zod.any()), }); -const getBase64ID = ( - hash: string, - orderIndex: number, - requestKey: string, -): string => { +const getBase64ID = (hash: string, orderIndex: number, requestKey: string): string => { const inputString = `Event:[\"${hash}\",\"${orderIndex}\",\"${requestKey}\"]`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/fungible-account-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/fungible-account-validator.ts index cc5d002a..44814dde 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/fungible-account-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/fungible-account-validator.ts @@ -1,6 +1,6 @@ -import { FungibleAccountOutput } from "../../application/balance-repository"; -import zod from "zod"; -import { BalanceAttributes } from "../../../../models/balance"; +import { FungibleAccountOutput } from '../../application/balance-repository'; +import zod from 'zod'; +import { BalanceAttributes } from '../../../../models/balance'; const fungibleSchema = zod.object({ id: zod.number(), @@ -14,11 +14,11 @@ const totalBalanceSchema = zod.object({ const getBase64ID = (fungibleName: string, accountName: string): string => { const inputString = `FungibleAccount:[\"${fungibleName}\",\"${accountName}\"]`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; -const validate = (row: any): Omit => { +const validate = (row: any): Omit => { const res = fungibleSchema.parse(row); return { id: getBase64ID(res.module, res.account), @@ -34,7 +34,7 @@ const validateTotalBalance = (row: any): string => { const mapFromSequelize = ( balanceModel: BalanceAttributes, -): Omit => { +): Omit => { return { id: getBase64ID(balanceModel.module, balanceModel.account), accountName: balanceModel.account, diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/fungible-chain-account-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/fungible-chain-account-validator.ts index de2cc15e..fe9770de 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/fungible-chain-account-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/fungible-chain-account-validator.ts @@ -1,13 +1,9 @@ -import zod from "zod"; -import { FungibleChainAccountOutput } from "../../application/block-repository"; +import zod from 'zod'; +import { FungibleChainAccountOutput } from '../../application/block-repository'; -const getBase64IDChain = ( - chainId: number, - fungibleName: string, - accountName: string, -): string => { +const getBase64IDChain = (chainId: number, fungibleName: string, accountName: string): string => { const inputString = `FungibleChainAccount:[\"${chainId}\",\"${fungibleName}\",\"${accountName}\"]`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/node-info-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/node-info-validator.ts index d2efe2a1..ac5565f9 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/node-info-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/node-info-validator.ts @@ -1,6 +1,6 @@ -import zod from "zod"; -import { GetNodeInfo } from "../../application/network-repository"; -import { getRequiredEnvString } from "../../../../utils/helpers"; +import zod from 'zod'; +import { GetNodeInfo } from '../../application/network-repository'; +import { getRequiredEnvString } from '../../../../utils/helpers'; const schema = zod.object({ nodeApiVersion: zod.string(), @@ -16,7 +16,7 @@ const schema = zod.object({ nodeHistoricalChains: zod.any(), }); -const HOST_URL = getRequiredEnvString("NODE_API_URL"); +const HOST_URL = getRequiredEnvString('NODE_API_URL'); function validate(row: any): GetNodeInfo { const res = schema.parse(row); diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/non-fungible-token-balance-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/non-fungible-token-balance-validator.ts index 260e07fe..e3b8593d 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/non-fungible-token-balance-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/non-fungible-token-balance-validator.ts @@ -1,5 +1,5 @@ -import zod from "zod"; -import { INonFungibleTokenBalance } from "../../application/balance-repository"; +import zod from 'zod'; +import { INonFungibleTokenBalance } from '../../application/balance-repository'; const nonFungibleTokenBalanceSchema = zod.object({ id: zod.number(), @@ -9,13 +9,9 @@ const nonFungibleTokenBalanceSchema = zod.object({ balance: zod.string(), }); -const getBase64ID = ( - tokenId: string, - accountName: string, - chainId: number, -): string => { +const getBase64ID = (tokenId: string, accountName: string, chainId: number): string => { const inputString = `NonFungibleTokenBalance:[\"${tokenId}\",\"${accountName}\",\"${chainId}\"]`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/signer-schema-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/signer-schema-validator.ts index 48e2bd50..80bcbb5f 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/signer-schema-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/signer-schema-validator.ts @@ -1,22 +1,17 @@ -import zod from "zod"; -import { SignerOutput } from "../../application/transaction-repository"; +import zod from 'zod'; +import { SignerOutput } from '../../application/transaction-repository'; const schema = zod.object({ requestKey: zod.string(), publicKey: zod.string(), address: zod.string().nullable(), signerOrderIndex: zod.number(), - clist: zod - .array(zod.object({ args: zod.array(zod.any()), name: zod.string() })) - .nullable(), + clist: zod.array(zod.object({ args: zod.array(zod.any()), name: zod.string() })).nullable(), }); -export const getBase64SignerID = ( - requestKey: string, - orderIndex: number, -): string => { +export const getBase64SignerID = (requestKey: string, orderIndex: number): string => { const inputString = `Signer:[\"${requestKey}\",\"${orderIndex}\"]`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; @@ -27,8 +22,8 @@ function validate(row: any): SignerOutput { pubkey: res.publicKey, address: res.address, orderIndex: res.signerOrderIndex, - scheme: "", - clist: (res.clist ?? []).map((c) => ({ + scheme: '', + clist: (res.clist ?? []).map(c => ({ args: JSON.stringify(c.args), name: c.name, })), diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/transaction-meta-schema-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/transaction-meta-schema-validator.ts index a960eb34..2bff1256 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/transaction-meta-schema-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/transaction-meta-schema-validator.ts @@ -1,5 +1,5 @@ -import zod from "zod"; -import { TransactionMetaOutput } from "../../application/transaction-repository"; +import zod from 'zod'; +import { TransactionMetaOutput } from '../../application/transaction-repository'; const schema = zod.object({ chainId: zod.number(), diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/transaction-schema-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/transaction-schema-validator.ts index c9d6ef1d..0b4e4e02 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/transaction-schema-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/transaction-schema-validator.ts @@ -1,12 +1,12 @@ -import { getRequiredEnvString } from "../../../../utils/helpers"; -import { TransactionOutput } from "../../application/transaction-repository"; -import zod from "zod"; +import { getRequiredEnvString } from '../../../../utils/helpers'; +import { TransactionOutput } from '../../application/transaction-repository'; +import zod from 'zod'; -const NETWORK_ID = getRequiredEnvString("SYNC_NETWORK"); +const NETWORK_ID = getRequiredEnvString('SYNC_NETWORK'); const getBase64ID = (blockHash: string, requestKey: string): string => { const inputString = `Transaction:[\"${blockHash}\",\"${requestKey}\"]`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; @@ -34,7 +34,7 @@ const schema = zod.object({ function validate(row: any): TransactionOutput { const res = schema.parse(row); - const isSuccess = res.result.status === "success"; + const isSuccess = res.result.status === 'success'; const continuation = JSON.stringify(res.continuation); return { id: getBase64ID(res.blockHash, res.requestKey), @@ -45,11 +45,11 @@ function validate(row: any): TransactionOutput { sigs: res.sigs, result: { // TransactionMempoolInfo - status: "", // TODO + status: '', // TODO // TransactionResult badResult: !isSuccess ? res.result.data : null, - continuation: continuation === "{}" ? null : continuation, + continuation: continuation === '{}' ? null : continuation, eventCount: res.eventCount, transactionId: res.txid ? res.txid : null, height: res.height, diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/transfer-schema-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/transfer-schema-validator.ts index 76e8b9a6..a28b96f6 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/transfer-schema-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/transfer-schema-validator.ts @@ -1,5 +1,5 @@ -import zod from "zod"; -import { TransferOutput } from "../../application/transfer-repository"; +import zod from 'zod'; +import { TransferOutput } from '../../application/transfer-repository'; const schema = zod.object({ id: zod.number(), @@ -25,20 +25,14 @@ const getBase64ID = ( requestKey: string, ): string => { const inputString = `Transfer:[\"${blockHash}\",\"${chainId}\",\"${orderIndex}\",\"${moduleHash}\",\"${requestKey}\"]`; - const base64ID = Buffer.from(inputString, "utf-8").toString("base64"); + const base64ID = Buffer.from(inputString, 'utf-8').toString('base64'); return base64ID; }; function validate(row: any): TransferOutput { const res = schema.parse(row); return { - id: getBase64ID( - res.blockHash, - res.chainId, - res.orderIndex, - res.moduleHash, - res.requestKey, - ), + id: getBase64ID(res.blockHash, res.chainId, res.orderIndex, res.moduleHash, res.requestKey), creationTime: new Date(Number(res.creationTime) * 1000), moduleHash: res.moduleHash, requestKey: res.requestKey, diff --git a/indexer/src/kadena-server/repository/pagination.ts b/indexer/src/kadena-server/repository/pagination.ts index e4ca832f..d43ddddc 100644 --- a/indexer/src/kadena-server/repository/pagination.ts +++ b/indexer/src/kadena-server/repository/pagination.ts @@ -1,5 +1,5 @@ -import { InputMaybe, PageInfo } from "../config/graphql-types"; -import { ConnectionEdge } from "./types"; +import { InputMaybe, PageInfo } from '../config/graphql-types'; +import { ConnectionEdge } from './types'; export interface PaginationsParams { after?: InputMaybe; @@ -12,21 +12,20 @@ const DEFAULT_LIMIT = 20; const LIMIT_NEXT_PAGE_CHECK = 1; interface Params { - order: "ASC" | "DESC"; + order: 'ASC' | 'DESC'; limit: number; edges: ConnectionEdge[]; after?: string | null; before?: string | null; } -export const encodeCursor = (cursor: string): string => - Buffer.from(cursor).toString("base64"); +export const encodeCursor = (cursor: string): string => Buffer.from(cursor).toString('base64'); export const decodeCursor = (cursor: string): string => - Buffer.from(cursor, "base64").toString("utf8"); + Buffer.from(cursor, 'base64').toString('utf8'); export const getPageInfo = ({ - order = "DESC", + order = 'DESC', limit: limitParam, edges, after, @@ -66,7 +65,7 @@ export const getPageInfo = ({ let endCursor = null; let newEdges = null; const idx = Math.min(length, limit); - if (order === "DESC") { + if (order === 'DESC') { hasNextPage = length > limit; hasPreviousPage = !!after; startCursor = encodeCursor(edges[0].cursor); @@ -81,7 +80,7 @@ export const getPageInfo = ({ newEdges = [...reversed]; } - const edgesWithCursorEncoded = newEdges.map((e) => ({ + const edgesWithCursorEncoded = newEdges.map(e => ({ cursor: encodeCursor(e.cursor), node: e.node, })); @@ -105,7 +104,7 @@ type PaginationInput = { type PaginationOutput = { limit: number; - order: "ASC" | "DESC"; + order: 'ASC' | 'DESC'; after: string | null; before: string | null; }; @@ -119,7 +118,7 @@ export function getPaginationParams({ if (after) { return { limit: (first ?? DEFAULT_LIMIT) + LIMIT_NEXT_PAGE_CHECK, - order: "DESC", + order: 'DESC', after: decodeCursor(after), before: null, }; @@ -128,7 +127,7 @@ export function getPaginationParams({ if (before) { return { limit: (last ?? DEFAULT_LIMIT) + LIMIT_NEXT_PAGE_CHECK, - order: "ASC", + order: 'ASC', after: null, before: decodeCursor(before), }; @@ -137,7 +136,7 @@ export function getPaginationParams({ if (first) { return { limit: first + LIMIT_NEXT_PAGE_CHECK, - order: "DESC", + order: 'DESC', after: null, before: null, }; @@ -145,7 +144,7 @@ export function getPaginationParams({ if (last) { return { limit: last + LIMIT_NEXT_PAGE_CHECK, - order: "ASC", + order: 'ASC', after: null, before: null, }; @@ -153,7 +152,7 @@ export function getPaginationParams({ return { limit: DEFAULT_LIMIT + LIMIT_NEXT_PAGE_CHECK, - order: "DESC", + order: 'DESC', after: null, before: null, }; diff --git a/indexer/src/kadena-server/resolvers/fields/block-events-connection/total-count-block-events-connection.ts b/indexer/src/kadena-server/resolvers/fields/block-events-connection/total-count-block-events-connection.ts index 14dce215..ec1cab32 100644 --- a/indexer/src/kadena-server/resolvers/fields/block-events-connection/total-count-block-events-connection.ts +++ b/indexer/src/kadena-server/resolvers/fields/block-events-connection/total-count-block-events-connection.ts @@ -1,18 +1,16 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { BlockEventsConnectionResolvers } from "../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { BlockEventsConnectionResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ blockHash: zod.string() }); -export const totalCountBlockEventsConnectionResolver: BlockEventsConnectionResolvers["totalCount"] = +export const totalCountBlockEventsConnectionResolver: BlockEventsConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountBlockEventsConnectionResolver"); + console.log('totalCountBlockEventsConnectionResolver'); const { blockHash } = schema.parse(parent); - const total = await context.eventRepository.getTotalCountOfBlockEvents( - blockHash - ); + const total = await context.eventRepository.getTotalCountOfBlockEvents(blockHash); return total; }; diff --git a/indexer/src/kadena-server/resolvers/fields/block-transactions-connection/total-count-block-transactions-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/block-transactions-connection/total-count-block-transactions-connection-resolver.ts index 81b5b1e3..15932d22 100644 --- a/indexer/src/kadena-server/resolvers/fields/block-transactions-connection/total-count-block-transactions-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/block-transactions-connection/total-count-block-transactions-connection-resolver.ts @@ -1,19 +1,18 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { BlockTransactionsConnectionResolvers } from "../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { BlockTransactionsConnectionResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ blockHash: zod.string(), }); -export const totalCountBlockTransactionsConnectionResolver: BlockTransactionsConnectionResolvers["totalCount"] = +export const totalCountBlockTransactionsConnectionResolver: BlockTransactionsConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountBlockTransactionsConnectionResolver"); + console.log('totalCountBlockTransactionsConnectionResolver'); const { blockHash } = schema.parse(parent); - const total = - await context.blockRepository.getTotalCountOfBlockEvents(blockHash); + const total = await context.blockRepository.getTotalCountOfBlockEvents(blockHash); return total; }; diff --git a/indexer/src/kadena-server/resolvers/fields/block/events-block-resolver.ts b/indexer/src/kadena-server/resolvers/fields/block/events-block-resolver.ts index b00efe4b..52297ab5 100644 --- a/indexer/src/kadena-server/resolvers/fields/block/events-block-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/block/events-block-resolver.ts @@ -1,31 +1,34 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { BlockResolvers } from "../../../config/graphql-types"; -import { buildEventOutput } from "../../output/build-event-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { BlockResolvers } from '../../../config/graphql-types'; +import { buildEventOutput } from '../../output/build-event-output'; -export const eventsBlockResolver: BlockResolvers["events"] = - async (parent, args, context) => { - console.log("eventsBlockResolver"); - const { hash } = parent; - const { first, after, before, last } = args; +export const eventsBlockResolver: BlockResolvers['events'] = async ( + parent, + args, + context, +) => { + console.log('eventsBlockResolver'); + const { hash } = parent; + const { first, after, before, last } = args; - const output = await context.eventRepository.getBlockEvents({ - hash, - first, - after, - before, - last, - }); + const output = await context.eventRepository.getBlockEvents({ + hash, + first, + after, + before, + last, + }); - const edges = output.edges.map((e) => ({ - cursor: e.cursor, - node: buildEventOutput(e.node), - })); + const edges = output.edges.map(e => ({ + cursor: e.cursor, + node: buildEventOutput(e.node), + })); - return { - edges, - pageInfo: output.pageInfo, - // for resolvers - blockHash: hash, - totalCount: -1, - }; + return { + edges, + pageInfo: output.pageInfo, + // for resolvers + blockHash: hash, + totalCount: -1, }; +}; diff --git a/indexer/src/kadena-server/resolvers/fields/block/miner-account-block-resolver.ts b/indexer/src/kadena-server/resolvers/fields/block/miner-account-block-resolver.ts index 4b8cf24a..2992a754 100644 --- a/indexer/src/kadena-server/resolvers/fields/block/miner-account-block-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/block/miner-account-block-resolver.ts @@ -1,15 +1,15 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { BlockResolvers } from "../../../config/graphql-types"; -import { buildFungibleChainAccount } from "../../output/build-fungible-chain-account-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { BlockResolvers } from '../../../config/graphql-types'; +import { buildFungibleChainAccount } from '../../output/build-fungible-chain-account-output'; -export const minerAccountBlockResolver: BlockResolvers["minerAccount"] = - async (parent, _args, context) => { - console.log("minerAccountBlockResolver"); +export const minerAccountBlockResolver: BlockResolvers['minerAccount'] = async ( + parent, + _args, + context, +) => { + console.log('minerAccountBlockResolver'); - const output = await context.blockRepository.getMinerData( - parent.hash, - parent.chainId, - ); + const output = await context.blockRepository.getMinerData(parent.hash, parent.chainId); - return buildFungibleChainAccount(output); - }; + return buildFungibleChainAccount(output); +}; diff --git a/indexer/src/kadena-server/resolvers/fields/block/parent-block-resolver.ts b/indexer/src/kadena-server/resolvers/fields/block/parent-block-resolver.ts index 61b4a941..c46e187b 100644 --- a/indexer/src/kadena-server/resolvers/fields/block/parent-block-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/block/parent-block-resolver.ts @@ -1,21 +1,24 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { Block, BlockResolvers } from "../../../config/graphql-types"; -import zod from "zod"; -import { buildBlockOutput } from "../../output/build-block-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { Block, BlockResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; +import { buildBlockOutput } from '../../output/build-block-output'; const schema = zod.object({ parentHash: zod.string() }); -export const parentBlockResolver: BlockResolvers["parent"] = - async (parent, _args, context): Promise => { - console.log("parentBlockResolver"); +export const parentBlockResolver: BlockResolvers['parent'] = async ( + parent, + _args, + context, +): Promise => { + console.log('parentBlockResolver'); - const { parentHash } = schema.parse(parent); + const { parentHash } = schema.parse(parent); - const output = await context.getBlocksByHashesLoader.load(parentHash); + const output = await context.getBlocksByHashesLoader.load(parentHash); - if (!output) { - return null; - } + if (!output) { + return null; + } - return buildBlockOutput(output); - }; + return buildBlockOutput(output); +}; diff --git a/indexer/src/kadena-server/resolvers/fields/block/pow-hash-block-resolver.ts b/indexer/src/kadena-server/resolvers/fields/block/pow-hash-block-resolver.ts index fe1f766d..ddc9a55f 100644 --- a/indexer/src/kadena-server/resolvers/fields/block/pow-hash-block-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/block/pow-hash-block-resolver.ts @@ -1,39 +1,37 @@ -import { getRequiredEnvString } from "../../../../utils/helpers"; -import { ResolverContext } from "../../../config/apollo-server-config"; -import { BlockResolvers } from "../../../config/graphql-types"; -import crypto from "crypto"; +import { getRequiredEnvString } from '../../../../utils/helpers'; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { BlockResolvers } from '../../../config/graphql-types'; +import crypto from 'crypto'; -const SYNC_BASE_URL = getRequiredEnvString("SYNC_BASE_URL"); -const NETWORK_ID = getRequiredEnvString("SYNC_NETWORK"); +const SYNC_BASE_URL = getRequiredEnvString('SYNC_BASE_URL'); +const NETWORK_ID = getRequiredEnvString('SYNC_NETWORK'); function base64UrlToBase64(base64url: any) { // Convert Base64 URL format to standard Base64 return ( - base64url.replace(/-/g, "+").replace(/_/g, "/") + - "=".repeat((4 - (base64url.length % 4)) % 4) + base64url.replace(/-/g, '+').replace(/_/g, '/') + '='.repeat((4 - (base64url.length % 4)) % 4) ); } async function hashWithBlake2s(input: any) { const normalizedBase64 = base64UrlToBase64(input); - const buffer = Buffer.from(normalizedBase64, "base64"); + const buffer = Buffer.from(normalizedBase64, 'base64'); const truncatedBuffer = buffer.subarray(0, -32); - const hash = crypto.createHash("blake2s256").update(truncatedBuffer).digest(); - return Buffer.from(hash).reverse().toString("hex"); + const hash = crypto.createHash('blake2s256').update(truncatedBuffer).digest(); + return Buffer.from(hash).reverse().toString('hex'); } -export const powHashBlockResolver: BlockResolvers["powHash"] = - async (parent) => { - console.log("powHashBlockResolver"); +export const powHashBlockResolver: BlockResolvers['powHash'] = async parent => { + console.log('powHashBlockResolver'); - const url = `${SYNC_BASE_URL}/${NETWORK_ID}/chain/${parent.chainId}/header/${parent.hash}`; - const res = await fetch(url, { - method: "GET", - headers: { - "Content-Type": "application/json", - }, - }); + const url = `${SYNC_BASE_URL}/${NETWORK_ID}/chain/${parent.chainId}/header/${parent.hash}`; + const res = await fetch(url, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + }, + }); - const output = await res.json(); - return hashWithBlake2s(output); - }; + const output = await res.json(); + return hashWithBlake2s(output); +}; diff --git a/indexer/src/kadena-server/resolvers/fields/block/transactions-block-resolver.ts b/indexer/src/kadena-server/resolvers/fields/block/transactions-block-resolver.ts index f3ea580a..aacfed6e 100644 --- a/indexer/src/kadena-server/resolvers/fields/block/transactions-block-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/block/transactions-block-resolver.ts @@ -1,32 +1,35 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { BlockResolvers } from "../../../config/graphql-types"; -import { buildTransactionOutput } from "../../output/build-transaction-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { BlockResolvers } from '../../../config/graphql-types'; +import { buildTransactionOutput } from '../../output/build-transaction-output'; -export const transactionsBlockResolver: BlockResolvers["transactions"] = - async (parent, args, context) => { - console.log("transactionsBlockResolver"); - const { hash } = parent; - const { first, last, before, after } = args; - const output = await context.transactionRepository.getTransactions({ - blockHash: hash, - first, - last, - before, - after, - }); - - const edges = output.edges.map((e) => { - return { - cursor: e.cursor, - node: buildTransactionOutput(e.node), - }; - }); +export const transactionsBlockResolver: BlockResolvers['transactions'] = async ( + parent, + args, + context, +) => { + console.log('transactionsBlockResolver'); + const { hash } = parent; + const { first, last, before, after } = args; + const output = await context.transactionRepository.getTransactions({ + blockHash: hash, + first, + last, + before, + after, + }); + const edges = output.edges.map(e => { return { - edges, - pageInfo: output.pageInfo, - // for resolvers - blockHash: hash, - totalCount: -1, + cursor: e.cursor, + node: buildTransactionOutput(e.node), }; + }); + + return { + edges, + pageInfo: output.pageInfo, + // for resolvers + blockHash: hash, + totalCount: -1, }; +}; diff --git a/indexer/src/kadena-server/resolvers/fields/event/block-event-resolver.ts b/indexer/src/kadena-server/resolvers/fields/event/block-event-resolver.ts index 90756891..e09a6707 100644 --- a/indexer/src/kadena-server/resolvers/fields/event/block-event-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/event/block-event-resolver.ts @@ -1,18 +1,21 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { EventResolvers } from "../../../config/graphql-types"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { EventResolvers } from '../../../config/graphql-types'; -import zod from "zod"; -import { buildBlockOutput } from "../../output/build-block-output"; +import zod from 'zod'; +import { buildBlockOutput } from '../../output/build-block-output'; const schema = zod.object({ eventId: zod.string() }); -export const blockEventResolver: EventResolvers["block"] = - async (parent, _args, context) => { - console.log("blockEventResolver"); +export const blockEventResolver: EventResolvers['block'] = async ( + parent, + _args, + context, +) => { + console.log('blockEventResolver'); - const { eventId } = schema.parse(parent); + const { eventId } = schema.parse(parent); - const output = await context.getBlocksByEventIdsLoader.load(eventId); + const output = await context.getBlocksByEventIdsLoader.load(eventId); - return buildBlockOutput(output); - }; + return buildBlockOutput(output); +}; diff --git a/indexer/src/kadena-server/resolvers/fields/event/transaction-event-resolver.ts b/indexer/src/kadena-server/resolvers/fields/event/transaction-event-resolver.ts index 4f69b04a..3ec81cdd 100644 --- a/indexer/src/kadena-server/resolvers/fields/event/transaction-event-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/event/transaction-event-resolver.ts @@ -1,17 +1,20 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { EventResolvers } from "../../../config/graphql-types"; -import zod from "zod"; -import { buildTransactionOutput } from "../../output/build-transaction-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { EventResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; +import { buildTransactionOutput } from '../../output/build-transaction-output'; const schema = zod.object({ eventId: zod.string() }); -export const transactionEventResolver: EventResolvers["transaction"] = - async (parent, _args, context) => { - console.log("transactionEventResolver"); +export const transactionEventResolver: EventResolvers['transaction'] = async ( + parent, + _args, + context, +) => { + console.log('transactionEventResolver'); - const { eventId } = schema.parse(parent); + const { eventId } = schema.parse(parent); - const output = await context.getTransactionsByEventIdsLoader.load(eventId); + const output = await context.getTransactionsByEventIdsLoader.load(eventId); - return buildTransactionOutput(output); - }; + return buildTransactionOutput(output); +}; diff --git a/indexer/src/kadena-server/resolvers/fields/fungible-account/chain-accounts-fungible-account-resolver.ts b/indexer/src/kadena-server/resolvers/fields/fungible-account/chain-accounts-fungible-account-resolver.ts index cc803656..7941fd23 100644 --- a/indexer/src/kadena-server/resolvers/fields/fungible-account/chain-accounts-fungible-account-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/fungible-account/chain-accounts-fungible-account-resolver.ts @@ -1,14 +1,14 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { FungibleAccountResolvers } from "../../../config/graphql-types"; -import { buildFungibleChainAccount } from "../../output/build-fungible-chain-account-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { FungibleAccountResolvers } from '../../../config/graphql-types'; +import { buildFungibleChainAccount } from '../../output/build-fungible-chain-account-output'; -export const chainAccountsFungibleAccountResolver: FungibleAccountResolvers["chainAccounts"] = +export const chainAccountsFungibleAccountResolver: FungibleAccountResolvers['chainAccounts'] = async (parent, _args, context) => { - console.log("chainAccountsFungibleAccountResolver"); + console.log('chainAccountsFungibleAccountResolver'); const accounts = await context.balanceRepository.getChainsAccountInfo_NODE( parent.accountName, parent.fungibleName, ); - return accounts.map((acc) => buildFungibleChainAccount(acc)); + return accounts.map(acc => buildFungibleChainAccount(acc)); }; diff --git a/indexer/src/kadena-server/resolvers/fields/fungible-account/transactions-connection/total-count-fungible-account-transactions-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/fungible-account/transactions-connection/total-count-fungible-account-transactions-connection-resolver.ts index 22524080..f7c26046 100644 --- a/indexer/src/kadena-server/resolvers/fields/fungible-account/transactions-connection/total-count-fungible-account-transactions-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/fungible-account/transactions-connection/total-count-fungible-account-transactions-connection-resolver.ts @@ -1,15 +1,15 @@ -import { ResolverContext } from "../../../../config/apollo-server-config"; -import { FungibleAccountTransactionsConnectionResolvers } from "../../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../../config/apollo-server-config'; +import { FungibleAccountTransactionsConnectionResolvers } from '../../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ accountName: zod.string(), fungibleName: zod.string(), }); -export const totalCountFungibleAccountTransactionsConnectionResolver: FungibleAccountTransactionsConnectionResolvers["totalCount"] = +export const totalCountFungibleAccountTransactionsConnectionResolver: FungibleAccountTransactionsConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountFungibleAccountTransactionsConnectionResolver"); + console.log('totalCountFungibleAccountTransactionsConnectionResolver'); const { accountName, fungibleName } = schema.parse(parent); diff --git a/indexer/src/kadena-server/resolvers/fields/fungible-account/transactions-fungible-account-resolver.ts b/indexer/src/kadena-server/resolvers/fields/fungible-account/transactions-fungible-account-resolver.ts index 75f9203a..b1d03cfd 100644 --- a/indexer/src/kadena-server/resolvers/fields/fungible-account/transactions-fungible-account-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/fungible-account/transactions-fungible-account-resolver.ts @@ -1,10 +1,10 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { FungibleAccountResolvers } from "../../../config/graphql-types"; -import { buildTransactionOutput } from "../../output/build-transaction-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { FungibleAccountResolvers } from '../../../config/graphql-types'; +import { buildTransactionOutput } from '../../output/build-transaction-output'; -export const transactionsFungibleAccountResolver: FungibleAccountResolvers["transactions"] = +export const transactionsFungibleAccountResolver: FungibleAccountResolvers['transactions'] = async (parent, args, context) => { - console.log("transactionsFungibleAccountResolver"); + console.log('transactionsFungibleAccountResolver'); const { first, last, after, before } = args; const output = await context.transactionRepository.getTransactions({ @@ -16,7 +16,7 @@ export const transactionsFungibleAccountResolver: FungibleAccountResolvers ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildTransactionOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/fields/fungible-account/transfers-connection/total-count-fungible-account-transfers-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/fungible-account/transfers-connection/total-count-fungible-account-transfers-connection-resolver.ts index 9098dfca..f641a0e0 100644 --- a/indexer/src/kadena-server/resolvers/fields/fungible-account/transfers-connection/total-count-fungible-account-transfers-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/fungible-account/transfers-connection/total-count-fungible-account-transfers-connection-resolver.ts @@ -1,15 +1,15 @@ -import { ResolverContext } from "../../../../config/apollo-server-config"; -import { FungibleAccountTransfersConnectionResolvers } from "../../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../../config/apollo-server-config'; +import { FungibleAccountTransfersConnectionResolvers } from '../../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ accountName: zod.string(), fungibleName: zod.string(), }); -export const totalCountFungibleAccountTransfersConnectionResolver: FungibleAccountTransfersConnectionResolvers["totalCount"] = +export const totalCountFungibleAccountTransfersConnectionResolver: FungibleAccountTransfersConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountFungibleAccountTransfersConnection"); + console.log('totalCountFungibleAccountTransfersConnection'); const { accountName, fungibleName } = schema.parse(parent); diff --git a/indexer/src/kadena-server/resolvers/fields/fungible-account/transfers-fungible-account-resolver.ts b/indexer/src/kadena-server/resolvers/fields/fungible-account/transfers-fungible-account-resolver.ts index 747e96e8..3e443883 100644 --- a/indexer/src/kadena-server/resolvers/fields/fungible-account/transfers-fungible-account-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/fungible-account/transfers-fungible-account-resolver.ts @@ -1,10 +1,10 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { FungibleAccountResolvers } from "../../../config/graphql-types"; -import { buildTransferOutput } from "../../output/build-transfer-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { FungibleAccountResolvers } from '../../../config/graphql-types'; +import { buildTransferOutput } from '../../output/build-transfer-output'; -export const transfersFungibleAccountResolver: FungibleAccountResolvers["transfers"] = +export const transfersFungibleAccountResolver: FungibleAccountResolvers['transfers'] = async (parent, args, context) => { - console.log("transfersFungibleAccountResolver"); + console.log('transfersFungibleAccountResolver'); const { first, after, before, last } = args; const output = await context.transferRepository.getTransfers({ @@ -16,7 +16,7 @@ export const transfersFungibleAccountResolver: FungibleAccountResolvers ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildTransferOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transactions-connection/total-count-fungible-chain-account-transactions-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transactions-connection/total-count-fungible-chain-account-transactions-connection-resolver.ts index f311c684..16ad70c0 100644 --- a/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transactions-connection/total-count-fungible-chain-account-transactions-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transactions-connection/total-count-fungible-chain-account-transactions-connection-resolver.ts @@ -1,6 +1,6 @@ -import { ResolverContext } from "../../../../config/apollo-server-config"; -import { FungibleChainAccountTransactionsConnectionResolvers } from "../../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../../config/apollo-server-config'; +import { FungibleChainAccountTransactionsConnectionResolvers } from '../../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ accountName: zod.string(), @@ -8,9 +8,9 @@ const schema = zod.object({ fungibleName: zod.string(), }); -export const totalCountFungibleChainAccountTransactionsConnectionResolver: FungibleChainAccountTransactionsConnectionResolvers["totalCount"] = +export const totalCountFungibleChainAccountTransactionsConnectionResolver: FungibleChainAccountTransactionsConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountFungibleChainAccountTransactionsConnectionResolver"); + console.log('totalCountFungibleChainAccountTransactionsConnectionResolver'); const { accountName, chainId, fungibleName } = schema.parse(parent); diff --git a/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transactions-fungible-chain-account-resolver.ts b/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transactions-fungible-chain-account-resolver.ts index 44f3d5c6..a92b0b9f 100644 --- a/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transactions-fungible-chain-account-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transactions-fungible-chain-account-resolver.ts @@ -1,10 +1,10 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { FungibleChainAccountResolvers } from "../../../config/graphql-types"; -import { buildTransactionOutput } from "../../output/build-transaction-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { FungibleChainAccountResolvers } from '../../../config/graphql-types'; +import { buildTransactionOutput } from '../../output/build-transaction-output'; -export const transactionsFungibleChainAccountResolver: FungibleChainAccountResolvers["transactions"] = +export const transactionsFungibleChainAccountResolver: FungibleChainAccountResolvers['transactions'] = async (parent, args, context) => { - console.log("transactionsFungibleChainAccountResolver"); + console.log('transactionsFungibleChainAccountResolver'); const { first, after, last, before } = args; const output = await context.transactionRepository.getTransactions({ @@ -17,7 +17,7 @@ export const transactionsFungibleChainAccountResolver: FungibleChainAccountResol before, }); - const edges = output.edges.map((e) => ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildTransactionOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transfers-connection/total-count-fungible-chain-account-transfers-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transfers-connection/total-count-fungible-chain-account-transfers-connection-resolver.ts index 9add578f..eda51bf8 100644 --- a/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transfers-connection/total-count-fungible-chain-account-transfers-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transfers-connection/total-count-fungible-chain-account-transfers-connection-resolver.ts @@ -1,6 +1,6 @@ -import { ResolverContext } from "../../../../config/apollo-server-config"; -import { FungibleChainAccountTransfersConnectionResolvers } from "../../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../../config/apollo-server-config'; +import { FungibleChainAccountTransfersConnectionResolvers } from '../../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ accountName: zod.string(), @@ -8,9 +8,9 @@ const schema = zod.object({ fungibleName: zod.string(), }); -export const totalCountFungibleChainAccountTransfersConnectionResolver: FungibleChainAccountTransfersConnectionResolvers["totalCount"] = +export const totalCountFungibleChainAccountTransfersConnectionResolver: FungibleChainAccountTransfersConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountFungibleChainAccountTransfersConnection"); + console.log('totalCountFungibleChainAccountTransfersConnection'); const { accountName, chainId, fungibleName } = schema.parse(parent); diff --git a/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transfers-fungible-chain-account-resolver.ts b/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transfers-fungible-chain-account-resolver.ts index d09eac00..d0cd1388 100644 --- a/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transfers-fungible-chain-account-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/fungible-chain-account/transfers-fungible-chain-account-resolver.ts @@ -1,10 +1,10 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { FungibleChainAccountResolvers } from "../../../config/graphql-types"; -import { buildTransferOutput } from "../../output/build-transfer-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { FungibleChainAccountResolvers } from '../../../config/graphql-types'; +import { buildTransferOutput } from '../../output/build-transfer-output'; -export const transfersFungibleChainAccountResolver: FungibleChainAccountResolvers["transfers"] = +export const transfersFungibleChainAccountResolver: FungibleChainAccountResolvers['transfers'] = async (parent, args, context) => { - console.log("transfersFungibleAccountResolver"); + console.log('transfersFungibleAccountResolver'); const { first, after, last, before } = args; const output = await context.transferRepository.getTransfers({ @@ -17,7 +17,7 @@ export const transfersFungibleChainAccountResolver: FungibleChainAccountResolver before, }); - const edges = output.edges.map((e) => ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildTransferOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/fields/non-fungible-account/transactions-connection/total-count-non-fungible-account-transactions-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/non-fungible-account/transactions-connection/total-count-non-fungible-account-transactions-connection-resolver.ts index bb26361d..bee62fe8 100644 --- a/indexer/src/kadena-server/resolvers/fields/non-fungible-account/transactions-connection/total-count-non-fungible-account-transactions-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/non-fungible-account/transactions-connection/total-count-non-fungible-account-transactions-connection-resolver.ts @@ -1,14 +1,14 @@ -import { ResolverContext } from "../../../../config/apollo-server-config"; -import { NonFungibleAccountTransactionsConnectionResolvers } from "../../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../../config/apollo-server-config'; +import { NonFungibleAccountTransactionsConnectionResolvers } from '../../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ accountName: zod.string(), }); -export const totalCountNonFungibleAccountTransactionsConnectionResolver: NonFungibleAccountTransactionsConnectionResolvers["totalCount"] = +export const totalCountNonFungibleAccountTransactionsConnectionResolver: NonFungibleAccountTransactionsConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountNonFungibleAccountTransactionsConnectionResolver"); + console.log('totalCountNonFungibleAccountTransactionsConnectionResolver'); const { accountName } = schema.parse(parent); diff --git a/indexer/src/kadena-server/resolvers/fields/non-fungible-account/transactions-non-fungible-account-resolver.ts b/indexer/src/kadena-server/resolvers/fields/non-fungible-account/transactions-non-fungible-account-resolver.ts index 3b416c9d..ceb86afe 100644 --- a/indexer/src/kadena-server/resolvers/fields/non-fungible-account/transactions-non-fungible-account-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/non-fungible-account/transactions-non-fungible-account-resolver.ts @@ -1,10 +1,10 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { NonFungibleAccountResolvers } from "../../../config/graphql-types"; -import { buildTransactionOutput } from "../../output/build-transaction-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { NonFungibleAccountResolvers } from '../../../config/graphql-types'; +import { buildTransactionOutput } from '../../output/build-transaction-output'; -export const transactionsNonFungibleAccountResolver: NonFungibleAccountResolvers["transactions"] = +export const transactionsNonFungibleAccountResolver: NonFungibleAccountResolvers['transactions'] = async (parent, args, context) => { - console.log("transactionsNonFungibleAccountResolver"); + console.log('transactionsNonFungibleAccountResolver'); const { first, after, last, before } = args; const output = await context.transactionRepository.getTransactions({ @@ -16,7 +16,7 @@ export const transactionsNonFungibleAccountResolver: NonFungibleAccountResolvers hasTokenId: true, }); - const edges = output.edges.map((e) => ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildTransactionOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/fields/non-fungible-chain-account/transactions-connection/total-count-non-fungible-chain-account-transactions-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/non-fungible-chain-account/transactions-connection/total-count-non-fungible-chain-account-transactions-connection-resolver.ts index 1008148e..1dda6491 100644 --- a/indexer/src/kadena-server/resolvers/fields/non-fungible-chain-account/transactions-connection/total-count-non-fungible-chain-account-transactions-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/non-fungible-chain-account/transactions-connection/total-count-non-fungible-chain-account-transactions-connection-resolver.ts @@ -1,17 +1,15 @@ -import { ResolverContext } from "../../../../config/apollo-server-config"; -import { NonFungibleChainAccountTransactionsConnectionResolvers } from "../../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../../config/apollo-server-config'; +import { NonFungibleChainAccountTransactionsConnectionResolvers } from '../../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ accountName: zod.string(), chainId: zod.string(), }); -export const totalCountNonFungibleChainAccountTransactionsConnectionResolver: NonFungibleChainAccountTransactionsConnectionResolvers["totalCount"] = +export const totalCountNonFungibleChainAccountTransactionsConnectionResolver: NonFungibleChainAccountTransactionsConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log( - "totalCountNonFungibleChainAccountTransactionsConnectionResolver", - ); + console.log('totalCountNonFungibleChainAccountTransactionsConnectionResolver'); const { accountName, chainId } = schema.parse(parent); diff --git a/indexer/src/kadena-server/resolvers/fields/non-fungible-chain-account/transactions-non-fungible-chain-account.ts b/indexer/src/kadena-server/resolvers/fields/non-fungible-chain-account/transactions-non-fungible-chain-account.ts index c3937152..870f3917 100644 --- a/indexer/src/kadena-server/resolvers/fields/non-fungible-chain-account/transactions-non-fungible-chain-account.ts +++ b/indexer/src/kadena-server/resolvers/fields/non-fungible-chain-account/transactions-non-fungible-chain-account.ts @@ -1,10 +1,10 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { NonFungibleChainAccountResolvers } from "../../../config/graphql-types"; -import { buildTransactionOutput } from "../../output/build-transaction-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { NonFungibleChainAccountResolvers } from '../../../config/graphql-types'; +import { buildTransactionOutput } from '../../output/build-transaction-output'; -export const transactionsNonFungibleChainAccountResolver: NonFungibleChainAccountResolvers["transactions"] = +export const transactionsNonFungibleChainAccountResolver: NonFungibleChainAccountResolvers['transactions'] = async (parent, args, context) => { - console.log("transactionsNonFungibleChainAccountResolver"); + console.log('transactionsNonFungibleChainAccountResolver'); const { first, after, last, before } = args; const output = await context.transactionRepository.getTransactions({ @@ -17,7 +17,7 @@ export const transactionsNonFungibleChainAccountResolver: NonFungibleChainAccoun hasTokenId: true, }); - const edges = output.edges.map((e) => ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildTransactionOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/fields/query-events-connection/total-count-query-events-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/query-events-connection/total-count-query-events-connection-resolver.ts index 9b437a45..0797e0b7 100644 --- a/indexer/src/kadena-server/resolvers/fields/query-events-connection/total-count-query-events-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/query-events-connection/total-count-query-events-connection-resolver.ts @@ -1,6 +1,6 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { QueryEventsConnectionResolvers } from "../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { QueryEventsConnectionResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ blockHash: zod.string().nullable().optional(), @@ -12,9 +12,9 @@ const schema = zod.object({ qualifiedEventName: zod.string(), }); -export const totalCountQueryEventsConnectionResolver: QueryEventsConnectionResolvers["totalCount"] = +export const totalCountQueryEventsConnectionResolver: QueryEventsConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountQueryEventsConnectionResolver"); + console.log('totalCountQueryEventsConnectionResolver'); const { blockHash, chainId, diff --git a/indexer/src/kadena-server/resolvers/fields/query-transactions-by-public-key-connection/total-count-query-transactions-by-public-key-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/query-transactions-by-public-key-connection/total-count-query-transactions-by-public-key-connection-resolver.ts index 36b04eba..9449da62 100644 --- a/indexer/src/kadena-server/resolvers/fields/query-transactions-by-public-key-connection/total-count-query-transactions-by-public-key-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/query-transactions-by-public-key-connection/total-count-query-transactions-by-public-key-connection-resolver.ts @@ -1,20 +1,17 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { QueryTransactionsByPublicKeyConnectionResolvers } from "../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { QueryTransactionsByPublicKeyConnectionResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ publicKey: zod.string(), }); -export const totalCountQueryTransactionsByPublicKeyConnectionResolver: QueryTransactionsByPublicKeyConnectionResolvers["totalCount"] = +export const totalCountQueryTransactionsByPublicKeyConnectionResolver: QueryTransactionsByPublicKeyConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountQueryTransactionsByPublicKeyConnectionResolver"); + console.log('totalCountQueryTransactionsByPublicKeyConnectionResolver'); const { publicKey } = schema.parse(parent); - const output = - await context.transactionRepository.getTransactionsByPublicKeyCount( - publicKey, - ); + const output = await context.transactionRepository.getTransactionsByPublicKeyCount(publicKey); return output; }; diff --git a/indexer/src/kadena-server/resolvers/fields/query-transactions-connection/total-count-query-transactions-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/query-transactions-connection/total-count-query-transactions-connection-resolver.ts index ed8d628c..868dbbda 100644 --- a/indexer/src/kadena-server/resolvers/fields/query-transactions-connection/total-count-query-transactions-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/query-transactions-connection/total-count-query-transactions-connection-resolver.ts @@ -1,6 +1,6 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { QueryTransactionsConnectionResolvers } from "../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { QueryTransactionsConnectionResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ accountName: zod.string().nullable().optional(), @@ -13,9 +13,9 @@ const schema = zod.object({ requestKey: zod.string().nullable().optional(), }); -export const totalCountQueryTransactionsConnectionResolver: QueryTransactionsConnectionResolvers["totalCount"] = +export const totalCountQueryTransactionsConnectionResolver: QueryTransactionsConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountQueryTransactionsConnectionResolver"); + console.log('totalCountQueryTransactionsConnectionResolver'); const { accountName, blockHash, diff --git a/indexer/src/kadena-server/resolvers/fields/query-transfers-connection/total-count-query-transfers-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/query-transfers-connection/total-count-query-transfers-connection-resolver.ts index 3182bba3..71c8c4a1 100644 --- a/indexer/src/kadena-server/resolvers/fields/query-transfers-connection/total-count-query-transfers-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/query-transfers-connection/total-count-query-transfers-connection-resolver.ts @@ -1,6 +1,6 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { QueryTransfersConnectionResolvers } from "../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { QueryTransfersConnectionResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ accountName: zod.string().nullable().optional(), @@ -10,21 +10,19 @@ const schema = zod.object({ requestKey: zod.string().nullable().optional(), }); -export const totalCountQueryTransfersConnectionResolver: QueryTransfersConnectionResolvers["totalCount"] = +export const totalCountQueryTransfersConnectionResolver: QueryTransfersConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountQueryTransfersConnectionResolver"); + console.log('totalCountQueryTransfersConnectionResolver'); - const { accountName, blockHash, chainId, fungibleName, requestKey } = - schema.parse(parent); + const { accountName, blockHash, chainId, fungibleName, requestKey } = schema.parse(parent); - const transactions = - await context.transferRepository.getTotalCountOfTransfers({ - accountName, - blockHash, - chainId, - fungibleName, - requestKey, - }); + const transactions = await context.transferRepository.getTotalCountOfTransfers({ + accountName, + blockHash, + chainId, + fungibleName, + requestKey, + }); return transactions; }; diff --git a/indexer/src/kadena-server/resolvers/fields/transaction-command/meta-transaction-command-resolver.ts b/indexer/src/kadena-server/resolvers/fields/transaction-command/meta-transaction-command-resolver.ts index 4e25e221..3617dda6 100644 --- a/indexer/src/kadena-server/resolvers/fields/transaction-command/meta-transaction-command-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transaction-command/meta-transaction-command-resolver.ts @@ -1,18 +1,17 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { TransactionCommandResolvers } from "../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { TransactionCommandResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ databaseTransactionId: zod.string() }); -export const metaTransactionCommandResolver: TransactionCommandResolvers["meta"] = +export const metaTransactionCommandResolver: TransactionCommandResolvers['meta'] = async (parent, _args, context) => { - console.log("metaTransactionCommandResolver"); + console.log('metaTransactionCommandResolver'); const parentArgs = schema.parse(parent); - const transactionMeta = - await context.transactionRepository.getTransactionMetaInfoById( - parentArgs.databaseTransactionId, - ); + const transactionMeta = await context.transactionRepository.getTransactionMetaInfoById( + parentArgs.databaseTransactionId, + ); return transactionMeta; }; diff --git a/indexer/src/kadena-server/resolvers/fields/transaction-command/signers-transaction-command-resolver.ts b/indexer/src/kadena-server/resolvers/fields/transaction-command/signers-transaction-command-resolver.ts index c72bf632..f04abd68 100644 --- a/indexer/src/kadena-server/resolvers/fields/transaction-command/signers-transaction-command-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transaction-command/signers-transaction-command-resolver.ts @@ -1,17 +1,15 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { TransactionCommandResolvers } from "../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { TransactionCommandResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ databaseTransactionId: zod.string() }); -export const signersTransactionCommandResolver: TransactionCommandResolvers["signers"] = +export const signersTransactionCommandResolver: TransactionCommandResolvers['signers'] = async (parent, _args, context) => { - console.log("signersTransactionCommandResolver"); + console.log('signersTransactionCommandResolver'); const parentArgs = schema.parse(parent); - const output = await context.transactionRepository.getSigners( - parentArgs.databaseTransactionId, - ); + const output = await context.transactionRepository.getSigners(parentArgs.databaseTransactionId); return output; }; diff --git a/indexer/src/kadena-server/resolvers/fields/transaction-result/block-transaction-result-resolver.ts b/indexer/src/kadena-server/resolvers/fields/transaction-result/block-transaction-result-resolver.ts index 188b1d4c..a7388d7e 100644 --- a/indexer/src/kadena-server/resolvers/fields/transaction-result/block-transaction-result-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transaction-result/block-transaction-result-resolver.ts @@ -1,13 +1,13 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { TransactionResultResolvers } from "../../../config/graphql-types"; -import zod from "zod"; -import { buildBlockOutput } from "../../output/build-block-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { TransactionResultResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; +import { buildBlockOutput } from '../../output/build-block-output'; const schema = zod.object({ databaseTransactionId: zod.string() }); -export const blockTransactionResultResolver: TransactionResultResolvers["block"] = +export const blockTransactionResultResolver: TransactionResultResolvers['block'] = async (parent, _args, context) => { - console.log("blockTransactionResultResolver"); + console.log('blockTransactionResultResolver'); const parentArgs = schema.parse(parent); diff --git a/indexer/src/kadena-server/resolvers/fields/transaction-result/events-connection/total-count-transaction-result-events-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/transaction-result/events-connection/total-count-transaction-result-events-connection-resolver.ts index 5a43d1ac..6487d91b 100644 --- a/indexer/src/kadena-server/resolvers/fields/transaction-result/events-connection/total-count-transaction-result-events-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transaction-result/events-connection/total-count-transaction-result-events-connection-resolver.ts @@ -1,21 +1,19 @@ -import { ResolverContext } from "../../../../config/apollo-server-config"; -import { TransactionResultEventsConnectionResolvers } from "../../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../../config/apollo-server-config'; +import { TransactionResultEventsConnectionResolvers } from '../../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ databaseTransactionId: zod.string(), }); -export const totalCountTransactionResultEventsConnectionResolver: TransactionResultEventsConnectionResolvers["totalCount"] = +export const totalCountTransactionResultEventsConnectionResolver: TransactionResultEventsConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountTransactionResultEventsConnectionResolver"); + console.log('totalCountTransactionResultEventsConnectionResolver'); const { databaseTransactionId } = schema.parse(parent); - const output = await context.eventRepository.getTotalTransactionEventsCount( - { - transactionId: databaseTransactionId, - }, - ); + const output = await context.eventRepository.getTotalTransactionEventsCount({ + transactionId: databaseTransactionId, + }); return output; }; diff --git a/indexer/src/kadena-server/resolvers/fields/transaction-result/events-transaction-result-resolver.ts b/indexer/src/kadena-server/resolvers/fields/transaction-result/events-transaction-result-resolver.ts index f2aaaa91..a513b3aa 100644 --- a/indexer/src/kadena-server/resolvers/fields/transaction-result/events-transaction-result-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transaction-result/events-transaction-result-resolver.ts @@ -1,13 +1,13 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { TransactionResultResolvers } from "../../../config/graphql-types"; -import { buildEventOutput } from "../../output/build-event-output"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { TransactionResultResolvers } from '../../../config/graphql-types'; +import { buildEventOutput } from '../../output/build-event-output'; +import zod from 'zod'; const schema = zod.object({ databaseTransactionId: zod.string() }); -export const eventsTransactionResultResolver: TransactionResultResolvers["events"] = +export const eventsTransactionResultResolver: TransactionResultResolvers['events'] = async (parent, args, context) => { - console.log("eventsTransactionResultResolver"); + console.log('eventsTransactionResultResolver'); const parentArgs = schema.parse(parent); @@ -20,7 +20,7 @@ export const eventsTransactionResultResolver: TransactionResultResolvers ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildEventOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/fields/transaction-result/transfers-connection/total-count-transaction-result-transfers-connection-resolver.ts b/indexer/src/kadena-server/resolvers/fields/transaction-result/transfers-connection/total-count-transaction-result-transfers-connection-resolver.ts index 6603d507..db0cd439 100644 --- a/indexer/src/kadena-server/resolvers/fields/transaction-result/transfers-connection/total-count-transaction-result-transfers-connection-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transaction-result/transfers-connection/total-count-transaction-result-transfers-connection-resolver.ts @@ -1,14 +1,14 @@ -import { ResolverContext } from "../../../../config/apollo-server-config"; -import { TransactionResultTransfersConnectionResolvers } from "../../../../config/graphql-types"; -import zod from "zod"; +import { ResolverContext } from '../../../../config/apollo-server-config'; +import { TransactionResultTransfersConnectionResolvers } from '../../../../config/graphql-types'; +import zod from 'zod'; const schema = zod.object({ databaseTransactionId: zod.string(), }); -export const totalCountTransactionResultTransfersConnectionResolver: TransactionResultTransfersConnectionResolvers["totalCount"] = +export const totalCountTransactionResultTransfersConnectionResolver: TransactionResultTransfersConnectionResolvers['totalCount'] = async (parent, _args, context) => { - console.log("totalCountTransactionResultTransfersConnectionResolver"); + console.log('totalCountTransactionResultTransfersConnectionResolver'); const { databaseTransactionId } = schema.parse(parent); diff --git a/indexer/src/kadena-server/resolvers/fields/transaction-result/transfers-transaction-result-tesolver.ts b/indexer/src/kadena-server/resolvers/fields/transaction-result/transfers-transaction-result-tesolver.ts index c3384b00..88bc34a2 100644 --- a/indexer/src/kadena-server/resolvers/fields/transaction-result/transfers-transaction-result-tesolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transaction-result/transfers-transaction-result-tesolver.ts @@ -1,28 +1,26 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { TransactionResultResolvers } from "../../../config/graphql-types"; -import { buildTransferOutput } from "../../output/build-transfer-output"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { TransactionResultResolvers } from '../../../config/graphql-types'; +import { buildTransferOutput } from '../../output/build-transfer-output'; +import zod from 'zod'; const schema = zod.object({ databaseTransactionId: zod.string() }); -export const transfersTransactionResultResolver: TransactionResultResolvers["transfers"] = +export const transfersTransactionResultResolver: TransactionResultResolvers['transfers'] = async (parent, args, context) => { - console.log("transfersTransactionResultResolver"); + console.log('transfersTransactionResultResolver'); const parentArgs = schema.parse(parent); const { first, after, before, last } = args; - const output = await context.transferRepository.getTransfersByTransactionId( - { - transactionId: parentArgs.databaseTransactionId, - first, - after, - before, - last, - }, - ); - const edges = output.edges.map((e) => ({ + const output = await context.transferRepository.getTransfersByTransactionId({ + transactionId: parentArgs.databaseTransactionId, + first, + after, + before, + last, + }); + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildTransferOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/fields/transfer/block-transfer-resolver.ts b/indexer/src/kadena-server/resolvers/fields/transfer/block-transfer-resolver.ts index b63e2985..3fdacfef 100644 --- a/indexer/src/kadena-server/resolvers/fields/transfer/block-transfer-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transfer/block-transfer-resolver.ts @@ -1,18 +1,21 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { TransferResolvers } from "../../../config/graphql-types"; -import { buildBlockOutput } from "../../output/build-block-output"; -import zod from "zod"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { TransferResolvers } from '../../../config/graphql-types'; +import { buildBlockOutput } from '../../output/build-block-output'; +import zod from 'zod'; const schema = zod.object({ blockHash: zod.string(), }); -export const blockTransferResolver: TransferResolvers["block"] = - async (parent, _args, context) => { - console.log("blockTransferResolver"); +export const blockTransferResolver: TransferResolvers['block'] = async ( + parent, + _args, + context, +) => { + console.log('blockTransferResolver'); - const { blockHash } = schema.parse(parent); - const output = await context.blockRepository.getBlockByHash(blockHash); + const { blockHash } = schema.parse(parent); + const output = await context.blockRepository.getBlockByHash(blockHash); - return buildBlockOutput(output); - }; + return buildBlockOutput(output); +}; diff --git a/indexer/src/kadena-server/resolvers/fields/transfer/cross-chain-transfer-transfer-resolver.ts b/indexer/src/kadena-server/resolvers/fields/transfer/cross-chain-transfer-transfer-resolver.ts index eec62893..2cc30357 100644 --- a/indexer/src/kadena-server/resolvers/fields/transfer/cross-chain-transfer-transfer-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transfer/cross-chain-transfer-transfer-resolver.ts @@ -1,24 +1,23 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { TransferResolvers } from "../../../config/graphql-types"; -import zod from "zod"; -import { buildTransferOutput } from "../../output/build-transfer-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { TransferResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; +import { buildTransferOutput } from '../../output/build-transfer-output'; const schema = zod.object({ pactId: zod.string().nullable(), amount: zod.string(), }); -export const crossChainTransferTransferResolver: TransferResolvers["crossChainTransfer"] = +export const crossChainTransferTransferResolver: TransferResolvers['crossChainTransfer'] = async (parent, _args, context) => { - console.log("crossChainTransferTransferResolver"); + console.log('crossChainTransferTransferResolver'); const { pactId, amount } = schema.parse(parent); if (!pactId) return null; - const output = - await context.transferRepository.getCrossChainTransferByPactId({ - pactId, - amount, - }); + const output = await context.transferRepository.getCrossChainTransferByPactId({ + pactId, + amount, + }); return buildTransferOutput(output); }; diff --git a/indexer/src/kadena-server/resolvers/fields/transfer/transaction-transfer-resolver.ts b/indexer/src/kadena-server/resolvers/fields/transfer/transaction-transfer-resolver.ts index f60e6e72..234c37e1 100644 --- a/indexer/src/kadena-server/resolvers/fields/transfer/transaction-transfer-resolver.ts +++ b/indexer/src/kadena-server/resolvers/fields/transfer/transaction-transfer-resolver.ts @@ -1,20 +1,17 @@ -import { ResolverContext } from "../../../config/apollo-server-config"; -import { TransferResolvers } from "../../../config/graphql-types"; -import zod from "zod"; -import { buildTransactionOutput } from "../../output/build-transaction-output"; +import { ResolverContext } from '../../../config/apollo-server-config'; +import { TransferResolvers } from '../../../config/graphql-types'; +import zod from 'zod'; +import { buildTransactionOutput } from '../../output/build-transaction-output'; const schema = zod.object({ transferId: zod.string() }); -export const transactionTransferResolver: TransferResolvers["transaction"] = +export const transactionTransferResolver: TransferResolvers['transaction'] = async (parent, _args, context) => { - console.log("transactionTransferResolver"); + console.log('transactionTransferResolver'); const { transferId } = schema.parse(parent); - const transaction = - await context.transactionRepository.getTransactionByTransferId( - transferId - ); + const transaction = await context.transactionRepository.getTransactionByTransferId(transferId); return buildTransactionOutput(transaction); }; diff --git a/indexer/src/kadena-server/resolvers/index.ts b/indexer/src/kadena-server/resolvers/index.ts index be132e7a..ca24aad7 100644 --- a/indexer/src/kadena-server/resolvers/index.ts +++ b/indexer/src/kadena-server/resolvers/index.ts @@ -1,69 +1,69 @@ -import { eventsBlockResolver } from "./fields/block/events-block-resolver"; -import { blockQueryResolver } from "./query/block-query-resolver"; -import { Resolvers } from "../config/graphql-types"; -import { parentBlockResolver } from "./fields/block/parent-block-resolver"; -import { minerAccountBlockResolver } from "./fields/block/miner-account-block-resolver"; -import { transactionsBlockResolver } from "./fields/block/transactions-block-resolver"; -import { transactionsQueryResolver } from "./query/transactions-query-resolver"; -import { transfersQueryResolver } from "./query/transfers-query-resolver"; -import { blockTransactionResultResolver } from "./fields/transaction-result/block-transaction-result-resolver"; -import { transactionTransferResolver } from "./fields/transfer/transaction-transfer-resolver"; -import { metaTransactionCommandResolver } from "./fields/transaction-command/meta-transaction-command-resolver"; -import { crossChainTransferTransferResolver } from "./fields/transfer/cross-chain-transfer-transfer-resolver"; -import { blocksFromDepthQueryResolver } from "./query/blocks-from-depth-query-resolver"; -import { blocksFromHeightQueryResolver } from "./query/blocks-from-height-query-resolver"; -import { totalCountQueryTransfersConnectionResolver } from "./fields/query-transfers-connection/total-count-query-transfers-connection-resolver"; -import { totalCountBlockEventsConnectionResolver } from "./fields/block-events-connection/total-count-block-events-connection"; -import { transactionQueryResolver } from "./query/transaction-query-resolver"; -import { transactionsByPublicKeyQueryResolver } from "./query/transactions-by-public-key-query-resolver"; -import { totalCountQueryTransactionsByPublicKeyConnectionResolver } from "./fields/query-transactions-by-public-key-connection/total-count-query-transactions-by-public-key-connection-resolver"; -import { eventsQueryResolver } from "./query/events-query-resolver"; -import { totalCountQueryEventsConnectionResolver } from "./fields/query-events-connection/total-count-query-events-connection-resolver"; -import { blockEventResolver } from "./fields/event/block-event-resolver"; -import { totalCountBlockTransactionsConnectionResolver } from "./fields/block-transactions-connection/total-count-block-transactions-connection-resolver"; -import { fungibleAccountQueryResolver } from "./query/fungible-account-query-resolver"; -import { transactionsFungibleAccountResolver } from "./fields/fungible-account/transactions-fungible-account-resolver"; -import { transfersFungibleAccountResolver } from "./fields/fungible-account/transfers-fungible-account-resolver"; -import { graphConfigurationQueryResolver } from "./query/graph-configuration-query-resolver"; -import { lastBlockHeightQueryResolver } from "./query/last-block-height-query-resolver"; -import { networkInfoQueryResolver } from "./query/network-info-query-resolver"; -import { chainAccountsFungibleAccountResolver } from "./fields/fungible-account/chain-accounts-fungible-account-resolver"; -import { nodeQueryResolver } from "./query/node-query-resolver"; -import { fungibleChainAccountsQueryResolver } from "./query/fungible-chain-accounts-query-resolver"; -import { transactionsFungibleChainAccountResolver } from "./fields/fungible-chain-account/transactions-fungible-chain-account-resolver"; -import { transfersFungibleChainAccountResolver } from "./fields/fungible-chain-account/transfers-fungible-chain-account-resolver"; -import { totalCountFungibleAccountTransfersConnectionResolver } from "./fields/fungible-account/transfers-connection/total-count-fungible-account-transfers-connection-resolver"; -import { totalCountFungibleAccountTransactionsConnectionResolver } from "./fields/fungible-account/transactions-connection/total-count-fungible-account-transactions-connection-resolver"; -import { totalCountFungibleChainAccountTransfersConnectionResolver } from "./fields/fungible-chain-account/transfers-connection/total-count-fungible-chain-account-transfers-connection-resolver"; -import { totalCountFungibleChainAccountTransactionsConnectionResolver } from "./fields/fungible-chain-account/transactions-connection/total-count-fungible-chain-account-transactions-connection-resolver"; -import { fungibleAccountsByPublicKeyQueryResolver } from "./query/fungible-accounts-by-public-key-query-resolver"; -import { transfersTransactionResultResolver } from "./fields/transaction-result/transfers-transaction-result-tesolver"; -import { totalCountTransactionResultTransfersConnectionResolver } from "./fields/transaction-result/transfers-connection/total-count-transaction-result-transfers-connection-resolver"; -import { eventsTransactionResultResolver } from "./fields/transaction-result/events-transaction-result-resolver"; -import { totalCountTransactionResultEventsConnectionResolver } from "./fields/transaction-result/events-connection/total-count-transaction-result-events-connection-resolver"; -import { transactionEventResolver } from "./fields/event/transaction-event-resolver"; -import { ResolverContext } from "../config/apollo-server-config"; -import { nodesQueryResolver } from "./query/nodes-query-resolver"; -import { totalCountQueryTransactionsConnectionResolver } from "./fields/query-transactions-connection/total-count-query-transactions-connection-resolver"; -import { transactionSubscriptionResolver } from "./subscription/transaction-subscription-resolver"; -import { newBlocksSubscriptionResolver } from "./subscription/new-blocks-subscription-resolver"; -import { eventsSubscriptionResolver } from "./subscription/events-subscription-resolver"; -import { newBlocksFromDepthSubscriptionResolver } from "./subscription/new-blocks-from-depth-subscription-resolver"; -import { blockTransferResolver } from "./fields/transfer/block-transfer-resolver"; -import { DateTimeResolver } from "graphql-scalars"; -import { fungibleChainAccountsByPublicKeyQueryResolver } from "./query/fungible-chain-accounts-by-public-key-query-resolver"; -import { completedBlockHeightsQueryResolver } from "./query/completed-block-heights-query-resolver"; -import { pactQueryResolver } from "./query/pact-query-resolver"; -import { gasLimitEstimateQueryResolver } from "./query/gas-limit-estimate-query-resolver"; -import { nonFungibleAccountQueryResolver } from "./query/non-fungible-account-query-resolver"; -import { signersTransactionCommandResolver } from "./fields/transaction-command/signers-transaction-command-resolver"; -import { transactionsNonFungibleAccountResolver } from "./fields/non-fungible-account/transactions-non-fungible-account-resolver"; -import { nonFungibleChainAccountQueryResolver } from "./query/non-fungible-chain-account-query-resolver"; -import { transactionsNonFungibleChainAccountResolver } from "./fields/non-fungible-chain-account/transactions-non-fungible-chain-account"; -import { totalCountNonFungibleAccountTransactionsConnectionResolver } from "./fields/non-fungible-account/transactions-connection/total-count-non-fungible-account-transactions-connection-resolver"; -import { totalCountNonFungibleChainAccountTransactionsConnectionResolver } from "./fields/non-fungible-chain-account/transactions-connection/total-count-non-fungible-chain-account-transactions-connection-resolver"; -import { fungibleChainAccountQueryResolver } from "./query/fungible-chain-account-query-resolver"; -import { powHashBlockResolver } from "./fields/block/pow-hash-block-resolver"; +import { eventsBlockResolver } from './fields/block/events-block-resolver'; +import { blockQueryResolver } from './query/block-query-resolver'; +import { Resolvers } from '../config/graphql-types'; +import { parentBlockResolver } from './fields/block/parent-block-resolver'; +import { minerAccountBlockResolver } from './fields/block/miner-account-block-resolver'; +import { transactionsBlockResolver } from './fields/block/transactions-block-resolver'; +import { transactionsQueryResolver } from './query/transactions-query-resolver'; +import { transfersQueryResolver } from './query/transfers-query-resolver'; +import { blockTransactionResultResolver } from './fields/transaction-result/block-transaction-result-resolver'; +import { transactionTransferResolver } from './fields/transfer/transaction-transfer-resolver'; +import { metaTransactionCommandResolver } from './fields/transaction-command/meta-transaction-command-resolver'; +import { crossChainTransferTransferResolver } from './fields/transfer/cross-chain-transfer-transfer-resolver'; +import { blocksFromDepthQueryResolver } from './query/blocks-from-depth-query-resolver'; +import { blocksFromHeightQueryResolver } from './query/blocks-from-height-query-resolver'; +import { totalCountQueryTransfersConnectionResolver } from './fields/query-transfers-connection/total-count-query-transfers-connection-resolver'; +import { totalCountBlockEventsConnectionResolver } from './fields/block-events-connection/total-count-block-events-connection'; +import { transactionQueryResolver } from './query/transaction-query-resolver'; +import { transactionsByPublicKeyQueryResolver } from './query/transactions-by-public-key-query-resolver'; +import { totalCountQueryTransactionsByPublicKeyConnectionResolver } from './fields/query-transactions-by-public-key-connection/total-count-query-transactions-by-public-key-connection-resolver'; +import { eventsQueryResolver } from './query/events-query-resolver'; +import { totalCountQueryEventsConnectionResolver } from './fields/query-events-connection/total-count-query-events-connection-resolver'; +import { blockEventResolver } from './fields/event/block-event-resolver'; +import { totalCountBlockTransactionsConnectionResolver } from './fields/block-transactions-connection/total-count-block-transactions-connection-resolver'; +import { fungibleAccountQueryResolver } from './query/fungible-account-query-resolver'; +import { transactionsFungibleAccountResolver } from './fields/fungible-account/transactions-fungible-account-resolver'; +import { transfersFungibleAccountResolver } from './fields/fungible-account/transfers-fungible-account-resolver'; +import { graphConfigurationQueryResolver } from './query/graph-configuration-query-resolver'; +import { lastBlockHeightQueryResolver } from './query/last-block-height-query-resolver'; +import { networkInfoQueryResolver } from './query/network-info-query-resolver'; +import { chainAccountsFungibleAccountResolver } from './fields/fungible-account/chain-accounts-fungible-account-resolver'; +import { nodeQueryResolver } from './query/node-query-resolver'; +import { fungibleChainAccountsQueryResolver } from './query/fungible-chain-accounts-query-resolver'; +import { transactionsFungibleChainAccountResolver } from './fields/fungible-chain-account/transactions-fungible-chain-account-resolver'; +import { transfersFungibleChainAccountResolver } from './fields/fungible-chain-account/transfers-fungible-chain-account-resolver'; +import { totalCountFungibleAccountTransfersConnectionResolver } from './fields/fungible-account/transfers-connection/total-count-fungible-account-transfers-connection-resolver'; +import { totalCountFungibleAccountTransactionsConnectionResolver } from './fields/fungible-account/transactions-connection/total-count-fungible-account-transactions-connection-resolver'; +import { totalCountFungibleChainAccountTransfersConnectionResolver } from './fields/fungible-chain-account/transfers-connection/total-count-fungible-chain-account-transfers-connection-resolver'; +import { totalCountFungibleChainAccountTransactionsConnectionResolver } from './fields/fungible-chain-account/transactions-connection/total-count-fungible-chain-account-transactions-connection-resolver'; +import { fungibleAccountsByPublicKeyQueryResolver } from './query/fungible-accounts-by-public-key-query-resolver'; +import { transfersTransactionResultResolver } from './fields/transaction-result/transfers-transaction-result-tesolver'; +import { totalCountTransactionResultTransfersConnectionResolver } from './fields/transaction-result/transfers-connection/total-count-transaction-result-transfers-connection-resolver'; +import { eventsTransactionResultResolver } from './fields/transaction-result/events-transaction-result-resolver'; +import { totalCountTransactionResultEventsConnectionResolver } from './fields/transaction-result/events-connection/total-count-transaction-result-events-connection-resolver'; +import { transactionEventResolver } from './fields/event/transaction-event-resolver'; +import { ResolverContext } from '../config/apollo-server-config'; +import { nodesQueryResolver } from './query/nodes-query-resolver'; +import { totalCountQueryTransactionsConnectionResolver } from './fields/query-transactions-connection/total-count-query-transactions-connection-resolver'; +import { transactionSubscriptionResolver } from './subscription/transaction-subscription-resolver'; +import { newBlocksSubscriptionResolver } from './subscription/new-blocks-subscription-resolver'; +import { eventsSubscriptionResolver } from './subscription/events-subscription-resolver'; +import { newBlocksFromDepthSubscriptionResolver } from './subscription/new-blocks-from-depth-subscription-resolver'; +import { blockTransferResolver } from './fields/transfer/block-transfer-resolver'; +import { DateTimeResolver } from 'graphql-scalars'; +import { fungibleChainAccountsByPublicKeyQueryResolver } from './query/fungible-chain-accounts-by-public-key-query-resolver'; +import { completedBlockHeightsQueryResolver } from './query/completed-block-heights-query-resolver'; +import { pactQueryResolver } from './query/pact-query-resolver'; +import { gasLimitEstimateQueryResolver } from './query/gas-limit-estimate-query-resolver'; +import { nonFungibleAccountQueryResolver } from './query/non-fungible-account-query-resolver'; +import { signersTransactionCommandResolver } from './fields/transaction-command/signers-transaction-command-resolver'; +import { transactionsNonFungibleAccountResolver } from './fields/non-fungible-account/transactions-non-fungible-account-resolver'; +import { nonFungibleChainAccountQueryResolver } from './query/non-fungible-chain-account-query-resolver'; +import { transactionsNonFungibleChainAccountResolver } from './fields/non-fungible-chain-account/transactions-non-fungible-chain-account'; +import { totalCountNonFungibleAccountTransactionsConnectionResolver } from './fields/non-fungible-account/transactions-connection/total-count-non-fungible-account-transactions-connection-resolver'; +import { totalCountNonFungibleChainAccountTransactionsConnectionResolver } from './fields/non-fungible-chain-account/transactions-connection/total-count-non-fungible-chain-account-transactions-connection-resolver'; +import { fungibleChainAccountQueryResolver } from './query/fungible-chain-account-query-resolver'; +import { powHashBlockResolver } from './fields/block/pow-hash-block-resolver'; export const resolvers: Resolvers = { DateTime: DateTimeResolver, @@ -83,8 +83,7 @@ export const resolvers: Resolvers = { fungibleAccountsByPublicKey: fungibleAccountsByPublicKeyQueryResolver, fungibleChainAccount: fungibleChainAccountQueryResolver, fungibleChainAccounts: fungibleChainAccountsQueryResolver, - fungibleChainAccountsByPublicKey: - fungibleChainAccountsByPublicKeyQueryResolver, + fungibleChainAccountsByPublicKey: fungibleChainAccountsByPublicKeyQueryResolver, gasLimitEstimate: gasLimitEstimateQueryResolver, graphConfiguration: graphConfigurationQueryResolver, lastBlockHeight: lastBlockHeightQueryResolver, @@ -184,31 +183,27 @@ export const resolvers: Resolvers = { Node: { __resolveType(obj: any) { if (obj.difficulty && obj.powHash) { - return "Block"; + return 'Block'; } if (obj.name && obj.qualifiedName) { - return "Event"; + return 'Event'; } if (obj.tokenId !== undefined && obj.version) { - return "NonFungibleTokenBalance"; + return 'NonFungibleTokenBalance'; } if (obj.chainId !== undefined && obj.nonFungibleTokenBalances) { - return "NonFungibleChainAccount"; + return 'NonFungibleChainAccount'; } if (obj.nonFungibleTokenBalances) { - return "NonFungibleAccount"; + return 'NonFungibleAccount'; } - if ( - obj.accountName && - obj.totalBalance !== undefined && - obj.totalBalance !== null - ) { - return "FungibleAccount"; + if (obj.accountName && obj.totalBalance !== undefined && obj.totalBalance !== null) { + return 'FungibleAccount'; } if ( @@ -217,22 +212,19 @@ export const resolvers: Resolvers = { obj.balance !== undefined && obj.balance !== null ) { - return "FungibleChainAccount"; + return 'FungibleChainAccount'; } if (obj.pubkey) { - return "Signer"; + return 'Signer'; } if (obj.cmd && obj.result) { - return "Transaction"; + return 'Transaction'; } - if ( - obj.senderAccount !== undefined && - obj.receiverAccount !== undefined - ) { - return "Transfer"; + if (obj.senderAccount !== undefined && obj.receiverAccount !== undefined) { + return 'Transfer'; } return null; @@ -243,26 +235,26 @@ export const resolvers: Resolvers = { // if (obj.status) { // return "TransactionMempoolInfo"; // } - return "TransactionResult"; + return 'TransactionResult'; }, }, TransactionPayload: { __resolveType: (obj: any) => { if (obj.code) { - return "ExecutionPayload"; + return 'ExecutionPayload'; } - return "ContinuationPayload"; + return 'ContinuationPayload'; }, }, IGuard: { __resolveType: (obj: any) => { if (obj.fun) { - return "UserGuard"; + return 'UserGuard'; } if (obj.keys?.length) { - return "KeysetGuard"; + return 'KeysetGuard'; } - return "RawGuard"; + return 'RawGuard'; }, }, }; diff --git a/indexer/src/kadena-server/resolvers/node-utils.ts b/indexer/src/kadena-server/resolvers/node-utils.ts index ddee736e..0e9111b0 100644 --- a/indexer/src/kadena-server/resolvers/node-utils.ts +++ b/indexer/src/kadena-server/resolvers/node-utils.ts @@ -1,25 +1,25 @@ -import { ResolverContext } from "../config/apollo-server-config"; -import { buildBlockOutput } from "./output/build-block-output"; -import { buildEventOutput } from "./output/build-event-output"; -import { buildFungibleAccount } from "./output/build-fungible-account-output"; -import { buildFungibleChainAccount } from "./output/build-fungible-chain-account-output"; -import { buildNonFungibleAccount } from "./output/build-non-fungible-account-output"; -import { buildNonFungibleChainAccount } from "./output/build-non-fungible-chain-account-output"; -import { buildTransactionOutput } from "./output/build-transaction-output"; -import { buildTransferOutput } from "./output/build-transfer-output"; +import { ResolverContext } from '../config/apollo-server-config'; +import { buildBlockOutput } from './output/build-block-output'; +import { buildEventOutput } from './output/build-event-output'; +import { buildFungibleAccount } from './output/build-fungible-account-output'; +import { buildFungibleChainAccount } from './output/build-fungible-chain-account-output'; +import { buildNonFungibleAccount } from './output/build-non-fungible-account-output'; +import { buildNonFungibleChainAccount } from './output/build-non-fungible-chain-account-output'; +import { buildTransactionOutput } from './output/build-transaction-output'; +import { buildTransferOutput } from './output/build-transfer-output'; export const getNode = async (context: ResolverContext, id: string) => { - const decodedString = Buffer.from(id, "base64").toString("utf-8"); + const decodedString = Buffer.from(id, 'base64').toString('utf-8'); const [type, params] = decodedString.split(/:(.+)/); - if (type === "Block") { + if (type === 'Block') { const output = await context.blockRepository.getBlockByHash(params); return buildBlockOutput(output); } - if (type === "Event") { + if (type === 'Event') { const [blockHash, orderIndex, requestKey] = JSON.parse(params); const output = await context.eventRepository.getEvent({ hash: blockHash, @@ -30,14 +30,13 @@ export const getNode = async (context: ResolverContext, id: string) => { return buildEventOutput(output); } - if (type === "FungibleAccount") { + if (type === 'FungibleAccount') { const [_fungible, accountName] = JSON.parse(params); - const output = - await context.balanceRepository.getAccountInfo_NODE(accountName); + const output = await context.balanceRepository.getAccountInfo_NODE(accountName); return buildFungibleAccount(output); } - if (type === "FungibleChainAccount") { + if (type === 'FungibleChainAccount') { const [chainId, fungibleName, accountName] = JSON.parse(params); const output = await context.balanceRepository.getChainsAccountInfo_NODE( accountName, @@ -47,24 +46,22 @@ export const getNode = async (context: ResolverContext, id: string) => { return buildFungibleChainAccount(output[0]); } - if (type === "Transaction") { + if (type === 'Transaction') { const [blockHash, requestKey] = JSON.parse(params); - const output = - await context.transactionRepository.getTransactionsByRequestKey({ - requestKey, - blockHash, - }); + const output = await context.transactionRepository.getTransactionsByRequestKey({ + requestKey, + blockHash, + }); - const outputs = output.map((t) => buildTransactionOutput(t)); + const outputs = output.map(t => buildTransactionOutput(t)); return { ...outputs[0], orphanedTransactions: outputs.slice(1), }; } - if (type === "Transfer") { - const [blockHash, chainId, orderIndex, moduleHash, requestKey] = - JSON.parse(params); + if (type === 'Transfer') { + const [blockHash, chainId, orderIndex, moduleHash, requestKey] = JSON.parse(params); const output = await context.transferRepository.getTransfers({ blockHash, chainId, @@ -77,54 +74,41 @@ export const getNode = async (context: ResolverContext, id: string) => { return buildTransferOutput(output.edges[0].node); } - if (type === "Signer") { + if (type === 'Signer') { const [requestKey, orderIndex] = JSON.parse(params); - const [output] = await context.transactionRepository.getSigners( - requestKey, - orderIndex, - ); + const [output] = await context.transactionRepository.getSigners(requestKey, orderIndex); return output; } - if (type === "NonFungibleAccount") { - const account = - await context.balanceRepository.getNonFungibleAccountInfo(params); - const nftsInfoParams = (account?.nonFungibleTokenBalances ?? []).map( - (n) => ({ - tokenId: n.tokenId, - chainId: n.chainId, - }), - ); + if (type === 'NonFungibleAccount') { + const account = await context.balanceRepository.getNonFungibleAccountInfo(params); + const nftsInfoParams = (account?.nonFungibleTokenBalances ?? []).map(n => ({ + tokenId: n.tokenId, + chainId: n.chainId, + })); - const nftsInfo = await context.pactGateway.getNftsInfo( - nftsInfoParams ?? [], - ); + const nftsInfo = await context.pactGateway.getNftsInfo(nftsInfoParams ?? []); const output = buildNonFungibleAccount(account, nftsInfo); return output; } - if (type === "NonFungibleChainAccount") { + if (type === 'NonFungibleChainAccount') { const [chainId, accountName] = JSON.parse(params); - const account = - await context.balanceRepository.getNonFungibleChainAccountInfo( - accountName, - chainId, - ); - - const nftsInfoParams = (account?.nonFungibleTokenBalances ?? []).map( - (n) => ({ - tokenId: n.tokenId, - chainId: n.chainId, - }), + const account = await context.balanceRepository.getNonFungibleChainAccountInfo( + accountName, + chainId, ); - const nftsInfo = await context.pactGateway.getNftsInfo( - nftsInfoParams ?? [], - ); + const nftsInfoParams = (account?.nonFungibleTokenBalances ?? []).map(n => ({ + tokenId: n.tokenId, + chainId: n.chainId, + })); + + const nftsInfo = await context.pactGateway.getNftsInfo(nftsInfoParams ?? []); return buildNonFungibleChainAccount(account, nftsInfo); } - if (type === "NonFungibleTokenBalance") { + if (type === 'NonFungibleTokenBalance') { const [tokenId, accountName, chainId] = JSON.parse(params); const account = await context.balanceRepository.getNonFungibleTokenBalance( accountName, @@ -136,9 +120,7 @@ export const getNode = async (context: ResolverContext, id: string) => { const nftsInfoParams = [{ tokenId, chainId }]; - const [nftsInfo] = await context.pactGateway.getNftsInfo( - nftsInfoParams ?? [], - ); + const [nftsInfo] = await context.pactGateway.getNftsInfo(nftsInfoParams ?? []); return { id: account.id, @@ -150,8 +132,8 @@ export const getNode = async (context: ResolverContext, id: string) => { // TODO guard: { keys: [], - predicate: "", - raw: JSON.stringify("{}"), + predicate: '', + raw: JSON.stringify('{}'), }, info: { precision: nftsInfo.precision, diff --git a/indexer/src/kadena-server/resolvers/output/build-block-output.ts b/indexer/src/kadena-server/resolvers/output/build-block-output.ts index d2e0499c..ff9457ac 100644 --- a/indexer/src/kadena-server/resolvers/output/build-block-output.ts +++ b/indexer/src/kadena-server/resolvers/output/build-block-output.ts @@ -3,8 +3,8 @@ import { BlockEventsConnection, BlockTransactionsConnection, FungibleChainAccount, -} from "../../config/graphql-types"; -import { BlockOutput } from "../../repository/application/block-repository"; +} from '../../config/graphql-types'; +import { BlockOutput } from '../../repository/application/block-repository'; export const buildBlockOutput = (output: BlockOutput) => { return { diff --git a/indexer/src/kadena-server/resolvers/output/build-event-output.ts b/indexer/src/kadena-server/resolvers/output/build-event-output.ts index a4cf7b88..9339b31c 100644 --- a/indexer/src/kadena-server/resolvers/output/build-event-output.ts +++ b/indexer/src/kadena-server/resolvers/output/build-event-output.ts @@ -1,5 +1,5 @@ -import { Block } from "../../config/graphql-types"; -import { EventOutput } from "../../repository/application/event-repository"; +import { Block } from '../../config/graphql-types'; +import { EventOutput } from '../../repository/application/event-repository'; export const buildEventOutput = (event: EventOutput) => { return { diff --git a/indexer/src/kadena-server/resolvers/output/build-fungible-account-output.ts b/indexer/src/kadena-server/resolvers/output/build-fungible-account-output.ts index a7d40371..657ba42c 100644 --- a/indexer/src/kadena-server/resolvers/output/build-fungible-account-output.ts +++ b/indexer/src/kadena-server/resolvers/output/build-fungible-account-output.ts @@ -2,8 +2,8 @@ import { FungibleAccountTransactionsConnection, FungibleAccountTransfersConnection, FungibleChainAccount, -} from "../../config/graphql-types"; -import { FungibleAccountOutput } from "../../repository/application/balance-repository"; +} from '../../config/graphql-types'; +import { FungibleAccountOutput } from '../../repository/application/balance-repository'; export const buildFungibleAccount = (account: FungibleAccountOutput) => { return { diff --git a/indexer/src/kadena-server/resolvers/output/build-fungible-chain-account-output.ts b/indexer/src/kadena-server/resolvers/output/build-fungible-chain-account-output.ts index c767ad6a..57e1141d 100644 --- a/indexer/src/kadena-server/resolvers/output/build-fungible-chain-account-output.ts +++ b/indexer/src/kadena-server/resolvers/output/build-fungible-chain-account-output.ts @@ -1,8 +1,8 @@ import { FungibleChainAccountTransactionsConnection, FungibleChainAccountTransfersConnection, -} from "../../config/graphql-types"; -import { FungibleChainAccountOutput } from "../../repository/application/block-repository"; +} from '../../config/graphql-types'; +import { FungibleChainAccountOutput } from '../../repository/application/block-repository'; export const buildFungibleChainAccount = (acc: FungibleChainAccountOutput) => { return { diff --git a/indexer/src/kadena-server/resolvers/output/build-non-fungible-account-output.ts b/indexer/src/kadena-server/resolvers/output/build-non-fungible-account-output.ts index 8ff1cc68..c768ec89 100644 --- a/indexer/src/kadena-server/resolvers/output/build-non-fungible-account-output.ts +++ b/indexer/src/kadena-server/resolvers/output/build-non-fungible-account-output.ts @@ -1,9 +1,9 @@ import { NonFungibleAccount, NonFungibleAccountTransactionsConnection, -} from "../../config/graphql-types"; -import { INonFungibleAccount } from "../../repository/application/balance-repository"; -import { NftInfo } from "../../repository/gateway/pact-gateway"; +} from '../../config/graphql-types'; +import { INonFungibleAccount } from '../../repository/application/balance-repository'; +import { NftInfo } from '../../repository/gateway/pact-gateway'; export const buildNonFungibleAccount = ( acc: INonFungibleAccount | null, @@ -21,8 +21,8 @@ export const buildNonFungibleAccount = ( // TODO guard: { keys: [], - predicate: "", - raw: JSON.stringify("{}"), + predicate: '', + raw: JSON.stringify('{}'), }, info: { precision: nft.precision, diff --git a/indexer/src/kadena-server/resolvers/output/build-non-fungible-chain-account-output.ts b/indexer/src/kadena-server/resolvers/output/build-non-fungible-chain-account-output.ts index 48bf0673..78d1dda8 100644 --- a/indexer/src/kadena-server/resolvers/output/build-non-fungible-chain-account-output.ts +++ b/indexer/src/kadena-server/resolvers/output/build-non-fungible-chain-account-output.ts @@ -1,6 +1,6 @@ -import { NonFungibleChainAccountTransactionsConnection } from "../../config/graphql-types"; -import { INonFungibleChainAccount } from "../../repository/application/balance-repository"; -import { NftInfo } from "../../repository/gateway/pact-gateway"; +import { NonFungibleChainAccountTransactionsConnection } from '../../config/graphql-types'; +import { INonFungibleChainAccount } from '../../repository/application/balance-repository'; +import { NftInfo } from '../../repository/gateway/pact-gateway'; export const buildNonFungibleChainAccount = ( acc: INonFungibleChainAccount | null, @@ -18,8 +18,8 @@ export const buildNonFungibleChainAccount = ( // TODO guard: { keys: [], - predicate: "", - raw: JSON.stringify("{}"), + predicate: '', + raw: JSON.stringify('{}'), }, info: { precision: nft.precision, diff --git a/indexer/src/kadena-server/resolvers/output/build-transaction-output.ts b/indexer/src/kadena-server/resolvers/output/build-transaction-output.ts index bdc3b9f7..1e84ee81 100644 --- a/indexer/src/kadena-server/resolvers/output/build-transaction-output.ts +++ b/indexer/src/kadena-server/resolvers/output/build-transaction-output.ts @@ -1,5 +1,5 @@ -import { Signer, TransactionMeta } from "../../config/graphql-types"; -import { TransactionOutput } from "../../repository/application/transaction-repository"; +import { Signer, TransactionMeta } from '../../config/graphql-types'; +import { TransactionOutput } from '../../repository/application/transaction-repository'; export const buildTransactionOutput = (tx: TransactionOutput) => { return { diff --git a/indexer/src/kadena-server/resolvers/output/build-transfer-output.ts b/indexer/src/kadena-server/resolvers/output/build-transfer-output.ts index 5cf2ffed..616b29d2 100644 --- a/indexer/src/kadena-server/resolvers/output/build-transfer-output.ts +++ b/indexer/src/kadena-server/resolvers/output/build-transfer-output.ts @@ -1,5 +1,5 @@ -import { Block, Transaction, Transfer } from "../../config/graphql-types"; -import { TransferOutput } from "../../repository/application/transfer-repository"; +import { Block, Transaction, Transfer } from '../../config/graphql-types'; +import { TransferOutput } from '../../repository/application/transfer-repository'; export const buildTransferOutput = (transfer: TransferOutput) => { return { diff --git a/indexer/src/kadena-server/resolvers/query/block-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/block-query-resolver.ts index e5586985..5936a41d 100644 --- a/indexer/src/kadena-server/resolvers/query/block-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/block-query-resolver.ts @@ -1,12 +1,15 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { Block, QueryResolvers } from "../../config/graphql-types"; -import { buildBlockOutput } from "../output/build-block-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { Block, QueryResolvers } from '../../config/graphql-types'; +import { buildBlockOutput } from '../output/build-block-output'; -export const blockQueryResolver: QueryResolvers["block"] = - async (_parent, args, context): Promise => { - console.log("blockQueryResolver"); - const { hash } = args; - const output = await context.blockRepository.getBlockByHash(hash); +export const blockQueryResolver: QueryResolvers['block'] = async ( + _parent, + args, + context, +): Promise => { + console.log('blockQueryResolver'); + const { hash } = args; + const output = await context.blockRepository.getBlockByHash(hash); - return buildBlockOutput(output); - }; + return buildBlockOutput(output); +}; diff --git a/indexer/src/kadena-server/resolvers/query/blocks-from-depth-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/blocks-from-depth-query-resolver.ts index fade2bcf..cfc6aaa1 100644 --- a/indexer/src/kadena-server/resolvers/query/blocks-from-depth-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/blocks-from-depth-query-resolver.ts @@ -1,10 +1,10 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildBlockOutput } from "../output/build-block-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildBlockOutput } from '../output/build-block-output'; -export const blocksFromDepthQueryResolver: QueryResolvers["blocksFromDepth"] = +export const blocksFromDepthQueryResolver: QueryResolvers['blocksFromDepth'] = async (_parent, args, context) => { - console.log("blocksFromDepthQueryResolver", args); + console.log('blocksFromDepthQueryResolver', args); const { minimumDepth, after, before, chainIds, first, last } = args; const output = await context.blockRepository.getBlocksFromDepth({ minimumDepth, @@ -15,7 +15,7 @@ export const blocksFromDepthQueryResolver: QueryResolvers["bloc chainIds, }); - const edges = output.edges.map((e) => ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildBlockOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/query/blocks-from-height-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/blocks-from-height-query-resolver.ts index 19a3b81d..70ead182 100644 --- a/indexer/src/kadena-server/resolvers/query/blocks-from-height-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/blocks-from-height-query-resolver.ts @@ -1,13 +1,13 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildBlockOutput } from "../output/build-block-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildBlockOutput } from '../output/build-block-output'; -export const blocksFromHeightQueryResolver: QueryResolvers["blocksFromHeight"] = +export const blocksFromHeightQueryResolver: QueryResolvers['blocksFromHeight'] = async (_parent, args, context) => { - console.log("blocksFromHeightQueryResolver"); + console.log('blocksFromHeightQueryResolver'); const output = await context.blockRepository.getBlocksBetweenHeights(args); - const edges = output.edges.map((e) => ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildBlockOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/query/completed-block-heights-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/completed-block-heights-query-resolver.ts index d59adb35..2e1b667b 100644 --- a/indexer/src/kadena-server/resolvers/query/completed-block-heights-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/completed-block-heights-query-resolver.ts @@ -1,19 +1,11 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildBlockOutput } from "../output/build-block-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildBlockOutput } from '../output/build-block-output'; -export const completedBlockHeightsQueryResolver: QueryResolvers["completedBlockHeights"] = +export const completedBlockHeightsQueryResolver: QueryResolvers['completedBlockHeights'] = async (_parent, args, context) => { - console.log("completedBlockHeightsQueryResolver"); - const { - completedHeights, - heightCount, - chainIds, - first, - after, - before, - last, - } = args; + console.log('completedBlockHeightsQueryResolver'); + const { completedHeights, heightCount, chainIds, first, after, before, last } = args; const output = await context.blockRepository.getCompletedBlocks({ completedHeights, heightCount, @@ -24,7 +16,7 @@ export const completedBlockHeightsQueryResolver: QueryResolvers last, }); - const edges = output.edges.map((e) => ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildBlockOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/query/events-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/events-query-resolver.ts index 6272d966..95117122 100644 --- a/indexer/src/kadena-server/resolvers/query/events-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/events-query-resolver.ts @@ -1,53 +1,56 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildEventOutput } from "../output/build-event-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildEventOutput } from '../output/build-event-output'; -export const eventsQueryResolver: QueryResolvers["events"] = - async (_parent, args, context) => { - console.log("eventsQueryResolver"); - const { - after, - first, - last, - before, - blockHash, - chainId, - maxHeight, - minHeight, - minimumDepth, - requestKey, - qualifiedEventName, - } = args; - const output = await context.eventRepository.getEventsWithQualifiedName({ - qualifiedEventName, - after, - before, - first, - last, - blockHash, - chainId, - maxHeight, - minHeight, - minimumDepth, - requestKey, - }); +export const eventsQueryResolver: QueryResolvers['events'] = async ( + _parent, + args, + context, +) => { + console.log('eventsQueryResolver'); + const { + after, + first, + last, + before, + blockHash, + chainId, + maxHeight, + minHeight, + minimumDepth, + requestKey, + qualifiedEventName, + } = args; + const output = await context.eventRepository.getEventsWithQualifiedName({ + qualifiedEventName, + after, + before, + first, + last, + blockHash, + chainId, + maxHeight, + minHeight, + minimumDepth, + requestKey, + }); - const edges = output.edges.map((e) => ({ - cursor: e.cursor, - node: buildEventOutput(e.node), - })); + const edges = output.edges.map(e => ({ + cursor: e.cursor, + node: buildEventOutput(e.node), + })); - return { - edges, - pageInfo: output.pageInfo, - // for resolvers - totalCount: -1, - blockHash, - chainId, - maxHeight, - minHeight, - minimumDepth, - requestKey, - qualifiedEventName, - }; + return { + edges, + pageInfo: output.pageInfo, + // for resolvers + totalCount: -1, + blockHash, + chainId, + maxHeight, + minHeight, + minimumDepth, + requestKey, + qualifiedEventName, }; +}; diff --git a/indexer/src/kadena-server/resolvers/query/fungible-account-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/fungible-account-query-resolver.ts index 1ba6e6da..6fa677d4 100644 --- a/indexer/src/kadena-server/resolvers/query/fungible-account-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/fungible-account-query-resolver.ts @@ -1,10 +1,10 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildFungibleAccount } from "../output/build-fungible-account-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildFungibleAccount } from '../output/build-fungible-account-output'; -export const fungibleAccountQueryResolver: QueryResolvers["fungibleAccount"] = +export const fungibleAccountQueryResolver: QueryResolvers['fungibleAccount'] = async (_parent, args, context) => { - console.log("fungibleAccountQueryResolver"); + console.log('fungibleAccountQueryResolver'); const account = await context.balanceRepository.getAccountInfo_NODE( args.accountName, args.fungibleName, diff --git a/indexer/src/kadena-server/resolvers/query/fungible-accounts-by-public-key-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/fungible-accounts-by-public-key-query-resolver.ts index 8c70ea89..5aea7d2d 100644 --- a/indexer/src/kadena-server/resolvers/query/fungible-accounts-by-public-key-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/fungible-accounts-by-public-key-query-resolver.ts @@ -1,18 +1,17 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildFungibleAccount } from "../output/build-fungible-account-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildFungibleAccount } from '../output/build-fungible-account-output'; -export const fungibleAccountsByPublicKeyQueryResolver: QueryResolvers["fungibleAccountsByPublicKey"] = +export const fungibleAccountsByPublicKeyQueryResolver: QueryResolvers['fungibleAccountsByPublicKey'] = async (_parent, args, context) => { - console.log("fungibleAccountsByPublicKeyQueryResolver"); + console.log('fungibleAccountsByPublicKeyQueryResolver'); const { publicKey, fungibleName } = args; - const accounts = - await context.balanceRepository.getAccountsByPublicKey_NODE( - publicKey, - fungibleName, - ); + const accounts = await context.balanceRepository.getAccountsByPublicKey_NODE( + publicKey, + fungibleName, + ); - const output = accounts.map((acc) => buildFungibleAccount(acc)); + const output = accounts.map(acc => buildFungibleAccount(acc)); return output; }; diff --git a/indexer/src/kadena-server/resolvers/query/fungible-chain-account-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/fungible-chain-account-query-resolver.ts index 4bc4d65b..0de32248 100644 --- a/indexer/src/kadena-server/resolvers/query/fungible-chain-account-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/fungible-chain-account-query-resolver.ts @@ -1,11 +1,11 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildFungibleChainAccount } from "../output/build-fungible-chain-account-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildFungibleChainAccount } from '../output/build-fungible-chain-account-output'; -export const fungibleChainAccountQueryResolver: QueryResolvers["fungibleChainAccount"] = +export const fungibleChainAccountQueryResolver: QueryResolvers['fungibleChainAccount'] = async (_parent, args, context) => { const { accountName, chainId, fungibleName } = args; - console.log("fungibleChainAccountQueryResolver"); + console.log('fungibleChainAccountQueryResolver'); const [account] = await context.balanceRepository.getChainsAccountInfo_NODE( accountName, fungibleName, diff --git a/indexer/src/kadena-server/resolvers/query/fungible-chain-accounts-by-public-key-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/fungible-chain-accounts-by-public-key-query-resolver.ts index 33acf031..14b70c3e 100644 --- a/indexer/src/kadena-server/resolvers/query/fungible-chain-accounts-by-public-key-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/fungible-chain-accounts-by-public-key-query-resolver.ts @@ -1,19 +1,18 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildFungibleChainAccount } from "../output/build-fungible-chain-account-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildFungibleChainAccount } from '../output/build-fungible-chain-account-output'; -export const fungibleChainAccountsByPublicKeyQueryResolver: QueryResolvers["fungibleChainAccountsByPublicKey"] = +export const fungibleChainAccountsByPublicKeyQueryResolver: QueryResolvers['fungibleChainAccountsByPublicKey'] = async (_parent, args, context) => { - console.log("fungibleChainAccountsByPublicKeyQueryResolver"); + console.log('fungibleChainAccountsByPublicKeyQueryResolver'); const { publicKey, fungibleName, chainId } = args; - const output = - await context.balanceRepository.getChainAccountsByPublicKey_NODE( - publicKey, - fungibleName, - chainId, - ); + const output = await context.balanceRepository.getChainAccountsByPublicKey_NODE( + publicKey, + fungibleName, + chainId, + ); - const res = output.map((acc) => buildFungibleChainAccount(acc)); + const res = output.map(acc => buildFungibleChainAccount(acc)); return res; }; diff --git a/indexer/src/kadena-server/resolvers/query/fungible-chain-accounts-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/fungible-chain-accounts-query-resolver.ts index 96801e8d..1c7f7842 100644 --- a/indexer/src/kadena-server/resolvers/query/fungible-chain-accounts-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/fungible-chain-accounts-query-resolver.ts @@ -1,17 +1,17 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildFungibleChainAccount } from "../output/build-fungible-chain-account-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildFungibleChainAccount } from '../output/build-fungible-chain-account-output'; -export const fungibleChainAccountsQueryResolver: QueryResolvers["fungibleChainAccounts"] = +export const fungibleChainAccountsQueryResolver: QueryResolvers['fungibleChainAccounts'] = async (_parent, args, context) => { const { accountName, chainIds, fungibleName } = args; - console.log("fungibleChainAccountsQueryResolver"); + console.log('fungibleChainAccountsQueryResolver'); const accounts = await context.balanceRepository.getChainsAccountInfo_NODE( accountName, fungibleName, - chainIds?.map((c) => c.toString()), + chainIds?.map(c => c.toString()), ); - const output = accounts.map((r) => buildFungibleChainAccount(r)); + const output = accounts.map(r => buildFungibleChainAccount(r)); return output; }; diff --git a/indexer/src/kadena-server/resolvers/query/gas-limit-estimate-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/gas-limit-estimate-query-resolver.ts index f718c0ba..73f16351 100644 --- a/indexer/src/kadena-server/resolvers/query/gas-limit-estimate-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/gas-limit-estimate-query-resolver.ts @@ -1,29 +1,23 @@ -import { getRequiredEnvString } from "../../../utils/helpers"; -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { determineInputType } from "../../domain/gas/input-checker.gas"; -import { parseInput } from "../../domain/gas/parser.gas"; -import { buildTransactionPayload } from "../../domain/gas/transaction.gas"; +import { getRequiredEnvString } from '../../../utils/helpers'; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { determineInputType } from '../../domain/gas/input-checker.gas'; +import { parseInput } from '../../domain/gas/parser.gas'; +import { buildTransactionPayload } from '../../domain/gas/transaction.gas'; -const NETWORK_ID = getRequiredEnvString("SYNC_NETWORK"); +const NETWORK_ID = getRequiredEnvString('SYNC_NETWORK'); -export const gasLimitEstimateQueryResolver: QueryResolvers["gasLimitEstimate"] = +export const gasLimitEstimateQueryResolver: QueryResolvers['gasLimitEstimate'] = async (_parent, args, context) => { - console.log("gasLimitEstimateQueryResolver"); + console.log('gasLimitEstimateQueryResolver'); const res = await Promise.all( - args.input.map((input) => { + args.input.map(input => { const parsedInput = parseInput(input); const userInput = determineInputType(parsedInput); - const networkId = userInput.networkId - ? userInput.networkId - : NETWORK_ID; + const networkId = userInput.networkId ? userInput.networkId : NETWORK_ID; const transaction = buildTransactionPayload(userInput, networkId); - return context.gasGateway.estimateGas( - userInput, - transaction, - networkId, - ); + return context.gasGateway.estimateGas(userInput, transaction, networkId); }), ); diff --git a/indexer/src/kadena-server/resolvers/query/graph-configuration-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/graph-configuration-query-resolver.ts index 68dea98b..f5e82379 100644 --- a/indexer/src/kadena-server/resolvers/query/graph-configuration-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/graph-configuration-query-resolver.ts @@ -1,15 +1,14 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; -export const graphConfigurationQueryResolver: QueryResolvers["graphConfiguration"] = +export const graphConfigurationQueryResolver: QueryResolvers['graphConfiguration'] = async (_args, _parent, context) => { - console.log("graphConfigurationQueryResolver"); + console.log('graphConfigurationQueryResolver'); - const minimumBlockHeight = - await context.blockRepository.getLowestBlockHeight(); + const minimumBlockHeight = await context.blockRepository.getLowestBlockHeight(); return { minimumBlockHeight, - version: "0.1.0", + version: '0.1.0', }; }; diff --git a/indexer/src/kadena-server/resolvers/query/last-block-height-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/last-block-height-query-resolver.ts index 1ab4055e..02a5a0f6 100644 --- a/indexer/src/kadena-server/resolvers/query/last-block-height-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/last-block-height-query-resolver.ts @@ -1,9 +1,9 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; -export const lastBlockHeightQueryResolver: QueryResolvers["lastBlockHeight"] = +export const lastBlockHeightQueryResolver: QueryResolvers['lastBlockHeight'] = async (_args, _parent, context) => { - console.log("lastBlockHeightQueryResolver"); + console.log('lastBlockHeightQueryResolver'); const lastBlockHeight = await context.blockRepository.getLastBlockHeight(); diff --git a/indexer/src/kadena-server/resolvers/query/network-info-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/network-info-query-resolver.ts index a5020ce3..35300a2b 100644 --- a/indexer/src/kadena-server/resolvers/query/network-info-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/network-info-query-resolver.ts @@ -1,9 +1,12 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; -export const networkInfoQueryResolver: QueryResolvers["networkInfo"] = - async (_args, _parent, context) => { - console.log("networkInfoQueryResolver"); - const output = await context.networkRepository.getAllInfo(); - return output; - }; +export const networkInfoQueryResolver: QueryResolvers['networkInfo'] = async ( + _args, + _parent, + context, +) => { + console.log('networkInfoQueryResolver'); + const output = await context.networkRepository.getAllInfo(); + return output; +}; diff --git a/indexer/src/kadena-server/resolvers/query/node-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/node-query-resolver.ts index ceba76f3..13bc1695 100644 --- a/indexer/src/kadena-server/resolvers/query/node-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/node-query-resolver.ts @@ -1,9 +1,12 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { getNode } from "../node-utils"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { getNode } from '../node-utils'; -export const nodeQueryResolver: QueryResolvers["node"] = - async (_args, parent, context) => { - console.log("nodeQueryResolver"); - return getNode(context, parent.id); - }; +export const nodeQueryResolver: QueryResolvers['node'] = async ( + _args, + parent, + context, +) => { + console.log('nodeQueryResolver'); + return getNode(context, parent.id); +}; diff --git a/indexer/src/kadena-server/resolvers/query/nodes-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/nodes-query-resolver.ts index 55c632c6..1256b633 100644 --- a/indexer/src/kadena-server/resolvers/query/nodes-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/nodes-query-resolver.ts @@ -1,11 +1,12 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { getNode } from "../node-utils"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { getNode } from '../node-utils'; -export const nodesQueryResolver: QueryResolvers["nodes"] = - async (_args, parent, context) => { - const nodes = await Promise.all( - parent.ids.map((id) => getNode(context, id)) - ); - return nodes; - }; +export const nodesQueryResolver: QueryResolvers['nodes'] = async ( + _args, + parent, + context, +) => { + const nodes = await Promise.all(parent.ids.map(id => getNode(context, id))); + return nodes; +}; diff --git a/indexer/src/kadena-server/resolvers/query/non-fungible-account-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/non-fungible-account-query-resolver.ts index 8acaaac6..f6a23ffa 100644 --- a/indexer/src/kadena-server/resolvers/query/non-fungible-account-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/non-fungible-account-query-resolver.ts @@ -1,16 +1,14 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildNonFungibleAccount } from "../output/build-non-fungible-account-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildNonFungibleAccount } from '../output/build-non-fungible-account-output'; -export const nonFungibleAccountQueryResolver: QueryResolvers["nonFungibleAccount"] = +export const nonFungibleAccountQueryResolver: QueryResolvers['nonFungibleAccount'] = async (_parent, args, context) => { - console.log("nonFungibleAccountQueryResolver"); + console.log('nonFungibleAccountQueryResolver'); - const account = await context.balanceRepository.getNonFungibleAccountInfo( - args.accountName, - ); + const account = await context.balanceRepository.getNonFungibleAccountInfo(args.accountName); - const params = (account?.nonFungibleTokenBalances ?? []).map((n) => ({ + const params = (account?.nonFungibleTokenBalances ?? []).map(n => ({ tokenId: n.tokenId, chainId: n.chainId, })); diff --git a/indexer/src/kadena-server/resolvers/query/non-fungible-chain-account-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/non-fungible-chain-account-query-resolver.ts index 412040f1..8a5752e0 100644 --- a/indexer/src/kadena-server/resolvers/query/non-fungible-chain-account-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/non-fungible-chain-account-query-resolver.ts @@ -1,18 +1,17 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildNonFungibleChainAccount } from "../output/build-non-fungible-chain-account-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildNonFungibleChainAccount } from '../output/build-non-fungible-chain-account-output'; -export const nonFungibleChainAccountQueryResolver: QueryResolvers["nonFungibleChainAccount"] = +export const nonFungibleChainAccountQueryResolver: QueryResolvers['nonFungibleChainAccount'] = async (_parent, args, context) => { - console.log("nonFungibleChainAccountQueryResolver"); + console.log('nonFungibleChainAccountQueryResolver'); - const account = - await context.balanceRepository.getNonFungibleChainAccountInfo( - args.accountName, - args.chainId, - ); + const account = await context.balanceRepository.getNonFungibleChainAccountInfo( + args.accountName, + args.chainId, + ); - const params = (account?.nonFungibleTokenBalances ?? []).map((n) => ({ + const params = (account?.nonFungibleTokenBalances ?? []).map(n => ({ tokenId: n.tokenId, chainId: n.chainId, })); diff --git a/indexer/src/kadena-server/resolvers/query/pact-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/pact-query-resolver.ts index 279cf7fa..124b6dd9 100644 --- a/indexer/src/kadena-server/resolvers/query/pact-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/pact-query-resolver.ts @@ -1,12 +1,14 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { handleSingleQuery } from "../../utils/raw-query"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { handleSingleQuery } from '../../utils/raw-query'; -export const pactQueryResolver: QueryResolvers["pactQuery"] = - async (_parent, args) => { - console.log("pactQueryResolver"); +export const pactQueryResolver: QueryResolvers['pactQuery'] = async ( + _parent, + args, +) => { + console.log('pactQueryResolver'); - const res = await Promise.all(args.pactQuery.map(handleSingleQuery)); + const res = await Promise.all(args.pactQuery.map(handleSingleQuery)); - return res; - }; + return res; +}; diff --git a/indexer/src/kadena-server/resolvers/query/transaction-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/transaction-query-resolver.ts index c5b48aed..cffcbfe7 100644 --- a/indexer/src/kadena-server/resolvers/query/transaction-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/transaction-query-resolver.ts @@ -1,27 +1,27 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildTransactionOutput } from "../output/build-transaction-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildTransactionOutput } from '../output/build-transaction-output'; -export const transactionQueryResolver: QueryResolvers["transaction"] = - async (_parent, args, context) => { - console.log("transactionQueryResolver"); - const { requestKey, blockHash, minimumDepth } = args; - const transactions = - await context.transactionRepository.getTransactionsByRequestKey({ - requestKey, - blockHash, - minimumDepth, - }); +export const transactionQueryResolver: QueryResolvers['transaction'] = async ( + _parent, + args, + context, +) => { + console.log('transactionQueryResolver'); + const { requestKey, blockHash, minimumDepth } = args; + const transactions = await context.transactionRepository.getTransactionsByRequestKey({ + requestKey, + blockHash, + minimumDepth, + }); - if (transactions.length === 0) return null; + if (transactions.length === 0) return null; - const [first, ...rest] = - await context.blockRepository.getTransactionsOrderedByBlockDepth( - transactions, - ); + const [first, ...rest] = + await context.blockRepository.getTransactionsOrderedByBlockDepth(transactions); - return { - ...buildTransactionOutput(first), - orphanedTransactions: rest.map((r) => buildTransactionOutput(r)), - }; + return { + ...buildTransactionOutput(first), + orphanedTransactions: rest.map(r => buildTransactionOutput(r)), }; +}; diff --git a/indexer/src/kadena-server/resolvers/query/transactions-by-public-key-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/transactions-by-public-key-query-resolver.ts index 3e73806c..26189d0a 100644 --- a/indexer/src/kadena-server/resolvers/query/transactions-by-public-key-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/transactions-by-public-key-query-resolver.ts @@ -1,21 +1,20 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildTransactionOutput } from "../output/build-transaction-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildTransactionOutput } from '../output/build-transaction-output'; -export const transactionsByPublicKeyQueryResolver: QueryResolvers["transactionsByPublicKey"] = +export const transactionsByPublicKeyQueryResolver: QueryResolvers['transactionsByPublicKey'] = async (_parent, args, context) => { - console.log("transactionsByPublicKeyQueryResolver"); + console.log('transactionsByPublicKeyQueryResolver'); const { publicKey, first, after, before, last } = args; - const output = - await context.transactionRepository.getTransactionsByPublicKey({ - publicKey, - first, - after, - before, - last, - }); + const output = await context.transactionRepository.getTransactionsByPublicKey({ + publicKey, + first, + after, + before, + last, + }); - const edges = output.edges.map((e) => ({ + const edges = output.edges.map(e => ({ cursor: e.cursor, node: buildTransactionOutput(e.node), })); diff --git a/indexer/src/kadena-server/resolvers/query/transactions-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/transactions-query-resolver.ts index 7c3efae4..d17716b4 100644 --- a/indexer/src/kadena-server/resolvers/query/transactions-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/transactions-query-resolver.ts @@ -1,63 +1,66 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildTransactionOutput } from "../output/build-transaction-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildTransactionOutput } from '../output/build-transaction-output'; -export const transactionsQueryResolver: QueryResolvers["transactions"] = - async (_parent, args, context) => { - console.log("transactionsQueryResolver"); - const { - after, - before, - first, - last, - accountName, - blockHash, - chainId, - fungibleName, - requestKey, - maxHeight, - minHeight, - minimumDepth, - } = args; +export const transactionsQueryResolver: QueryResolvers['transactions'] = async ( + _parent, + args, + context, +) => { + console.log('transactionsQueryResolver'); + const { + after, + before, + first, + last, + accountName, + blockHash, + chainId, + fungibleName, + requestKey, + maxHeight, + minHeight, + minimumDepth, + } = args; - if (!accountName && !fungibleName && !blockHash && !requestKey) { - throw new Error( - "At least one of accountName, fungibleName, blockHash, or requestKey must be provided", - ); - } + if (!accountName && !fungibleName && !blockHash && !requestKey) { + throw new Error( + 'At least one of accountName, fungibleName, blockHash, or requestKey must be provided', + ); + } - const output = await context.transactionRepository.getTransactions({ - blockHash, - accountName, - chainId, - fungibleName, - requestKey, - maxHeight, - minHeight, - minimumDepth, - first, - last, - before, - after, - }); + const output = await context.transactionRepository.getTransactions({ + blockHash, + accountName, + chainId, + fungibleName, + requestKey, + maxHeight, + minHeight, + minimumDepth, + first, + last, + before, + after, + }); - const edges = output.edges.map((e) => ({ - cursor: e.cursor, - node: buildTransactionOutput(e.node), - })); + const edges = output.edges.map(e => ({ + cursor: e.cursor, + node: buildTransactionOutput(e.node), + })); - return { - edges, - pageInfo: output.pageInfo, - // for resolvers - totalCount: -1, - accountName, - blockHash, - chainId, - maxHeight, - minHeight, - minimumDepth, - fungibleName, - requestKey, - }; + return { + edges, + pageInfo: output.pageInfo, + // for resolvers + totalCount: -1, + accountName, + blockHash, + chainId, + maxHeight, + minHeight, + minimumDepth, + fungibleName, + requestKey, }; +}; diff --git a/indexer/src/kadena-server/resolvers/query/transfers-query-resolver.ts b/indexer/src/kadena-server/resolvers/query/transfers-query-resolver.ts index d53b8fa8..9c8a3fa9 100644 --- a/indexer/src/kadena-server/resolvers/query/transfers-query-resolver.ts +++ b/indexer/src/kadena-server/resolvers/query/transfers-query-resolver.ts @@ -1,47 +1,41 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { QueryResolvers } from "../../config/graphql-types"; -import { buildTransferOutput } from "../output/build-transfer-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { QueryResolvers } from '../../config/graphql-types'; +import { buildTransferOutput } from '../output/build-transfer-output'; -export const transfersQueryResolver: QueryResolvers["transfers"] = - async (_parent, args, context) => { - console.log("transfersQueryResolver"); - const { - after, - before, - first, - last, - accountName, - blockHash, - chainId, - fungibleName, - requestKey, - } = args; - const output = await context.transferRepository.getTransfers({ - blockHash, - accountName, - chainId, - fungibleName, - requestKey, - first, - last, - before, - after, - }); +export const transfersQueryResolver: QueryResolvers['transfers'] = async ( + _parent, + args, + context, +) => { + console.log('transfersQueryResolver'); + const { after, before, first, last, accountName, blockHash, chainId, fungibleName, requestKey } = + args; + const output = await context.transferRepository.getTransfers({ + blockHash, + accountName, + chainId, + fungibleName, + requestKey, + first, + last, + before, + after, + }); - const edges = output.edges.map((e) => ({ - cursor: e.cursor, - node: buildTransferOutput(e.node), - })); + const edges = output.edges.map(e => ({ + cursor: e.cursor, + node: buildTransferOutput(e.node), + })); - return { - edges, - pageInfo: output.pageInfo, - // for resolvers - totalCount: -1, - accountName, - blockHash, - chainId, - fungibleName, - requestKey, - }; + return { + edges, + pageInfo: output.pageInfo, + // for resolvers + totalCount: -1, + accountName, + blockHash, + chainId, + fungibleName, + requestKey, }; +}; diff --git a/indexer/src/kadena-server/resolvers/subscription/consts.ts b/indexer/src/kadena-server/resolvers/subscription/consts.ts index acb9a066..1b1d74cb 100644 --- a/indexer/src/kadena-server/resolvers/subscription/consts.ts +++ b/indexer/src/kadena-server/resolvers/subscription/consts.ts @@ -1,4 +1,4 @@ -export const TRANSACTION_EVENT = "NEW_TRANSACTION"; -export const NEW_BLOCKS_EVENT = "NEW_BLOCKS"; -export const NEW_BLOCKS_FROM_DEPTH_EVENT = "NEW_BLOCKS_FROM_DEPTH_EVENT"; -export const EVENTS_EVENT = "NEW_EVENTS"; +export const TRANSACTION_EVENT = 'NEW_TRANSACTION'; +export const NEW_BLOCKS_EVENT = 'NEW_BLOCKS'; +export const NEW_BLOCKS_FROM_DEPTH_EVENT = 'NEW_BLOCKS_FROM_DEPTH_EVENT'; +export const EVENTS_EVENT = 'NEW_EVENTS'; diff --git a/indexer/src/kadena-server/resolvers/subscription/events-subscription-resolver.ts b/indexer/src/kadena-server/resolvers/subscription/events-subscription-resolver.ts index 9cc3b1ce..d81aeb6c 100644 --- a/indexer/src/kadena-server/resolvers/subscription/events-subscription-resolver.ts +++ b/indexer/src/kadena-server/resolvers/subscription/events-subscription-resolver.ts @@ -1,7 +1,7 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { SubscriptionResolvers } from "../../config/graphql-types"; -import { EventOutput } from "../../repository/application/event-repository"; -import { buildEventOutput } from "../output/build-event-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { SubscriptionResolvers } from '../../config/graphql-types'; +import { EventOutput } from '../../repository/application/event-repository'; +import { buildEventOutput } from '../output/build-event-output'; async function* iteratorFn( context: ResolverContext, @@ -20,22 +20,16 @@ async function* iteratorFn( if (newEvents.length > 1) { lastEventId = Number(newEvents[0].id); - yield newEvents.map((e) => buildEventOutput(e)); + yield newEvents.map(e => buildEventOutput(e)); } - await new Promise((resolve) => setTimeout(resolve, 1000)); + await new Promise(resolve => setTimeout(resolve, 1000)); } } -export const eventsSubscriptionResolver: SubscriptionResolvers["events"] = - { - subscribe: (__root, args, context) => { - return iteratorFn( - context, - args.qualifiedEventName, - args.chainId, - args.minimumDepth, - ); - }, - resolve: (payload: any) => payload, - }; +export const eventsSubscriptionResolver: SubscriptionResolvers['events'] = { + subscribe: (__root, args, context) => { + return iteratorFn(context, args.qualifiedEventName, args.chainId, args.minimumDepth); + }, + resolve: (payload: any) => payload, +}; diff --git a/indexer/src/kadena-server/resolvers/subscription/new-blocks-from-depth-subscription-resolver.ts b/indexer/src/kadena-server/resolvers/subscription/new-blocks-from-depth-subscription-resolver.ts index feaf438e..54378a76 100644 --- a/indexer/src/kadena-server/resolvers/subscription/new-blocks-from-depth-subscription-resolver.ts +++ b/indexer/src/kadena-server/resolvers/subscription/new-blocks-from-depth-subscription-resolver.ts @@ -1,12 +1,12 @@ -import { withFilter } from "graphql-subscriptions"; -import { ResolverContext } from "../../config/apollo-server-config"; +import { withFilter } from 'graphql-subscriptions'; +import { ResolverContext } from '../../config/apollo-server-config'; import { SubscriptionNewBlocksFromDepthArgs, SubscriptionResolvers, -} from "../../config/graphql-types"; -import { NEW_BLOCKS_FROM_DEPTH_EVENT } from "./consts"; -import zod from "zod"; -import { blockQueryResolver } from "../query/block-query-resolver"; +} from '../../config/graphql-types'; +import { NEW_BLOCKS_FROM_DEPTH_EVENT } from './consts'; +import zod from 'zod'; +import { blockQueryResolver } from '../query/block-query-resolver'; const newBlocksFromDepthSubscriptionSchema = zod.object({ chainId: zod.string(), @@ -14,33 +14,25 @@ const newBlocksFromDepthSubscriptionSchema = zod.object({ hash: zod.string(), }); -export const newBlocksFromDepthSubscriptionResolver: SubscriptionResolvers["newBlocksFromDepth"] = +export const newBlocksFromDepthSubscriptionResolver: SubscriptionResolvers['newBlocksFromDepth'] = { resolve: async (payload: any, _args: any, context: ResolverContext) => { - const res = await (blockQueryResolver as any)( - {}, - { hash: payload.hash }, - context, - ); + const res = await (blockQueryResolver as any)({}, { hash: payload.hash }, context); return [res]; }, subscribe: (_parent, args: SubscriptionNewBlocksFromDepthArgs, context) => { return { [Symbol.asyncIterator]: withFilter( () => context.pubSub.asyncIterator(NEW_BLOCKS_FROM_DEPTH_EVENT), - (payload) => { + payload => { const res = newBlocksFromDepthSubscriptionSchema.safeParse(payload); if (!res.success) { - console.info( - "Invalid payload on newBlocksFromDepthSubscription", - payload, - ); + console.info('Invalid payload on newBlocksFromDepthSubscription', payload); return false; } const { chainId, height } = res.data; return ( - (!args.chainIds || args.chainIds.includes(chainId)) && - height >= args.minimumDepth + (!args.chainIds || args.chainIds.includes(chainId)) && height >= args.minimumDepth ); }, ), diff --git a/indexer/src/kadena-server/resolvers/subscription/new-blocks-subscription-resolver.ts b/indexer/src/kadena-server/resolvers/subscription/new-blocks-subscription-resolver.ts index add79d64..596ce73b 100644 --- a/indexer/src/kadena-server/resolvers/subscription/new-blocks-subscription-resolver.ts +++ b/indexer/src/kadena-server/resolvers/subscription/new-blocks-subscription-resolver.ts @@ -1,7 +1,7 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { SubscriptionResolvers } from "../../config/graphql-types"; -import { BlockOutput } from "../../repository/application/block-repository"; -import { buildBlockOutput } from "../output/build-block-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { SubscriptionResolvers } from '../../config/graphql-types'; +import { BlockOutput } from '../../repository/application/block-repository'; +import { buildBlockOutput } from '../output/build-block-output'; async function* iteratorFn( chainIds: string[], @@ -20,17 +20,16 @@ async function* iteratorFn( if (newBlocks.length > 0) { lastBlockId = Number(newBlocks[0].id); - yield newBlocks.map((b) => buildBlockOutput(b)); + yield newBlocks.map(b => buildBlockOutput(b)); } - await new Promise((resolve) => setTimeout(resolve, 1000)); + await new Promise(resolve => setTimeout(resolve, 1000)); } } -export const newBlocksSubscriptionResolver: SubscriptionResolvers["newBlocks"] = - { - subscribe: (_root, args, context) => { - return iteratorFn(args.chainIds ?? [], context); - }, - resolve: (payload: any) => payload, - }; +export const newBlocksSubscriptionResolver: SubscriptionResolvers['newBlocks'] = { + subscribe: (_root, args, context) => { + return iteratorFn(args.chainIds ?? [], context); + }, + resolve: (payload: any) => payload, +}; diff --git a/indexer/src/kadena-server/resolvers/subscription/transaction-subscription-resolver.ts b/indexer/src/kadena-server/resolvers/subscription/transaction-subscription-resolver.ts index 474dc5df..985941e5 100644 --- a/indexer/src/kadena-server/resolvers/subscription/transaction-subscription-resolver.ts +++ b/indexer/src/kadena-server/resolvers/subscription/transaction-subscription-resolver.ts @@ -1,7 +1,7 @@ -import { ResolverContext } from "../../config/apollo-server-config"; -import { SubscriptionResolvers } from "../../config/graphql-types"; -import { TransactionOutput } from "../../repository/application/transaction-repository"; -import { buildTransactionOutput } from "../output/build-transaction-output"; +import { ResolverContext } from '../../config/apollo-server-config'; +import { SubscriptionResolvers } from '../../config/graphql-types'; +import { TransactionOutput } from '../../repository/application/transaction-repository'; +import { buildTransactionOutput } from '../output/build-transaction-output'; async function* iteratorFn( requestKey: string, @@ -13,17 +13,15 @@ async function* iteratorFn( requestKey, chainId, }); - const transactions = edges.map((e) => e.node); + const transactions = edges.map(e => e.node); if (transactions.length > 0) { const [first, ...rest] = - await context.blockRepository.getTransactionsOrderedByBlockDepth( - transactions, - ); + await context.blockRepository.getTransactionsOrderedByBlockDepth(transactions); const result = { ...buildTransactionOutput(first), - orphanedTransactions: rest.map((r) => buildTransactionOutput(r)), + orphanedTransactions: rest.map(r => buildTransactionOutput(r)), }; yield result; @@ -32,7 +30,7 @@ async function* iteratorFn( } } -export const transactionSubscriptionResolver: SubscriptionResolvers["transaction"] = +export const transactionSubscriptionResolver: SubscriptionResolvers['transaction'] = { subscribe: (_root, args, context) => { return iteratorFn(args.requestKey, context, args.chainId); diff --git a/indexer/src/kadena-server/server.ts b/indexer/src/kadena-server/server.ts index c1e2e8cc..18e3e2e9 100644 --- a/indexer/src/kadena-server/server.ts +++ b/indexer/src/kadena-server/server.ts @@ -1,38 +1,35 @@ -import { ApolloServer, ApolloServerPlugin } from "@apollo/server"; -import { expressMiddleware } from "@apollo/server/express4"; -import { ApolloServerPluginDrainHttpServer } from "@apollo/server/plugin/drainHttpServer"; -import express, { NextFunction, Request, Response } from "express"; -import http from "http"; -import cors from "cors"; -import { resolvers } from "./resolvers"; -import { readFileSync } from "fs"; -import { join } from "path"; +import { ApolloServer, ApolloServerPlugin } from '@apollo/server'; +import { expressMiddleware } from '@apollo/server/express4'; +import { ApolloServerPluginDrainHttpServer } from '@apollo/server/plugin/drainHttpServer'; +import express, { NextFunction, Request, Response } from 'express'; +import http from 'http'; +import cors from 'cors'; +import { resolvers } from './resolvers'; +import { readFileSync } from 'fs'; +import { join } from 'path'; import { createGraphqlContext, publishSubscribe, ResolverContext, -} from "./config/apollo-server-config"; -import { WebSocketServer } from "ws"; -import { useServer } from "graphql-ws/lib/use/ws"; -import { makeExecutableSchema } from "@graphql-tools/schema"; -import { ArgumentNode, ASTNode, GraphQLError, Kind } from "graphql"; +} from './config/apollo-server-config'; +import { WebSocketServer } from 'ws'; +import { useServer } from 'graphql-ws/lib/use/ws'; +import { makeExecutableSchema } from '@graphql-tools/schema'; +import { ArgumentNode, ASTNode, GraphQLError, Kind } from 'graphql'; import { EVENTS_EVENT, NEW_BLOCKS_EVENT, NEW_BLOCKS_FROM_DEPTH_EVENT, TRANSACTION_EVENT, -} from "./resolvers/subscription/consts"; -import { dispatchInfoSchema } from "../jobs/publisher-job"; -import initCache from "../cache/init"; -import { getRequiredEnvString } from "../utils/helpers"; -import ipRangeCheck from "ip-range-check"; +} from './resolvers/subscription/consts'; +import { dispatchInfoSchema } from '../jobs/publisher-job'; +import initCache from '../cache/init'; +import { getRequiredEnvString } from '../utils/helpers'; +import ipRangeCheck from 'ip-range-check'; -const typeDefs = readFileSync( - join(__dirname, "./config/schema.graphql"), - "utf-8", -); +const typeDefs = readFileSync(join(__dirname, './config/schema.graphql'), 'utf-8'); -const KADENA_GRAPHQL_API_PORT = getRequiredEnvString("KADENA_GRAPHQL_API_PORT"); +const KADENA_GRAPHQL_API_PORT = getRequiredEnvString('KADENA_GRAPHQL_API_PORT'); const validatePaginationParamsPlugin: ApolloServerPlugin = { requestDidStart: async () => ({ @@ -56,17 +53,14 @@ const validatePaginationParamsPlugin: ApolloServerPlugin = { }); } if (node.kind === Kind.SELECTION_SET) { - node.selections.forEach((selection) => extractArguments(selection)); + node.selections.forEach(selection => extractArguments(selection)); } }; // Traverse the query AST to extract inline arguments if (document) { - document.definitions.forEach((definition) => { - if ( - definition.kind === Kind.OPERATION_DEFINITION && - definition.selectionSet - ) { + document.definitions.forEach(definition => { + if (definition.kind === Kind.OPERATION_DEFINITION && definition.selectionSet) { extractArguments(definition.selectionSet); } }); @@ -103,17 +97,13 @@ const validatePaginationParamsPlugin: ApolloServerPlugin = { }, }), }; -const allowedCIDRs = ["10.0.2.0/24", "10.0.3.0/24"]; +const allowedCIDRs = ['10.0.2.0/24', '10.0.3.0/24']; -const ipFilterMiddleware = ( - req: Request, - res: Response, - next: NextFunction, -) => { +const ipFilterMiddleware = (req: Request, res: Response, next: NextFunction) => { if (req.ip && ipRangeCheck(req.ip, allowedCIDRs)) { next(); // Allow access } else { - res.status(403).json({ message: "Access denied: IP not allowed" }); + res.status(403).json({ message: 'Access denied: IP not allowed' }); } }; @@ -145,16 +135,16 @@ export async function useKadenaGraphqlServer() { const wsServer = new WebSocketServer({ server: httpServer, - path: "/graphql", + path: '/graphql', }); const serverCleanup = useServer( { schema, - context: async (ctx) => { + context: async ctx => { const abortController = new AbortController(); - ctx.extra.socket.addEventListener("close", () => { + ctx.extra.socket.addEventListener('close', () => { abortController.abort(); // Only aborts this specific subscription }); @@ -169,20 +159,19 @@ export async function useKadenaGraphqlServer() { app.use(express.json()); app.use( - "/graphql", + '/graphql', cors(), expressMiddleware(server, { context: createGraphqlContext, }), ); - app.post("/new-block", ipFilterMiddleware, async (req, res) => { + app.post('/new-block', ipFilterMiddleware, async (req, res) => { const payload = await dispatchInfoSchema.safeParseAsync(req.body); if (!payload.success) { - return res.status(400).json({ message: "Invalid input" }); + return res.status(400).json({ message: 'Invalid input' }); } - const { hash, chainId, height, requestKeys, qualifiedEventNames } = - payload.data; + const { hash, chainId, height, requestKeys, qualifiedEventNames } = payload.data; publishSubscribe.publish(NEW_BLOCKS_EVENT, { hash, @@ -195,7 +184,7 @@ export async function useKadenaGraphqlServer() { hash, }); - const eventPromises = qualifiedEventNames.map((qualifiedEventName) => { + const eventPromises = qualifiedEventNames.map(qualifiedEventName => { return publishSubscribe.publish(EVENTS_EVENT, { qualifiedEventName, height, @@ -204,7 +193,7 @@ export async function useKadenaGraphqlServer() { }); }); - const transactionPromises = requestKeys.map((requestKey) => { + const transactionPromises = requestKeys.map(requestKey => { return publishSubscribe.publish(TRANSACTION_EVENT, { chainId, requestKey, @@ -214,13 +203,11 @@ export async function useKadenaGraphqlServer() { await Promise.all([...eventPromises, ...transactionPromises]); res.json({ - message: "New block published.", + message: 'New block published.', }); }); await initCache(context); - await new Promise((resolve) => - httpServer.listen({ port: KADENA_GRAPHQL_API_PORT }, resolve), - ); + await new Promise(resolve => httpServer.listen({ port: KADENA_GRAPHQL_API_PORT }, resolve)); console.log(`Server running on port ${KADENA_GRAPHQL_API_PORT}.`); } diff --git a/indexer/src/kadena-server/utils/coin-circulation.ts b/indexer/src/kadena-server/utils/coin-circulation.ts index b0025697..c2475086 100644 --- a/indexer/src/kadena-server/utils/coin-circulation.ts +++ b/indexer/src/kadena-server/utils/coin-circulation.ts @@ -1,16 +1,15 @@ -import Papa from "papaparse"; -import fs from "fs"; -import path from "path"; +import Papa from 'papaparse'; +import fs from 'fs'; +import path from 'path'; type RewardRow = [number, number]; function calculateReward(csvContent: string, cutHeight: number): number { const parsed = Papa.parse(csvContent, { - delimiter: ",", + delimiter: ',', skipEmptyLines: true, transformHeader: undefined, - transform: (value, fieldIndex) => - fieldIndex === 0 ? parseInt(value, 10) : parseFloat(value), + transform: (value, fieldIndex) => (fieldIndex === 0 ? parseInt(value, 10) : parseFloat(value)), }); const averageHeight = cutHeight / 20; // number of chains @@ -40,12 +39,9 @@ function calculateReward(csvContent: string, cutHeight: number): number { type CsvRow = [string, string, string, number, number]; -function calculateTokenPayments( - csvContent: string, - targetTimestamp: number, -): number { +function calculateTokenPayments(csvContent: string, targetTimestamp: number): number { const parsed = Papa.parse(csvContent, { - delimiter: ",", + delimiter: ',', skipEmptyLines: true, transform: (value, fieldIndex) => { if (fieldIndex === 3 || fieldIndex === 4) { @@ -82,7 +78,7 @@ function calculateTokenPayments( async function getCsvContent(filePath: string): Promise { return new Promise((resolve, reject) => { - fs.readFile(filePath, "utf-8", (err, data) => { + fs.readFile(filePath, 'utf-8', (err, data) => { if (err) { reject(err); } else { @@ -93,29 +89,20 @@ async function getCsvContent(filePath: string): Promise { } async function getMinerRewards(cutHeight: number) { - const filePath = path.resolve( - __dirname, - "../../circulating-coins/miner_rewards.csv", - ); + const filePath = path.resolve(__dirname, '../../circulating-coins/miner_rewards.csv'); const csvContent = await getCsvContent(filePath); const reward = calculateReward(csvContent, cutHeight); return reward; } async function getTokenPayments(latestCreationTime: number) { - const filePath = path.resolve( - __dirname, - "../../circulating-coins/token_payments.csv", - ); + const filePath = path.resolve(__dirname, '../../circulating-coins/token_payments.csv'); const csvContent = await getCsvContent(filePath); const tokenPayments = calculateTokenPayments(csvContent, latestCreationTime); return tokenPayments; } -export async function getCirculationNumber( - cutHeight: number, - latestCreationTime: number, -) { +export async function getCirculationNumber(cutHeight: number, latestCreationTime: number) { const minerRewards = await getMinerRewards(cutHeight); const tokenPayments = await getTokenPayments(latestCreationTime); return minerRewards + tokenPayments; diff --git a/indexer/src/kadena-server/utils/date.ts b/indexer/src/kadena-server/utils/date.ts index b672764f..6dd26d55 100644 --- a/indexer/src/kadena-server/utils/date.ts +++ b/indexer/src/kadena-server/utils/date.ts @@ -1,7 +1,7 @@ export function convertStringToDate(timestampInMicrosecondsString: any) { // Ensure the input is a string - if (typeof timestampInMicrosecondsString !== "string") { - throw new Error("The input timestamp must be a string."); + if (typeof timestampInMicrosecondsString !== 'string') { + throw new Error('The input timestamp must be a string.'); } // Convert the string to a BigInt @@ -22,8 +22,6 @@ export function convertStringToDate(timestampInMicrosecondsString: any) { return new Date(timestampInMilliseconds); } else { // Handle the case where the BigInt is too large to convert - throw new Error( - "The timestamp is too large to safely convert to a JavaScript number.", - ); + throw new Error('The timestamp is too large to safely convert to a JavaScript number.'); } } diff --git a/indexer/src/kadena-server/utils/difficulty.ts b/indexer/src/kadena-server/utils/difficulty.ts index df2ac85b..fdf9808e 100644 --- a/indexer/src/kadena-server/utils/difficulty.ts +++ b/indexer/src/kadena-server/utils/difficulty.ts @@ -16,37 +16,28 @@ export function calculateTotalDifficulty( chainIds: number[], ): bigint | undefined { for (let i = currentHeight; i > currentHeight - 4n; i--) { - const blocksOfThisHeight = blocks.filter((block) => block.height === i); + const blocksOfThisHeight = blocks.filter(block => block.height === i); if (blocksOfThisHeight.length === chainIds.length) { - const totalDifficulty = blocksOfThisHeight.reduce( - (acc, block) => acc + block.difficulty, - 0n, - ); + const totalDifficulty = blocksOfThisHeight.reduce((acc, block) => acc + block.difficulty, 0n); return totalDifficulty; // Deal with the case where we have orphan blocks. } else if (blocksOfThisHeight.length > chainIds.length) { - const blocksGroupedByChainId = blocksOfThisHeight.reduce( - (acc, block) => { - const chainIdKey = block.chainId.toString(); - if (!acc[chainIdKey]) { - acc[chainIdKey] = []; - } - acc[chainIdKey].push(block); - return acc; - }, - {}, - ); + const blocksGroupedByChainId = blocksOfThisHeight.reduce((acc, block) => { + const chainIdKey = block.chainId.toString(); + if (!acc[chainIdKey]) { + acc[chainIdKey] = []; + } + acc[chainIdKey].push(block); + return acc; + }, {}); let totalDifficulty = 0n; for (const chainId of chainIds) { const blocks = blocksGroupedByChainId[chainId.toString()]; if (blocks) { - const chainDifficulty = blocks.reduce( - (acc, block) => acc + block.difficulty, - 0n, - ); + const chainDifficulty = blocks.reduce((acc, block) => acc + block.difficulty, 0n); // If there are multiple blocks, we average their difficulties. totalDifficulty += chainDifficulty / BigInt(blocks.length); @@ -59,13 +50,13 @@ export function calculateTotalDifficulty( function base64UrlToBigIntLittleEndian(base64UrlString: any) { // Replace base64url specific characters with standard base64 characters - const base64 = base64UrlString.replace(/-/g, "+").replace(/_/g, "/"); + const base64 = base64UrlString.replace(/-/g, '+').replace(/_/g, '/'); // Pad the base64 string with '=' to make it valid - const paddedBase64 = base64.padEnd(Math.ceil(base64.length / 4) * 4, "="); + const paddedBase64 = base64.padEnd(Math.ceil(base64.length / 4) * 4, '='); // Decode the base64 string into a Buffer (Node.js compatible) - const byteArray = Buffer.from(paddedBase64, "base64"); + const byteArray = Buffer.from(paddedBase64, 'base64'); // Reverse the byte array to handle little-endian encoding const reversedByteArray = Uint8Array.from(byteArray).reverse(); diff --git a/indexer/src/kadena-server/utils/hashrate.ts b/indexer/src/kadena-server/utils/hashrate.ts index 94ac7d70..9b7be1fc 100644 --- a/indexer/src/kadena-server/utils/hashrate.ts +++ b/indexer/src/kadena-server/utils/hashrate.ts @@ -1,8 +1,9 @@ -import { BlockWithDifficulty } from "./difficulty"; +import { BlockWithDifficulty } from './difficulty'; -function aggregateBlockData( - blocks: { creationTimeDate: Date; difficulty: bigint }[], -): { earliestTime: number; totalDifficulty: bigint } { +function aggregateBlockData(blocks: { creationTimeDate: Date; difficulty: bigint }[]): { + earliestTime: number; + totalDifficulty: bigint; +} { let earliestTime = Number.MAX_SAFE_INTEGER; let totalDifficulty = 0n; @@ -17,15 +18,10 @@ function aggregateBlockData( return { earliestTime, totalDifficulty }; } -export function calculateNetworkHashRate( - blocksWithDifficulty: BlockWithDifficulty[], -): bigint { - const { earliestTime, totalDifficulty } = - aggregateBlockData(blocksWithDifficulty); +export function calculateNetworkHashRate(blocksWithDifficulty: BlockWithDifficulty[]): bigint { + const { earliestTime, totalDifficulty } = aggregateBlockData(blocksWithDifficulty); const timeDifference = Date.now() - earliestTime; - return timeDifference < 1000 - ? 0n - : totalDifficulty / (BigInt(timeDifference) / 1000n); + return timeDifference < 1000 ? 0n : totalDifficulty / (BigInt(timeDifference) / 1000n); } diff --git a/indexer/src/kadena-server/utils/normalize-errors.ts b/indexer/src/kadena-server/utils/normalize-errors.ts index 25502f63..f51a9b88 100644 --- a/indexer/src/kadena-server/utils/normalize-errors.ts +++ b/indexer/src/kadena-server/utils/normalize-errors.ts @@ -1,6 +1,6 @@ -import { GraphQLFormattedError } from "graphql"; -import { ZodError } from "zod"; -import { unwrapResolverError } from "@apollo/server/errors"; +import { GraphQLFormattedError } from 'graphql'; +import { ZodError } from 'zod'; +import { unwrapResolverError } from '@apollo/server/errors'; interface CustomFormattedError extends GraphQLFormattedError { type?: string; @@ -10,20 +10,19 @@ interface CustomFormattedError extends GraphQLFormattedError { export const formatError = ( formattedError: GraphQLFormattedError, - error: unknown + error: unknown, ): CustomFormattedError => { const unwrappedError = unwrapResolverError(error); if (unwrappedError instanceof ZodError) { if (unwrappedError.issues.length > 0) { return { - message: "Input Validation Error", - type: "ZodError", - description: - "The input provided is invalid. Check the input and try again.", - data: unwrappedError.issues.map((issue) => ({ + message: 'Input Validation Error', + type: 'ZodError', + description: 'The input provided is invalid. Check the input and try again.', + data: unwrappedError.issues.map(issue => ({ message: issue.message, - path: issue.path.join("."), + path: issue.path.join('.'), })), }; } @@ -31,15 +30,15 @@ export const formatError = ( if ( error instanceof Error && - "code" in error && - "type" in error && - (error as any).code === "ECONNREFUSED" + 'code' in error && + 'type' in error && + (error as any).code === 'ECONNREFUSED' ) { return { - message: "Chainweb Node Connection Refused", + message: 'Chainweb Node Connection Refused', type: (error as any).type, description: - "Chainweb Node connection refused. Are you sure the Chainweb Node is running and reachable?", + 'Chainweb Node connection refused. Are you sure the Chainweb Node is running and reachable?', data: error.stack ?? [], }; } diff --git a/indexer/src/kadena-server/utils/raw-query.ts b/indexer/src/kadena-server/utils/raw-query.ts index cdf16df6..b1142c73 100644 --- a/indexer/src/kadena-server/utils/raw-query.ts +++ b/indexer/src/kadena-server/utils/raw-query.ts @@ -1,15 +1,11 @@ -import { dirtyReadClient } from "@kadena/client-utils/core"; -import type { ChainId } from "@kadena/types"; -import { - PactQuery, - PactQueryData, - PactQueryResponse, -} from "../config/graphql-types"; -import { getRequiredEnvString } from "../../utils/helpers"; -import { PactCommandError } from "../errors/pact-command-error"; +import { dirtyReadClient } from '@kadena/client-utils/core'; +import type { ChainId } from '@kadena/types'; +import { PactQuery, PactQueryData, PactQueryResponse } from '../config/graphql-types'; +import { getRequiredEnvString } from '../../utils/helpers'; +import { PactCommandError } from '../errors/pact-command-error'; -const HOST_URL = getRequiredEnvString("NODE_API_URL"); -const NETWORK_ID = getRequiredEnvString("SYNC_NETWORK"); +const HOST_URL = getRequiredEnvString('NODE_API_URL'); +const NETWORK_ID = getRequiredEnvString('SYNC_NETWORK'); async function sendRawQuery( code: string, @@ -40,7 +36,7 @@ async function sendRawQuery( return JSON.stringify(result); } catch (error) { - throw new PactCommandError("Pact Command failed with error", error); + throw new PactCommandError('Pact Command failed with error', error); } } @@ -49,7 +45,7 @@ async function sendQuery(query: PactQuery): Promise { try { const result = await sendRawQuery(code, chainId, data); return { - status: "success", + status: 'success', result, error: null, chainId: chainId, @@ -57,11 +53,10 @@ async function sendQuery(query: PactQuery): Promise { }; } catch (error: unknown) { const err = error as PactCommandError; - const pactErrorMessage = - err.pactError?.message || JSON.stringify(err.pactError || error); + const pactErrorMessage = err.pactError?.message || JSON.stringify(err.pactError || error); return { - status: "error", + status: 'error', result: null, error: pactErrorMessage, chainId: chainId, @@ -70,16 +65,13 @@ async function sendQuery(query: PactQuery): Promise { } } -function createTimeout( - query: PactQuery, - timeoutMs: number, -): Promise { - return new Promise((resolve) => +function createTimeout(query: PactQuery, timeoutMs: number): Promise { + return new Promise(resolve => setTimeout(() => { resolve({ - status: "timeout", + status: 'timeout', result: null, - error: "The query took too long to execute and was aborted", + error: 'The query took too long to execute and was aborted', chainId: query.chainId, code: query.code, }); @@ -87,9 +79,7 @@ function createTimeout( ); } -export async function handleSingleQuery( - query: PactQuery, -): Promise { +export async function handleSingleQuery(query: PactQuery): Promise { const timeoutPromise = createTimeout(query, 10000); const sendQueryPromise = sendQuery(query); diff --git a/indexer/src/models/balance.ts b/indexer/src/models/balance.ts index ede12c90..1ef24430 100644 --- a/indexer/src/models/balance.ts +++ b/indexer/src/models/balance.ts @@ -1,7 +1,7 @@ -import { Model, DataTypes, Optional } from "sequelize"; -import { sequelize } from "../config/database"; -import Contract from "./contract"; -import { gql, makeExtendSchemaPlugin } from "postgraphile"; +import { Model, DataTypes, Optional } from 'sequelize'; +import { sequelize } from '../config/database'; +import Contract from './contract'; +import { gql, makeExtendSchemaPlugin } from 'postgraphile'; export interface BalanceAttributes { id?: number; @@ -17,8 +17,7 @@ export interface BalanceAttributes { polyfungiblesCount?: number; } -interface BalanceCreationAttributes - extends Optional {} +interface BalanceCreationAttributes extends Optional {} /** * Represents a balance in the system. @@ -67,7 +66,7 @@ Balance.init( type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true, - comment: "The unique identifier for the balance record (e.g., 45690).", + comment: 'The unique identifier for the balance record (e.g., 45690).', }, account: { type: DataTypes.STRING, @@ -78,13 +77,13 @@ Balance.init( chainId: { type: DataTypes.INTEGER, allowNull: false, - comment: "The ID of the blockchain network (e.g., 2).", + comment: 'The ID of the blockchain network (e.g., 2).', }, balance: { type: DataTypes.DECIMAL, allowNull: false, defaultValue: 0, - comment: "The balance amount (e.g., 25).", + comment: 'The balance amount (e.g., 25).', }, module: { type: DataTypes.STRING, @@ -94,71 +93,70 @@ Balance.init( tokenId: { type: DataTypes.STRING, allowNull: true, - comment: - "The token ID associated with the balance (e.g., 'boxing-badger #1443').", + comment: "The token ID associated with the balance (e.g., 'boxing-badger #1443').", }, hasTokenId: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false, - comment: "Whether the balance has a token ID (e.g., false).", + comment: 'Whether the balance has a token ID (e.g., false).', }, contractId: { type: DataTypes.INTEGER, allowNull: true, - comment: "The ID of the associated contract (e.g., 204).", + comment: 'The ID of the associated contract (e.g., 204).', }, transactionsCount: { type: DataTypes.INTEGER, defaultValue: 0, - comment: "The number of transactions in the block.", + comment: 'The number of transactions in the block.', }, fungiblesCount: { type: DataTypes.INTEGER, defaultValue: 0, - comment: "The number of fungibles in the block.", + comment: 'The number of fungibles in the block.', }, polyfungiblesCount: { type: DataTypes.INTEGER, defaultValue: 0, - comment: "The number of polyfungibles in the block.", + comment: 'The number of polyfungibles in the block.', }, }, { sequelize, - modelName: "Balance", + modelName: 'Balance', indexes: [ { - name: "balances_unique_constraint", + name: 'balances_unique_constraint', unique: true, - fields: ["chainId", "account", "module", "tokenId"], + fields: ['chainId', 'account', 'module', 'tokenId'], }, { - name: "balances_account_index", - fields: ["account"], + name: 'balances_account_index', + fields: ['account'], }, { - name: "balances_tokenid_index", - fields: ["tokenId"], + name: 'balances_tokenid_index', + fields: ['tokenId'], }, { - name: "balances_contractid_index", - fields: ["contractId"], + name: 'balances_contractid_index', + fields: ['contractId'], }, { - name: "balances_search_idx", - fields: [sequelize.fn("LOWER", sequelize.col("account"))], + name: 'balances_search_idx', + fields: [sequelize.fn('LOWER', sequelize.col('account'))], }, ], }, ); Balance.belongsTo(Contract, { - foreignKey: "contractId", - as: "contract", + foreignKey: 'contractId', + as: 'contract', }); -export const getHoldersPlugin = makeExtendSchemaPlugin((build) => { +export const getHoldersPlugin = makeExtendSchemaPlugin(build => { return { typeDefs: gql` extend type Query { @@ -200,7 +198,7 @@ export const getHoldersPlugin = makeExtendSchemaPlugin((build) => { ); const holders = rows.map((row: any) => ({ - cursor: Buffer.from(row.row_id.toString()).toString("base64"), + cursor: Buffer.from(row.row_id.toString()).toString('base64'), node: { address: row.address, quantity: row.quantity, @@ -211,9 +209,7 @@ export const getHoldersPlugin = makeExtendSchemaPlugin((build) => { const hasNextPage = first ? holders.length === first : false; const hasPreviousPage = last ? holders.length === last : !!after; - const endCursor = hasNextPage - ? holders[holders.length - 1].cursor - : null; + const endCursor = hasNextPage ? holders[holders.length - 1].cursor : null; const startCursor = holders.length > 0 ? holders[0].cursor : null; const totalCount = holders.length; diff --git a/indexer/src/models/block.ts b/indexer/src/models/block.ts index ae649456..bf19f42d 100644 --- a/indexer/src/models/block.ts +++ b/indexer/src/models/block.ts @@ -1,6 +1,6 @@ -import { Model, DataTypes } from "sequelize"; -import { sequelize } from "../config/database"; -import { gql, makeExtendSchemaPlugin } from "postgraphile"; +import { Model, DataTypes } from 'sequelize'; +import { sequelize } from '../config/database'; +import { gql, makeExtendSchemaPlugin } from 'postgraphile'; export interface BlockAttributes { id: number; @@ -93,7 +93,7 @@ Block.init( type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true, - comment: "The unique identifier for the block record.", + comment: 'The unique identifier for the block record.', }, nonce: { type: DataTypes.STRING, @@ -101,12 +101,11 @@ Block.init( }, creationTime: { type: DataTypes.BIGINT, - comment: "The creation time of the block (e.g., 1718887955748100).", + comment: 'The creation time of the block (e.g., 1718887955748100).', }, parent: { type: DataTypes.STRING, - comment: - "The parent block hash (e.g., '2Zw0pONGUoyYmlKi-F0o_-ak2hKKlg1Mmc9ab6BjATY').", + comment: "The parent block hash (e.g., '2Zw0pONGUoyYmlKi-F0o_-ak2hKKlg1Mmc9ab6BjATY').", }, adjacents: { type: DataTypes.JSONB, @@ -115,8 +114,7 @@ Block.init( }, target: { type: DataTypes.STRING, - comment: - "The target of the block (e.g., 'o2YaicN3y58DkvsmCDKR88KqPwLPG5EADwAAAAAAAAA').", + comment: "The target of the block (e.g., 'o2YaicN3y58DkvsmCDKR88KqPwLPG5EADwAAAAAAAAA').", }, payloadHash: { type: DataTypes.STRING, @@ -125,16 +123,15 @@ Block.init( }, chainId: { type: DataTypes.INTEGER, - comment: "The ID of the blockchain network (e.g., 16).", + comment: 'The ID of the blockchain network (e.g., 16).', }, weight: { type: DataTypes.STRING, - comment: - "The weight of the block (e.g., 'WNim1Xw26HgDNwEAAAAAAAAAAAAAAAAAAAAAAAAAAAA').", + comment: "The weight of the block (e.g., 'WNim1Xw26HgDNwEAAAAAAAAAAAAAAAAAAAAAAAAAAAA').", }, height: { type: DataTypes.INTEGER, - comment: "The height of the block (e.g., 4881163).", + comment: 'The height of the block (e.g., 4881163).', }, chainwebVersion: { type: DataTypes.STRING, @@ -142,16 +139,15 @@ Block.init( }, epochStart: { type: DataTypes.BIGINT, - comment: "The epoch start time of the block (e.g., 1718886629458176).", + comment: 'The epoch start time of the block (e.g., 1718886629458176).', }, featureFlags: { type: DataTypes.BIGINT, - comment: "The feature flags of the block (e.g., 56646198189039183).", + comment: 'The feature flags of the block (e.g., 56646198189039183).', }, hash: { type: DataTypes.STRING, - comment: - "The hash of the block (e.g., 'XZXKrN7DzWnzEX2oZp5HOjr6R0zapn-XxtsYOdtfYFY').", + comment: "The hash of the block (e.g., 'XZXKrN7DzWnzEX2oZp5HOjr6R0zapn-XxtsYOdtfYFY').", }, minerData: { type: DataTypes.JSONB, @@ -175,68 +171,64 @@ Block.init( }, canonical: { type: DataTypes.BOOLEAN, - comment: "Indicates whether the transaction is canonical.", + comment: 'Indicates whether the transaction is canonical.', }, transactionsCount: { type: DataTypes.INTEGER, defaultValue: 0, - comment: "The number of transactions in the block.", + comment: 'The number of transactions in the block.', }, }, { sequelize, - modelName: "Block", + modelName: 'Block', indexes: [ { - name: "blocks_chainwebVersion_chainid_hash_unique_idx", + name: 'blocks_chainwebVersion_chainid_hash_unique_idx', unique: true, - fields: ["chainwebVersion", "chainId", "hash"], + fields: ['chainwebVersion', 'chainId', 'hash'], }, { - name: "blocks_height_idx", - fields: ["height"], + name: 'blocks_height_idx', + fields: ['height'], }, { - name: "blocks_hash_idx", - fields: ["hash"], + name: 'blocks_hash_idx', + fields: ['hash'], }, { - name: "blocks_chainid_height_idx", - fields: ["chainId", "height"], + name: 'blocks_chainid_height_idx', + fields: ['chainId', 'height'], }, { - name: "blocks_chainid_idx", - fields: ["chainId"], + name: 'blocks_chainid_idx', + fields: ['chainId'], }, { - name: "blocks_canonical_idx", - fields: ["canonical"], + name: 'blocks_canonical_idx', + fields: ['canonical'], }, { - name: "blocks_height_id_idx", - fields: ["height", "id"], + name: 'blocks_height_id_idx', + fields: ['height', 'id'], }, // Search indexes { - name: "blocks_trgm_parent_idx", - fields: [sequelize.fn("LOWER", sequelize.col("parent"))], - using: "gin", - operator: "gin_trgm_ops", + name: 'blocks_trgm_parent_idx', + fields: [sequelize.fn('LOWER', sequelize.col('parent'))], + using: 'gin', + operator: 'gin_trgm_ops', }, ], }, ); -export const blockQueryPlugin = makeExtendSchemaPlugin((build) => { +export const blockQueryPlugin = makeExtendSchemaPlugin(build => { return { typeDefs: gql` extend type Query { blockByHeight(height: Int!, chainId: Int!): Block - searchAll( - searchTerm: String! - limit: Int! - heightFilter: Int - ): SearchAllResult + searchAll(searchTerm: String!, limit: Int!, heightFilter: Int): SearchAllResult } type SearchAllResult { @@ -304,11 +296,7 @@ export const blockQueryPlugin = makeExtendSchemaPlugin((build) => { `; const [blocks, transactions, addresses, tokens] = await Promise.all([ - rootPgPool.query(blocksQuery, [ - `${searchTerm}`, - limit, - heightFilter, - ]), + rootPgPool.query(blocksQuery, [`${searchTerm}`, limit, heightFilter]), rootPgPool.query(transactionsQuery, [`${searchTerm}`, limit]), rootPgPool.query(addressesQuery, [`${searchTerm}`, limit]), rootPgPool.query(tokensQuery, [`%${searchTerm}%`, limit]), diff --git a/indexer/src/models/contract.ts b/indexer/src/models/contract.ts index b2185b62..036faa5a 100644 --- a/indexer/src/models/contract.ts +++ b/indexer/src/models/contract.ts @@ -1,6 +1,6 @@ -import { Model, DataTypes } from "sequelize"; -import { sequelize } from "../config/database"; -import Balance from "./balance"; +import { Model, DataTypes } from 'sequelize'; +import { sequelize } from '../config/database'; +import Balance from './balance'; export interface ContractAttributes { id: number; @@ -44,12 +44,12 @@ Contract.init( type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true, - comment: "The unique identifier for the contract record (e.g., 1).", + comment: 'The unique identifier for the contract record (e.g., 1).', }, chainId: { type: DataTypes.INTEGER, allowNull: false, - comment: "The ID of the blockchain network (e.g., 8).", + comment: 'The ID of the blockchain network (e.g., 8).', }, type: { type: DataTypes.STRING, @@ -59,8 +59,7 @@ Contract.init( module: { type: DataTypes.STRING, allowNull: false, - comment: - "The module associated with the contract (e.g., 'marmalade.ledger').", + comment: "The module associated with the contract (e.g., 'marmalade.ledger').", }, metadata: { type: DataTypes.JSON, @@ -77,21 +76,21 @@ Contract.init( precision: { type: DataTypes.INTEGER, allowNull: true, - comment: "The precision of the contract (e.g., 12).", + comment: 'The precision of the contract (e.g., 12).', }, }, { sequelize, - modelName: "Contract", + modelName: 'Contract', indexes: [ { - name: "contract_unique_constraint", + name: 'contract_unique_constraint', unique: true, - fields: ["chainId", "module", "tokenId"], + fields: ['chainId', 'module', 'tokenId'], }, { - name: "contracts_search_idx", - fields: [sequelize.fn("LOWER", sequelize.col("module"))], + name: 'contracts_search_idx', + fields: [sequelize.fn('LOWER', sequelize.col('module'))], }, ], }, diff --git a/indexer/src/models/event.ts b/indexer/src/models/event.ts index e0fe3847..7b3fef72 100644 --- a/indexer/src/models/event.ts +++ b/indexer/src/models/event.ts @@ -1,6 +1,6 @@ -import { Model, DataTypes } from "sequelize"; -import { sequelize } from "../config/database"; -import Transaction, { TransactionAttributes } from "./transaction"; +import { Model, DataTypes } from 'sequelize'; +import { sequelize } from '../config/database'; +import Transaction, { TransactionAttributes } from './transaction'; export interface EventAttributes { id: number; @@ -52,17 +52,17 @@ Event.init( type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true, - comment: "The unique identifier for the event record (e.g., 5985644).", + comment: 'The unique identifier for the event record (e.g., 5985644).', }, transactionId: { type: DataTypes.INTEGER, allowNull: true, - comment: "The ID of the associated transaction (e.g., 4134355).", + comment: 'The ID of the associated transaction (e.g., 4134355).', }, chainId: { type: DataTypes.INTEGER, allowNull: false, - comment: "The ID of the blockchain network (e.g., 0).", + comment: 'The ID of the blockchain network (e.g., 0).', }, module: { type: DataTypes.STRING, @@ -94,28 +94,28 @@ Event.init( orderIndex: { type: DataTypes.INTEGER, allowNull: true, - comment: "The event order.", + comment: 'The event order.', }, }, { sequelize, - modelName: "Event", + modelName: 'Event', indexes: [ { - name: "events_transactionid_idx", - fields: ["transactionId"], + name: 'events_transactionid_idx', + fields: ['transactionId'], }, { - name: "events_module_name_idx", - fields: ["module", "name"], + name: 'events_module_name_idx', + fields: ['module', 'name'], }, ], }, ); Event.belongsTo(Transaction, { - foreignKey: "transactionId", - as: "transaction", + foreignKey: 'transactionId', + as: 'transaction', }); export default Event; diff --git a/indexer/src/models/guard.ts b/indexer/src/models/guard.ts index ed4e14f1..dc20e94e 100644 --- a/indexer/src/models/guard.ts +++ b/indexer/src/models/guard.ts @@ -1,6 +1,6 @@ -import { Model, DataTypes, Optional } from "sequelize"; -import { sequelize } from "../config/database"; -import Balance from "./balance"; +import { Model, DataTypes, Optional } from 'sequelize'; +import { sequelize } from '../config/database'; +import Balance from './balance'; export interface GuardAttributes { id: number; @@ -9,12 +9,9 @@ export interface GuardAttributes { balanceId: number; } -interface GuardCreationAttributes extends Optional {} +interface GuardCreationAttributes extends Optional {} -class Guard - extends Model - implements GuardAttributes -{ +class Guard extends Model implements GuardAttributes { public id!: number; public publicKey!: string; public predicate!: string; @@ -27,33 +24,32 @@ Guard.init( type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true, - comment: "The unique identifier for the signer", + comment: 'The unique identifier for the signer', }, publicKey: { type: DataTypes.STRING, allowNull: false, - comment: "The public key associated with the account", + comment: 'The public key associated with the account', }, predicate: { type: DataTypes.STRING, allowNull: false, - comment: - "The predicate associated with the account, public key and chain", + comment: 'The predicate associated with the account, public key and chain', }, balanceId: { type: DataTypes.INTEGER, allowNull: true, - comment: "The ID of the associated balance (e.g., 204).", + comment: 'The ID of the associated balance (e.g., 204).', }, }, { sequelize, - modelName: "Guard", - tableName: "Guards", + modelName: 'Guard', + tableName: 'Guards', indexes: [ { - name: "guards_publickey_predicate_balanceid_idx", - fields: ["publicKey", "predicate", "balanceId"], + name: 'guards_publickey_predicate_balanceid_idx', + fields: ['publicKey', 'predicate', 'balanceId'], unique: true, }, ], @@ -61,8 +57,8 @@ Guard.init( ); Guard.belongsTo(Balance, { - foreignKey: "balanceId", - as: "balance", + foreignKey: 'balanceId', + as: 'balance', }); export default Guard; diff --git a/indexer/src/models/signer.ts b/indexer/src/models/signer.ts index 8423f0bd..b623dc45 100644 --- a/indexer/src/models/signer.ts +++ b/indexer/src/models/signer.ts @@ -1,6 +1,6 @@ -import { Model, DataTypes, Optional } from "sequelize"; -import { sequelize } from "../config/database"; -import Transaction from "./transaction"; +import { Model, DataTypes, Optional } from 'sequelize'; +import { sequelize } from '../config/database'; +import Transaction from './transaction'; export interface SignerAttributes { id: number; @@ -12,12 +12,9 @@ export interface SignerAttributes { transactionId: number; } -interface SignerCreationAttributes extends Optional {} +interface SignerCreationAttributes extends Optional {} -class Signer - extends Model - implements SignerAttributes -{ +class Signer extends Model implements SignerAttributes { public id!: number; public address?: string; public orderIndex?: number; @@ -33,56 +30,56 @@ Signer.init( type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true, - comment: "The unique identifier for the signer", + comment: 'The unique identifier for the signer', }, address: { type: DataTypes.STRING, allowNull: true, - comment: "The address of the signer", + comment: 'The address of the signer', }, orderIndex: { type: DataTypes.INTEGER, allowNull: true, - comment: "The order index for the signer", + comment: 'The order index for the signer', }, pubkey: { type: DataTypes.STRING, allowNull: false, - comment: "The public key of the signer", + comment: 'The public key of the signer', }, clist: { type: DataTypes.JSONB, allowNull: true, - comment: "The capabilities list (clist) associated with the signer", + comment: 'The capabilities list (clist) associated with the signer', }, scheme: { type: DataTypes.STRING, allowNull: true, - comment: "The scheme associated with the signer, eg. ED25519", + comment: 'The scheme associated with the signer, eg. ED25519', }, transactionId: { type: DataTypes.INTEGER, allowNull: false, references: { - model: "Transactions", - key: "id", + model: 'Transactions', + key: 'id', }, - comment: "Foreign key referencing the related transaction ID", + comment: 'Foreign key referencing the related transaction ID', }, }, { sequelize, - modelName: "Signer", - tableName: "Signers", + modelName: 'Signer', + tableName: 'Signers', timestamps: true, indexes: [ { - name: "signers_pubkey_transactionid_idx", - fields: ["pubkey", "transactionId"], + name: 'signers_pubkey_transactionid_idx', + fields: ['pubkey', 'transactionId'], }, { - name: "signers_transaction_id_idx", - fields: ["transactionId"], + name: 'signers_transaction_id_idx', + fields: ['transactionId'], }, ], }, @@ -90,8 +87,8 @@ Signer.init( // Set up association with Transaction Signer.belongsTo(Transaction, { - foreignKey: "transactionId", - as: "transaction", + foreignKey: 'transactionId', + as: 'transaction', }); export default Signer; diff --git a/indexer/src/models/streaming-error.ts b/indexer/src/models/streaming-error.ts index 753eb3ba..e7c025ed 100644 --- a/indexer/src/models/streaming-error.ts +++ b/indexer/src/models/streaming-error.ts @@ -1,5 +1,5 @@ -import { Model, DataTypes, Optional } from "sequelize"; -import { sequelize } from "../config/database"; +import { Model, DataTypes, Optional } from 'sequelize'; +import { sequelize } from '../config/database'; interface StreamingErrorAttributes { id: number; @@ -7,8 +7,7 @@ interface StreamingErrorAttributes { hash: string; } -interface StreamingErrorCreationAttributes - extends Optional {} +interface StreamingErrorCreationAttributes extends Optional {} class StreamingError extends Model @@ -27,8 +26,8 @@ StreamingError.init( }, { sequelize, - modelName: "StreamingError", - tableName: "StreamingErrors", + modelName: 'StreamingError', + tableName: 'StreamingErrors', timestamps: true, }, ); diff --git a/indexer/src/models/transaction.ts b/indexer/src/models/transaction.ts index 84f5e64b..116f7ece 100644 --- a/indexer/src/models/transaction.ts +++ b/indexer/src/models/transaction.ts @@ -1,7 +1,7 @@ -import { Model, DataTypes } from "sequelize"; -import { sequelize } from "../config/database"; -import Block from "./block"; -import { gql, makeExtendSchemaPlugin } from "postgraphile"; +import { Model, DataTypes } from 'sequelize'; +import { sequelize } from '../config/database'; +import Block from './block'; +import { gql, makeExtendSchemaPlugin } from 'postgraphile'; export interface TransactionAttributes { id: number; @@ -34,10 +34,7 @@ export interface TransactionAttributes { /** * Represents a transaction in the blockchain. */ -class Transaction - extends Model - implements TransactionAttributes -{ +class Transaction extends Model implements TransactionAttributes { /** The unique identifier for the transaction record (e.g., 53411). */ declare id: number; @@ -121,17 +118,16 @@ Transaction.init( type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true, - comment: - "The unique identifier for the transaction record (e.g., 53411).", + comment: 'The unique identifier for the transaction record (e.g., 53411).', }, blockId: { type: DataTypes.INTEGER, allowNull: true, - comment: "The ID of the associated block (e.g., 40515).", + comment: 'The ID of the associated block (e.g., 40515).', }, chainId: { type: DataTypes.INTEGER, - comment: "The ID of the blockchain network (e.g., 0).", + comment: 'The ID of the blockchain network (e.g., 0).', }, code: { type: DataTypes.JSONB, @@ -165,8 +161,7 @@ Transaction.init( }, hash: { type: DataTypes.STRING, - comment: - "The hash of the transaction (e.g., 'S7v5RXHKgYAWAsnRfYWU_SUh6Jc4g4TU2HOEALj_JSU').", + comment: "The hash of the transaction (e.g., 'S7v5RXHKgYAWAsnRfYWU_SUh6Jc4g4TU2HOEALj_JSU').", }, result: { type: DataTypes.JSONB, @@ -180,18 +175,17 @@ Transaction.init( }, nonce: { type: DataTypes.STRING, - comment: - "The nonce of the transaction (e.g., '2024-05-15T04:36:52.657Z').", + comment: "The nonce of the transaction (e.g., '2024-05-15T04:36:52.657Z').", }, num_events: { type: DataTypes.INTEGER, - comment: "The number of events generated by the transaction (e.g., 1).", + comment: 'The number of events generated by the transaction (e.g., 1).', }, pactid: { type: DataTypes.STRING, - comment: "The pact ID of the transaction.", + comment: 'The pact ID of the transaction.', }, - proof: { type: DataTypes.TEXT, comment: "The proof of the transactio." }, + proof: { type: DataTypes.TEXT, comment: 'The proof of the transactio.' }, requestkey: { type: DataTypes.STRING, comment: @@ -199,7 +193,7 @@ Transaction.init( }, rollback: { type: DataTypes.BOOLEAN, - comment: "Indicates whether the transaction is a rollback (e.g., false).", + comment: 'Indicates whether the transaction is a rollback (e.g., false).', }, sender: { type: DataTypes.STRING, @@ -213,7 +207,7 @@ Transaction.init( }, step: { type: DataTypes.INTEGER, - comment: "The step of the transaction (e.g., 0).", + comment: 'The step of the transaction (e.g., 0).', }, ttl: { type: DataTypes.STRING, @@ -225,82 +219,82 @@ Transaction.init( }, canonical: { type: DataTypes.BOOLEAN, - comment: "Indicates whether the transaction is canonical.", + comment: 'Indicates whether the transaction is canonical.', }, }, { sequelize, - modelName: "Transaction", + modelName: 'Transaction', indexes: [ { - name: "transactions_requestkey_idx", - fields: ["requestkey"], + name: 'transactions_requestkey_idx', + fields: ['requestkey'], }, { - name: "transactions_blockId_idx", - fields: ["blockId"], + name: 'transactions_blockId_idx', + fields: ['blockId'], }, { - name: "transactions_sender_idx", - fields: ["sender"], + name: 'transactions_sender_idx', + fields: ['sender'], }, { - name: "transactions_chainId_idx", - fields: ["chainId"], + name: 'transactions_chainId_idx', + fields: ['chainId'], }, { - name: "transactions_chainid_blockid_idx", - fields: ["chainId", "blockId"], + name: 'transactions_chainid_blockid_idx', + fields: ['chainId', 'blockId'], }, { - name: "transactions_hash_idx", - fields: ["hash"], + name: 'transactions_hash_idx', + fields: ['hash'], }, { - name: "transactions_canonical_idx", - fields: ["canonical"], + name: 'transactions_canonical_idx', + fields: ['canonical'], }, { - name: "transactions_sender_id_idx", - fields: ["sender", "id"], + name: 'transactions_sender_id_idx', + fields: ['sender', 'id'], }, // Search indexes { - name: "transactions_trgm_requestkey_idx", - fields: [sequelize.fn("LOWER", sequelize.col("requestkey"))], - using: "gin", - operator: "gin_trgm_ops", + name: 'transactions_trgm_requestkey_idx', + fields: [sequelize.fn('LOWER', sequelize.col('requestkey'))], + using: 'gin', + operator: 'gin_trgm_ops', }, { - name: "transactions_trgm_hash_idx", - fields: [sequelize.fn("LOWER", sequelize.col("hash"))], - using: "gin", - operator: "gin_trgm_ops", + name: 'transactions_trgm_hash_idx', + fields: [sequelize.fn('LOWER', sequelize.col('hash'))], + using: 'gin', + operator: 'gin_trgm_ops', }, { - name: "transactions_trgm_txid_idx", - fields: [sequelize.fn("LOWER", sequelize.col("txid"))], - using: "gin", - operator: "gin_trgm_ops", + name: 'transactions_trgm_txid_idx', + fields: [sequelize.fn('LOWER', sequelize.col('txid'))], + using: 'gin', + operator: 'gin_trgm_ops', }, { - name: "transactions_trgm_pactid_idx", - fields: [sequelize.fn("LOWER", sequelize.col("pactid"))], - using: "gin", - operator: "gin_trgm_ops", + name: 'transactions_trgm_pactid_idx', + fields: [sequelize.fn('LOWER', sequelize.col('pactid'))], + using: 'gin', + operator: 'gin_trgm_ops', }, { - name: "transactions_trgm_sender_idx", - fields: [sequelize.fn("LOWER", sequelize.col("sender"))], - using: "gin", - operator: "gin_trgm_ops", + name: 'transactions_trgm_sender_idx', + fields: [sequelize.fn('LOWER', sequelize.col('sender'))], + using: 'gin', + operator: 'gin_trgm_ops', }, ], }, ); Transaction.belongsTo(Block, { - foreignKey: "blockId", + foreignKey: 'blockId', }); export interface Transaction_ { @@ -530,7 +524,7 @@ export interface PageInfo { startCursor: string; } -export const kadenaExtensionPlugin = makeExtendSchemaPlugin((build) => { +export const kadenaExtensionPlugin = makeExtendSchemaPlugin(build => { return { typeDefs: gql` extend type Query { @@ -684,12 +678,7 @@ export const kadenaExtensionPlugin = makeExtendSchemaPlugin((build) => { creationTime: Datetime! difficulty: BigInt! epoch: Datetime! - events( - after: String - before: String - first: Int - last: Int - ): BlockEventsConnection! + events(after: String, before: String, first: Int, last: Int): BlockEventsConnection! flags: BigFloat! hash: String! height: BigInt! @@ -789,9 +778,9 @@ export const kadenaExtensionPlugin = makeExtendSchemaPlugin((build) => { TransactionPayload: { __resolveType(obj) { if (obj.code) { - return "ExecutionPayload"; + return 'ExecutionPayload'; } - return "ContinuationPayload"; + return 'ContinuationPayload'; }, }, Query: { @@ -815,24 +804,18 @@ export const kadenaExtensionPlugin = makeExtendSchemaPlugin((build) => { cmd: { meta: { chainId: BigInt(transaction.chainId), - creationTime: new Date( - parseInt(transaction.creationtime) * 1000, - ), + creationTime: new Date(parseInt(transaction.creationtime) * 1000), gasLimit: BigInt(transaction.gaslimit), gasPrice: parseFloat(transaction.gasprice), sender: transaction.sender, ttl: BigInt(transaction.ttl), }, - networkId: transaction.chainId - ? transaction.chainId.toString() - : "", + networkId: transaction.chainId ? transaction.chainId.toString() : '', nonce: transaction.nonce, payload: - transaction.continuation.toString() == "" + transaction.continuation.toString() == '' ? ({ - code: transaction.code - ? transaction.code.toString() - : "", + code: transaction.code ? transaction.code.toString() : '', data: JSON.stringify(transaction.data), } as ExecutionPayload) : ({ @@ -847,21 +830,15 @@ export const kadenaExtensionPlugin = makeExtendSchemaPlugin((build) => { hash: transaction.hash, id: transaction.id, result: { - badResult: transaction.rollback - ? transaction.result.toString() - : "", + badResult: transaction.rollback ? transaction.result.toString() : '', block: null, - continuation: transaction.continuation - ? transaction.continuation.toString() - : "", + continuation: transaction.continuation ? transaction.continuation.toString() : '', eventCount: BigInt(transaction.num_events), events: null, gas: BigInt(transaction.gas), - goodResult: transaction.rollback - ? "" - : transaction.result.toString(), + goodResult: transaction.rollback ? '' : transaction.result.toString(), height: BigInt(0), - logs: transaction.logs ? transaction.logs.toString() : "", + logs: transaction.logs ? transaction.logs.toString() : '', transactionId: BigInt(transaction.id), transfers: null, }, @@ -881,143 +858,127 @@ export const kadenaExtensionPlugin = makeExtendSchemaPlugin((build) => { }; }); -export const transactionByRequestKeyQueryPlugin = makeExtendSchemaPlugin( - (build) => { - return { - typeDefs: gql` - extend type Query { - transactionByRequestKey( - requestkey: String! - eventLimit: Int - transferLimit: Int - ): TransactionData - } - - type TransactionData { - transaction: Transaction - events: [Event] - transfers: [TransferData] - } - - type TransferData { - transfer: Transfer - contract: Contract - } - `, - resolvers: { - Query: { - transactionByRequestKey: async ( - _query, - args, - context, - resolveInfo, - ) => { - const { requestkey, eventLimit, transferLimit } = args; - const { rootPgPool } = context; - - const { rows: transactions } = await rootPgPool.query( - `SELECT * FROM public."Transactions" WHERE requestkey = $1`, - [requestkey], - ); - - if (transactions.length === 0) { - return null; - } +export const transactionByRequestKeyQueryPlugin = makeExtendSchemaPlugin(build => { + return { + typeDefs: gql` + extend type Query { + transactionByRequestKey( + requestkey: String! + eventLimit: Int + transferLimit: Int + ): TransactionData + } - const transaction = transactions[0]; - - const eventLimitClause = eventLimit ? `LIMIT $2` : ""; - const eventQueryParams = eventLimit - ? [transaction.id, eventLimit] - : [transaction.id]; - const { rows: events } = await rootPgPool.query( - `SELECT * FROM public."Events" WHERE "transactionId" = $1 ${eventLimitClause}`, - eventQueryParams, - ); - - const transferLimitClause = transferLimit ? `LIMIT $2` : ""; - const transferQueryParams = transferLimit - ? [transaction.id, transferLimit] - : [transaction.id]; - const { rows: transfers } = await rootPgPool.query( - `SELECT * FROM public."Transfers" WHERE "transactionId" = $1 ${transferLimitClause}`, - transferQueryParams, - ); - - const transferDataPromises = transfers.map( - async (transfer: any) => { - let contract = null; - transfer.toAcct = transfer.to_acct; - transfer.fromAcct = transfer.from_acct; - if (transfer.contractId) { - const { rows: contracts } = await rootPgPool.query( - `SELECT * FROM public."Contracts" WHERE id = $1`, - [transfer.contractId], - ); - contract = contracts.length > 0 ? contracts[0] : null; - } - - return { - transfer, - contract, - }; - }, - ); + type TransactionData { + transaction: Transaction + events: [Event] + transfers: [TransferData] + } - const transferData = await Promise.all(transferDataPromises); + type TransferData { + transfer: Transfer + contract: Contract + } + `, + resolvers: { + Query: { + transactionByRequestKey: async (_query, args, context, resolveInfo) => { + const { requestkey, eventLimit, transferLimit } = args; + const { rootPgPool } = context; - transaction.numEvents = events.length; + const { rows: transactions } = await rootPgPool.query( + `SELECT * FROM public."Transactions" WHERE requestkey = $1`, + [requestkey], + ); + + if (transactions.length === 0) { + return null; + } + + const transaction = transactions[0]; + + const eventLimitClause = eventLimit ? `LIMIT $2` : ''; + const eventQueryParams = eventLimit ? [transaction.id, eventLimit] : [transaction.id]; + const { rows: events } = await rootPgPool.query( + `SELECT * FROM public."Events" WHERE "transactionId" = $1 ${eventLimitClause}`, + eventQueryParams, + ); + + const transferLimitClause = transferLimit ? `LIMIT $2` : ''; + const transferQueryParams = transferLimit + ? [transaction.id, transferLimit] + : [transaction.id]; + const { rows: transfers } = await rootPgPool.query( + `SELECT * FROM public."Transfers" WHERE "transactionId" = $1 ${transferLimitClause}`, + transferQueryParams, + ); + + const transferDataPromises = transfers.map(async (transfer: any) => { + let contract = null; + transfer.toAcct = transfer.to_acct; + transfer.fromAcct = transfer.from_acct; + if (transfer.contractId) { + const { rows: contracts } = await rootPgPool.query( + `SELECT * FROM public."Contracts" WHERE id = $1`, + [transfer.contractId], + ); + contract = contracts.length > 0 ? contracts[0] : null; + } return { - transaction, - events, - transfers: transferData, + transfer, + contract, }; - }, + }); + + const transferData = await Promise.all(transferDataPromises); + + transaction.numEvents = events.length; + + return { + transaction, + events, + transfers: transferData, + }; }, }, - }; - }, -); + }, + }; +}); -export const transactionsByBlockIdQueryPlugin = makeExtendSchemaPlugin( - (build) => { - return { - typeDefs: gql` - extend type Query { - transactionsByBlockId( - blockId: Int! - first: Int - after: String - ): TransactionConnection - } - - type TransactionConnection { - edges: [TransactionEdge] - pageInfo: PageInfo - } - - type TransactionEdge { - node: Transaction - cursor: String - } - `, - resolvers: { - Query: { - transactionsByBlockId: async (_query, args, context, resolveInfo) => { - const { blockId, first, after } = args; - const { rootPgPool } = context; - - let cursorCondition = ""; - const limit = first || 10; - const values = [blockId, limit + 1]; - - if (after) { - cursorCondition = "AND id > $3"; - values.push(Buffer.from(after, "base64").toString("ascii")); - } +export const transactionsByBlockIdQueryPlugin = makeExtendSchemaPlugin(build => { + return { + typeDefs: gql` + extend type Query { + transactionsByBlockId(blockId: Int!, first: Int, after: String): TransactionConnection + } + + type TransactionConnection { + edges: [TransactionEdge] + pageInfo: PageInfo + } + + type TransactionEdge { + node: Transaction + cursor: String + } + `, + resolvers: { + Query: { + transactionsByBlockId: async (_query, args, context, resolveInfo) => { + const { blockId, first, after } = args; + const { rootPgPool } = context; + + let cursorCondition = ''; + const limit = first || 10; + const values = [blockId, limit + 1]; - const query = ` + if (after) { + cursorCondition = 'AND id > $3'; + values.push(Buffer.from(after, 'base64').toString('ascii')); + } + + const query = ` SELECT * FROM public."Transactions" WHERE "blockId" = $1 ${cursorCondition} @@ -1025,35 +986,31 @@ export const transactionsByBlockIdQueryPlugin = makeExtendSchemaPlugin( LIMIT $2 `; - const { rows } = await rootPgPool.query(query, values); + const { rows } = await rootPgPool.query(query, values); - const hasNextPage = rows.length > limit; - if (hasNextPage) { - rows.pop(); - } + const hasNextPage = rows.length > limit; + if (hasNextPage) { + rows.pop(); + } - const edges = rows.map((row: any) => ({ - node: row, - cursor: Buffer.from(row.id.toString(), "ascii").toString( - "base64", - ), - })); + const edges = rows.map((row: any) => ({ + node: row, + cursor: Buffer.from(row.id.toString(), 'ascii').toString('base64'), + })); - const endCursor = - edges.length > 0 ? edges[edges.length - 1].cursor : null; + const endCursor = edges.length > 0 ? edges[edges.length - 1].cursor : null; - return { - edges, - pageInfo: { - endCursor, - hasNextPage, - }, - }; - }, + return { + edges, + pageInfo: { + endCursor, + hasNextPage, + }, + }; }, }, - }; - }, -); + }, + }; +}); export default Transaction; diff --git a/indexer/src/models/transfer.ts b/indexer/src/models/transfer.ts index bfd3e853..21e44725 100644 --- a/indexer/src/models/transfer.ts +++ b/indexer/src/models/transfer.ts @@ -1,8 +1,8 @@ -import { Model, DataTypes } from "sequelize"; -import { sequelize } from "../config/database"; -import Transaction from "./transaction"; -import Contract from "./contract"; -import { gql, makeExtendSchemaPlugin } from "postgraphile"; +import { Model, DataTypes } from 'sequelize'; +import { sequelize } from '../config/database'; +import Transaction from './transaction'; +import Contract from './contract'; +import { gql, makeExtendSchemaPlugin } from 'postgraphile'; export interface TransferAttributes { id: number; @@ -78,12 +78,12 @@ Transfer.init( type: DataTypes.INTEGER, autoIncrement: true, primaryKey: true, - comment: "The unique identifier for the transfer record (e.g., 1799984).", + comment: 'The unique identifier for the transfer record (e.g., 1799984).', }, transactionId: { type: DataTypes.INTEGER, allowNull: true, - comment: "The ID of the associated transaction (e.g., 2022215).", + comment: 'The ID of the associated transaction (e.g., 2022215).', }, type: { type: DataTypes.STRING, @@ -93,12 +93,12 @@ Transfer.init( amount: { type: DataTypes.DECIMAL, allowNull: false, - comment: "The amount transferred (e.g., 0.0003112).", + comment: 'The amount transferred (e.g., 0.0003112).', }, chainId: { type: DataTypes.INTEGER, allowNull: false, - comment: "The ID of the blockchain network (e.g., 0).", + comment: 'The ID of the blockchain network (e.g., 0).', }, from_acct: { type: DataTypes.STRING, @@ -109,8 +109,7 @@ Transfer.init( modulehash: { type: DataTypes.STRING, allowNull: false, - comment: - "The hash of the module (e.g., 'klFkrLfpyLW-M3xjVPSdqXEMgxPPJibRt_D6qiBws6s').", + comment: "The hash of the module (e.g., 'klFkrLfpyLW-M3xjVPSdqXEMgxPPJibRt_D6qiBws6s').", }, modulename: { type: DataTypes.STRING, @@ -132,7 +131,7 @@ Transfer.init( hasTokenId: { type: DataTypes.BOOLEAN, allowNull: true, - comment: "Whether the transfer has a token ID (e.g., true).", + comment: 'Whether the transfer has a token ID (e.g., true).', }, tokenId: { type: DataTypes.STRING, @@ -143,86 +142,82 @@ Transfer.init( contractId: { type: DataTypes.INTEGER, allowNull: true, - comment: "The ID of the associated contract (optional, e.g., 1).", + comment: 'The ID of the associated contract (optional, e.g., 1).', }, canonical: { type: DataTypes.BOOLEAN, allowNull: true, - comment: "Whether the transfer is canonical", + comment: 'Whether the transfer is canonical', }, orderIndex: { type: DataTypes.INTEGER, allowNull: true, - comment: "The transfer order", + comment: 'The transfer order', }, }, { sequelize, - modelName: "Transfer", + modelName: 'Transfer', indexes: [ { - name: "transfers_type_idx", - fields: ["type"], + name: 'transfers_type_idx', + fields: ['type'], }, { - name: "transfers_transactionid_idx", - fields: ["transactionId"], + name: 'transfers_transactionid_idx', + fields: ['transactionId'], }, { - name: "transfers_hasTokenId_idx", - fields: ["hasTokenId"], + name: 'transfers_hasTokenId_idx', + fields: ['hasTokenId'], }, { - name: "transfers_contractid_idx", - fields: ["contractId"], + name: 'transfers_contractid_idx', + fields: ['contractId'], }, { - name: "transfers_modulename_idx", - fields: ["modulename"], + name: 'transfers_modulename_idx', + fields: ['modulename'], }, { - name: "transfers_from_acct_modulename_idx", - fields: ["from_acct", "modulename"], + name: 'transfers_from_acct_modulename_idx', + fields: ['from_acct', 'modulename'], }, { - name: "transfers_chainid_from_acct_modulename_idx", - fields: ["chainId", "from_acct", "modulename"], + name: 'transfers_chainid_from_acct_modulename_idx', + fields: ['chainId', 'from_acct', 'modulename'], }, { - name: "transfers_chainid_to_acct_modulename_idx", - fields: ["chainId", "to_acct", "modulename"], + name: 'transfers_chainid_to_acct_modulename_idx', + fields: ['chainId', 'to_acct', 'modulename'], }, { - name: "from_acct_idx", - fields: ["from_acct"], + name: 'from_acct_idx', + fields: ['from_acct'], }, { - name: "to_acct_idx", - fields: ["to_acct"], + name: 'to_acct_idx', + fields: ['to_acct'], }, ], }, ); Transfer.belongsTo(Transaction, { - foreignKey: "transactionId", - as: "transaction", + foreignKey: 'transactionId', + as: 'transaction', }); Transfer.belongsTo(Contract, { - foreignKey: "contractId", - as: "contract", + foreignKey: 'contractId', + as: 'contract', }); -export const transfersByTypeQueryPlugin = makeExtendSchemaPlugin((build) => { +export const transfersByTypeQueryPlugin = makeExtendSchemaPlugin(build => { return { typeDefs: gql` extend type Query { - transfersByType( - type: String! - first: Int - after: String - ): TransferConnection + transfersByType(type: String!, first: Int, after: String): TransferConnection } type TransferConnection { @@ -241,13 +236,13 @@ export const transfersByTypeQueryPlugin = makeExtendSchemaPlugin((build) => { const { type, first, after } = args; const { rootPgPool } = context; - let cursorCondition = ""; + let cursorCondition = ''; const limit = first || 10; const values = [type, limit + 1]; if (after) { - cursorCondition = "AND id > $3"; - values.push(Buffer.from(after, "base64").toString("ascii")); + cursorCondition = 'AND id > $3'; + values.push(Buffer.from(after, 'base64').toString('ascii')); } const query = ` @@ -267,11 +262,10 @@ export const transfersByTypeQueryPlugin = makeExtendSchemaPlugin((build) => { const edges = rows.map((row: any) => ({ node: row, - cursor: Buffer.from(row.id.toString(), "ascii").toString("base64"), + cursor: Buffer.from(row.id.toString(), 'ascii').toString('base64'), })); - const endCursor = - edges.length > 0 ? edges[edges.length - 1].cursor : null; + const endCursor = edges.length > 0 ? edges[edges.length - 1].cursor : null; return { edges, diff --git a/indexer/src/server/metrics.ts b/indexer/src/server/metrics.ts index f4a75a7b..341b921a 100644 --- a/indexer/src/server/metrics.ts +++ b/indexer/src/server/metrics.ts @@ -1,19 +1,19 @@ -import express from "express"; -import { collectDefaultMetrics, Registry } from "prom-client"; -import { postgraphile } from "postgraphile"; -import { getRequiredEnvString } from "../utils/helpers"; -import path from "path"; -import cors from "cors"; -import ConnectionFilterPlugin from "postgraphile-plugin-connection-filter"; -import { blockQueryPlugin } from "../models/block"; +import express from 'express'; +import { collectDefaultMetrics, Registry } from 'prom-client'; +import { postgraphile } from 'postgraphile'; +import { getRequiredEnvString } from '../utils/helpers'; +import path from 'path'; +import cors from 'cors'; +import ConnectionFilterPlugin from 'postgraphile-plugin-connection-filter'; +import { blockQueryPlugin } from '../models/block'; import { transactionByRequestKeyQueryPlugin, transactionsByBlockIdQueryPlugin, kadenaExtensionPlugin, -} from "../models/transaction"; -import { transfersByTypeQueryPlugin } from "../models/transfer"; -import { getHoldersPlugin } from "../models/balance"; -import { Pool } from "pg"; +} from '../models/transaction'; +import { transfersByTypeQueryPlugin } from '../models/transfer'; +import { getHoldersPlugin } from '../models/balance'; +import { Pool } from 'pg'; const register = new Registry(); @@ -21,20 +21,20 @@ collectDefaultMetrics({ register }); const app = express(); const PORT = 3000; -const DB_USERNAME = getRequiredEnvString("DB_USERNAME"); -const DB_PASSWORD = getRequiredEnvString("DB_PASSWORD"); -const DB_NAME = getRequiredEnvString("DB_NAME"); -const DB_HOST = getRequiredEnvString("DB_HOST"); +const DB_USERNAME = getRequiredEnvString('DB_USERNAME'); +const DB_PASSWORD = getRequiredEnvString('DB_PASSWORD'); +const DB_NAME = getRequiredEnvString('DB_NAME'); +const DB_HOST = getRequiredEnvString('DB_HOST'); -const SSL_CERT_PATH = path.resolve(__dirname, "../config/global-bundle.pem"); -const DB_SSL_ENABLED = getRequiredEnvString("DB_SSL_ENABLED"); -const isSslEnabled = DB_SSL_ENABLED === "true"; +const SSL_CERT_PATH = path.resolve(__dirname, '../config/global-bundle.pem'); +const DB_SSL_ENABLED = getRequiredEnvString('DB_SSL_ENABLED'); +const isSslEnabled = DB_SSL_ENABLED === 'true'; const DB_CONNECTION = `postgres://${DB_USERNAME}:${DB_PASSWORD}@${DB_HOST}/${DB_NAME}?sslmode=${ - isSslEnabled ? "require" : "disable" -}${isSslEnabled ? `&sslrootcert=${SSL_CERT_PATH}` : ""}`; + isSslEnabled ? 'require' : 'disable' +}${isSslEnabled ? `&sslrootcert=${SSL_CERT_PATH}` : ''}`; -const SCHEMAS: Array = ["public"]; +const SCHEMAS: Array = ['public']; const rootPgPool = new Pool({ connectionString: DB_CONNECTION, @@ -45,14 +45,14 @@ const rootPgPool = new Pool({ // }); export async function usePostgraphile() { - console.log("Starting GraphQL server..."); + console.log('Starting GraphQL server...'); app.use(cors()); - app.get("/metrics", async (_req, res) => { + app.get('/metrics', async (_req, res) => { try { const metrics = await register.metrics(); - res.set("Content-Type", register.contentType); + res.set('Content-Type', register.contentType); res.end(metrics); } catch (err) { res.status(500).end(err); @@ -86,9 +86,7 @@ export async function usePostgraphile() { ); app.listen(PORT, () => { console.log(`Metrics server listening at http://localhost:${PORT}/metrics`); - console.log( - `Postgraphile server listening at http://localhost:${PORT}/graphiql`, - ); + console.log(`Postgraphile server listening at http://localhost:${PORT}/graphiql`); }); } diff --git a/indexer/src/server/prometheus.yml b/indexer/src/server/prometheus.yml index bd1e773f..4a215202 100644 --- a/indexer/src/server/prometheus.yml +++ b/indexer/src/server/prometheus.yml @@ -2,7 +2,7 @@ global: scrape_interval: 15s scrape_configs: - - job_name: "kadena-indexer" - metrics_path: "/metrics" + - job_name: 'kadena-indexer' + metrics_path: '/metrics' static_configs: - targets: ['host.docker.internal:3000'] diff --git a/indexer/src/services/sync/coinbase.ts b/indexer/src/services/sync/coinbase.ts index da761157..5ffe6096 100644 --- a/indexer/src/services/sync/coinbase.ts +++ b/indexer/src/services/sync/coinbase.ts @@ -1,12 +1,12 @@ -import { closeDatabase, rootPgPool, sequelize } from "../../config/database"; -import TransactionModel from "../../models/transaction"; -import Transfer from "../../models/transfer"; -import { Transaction } from "sequelize"; -import Event, { EventAttributes } from "../../models/event"; -import { getCoinTransfers } from "./transfers"; +import { closeDatabase, rootPgPool, sequelize } from '../../config/database'; +import TransactionModel from '../../models/transaction'; +import Transfer from '../../models/transfer'; +import { Transaction } from 'sequelize'; +import Event, { EventAttributes } from '../../models/event'; +import { getCoinTransfers } from './transfers'; export async function startBackfillCoinbaseTransactions() { - console.log("Starting coinbase backfill ..."); + console.log('Starting coinbase backfill ...'); const limit = 1000; // Number of rows to process in one batch let offset = 0; @@ -20,7 +20,7 @@ export async function startBackfillCoinbaseTransactions() { const rows = res.rows; if (rows.length === 0) { - console.log("No more rows to process."); + console.log('No more rows to process.'); break; } @@ -36,7 +36,7 @@ export async function startBackfillCoinbaseTransactions() { await tx.rollback(); console.log(`Transaction for batch at offset ${offset} rolled back.`); } catch (rollbackError) { - console.error("Error during rollback:", rollbackError); + console.error('Error during rollback:', rollbackError); } break; } @@ -56,33 +56,29 @@ async function addCoinbaseTransactions(rows: Array, tx: Transaction) { return output; }); - const allData = (await Promise.all(fetchPromises)).filter( - (f) => f !== undefined, - ); + const allData = (await Promise.all(fetchPromises)).filter(f => f !== undefined); const transactionsAdded = await TransactionModel.bulkCreate( - allData.map((o) => o?.transactionAttributes ?? []), + allData.map(o => o?.transactionAttributes ?? []), { transaction: tx, - returning: ["id"], + returning: ['id'], }, ); const transfersToAdd = allData .map((d, index) => { - const transfersWithTransactionId = (d?.transfersCoinAttributes ?? []).map( - (t) => ({ - ...t, - transactionId: transactionsAdded[index].id, - }), - ); + const transfersWithTransactionId = (d?.transfersCoinAttributes ?? []).map(t => ({ + ...t, + transactionId: transactionsAdded[index].id, + })); return transfersWithTransactionId; }) .flat(); const eventsToAdd = allData .map((d, index) => { - const eventsWithTransactionId = (d?.eventsAttributes ?? []).map((t) => ({ + const eventsWithTransactionId = (d?.eventsAttributes ?? []).map(t => ({ ...t, transactionId: transactionsAdded[index].id, })); @@ -112,30 +108,27 @@ export async function processCoinbaseTransaction( data: {}, chainId: block.chainId, creationtime: block.creationTime, - gaslimit: "0", - gasprice: "0", + gaslimit: '0', + gasprice: '0', hash: coinbase.reqKey, - nonce: "", + nonce: '', pactid: null, continuation: {}, - gas: "0", + gas: '0', result: coinbase.result, logs: coinbase.logs, num_events: eventsData ? eventsData.length : 0, requestkey: coinbase.reqKey, rollback: null, - sender: "coinbase", + sender: 'coinbase', sigs: [], step: null, proof: null, - ttl: "0", + ttl: '0', txid: coinbase.txId.toString(), } as any; - const transfersCoinAttributes = await getCoinTransfers( - eventsData, - transactionAttributes, - ); + const transfersCoinAttributes = await getCoinTransfers(eventsData, transactionAttributes); const eventsAttributes = eventsData.map((eventData: any) => { return { diff --git a/indexer/src/services/sync/contract.ts b/indexer/src/services/sync/contract.ts index c027a912..75b82a2f 100644 --- a/indexer/src/services/sync/contract.ts +++ b/indexer/src/services/sync/contract.ts @@ -1,27 +1,23 @@ -import { handleSingleQuery } from "../../kadena-server/utils/raw-query"; -import Contract, { ContractAttributes } from "../../models/contract"; +import { handleSingleQuery } from '../../kadena-server/utils/raw-query'; +import Contract, { ContractAttributes } from '../../models/contract'; -export async function syncContract( - chainId: number, - modulename: any, - tokenId: any, -) { +export async function syncContract(chainId: number, modulename: any, tokenId: any) { const manifestData = await handleSingleQuery({ chainId: chainId.toString(), code: `(${modulename}.get-manifest "${tokenId}")`, }); - console.log("manifestData", manifestData); + console.log('manifestData', manifestData); let contractId; if (!manifestData.error) { contractId = await saveContract( chainId, modulename, - "poly-fungible", + 'poly-fungible', tokenId, manifestData.result, ); } else { - console.log("No manifest URI found for token ID:", tokenId); + console.log('No manifest URI found for token ID:', tokenId); } return contractId; } diff --git a/indexer/src/services/sync/guards.ts b/indexer/src/services/sync/guards.ts index 5163c0a9..a3aea876 100644 --- a/indexer/src/services/sync/guards.ts +++ b/indexer/src/services/sync/guards.ts @@ -1,14 +1,14 @@ -import pLimit from "p-limit"; -import { closeDatabase, rootPgPool, sequelize } from "../../config/database"; -import { getGuardsFromBalances } from "./payload"; -import Guard from "../../models/guard"; +import pLimit from 'p-limit'; +import { closeDatabase, rootPgPool, sequelize } from '../../config/database'; +import { getGuardsFromBalances } from './payload'; +import Guard from '../../models/guard'; const CONCURRENCY_LIMIT = 4; // Number of concurrent fetches allowed const limitFetch = pLimit(CONCURRENCY_LIMIT); export async function startGuardsBackfill() { await sequelize.authenticate(); - console.log("Connected to the database."); + console.log('Connected to the database.'); await rootPgPool.query( ` @@ -32,8 +32,8 @@ export async function startGuardsBackfill() { `, ); - console.log("Balances backfilled successfully."); - console.log("Starting guards backfill ..."); + console.log('Balances backfilled successfully.'); + console.log('Starting guards backfill ...'); const limit = 10000; // Number of rows to process in one batch let offset = 0; @@ -47,12 +47,12 @@ export async function startGuardsBackfill() { const rows = res.rows; if (rows.length === 0) { - console.log("No more rows to process."); + console.log('No more rows to process.'); break; } // Use p-limit to ensure controlled concurrency for fetch requests - const fetchPromises = rows.map((row) => + const fetchPromises = rows.map(row => limitFetch(() => getGuardsFromBalances([ { @@ -80,7 +80,7 @@ export async function startGuardsBackfill() { await tx.rollback(); console.log(`Transaction for batch at offset ${offset} rolled back.`); } catch (rollbackError) { - console.error("Error during rollback:", rollbackError); + console.error('Error during rollback:', rollbackError); } break; } diff --git a/indexer/src/services/sync/payload.ts b/indexer/src/services/sync/payload.ts index c6f16a53..3454a1a3 100644 --- a/indexer/src/services/sync/payload.ts +++ b/indexer/src/services/sync/payload.ts @@ -1,16 +1,14 @@ -import { BlockAttributes } from "../../models/block"; -import TransactionModel, { - TransactionAttributes, -} from "../../models/transaction"; -import Event, { EventAttributes } from "../../models/event"; -import Transfer, { TransferAttributes } from "../../models/transfer"; -import { getNftTransfers, getCoinTransfers } from "./transfers"; -import { QueryTypes, Transaction } from "sequelize"; -import Signer from "../../models/signer"; -import Guard from "../../models/guard"; -import { handleSingleQuery } from "../../kadena-server/utils/raw-query"; -import { sequelize } from "../../config/database"; -import { processCoinbaseTransaction } from "./coinbase"; +import { BlockAttributes } from '../../models/block'; +import TransactionModel, { TransactionAttributes } from '../../models/transaction'; +import Event, { EventAttributes } from '../../models/event'; +import Transfer, { TransferAttributes } from '../../models/transfer'; +import { getNftTransfers, getCoinTransfers } from './transfers'; +import { QueryTypes, Transaction } from 'sequelize'; +import Signer from '../../models/signer'; +import Guard from '../../models/guard'; +import { handleSingleQuery } from '../../kadena-server/utils/raw-query'; +import { sequelize } from '../../config/database'; +import { processCoinbaseTransaction } from './coinbase'; const TRANSACTION_INDEX = 0; const RECEIPT_INDEX = 1; @@ -55,14 +53,12 @@ export async function processTransaction( try { cmdData = JSON.parse(transactionInfo.cmd); } catch (error) { - console.error( - `Error parsing cmd JSON for key ${transactionInfo.cmd}: ${error}`, - ); + console.error(`Error parsing cmd JSON for key ${transactionInfo.cmd}: ${error}`); throw error; } - let nonce = (cmdData.nonce || "").replace(/\\"/g, ""); - nonce = nonce.replace(/"/g, ""); + let nonce = (cmdData.nonce || '').replace(/\\"/g, ''); + nonce = nonce.replace(/"/g, ''); const eventsData = receiptInfo.events || []; const transactionAttributes = { blockId: block.id, @@ -81,9 +77,7 @@ export async function processTransaction( logs: receiptInfo.logs || null, num_events: eventsData ? eventsData.length : 0, requestkey: receiptInfo.reqKey, - rollback: receiptInfo.result - ? receiptInfo.result.status != "success" - : true, + rollback: receiptInfo.result ? receiptInfo.result.status != 'success' : true, sender: cmdData?.meta?.sender || null, sigs: sigsData, step: cmdData?.payload?.cont?.step || 0, @@ -107,10 +101,7 @@ export async function processTransaction( } as EventAttributes; }) as EventAttributes[]; - const transfersCoinAttributes = await getCoinTransfers( - eventsData, - transactionAttributes, - ); + const transfersCoinAttributes = await getCoinTransfers(eventsData, transactionAttributes); const transfersNftAttributes = await getNftTransfers( transactionAttributes.chainId, @@ -120,35 +111,30 @@ export async function processTransaction( const transfersAttributes = [transfersCoinAttributes, transfersNftAttributes] .flat() - .filter((transfer) => transfer.amount !== undefined); + .filter(transfer => transfer.amount !== undefined); try { - const { id: transactionId } = await TransactionModel.create( - transactionAttributes, - { - transaction: tx, - }, - ); - - const eventsWithTransactionId = eventsAttributes.map((event) => ({ + const { id: transactionId } = await TransactionModel.create(transactionAttributes, { + transaction: tx, + }); + + const eventsWithTransactionId = eventsAttributes.map(event => ({ ...event, transactionId, })) as EventAttributes[]; await Event.bulkCreate(eventsWithTransactionId, { transaction: tx }); - const signers = (cmdData.signers ?? []).map( - (signer: any, index: number) => ({ - address: signer.address, - orderIndex: index, - pubkey: signer.pubKey, - clist: signer.clist, - scheme: signer.scheme, - transactionId, - }), - ); + const signers = (cmdData.signers ?? []).map((signer: any, index: number) => ({ + address: signer.address, + orderIndex: index, + pubkey: signer.pubKey, + clist: signer.clist, + scheme: signer.scheme, + transactionId, + })); await Signer.bulkCreate(signers, { transaction: tx }); - const transfersWithTransactionId = transfersAttributes.map((transfer) => ({ + const transfersWithTransactionId = transfersAttributes.map(transfer => ({ ...transfer, transactionId, })) as TransferAttributes[]; @@ -157,33 +143,33 @@ export async function processTransaction( }); const balancesFrom = transfersAttributes - .filter((t) => t.from_acct !== "") - .map((t) => ({ + .filter(t => t.from_acct !== '') + .map(t => ({ account: t.from_acct, chainId: t.chainId, module: t.modulename, hasTokenId: t.hasTokenId, - tokenId: t.tokenId ?? "", // Normalize tokenId + tokenId: t.tokenId ?? '', // Normalize tokenId })); const balancesTo = transfersAttributes - .filter((t) => t.to_acct !== "") - .map((t) => ({ + .filter(t => t.to_acct !== '') + .map(t => ({ account: t.to_acct, chainId: t.chainId, module: t.modulename, hasTokenId: t.hasTokenId, - tokenId: t.tokenId ?? "", // Normalize tokenId + tokenId: t.tokenId ?? '', // Normalize tokenId })); const balances = [...balancesFrom, ...balancesTo]; const values = balances .map( - (balance) => + balance => `(${balance.chainId}, '${balance.account}', '${balance.module}', '${balance.tokenId}', ${balance.hasTokenId}, NOW(), NOW())`, ) - .join(", "); + .join(', '); const query = ` INSERT INTO "Balances" ("chainId", account, module, "tokenId", "hasTokenId", "createdAt", "updatedAt") @@ -207,36 +193,34 @@ export async function processTransaction( } export async function getGuardsFromBalances(balances: BalanceInsertResult[]) { - const guardPromises: Array> = balances.map( - async (balance) => { - const res = await handleSingleQuery({ - chainId: balance.chainId.toString(), - code: `(${balance.module}.details \"${balance.account}\")`, - }); - - if (res.status !== "success" || !res.result) return null; - - const result = JSON.parse(res.result ?? "{}"); - const keys = result?.guard?.keys ?? []; - const pred = result?.guard?.pred; - if (!keys?.length || !pred) return null; - - const withKeys = keys.map((key: any) => ({ - balanceId: balance.id, - account: balance.account, - publicKey: key, - predicate: pred, - })); + const guardPromises: Array> = balances.map(async balance => { + const res = await handleSingleQuery({ + chainId: balance.chainId.toString(), + code: `(${balance.module}.details \"${balance.account}\")`, + }); - return withKeys; - }, - ); + if (res.status !== 'success' || !res.result) return null; + + const result = JSON.parse(res.result ?? '{}'); + const keys = result?.guard?.keys ?? []; + const pred = result?.guard?.pred; + if (!keys?.length || !pred) return null; + + const withKeys = keys.map((key: any) => ({ + balanceId: balance.id, + account: balance.account, + publicKey: key, + predicate: pred, + })); + + return withKeys; + }); const guards = await Promise.all(guardPromises); const filteredGuards = guards .flat() - .filter((g) => g !== null && `k:${g.publicKey}` !== g.account) - .map((g) => ({ + .filter(g => g !== null && `k:${g.publicKey}` !== g.account) + .map(g => ({ balanceId: g.balanceId, publicKey: g.publicKey, predicate: g.predicate, diff --git a/indexer/src/services/sync/streaming.ts b/indexer/src/services/sync/streaming.ts index 2fce5e5a..b9f12ad8 100644 --- a/indexer/src/services/sync/streaming.ts +++ b/indexer/src/services/sync/streaming.ts @@ -1,29 +1,27 @@ -import { processPayloadKey } from "./payload"; -import { getDecoded, getRequiredEnvString } from "../../utils/helpers"; -import EventSource from "eventsource"; -import { DispatchInfo } from "../../jobs/publisher-job"; -import { uint64ToInt64 } from "../../utils/int-uint-64"; -import Block, { BlockAttributes } from "../../models/block"; -import { sequelize } from "../../config/database"; -import StreamingError from "../../models/streaming-error"; - -const SYNC_BASE_URL = getRequiredEnvString("SYNC_BASE_URL"); -const SYNC_NETWORK = getRequiredEnvString("SYNC_NETWORK"); +import { processPayloadKey } from './payload'; +import { getDecoded, getRequiredEnvString } from '../../utils/helpers'; +import EventSource from 'eventsource'; +import { DispatchInfo } from '../../jobs/publisher-job'; +import { uint64ToInt64 } from '../../utils/int-uint-64'; +import Block, { BlockAttributes } from '../../models/block'; +import { sequelize } from '../../config/database'; +import StreamingError from '../../models/streaming-error'; + +const SYNC_BASE_URL = getRequiredEnvString('SYNC_BASE_URL'); +const SYNC_NETWORK = getRequiredEnvString('SYNC_NETWORK'); export async function startStreaming() { - console.log("Starting streaming..."); + console.log('Starting streaming...'); const blocksAlreadyReceived = new Set(); - const eventSource = new EventSource( - `${SYNC_BASE_URL}/${SYNC_NETWORK}/block/updates`, - ); + const eventSource = new EventSource(`${SYNC_BASE_URL}/${SYNC_NETWORK}/block/updates`); eventSource.onerror = (error: any) => { - console.error("Connection error:", error); + console.error('Connection error:', error); }; - eventSource.addEventListener("BlockHeader", async (event: any) => { + eventSource.addEventListener('BlockHeader', async (event: any) => { try { const block = JSON.parse(event.data); const payload = processPayload(block.payloadWithOutputs); @@ -46,7 +44,7 @@ export async function startStreaming() { setInterval( () => { - console.log("Clearing blocks already received."); + console.log('Clearing blocks already received.'); blocksAlreadyReceived.clear(); }, 1000 * 60 * 10, @@ -110,18 +108,12 @@ async function saveBlock(parsedData: any): Promise { transaction: tx, }); - const eventsCreated = await processPayloadKey( - createdBlock, - payloadData, - tx, - ); + const eventsCreated = await processPayloadKey(createdBlock, payloadData, tx); - const uniqueRequestKeys = new Set( - eventsCreated.map((t) => t.requestkey).filter(Boolean), - ); + const uniqueRequestKeys = new Set(eventsCreated.map(t => t.requestkey).filter(Boolean)); const uniqueQualifiedEventNames = new Set( - eventsCreated.map((t) => `${t.module}.${t.name}`).filter(Boolean), + eventsCreated.map(t => `${t.module}.${t.name}`).filter(Boolean), ); await tx.commit(); diff --git a/indexer/src/services/sync/transfers.ts b/indexer/src/services/sync/transfers.ts index a11f2c30..fa01564d 100644 --- a/indexer/src/services/sync/transfers.ts +++ b/indexer/src/services/sync/transfers.ts @@ -1,7 +1,7 @@ -import { handleSingleQuery } from "../../kadena-server/utils/raw-query"; -import { TransactionAttributes } from "../../models/transaction"; -import { TransferAttributes } from "../../models/transfer"; -import { getContract, saveContract, syncContract } from "./contract"; +import { handleSingleQuery } from '../../kadena-server/utils/raw-query'; +import { TransactionAttributes } from '../../models/transaction'; +import { TransferAttributes } from '../../models/transfer'; +import { getContract, saveContract, syncContract } from './contract'; /** * Filters and processes NFT transfer events from a payload's event data. It identifies NFT transfer events based on @@ -18,16 +18,16 @@ export function getNftTransfers( eventsData: any, transactionAttributes: TransactionAttributes, ) { - const TRANSFER_NFT_SIGNATURE = "TRANSFER"; + const TRANSFER_NFT_SIGNATURE = 'TRANSFER'; const TRANSFER_NFT_PARAMS_LENGTH = 4; const transferNftSignature = (eventData: any) => eventData.name == TRANSFER_NFT_SIGNATURE && eventData.params.length == TRANSFER_NFT_PARAMS_LENGTH && - typeof eventData.params[0] == "string" && - typeof eventData.params[1] == "string" && - typeof eventData.params[2] == "string" && - typeof eventData.params[3] == "number"; + typeof eventData.params[0] == 'string' && + typeof eventData.params[1] == 'string' && + typeof eventData.params[2] == 'string' && + typeof eventData.params[3] == 'number'; const transferPromises = eventsData .filter(transferNftSignature) @@ -54,7 +54,7 @@ export function getNftTransfers( to_acct: to_acct, hasTokenId: true, tokenId: tokenId, - type: "poly-fungible", + type: 'poly-fungible', contractId: contractId, orderIndex: index, } as TransferAttributes; @@ -73,19 +73,16 @@ const requests: Record = {}; * @param {any} requestKey - Associated to the T. * @returns {Promise} A Promise that resolves to an array of transfer attributes specifically for coin transfers. */ -export function getCoinTransfers( - eventsData: any, - transactionAttributes: TransactionAttributes, -) { - const TRANSFER_COIN_SIGNATURE = "TRANSFER"; +export function getCoinTransfers(eventsData: any, transactionAttributes: TransactionAttributes) { + const TRANSFER_COIN_SIGNATURE = 'TRANSFER'; const TRANSFER_COIN_PARAMS_LENGTH = 3; const transferCoinSignature = (eventData: any) => eventData.name == TRANSFER_COIN_SIGNATURE && eventData.params.length == TRANSFER_COIN_PARAMS_LENGTH && - typeof eventData.params[0] == "string" && - typeof eventData.params[1] == "string" && - typeof eventData.params[2] == "number"; + typeof eventData.params[0] == 'string' && + typeof eventData.params[1] == 'string' && + typeof eventData.params[2] == 'number'; const transferPromises = eventsData .filter(transferCoinSignature) @@ -110,7 +107,7 @@ export function getCoinTransfers( contractId = await saveContract( chainId, modulename, - "fungible", + 'fungible', null, null, Number(JSON.parse(precisionData.result).int), @@ -136,7 +133,7 @@ export function getCoinTransfers( to_acct: to_acct, hasTokenId: false, tokenId: undefined, - type: "fungible", + type: 'fungible', contractId: contractId, orderIndex: index, } as TransferAttributes; diff --git a/indexer/src/utils/chainweb-node.ts b/indexer/src/utils/chainweb-node.ts index bbf477f0..eda7a774 100644 --- a/indexer/src/utils/chainweb-node.ts +++ b/indexer/src/utils/chainweb-node.ts @@ -1,7 +1,7 @@ -import { PactQueryResponse } from "../kadena-server/config/graphql-types"; +import { PactQueryResponse } from '../kadena-server/config/graphql-types'; export const formatBalance_NODE = (queryResult: PactQueryResponse) => { - const resultParsed = JSON.parse(queryResult.result ?? "{}"); + const resultParsed = JSON.parse(queryResult.result ?? '{}'); if (resultParsed?.balance?.decimal) { return Number(resultParsed.balance.decimal); } else if (resultParsed?.balance) { @@ -12,7 +12,7 @@ export const formatBalance_NODE = (queryResult: PactQueryResponse) => { }; export const formatGuard_NODE = (queryResult: PactQueryResponse) => { - const resultParsed = JSON.parse(queryResult.result ?? "{}"); + const resultParsed = JSON.parse(queryResult.result ?? '{}'); if (resultParsed.guard?.fun) { return { @@ -20,7 +20,7 @@ export const formatGuard_NODE = (queryResult: PactQueryResponse) => { fun: resultParsed.guard.fun, raw: JSON.stringify(resultParsed.guard), keys: [], - predicate: "", + predicate: '', }; } @@ -32,5 +32,5 @@ export const formatGuard_NODE = (queryResult: PactQueryResponse) => { }; } - return { raw: JSON.stringify(resultParsed.guard), keys: [], predicate: "" }; + return { raw: JSON.stringify(resultParsed.guard), keys: [], predicate: '' }; }; diff --git a/indexer/src/utils/helpers.ts b/indexer/src/utils/helpers.ts index 5bc1d734..5975fbf7 100644 --- a/indexer/src/utils/helpers.ts +++ b/indexer/src/utils/helpers.ts @@ -5,11 +5,11 @@ * @returns The decoded and parsed JSON object, or null if decoding or parsing fails. */ export function getDecoded(encodedData: string): any { - const decodedData = Buffer.from(encodedData, "base64").toString("utf-8"); + const decodedData = Buffer.from(encodedData, 'base64').toString('utf-8'); try { return JSON.parse(decodedData); } catch (error) { - console.error("Error decoding data:", error); + console.error('Error decoding data:', error); return null; } } @@ -21,7 +21,7 @@ export function getDecoded(encodedData: string): any { * @returns A promise that resolves after the specified delay. */ export function delay(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); + return new Promise(resolve => setTimeout(resolve, ms)); } /** @@ -34,11 +34,7 @@ export function delay(ms: number): Promise { * @param rangeSize The size of each chunk. * @returns An array of arrays, where each inner array represents a chunk with a start and end value. */ -export function splitIntoChunks( - min: number, - max: number, - rangeSize: number, -): number[][] { +export function splitIntoChunks(min: number, max: number, rangeSize: number): number[][] { const chunks = []; let current = max; if (max - min <= rangeSize) { @@ -59,7 +55,7 @@ export function splitIntoChunks( * @returns The size of the data in bytes. */ export function calculateDataSize(data: any) { - return Buffer.byteLength(JSON.stringify(data), "utf8"); + return Buffer.byteLength(JSON.stringify(data), 'utf8'); } /** diff --git a/indexer/src/utils/int-uint-64.ts b/indexer/src/utils/int-uint-64.ts index 8d8f0b1b..0e9d5dd1 100644 --- a/indexer/src/utils/int-uint-64.ts +++ b/indexer/src/utils/int-uint-64.ts @@ -3,16 +3,16 @@ * signed 64-bit integers. * **/ -const INT64_MIN = BigInt("-9223372036854775808"); -const INT64_MAX = BigInt("9223372036854775807"); -const UINT64_MAX = BigInt("18446744073709551615"); // 2^64 - 1 +const INT64_MIN = BigInt('-9223372036854775808'); +const INT64_MAX = BigInt('9223372036854775807'); +const UINT64_MAX = BigInt('18446744073709551615'); // 2^64 - 1 export function uint64ToInt64(uint64Value: any): bigint { const bigIntValue = BigInt(uint64Value); // Ensure the value is in the valid uint64 range if (bigIntValue < 0n || bigIntValue > UINT64_MAX) { - throw new Error("Value is out of range for uint64"); + throw new Error('Value is out of range for uint64'); } if (bigIntValue <= INT64_MAX) { @@ -23,18 +23,17 @@ export function uint64ToInt64(uint64Value: any): bigint { } export function int64ToUint64String(bigintString: any) { - const UINT64_WRAPAROUND = BigInt("18446744073709551616"); // 2^64 + const UINT64_WRAPAROUND = BigInt('18446744073709551616'); // 2^64 const int64Value = BigInt(bigintString); // Validate the range for int64 if (int64Value < INT64_MIN || int64Value > INT64_MAX) { - throw new RangeError("Value is out of range for int64"); + throw new RangeError('Value is out of range for int64'); } // Convert int64 to uint64 - const uint64Value = - int64Value < 0n ? int64Value + UINT64_WRAPAROUND : int64Value; + const uint64Value = int64Value < 0n ? int64Value + UINT64_WRAPAROUND : int64Value; return uint64Value.toString(); } diff --git a/indexer/tests/database/docker-compose.yml b/indexer/tests/database/docker-compose.yml index aefd7f97..2bb820ce 100644 --- a/indexer/tests/database/docker-compose.yml +++ b/indexer/tests/database/docker-compose.yml @@ -7,7 +7,7 @@ services: POSTGRES_PASSWORD: test_password POSTGRES_DB: test_db ports: - - "5432:5432" + - '5432:5432' networks: - test-network volumes: diff --git a/indexer/tests/database/orphans.test.ts b/indexer/tests/database/orphans.test.ts index 78612714..a3282fb4 100644 --- a/indexer/tests/database/orphans.test.ts +++ b/indexer/tests/database/orphans.test.ts @@ -3,107 +3,122 @@ import { Client } from 'pg'; import { execSync } from 'child_process'; describe('check_backward_orphans Function with Dockerized Database', () => { - let client: Client; - - before(async function() { - this.timeout(120000); // 2 minutes - - const dockerComposeDir = __dirname; - console.log('Starting Docker container in directory:', dockerComposeDir); - try { - execSync('docker-compose up -d test-db', { cwd: dockerComposeDir }); - console.log('Docker container started successfully.'); - } catch (error) { - console.error('Failed to start Docker container:', error); - throw error; - } - - console.log('Waiting for the database to be ready...'); - - await new Promise(resolve => setTimeout(resolve, 20000)); // Wait 20 seconds for the database to be ready - console.log('Database should be ready now.'); - - console.log('Connecting to the test database...'); - try { - client = new Client({ - user: 'test_user', - host: 'localhost', - database: 'test_db', - password: 'test_password', - port: 5432, - }); - await client.connect(); - console.log('Connected to the test database successfully.'); - } catch (error) { - console.error('Failed to connect to the test database:', error); - throw error; - } - }); - - after(async function() { - if (client) { - console.log('Closing the database connection...'); - await client.end(); - console.log('Database connection closed.'); - } - - const dockerComposeDir = __dirname; - console.log('Stopping and removing Docker container...'); - execSync('docker-compose down --volumes', { cwd: dockerComposeDir }); - console.log('Docker container stopped and removed.'); - }); - - it('should correctly identify orphan blocks', async () => { - - /* GIVEN a block that is in the buffer zone + let client: Client; + + before(async function () { + this.timeout(120000); // 2 minutes + + const dockerComposeDir = __dirname; + console.log('Starting Docker container in directory:', dockerComposeDir); + try { + execSync('docker-compose up -d test-db', { cwd: dockerComposeDir }); + console.log('Docker container started successfully.'); + } catch (error) { + console.error('Failed to start Docker container:', error); + throw error; + } + + console.log('Waiting for the database to be ready...'); + + await new Promise(resolve => setTimeout(resolve, 20000)); // Wait 20 seconds for the database to be ready + console.log('Database should be ready now.'); + + console.log('Connecting to the test database...'); + try { + client = new Client({ + user: 'test_user', + host: 'localhost', + database: 'test_db', + password: 'test_password', + port: 5432, + }); + await client.connect(); + console.log('Connected to the test database successfully.'); + } catch (error) { + console.error('Failed to connect to the test database:', error); + throw error; + } + }); + + after(async function () { + if (client) { + console.log('Closing the database connection...'); + await client.end(); + console.log('Database connection closed.'); + } + + const dockerComposeDir = __dirname; + console.log('Stopping and removing Docker container...'); + execSync('docker-compose down --volumes', { cwd: dockerComposeDir }); + console.log('Docker container stopped and removed.'); + }); + + it('should correctly identify orphan blocks', async () => { + /* GIVEN a block that is in the buffer zone WHEN the block is analyzed for canonical status THEN the block should not be marked as canonical */ - const res1 = await getBlock(5028930, 1, 'mainnet01'); + const res1 = await getBlock(5028930, 1, 'mainnet01'); - assert(res1.rows[0].canonical == null, 'Block 5028930 should not have been analyzed for canonical status yet, because it is in buffer zone'); + assert( + res1.rows[0].canonical == null, + 'Block 5028930 should not have been analyzed for canonical status yet, because it is in buffer zone', + ); - await client.query(` + await client.query(` INSERT INTO public."Blocks" (nonce,"creationTime",parent,adjacents,target,"payloadHash","chainId",weight,height,"chainwebVersion","epochStart","featureFlags",hash,"minerData","transactionsHash","outputsHash",coinbase,canonical,"createdAt","updatedAt") VALUES ('10022758274919128455',1723326500041787,'7NXUqUY3AI6I070BN8u7o6OMNdDwwS0kZZXYZviGUjk','{"6": "CcU_QgckldBQTHDEheeX4ht_GljxUAR6DCyXUrICYYY", "11": "Ns7WjS4Q4tYz7Gbe4oh7Y7J9SHma65D0exW36LjrIxI", "16": "zmG3SHVzZHFCNUlxw3kNigCJ1-kcinU1kw2u8mLtQFM"}','jVwwSVNSFOea-OCLJty6HJcrg2xbX-v8EwAAAAAAAAA','J8i-GvdqQeWwRxk9mqWOLtK1FZfqoPTaMOVnovWoR84',1,'iQ9yYAhhZmgrWgEAAAAAAAAAAAAAAAAAAAAAAAAAAAA',5028937,'mainnet01',1723323640709427,0,'CX0nyqXxIkLa66m-kv1wEHYtQ8ak5Ko-sh1o5etTvM8','{"account": "k:e7f7130f359fb1f8c87873bf858a0e9cbc3c1059f62ae715ec72e760b055e9f3", "predicate": "keys-all", "public-keys": ["e7f7130f359fb1f8c87873bf858a0e9cbc3c1059f62ae715ec72e760b055e9f3"]}','9yNSeh7rTW_j1ziKYyubdYUCefnO5K63d5RfPkHQXiM','GAWcRP0tOrk2OlapaDDTqr6oNEHi5XhlDrX4xrgdUFY','{"gas": 0, "logs": "34ed4akeGyx1MyEhuBY7M7YNjEwwfbaKbt3VxViH-cQ", "txId": 11365414, "events": [{"name": "TRANSFER", "module": {"name": "coin", "namespace": null}, "params": ["", "k:e7f7130f359fb1f8c87873bf858a0e9cbc3c1059f62ae715ec72e760b055e9f3", 0.9773645], "moduleHash": "klFkrLfpyLW-M3xjVPSdqXEMgxPPJibRt_D6qiBws6s"}], "reqKey": "IjdOWFVxVVkzQUk2STA3MEJOOHU3bzZPTU5kRHd3UzBrWlpYWVp2aUdVamsi", "result": {"data": "Write succeeded", "status": "success"}, "metaData": null, "continuation": null}',NULL,'2024-08-10 18:48:26.982-03','2024-08-10 18:48:26.982-03') `); - /* GIVEN a block that is not in the buffer zone + /* GIVEN a block that is not in the buffer zone WHEN the block is analyzed for canonical status and is not an orphan THEN the block should be marked as canonical */ - const validatedBlock = await getBlock(5028930, 1, 'mainnet01'); + const validatedBlock = await getBlock(5028930, 1, 'mainnet01'); - assert(validatedBlock.rows[0].canonical == true, 'Block 5028930 should be marked as canonical'); + assert(validatedBlock.rows[0].canonical == true, 'Block 5028930 should be marked as canonical'); - /* GIVEN a block that is in the buffer zone + /* GIVEN a block that is in the buffer zone WHEN the block is analyzed for canonical status THEN the block should not be marked as canonical */ - const nonValidatedBlock = await getBlock(5028931, 1, 'mainnet01'); + const nonValidatedBlock = await getBlock(5028931, 1, 'mainnet01'); - assert(nonValidatedBlock.rows[0].canonical == null, 'Block 5028931 should not have been analyzed for canonical status yet, because it is in buffer zone'); + assert( + nonValidatedBlock.rows[0].canonical == null, + 'Block 5028931 should not have been analyzed for canonical status yet, because it is in buffer zone', + ); - /* GIVEN a block that is not in the buffer zone + /* GIVEN a block that is not in the buffer zone WHEN the block is analyzed for canonical status and is an orphan THEN the block should not be marked as canonical */ - const orphanBlock = await getBlock(5028903, 1, 'mainnet01', 'YdFEVJdW77DwN1Lp8WyH7mGWF5iRRn-XIz_ENRPMozw'); + const orphanBlock = await getBlock( + 5028903, + 1, + 'mainnet01', + 'YdFEVJdW77DwN1Lp8WyH7mGWF5iRRn-XIz_ENRPMozw', + ); - assert(orphanBlock.rows[0].canonical == false, 'Block 5028903 should be an orphan block'); + assert(orphanBlock.rows[0].canonical == false, 'Block 5028903 should be an orphan block'); - /* GIVEN a block that is not in the buffer zone + /* GIVEN a block that is not in the buffer zone WHEN the block is analyzed for canonical status and is not an orphan THEN the block should be marked as canonical */ - const nonOrphanBlock = await getBlock(5028903, 1, 'mainnet01', 't-OSh9Zu0YCpwOAEsbD3pIHTEwO0c031O-hpqeSXwQc'); + const nonOrphanBlock = await getBlock( + 5028903, + 1, + 'mainnet01', + 't-OSh9Zu0YCpwOAEsbD3pIHTEwO0c031O-hpqeSXwQc', + ); - assert(nonOrphanBlock.rows[0].canonical == true, 'Block 5028903 should not be an orphan block'); - }); + assert(nonOrphanBlock.rows[0].canonical == true, 'Block 5028903 should not be an orphan block'); + }); - function getBlock(height: number, chainId: number, chainwebVersion: string, hash?: string) { - var query = ` + function getBlock(height: number, chainId: number, chainwebVersion: string, hash?: string) { + var query = ` SELECT * from public."Blocks" WHERE "height" = ${height} and "chainId" = ${chainId} and "chainwebVersion" = '${chainwebVersion}' ${hash ? ` and "hash" = '${hash}'` : ''}; `; - return client.query(query); - } + return client.query(query); + } }); diff --git a/indexer/tests/unit/pagination.test.ts b/indexer/tests/unit/pagination.test.ts index bbb4c311..8a227027 100644 --- a/indexer/tests/unit/pagination.test.ts +++ b/indexer/tests/unit/pagination.test.ts @@ -1,21 +1,18 @@ -import { - encodeCursor, - getPageInfo, -} from "../../src/kadena-server/repository/pagination"; +import { encodeCursor, getPageInfo } from '../../src/kadena-server/repository/pagination'; const PAGE_SIZE = 5 + 1; -describe("Pagination - DESC", () => { - it("ROWS_LENGTH = PAGE_SIZE + 1", async () => { +describe('Pagination - DESC', () => { + it('ROWS_LENGTH = PAGE_SIZE + 1', async () => { const limit = PAGE_SIZE; - const order = "DESC"; + const order = 'DESC'; - const edges = ["6", "5", "4", "3", "2", "1"].map((cursor) => ({ + const edges = ['6', '5', '4', '3', '2', '1'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); const expectEdges = edges - .map((e) => ({ cursor: encodeCursor(e.cursor), node: e.node })) + .map(e => ({ cursor: encodeCursor(e.cursor), node: e.node })) .slice(0, 5); const output = getPageInfo({ @@ -28,22 +25,22 @@ describe("Pagination - DESC", () => { pageInfo: { hasNextPage: true, hasPreviousPage: false, - startCursor: "Ng==", - endCursor: "Mg==", + startCursor: 'Ng==', + endCursor: 'Mg==', }, edges: expectEdges, }); }); - it("ROWS_LENGTH = PAGE_SIZE", async () => { + it('ROWS_LENGTH = PAGE_SIZE', async () => { const limit = PAGE_SIZE; - const order = "DESC"; + const order = 'DESC'; - const edges = ["5", "4", "3", "2", "1"].map((cursor) => ({ + const edges = ['5', '4', '3', '2', '1'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); - const expectEdges = edges.map((e) => ({ + const expectEdges = edges.map(e => ({ cursor: encodeCursor(e.cursor), node: e.node, })); @@ -58,22 +55,22 @@ describe("Pagination - DESC", () => { pageInfo: { hasNextPage: false, hasPreviousPage: false, - startCursor: "NQ==", - endCursor: "MQ==", + startCursor: 'NQ==', + endCursor: 'MQ==', }, edges: expectEdges, }); }); - it("ROWS_LENGTH < PAGE_SIZE", async () => { + it('ROWS_LENGTH < PAGE_SIZE', async () => { const limit = PAGE_SIZE; - const order = "DESC"; + const order = 'DESC'; - const edges = ["4", "3", "2", "1"].map((cursor) => ({ + const edges = ['4', '3', '2', '1'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); - const expectEdges = edges.map((e) => ({ + const expectEdges = edges.map(e => ({ cursor: encodeCursor(e.cursor), node: e.node, })); @@ -88,22 +85,22 @@ describe("Pagination - DESC", () => { pageInfo: { hasNextPage: false, hasPreviousPage: false, - startCursor: "NA==", - endCursor: "MQ==", + startCursor: 'NA==', + endCursor: 'MQ==', }, edges: expectEdges, }); }); - it("hasPreviousPage = true", async () => { + it('hasPreviousPage = true', async () => { const limit = PAGE_SIZE; - const order = "DESC"; + const order = 'DESC'; - const edges = ["5", "4", "3", "2", "1"].map((cursor) => ({ + const edges = ['5', '4', '3', '2', '1'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); - const expectEdges = edges.map((e) => ({ + const expectEdges = edges.map(e => ({ cursor: encodeCursor(e.cursor), node: e.node, })); @@ -112,29 +109,29 @@ describe("Pagination - DESC", () => { edges, limit, order, - after: "6", + after: '6', }); expect(output).toEqual({ pageInfo: { hasNextPage: false, hasPreviousPage: true, - startCursor: "NQ==", - endCursor: "MQ==", + startCursor: 'NQ==', + endCursor: 'MQ==', }, edges: expectEdges, }); }); - it("hasPreviousPage = false", async () => { + it('hasPreviousPage = false', async () => { const limit = PAGE_SIZE; - const order = "DESC"; + const order = 'DESC'; - const edges = ["5", "4", "3", "2", "1"].map((cursor) => ({ + const edges = ['5', '4', '3', '2', '1'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); - const expectEdges = edges.map((e) => ({ + const expectEdges = edges.map(e => ({ cursor: encodeCursor(e.cursor), node: e.node, })); @@ -149,26 +146,26 @@ describe("Pagination - DESC", () => { pageInfo: { hasNextPage: false, hasPreviousPage: false, - startCursor: "NQ==", - endCursor: "MQ==", + startCursor: 'NQ==', + endCursor: 'MQ==', }, edges: expectEdges, }); }); }); -describe("Pagination - ASC", () => { - it("ROWS_LENGTH = PAGE_SIZE + 1", async () => { +describe('Pagination - ASC', () => { + it('ROWS_LENGTH = PAGE_SIZE + 1', async () => { const limit = PAGE_SIZE; - const order = "ASC"; + const order = 'ASC'; - const edges = ["1", "2", "3", "4", "5", "6"].map((cursor) => ({ + const edges = ['1', '2', '3', '4', '5', '6'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); - const edgesExpected = ["5", "4", "3", "2", "1"].map((cursor) => ({ + const edgesExpected = ['5', '4', '3', '2', '1'].map(cursor => ({ cursor: encodeCursor(cursor), - node: { id: "_" }, + node: { id: '_' }, })); const output = getPageInfo({ @@ -181,24 +178,24 @@ describe("Pagination - ASC", () => { pageInfo: { hasNextPage: false, hasPreviousPage: true, - startCursor: "NQ==", - endCursor: "MQ==", + startCursor: 'NQ==', + endCursor: 'MQ==', }, edges: edgesExpected, }); }); - it("ROWS_LENGTH = PAGE_SIZE", async () => { + it('ROWS_LENGTH = PAGE_SIZE', async () => { const limit = PAGE_SIZE; - const order = "ASC"; + const order = 'ASC'; - const edges = ["1", "2", "3", "4", "5"].map((cursor) => ({ + const edges = ['1', '2', '3', '4', '5'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); - const edgesExpected = ["5", "4", "3", "2", "1"].map((cursor) => ({ + const edgesExpected = ['5', '4', '3', '2', '1'].map(cursor => ({ cursor: encodeCursor(cursor), - node: { id: "_" }, + node: { id: '_' }, })); const output = getPageInfo({ @@ -211,24 +208,24 @@ describe("Pagination - ASC", () => { pageInfo: { hasNextPage: false, hasPreviousPage: false, - startCursor: "NQ==", - endCursor: "MQ==", + startCursor: 'NQ==', + endCursor: 'MQ==', }, edges: edgesExpected, }); }); - it("ROWS_LENGTH < PAGE_SIZE", async () => { + it('ROWS_LENGTH < PAGE_SIZE', async () => { const limit = PAGE_SIZE; - const order = "ASC"; + const order = 'ASC'; - const edges = ["1", "2", "3", "4"].map((cursor) => ({ + const edges = ['1', '2', '3', '4'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); - const edgesExpected = ["4", "3", "2", "1"].map((cursor) => ({ + const edgesExpected = ['4', '3', '2', '1'].map(cursor => ({ cursor: encodeCursor(cursor), - node: { id: "_" }, + node: { id: '_' }, })); const output = getPageInfo({ @@ -241,55 +238,55 @@ describe("Pagination - ASC", () => { pageInfo: { hasNextPage: false, hasPreviousPage: false, - startCursor: "NA==", - endCursor: "MQ==", + startCursor: 'NA==', + endCursor: 'MQ==', }, edges: edgesExpected, }); }); - it("hasPreviousPage = true", async () => { + it('hasPreviousPage = true', async () => { const limit = PAGE_SIZE; - const order = "ASC"; + const order = 'ASC'; - const edges = ["1", "2", "3", "4", "5"].map((cursor) => ({ + const edges = ['1', '2', '3', '4', '5'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); - const edgesExpected = ["5", "4", "3", "2", "1"].map((cursor) => ({ + const edgesExpected = ['5', '4', '3', '2', '1'].map(cursor => ({ cursor: encodeCursor(cursor), - node: { id: "_" }, + node: { id: '_' }, })); const output = getPageInfo({ edges, limit, order, - before: "6", + before: '6', }); expect(output).toEqual({ pageInfo: { hasNextPage: true, hasPreviousPage: false, - startCursor: "NQ==", - endCursor: "MQ==", + startCursor: 'NQ==', + endCursor: 'MQ==', }, edges: edgesExpected, }); }); - it("hasPreviousPage = false", async () => { + it('hasPreviousPage = false', async () => { const limit = PAGE_SIZE; - const order = "ASC"; + const order = 'ASC'; - const edges = ["1", "2", "3", "4", "5"].map((cursor) => ({ + const edges = ['1', '2', '3', '4', '5'].map(cursor => ({ cursor, - node: { id: "_" }, + node: { id: '_' }, })); - const edgesExpected = ["5", "4", "3", "2", "1"].map((cursor) => ({ + const edgesExpected = ['5', '4', '3', '2', '1'].map(cursor => ({ cursor: encodeCursor(cursor), - node: { id: "_" }, + node: { id: '_' }, })); const output = getPageInfo({ @@ -302,8 +299,8 @@ describe("Pagination - ASC", () => { pageInfo: { hasNextPage: false, hasPreviousPage: false, - startCursor: "NQ==", - endCursor: "MQ==", + startCursor: 'NQ==', + endCursor: 'MQ==', }, edges: edgesExpected, }); diff --git a/package.json b/package.json index 86b324ea..326da9c0 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,8 @@ "terraform": "yarn workspace @kadena-indexer/terraform", "run-terraform-workflow": "act -W .github/workflows/terraform.yml -P ubuntu-latest=-self-hosted --artifact-server-path ./.github/workflows/.artifacts/ --secret-file ./terraform/.env", "run-indexer-workflow": "act -W .github/workflows/indexer.yml --secret-file ./indexer/.env", + "format": "prettier --write \"**/*.{yml,yaml,json,md,js,ts}\"", + "format:check": "prettier --check \"**/*.{yml,yaml,json,md,js,ts}\"", "prepare": "husky" }, "dependencies": {}, @@ -28,7 +30,7 @@ "prettier": "^3.5.1" }, "lint-staged": { - "*.{yml,yaml,go,json,md,js,ts,env.*}": [ + "*.{yml,yaml,go,json,md,js,ts}": [ "prettier --write" ] } diff --git a/terraform/README.md b/terraform/README.md index 806feea9..6282150e 100644 --- a/terraform/README.md +++ b/terraform/README.md @@ -1,15 +1,18 @@ # Kadena Indexer - Terraform Configuration ### 🚀 Getting Started + - [Introduction](#1-introduction) - [Prerequisites](#2-prerequisites) ### ⚙️ Configuration + - [Environment Setup](#3-environment-setup) - [Configure AWS Credentials](#31-configure-aws-credentials) - [Environment Variables](#32-environment-variables) ### 🛠️ Infrastructure Management + - [Terraform Operations](#4-terraform-operations) - [Initialize](#41-initialize-terraform) - [Deploy](#42-deploy-infrastructure) @@ -17,9 +20,11 @@ - [Local Testing](#44-local-workflow-testing) ## 1. Introduction + This directory contains the infrastructure configuration for running the Kadena indexer assuming that you have already set up your Kadena node. ## 2. Prerequisites + - [Terraform](https://www.terraform.io/downloads.html) - [AWS CLI](https://aws.amazon.com/cli/) - [AWS Account](https://aws.amazon.com/) @@ -28,13 +33,17 @@ This directory contains the infrastructure configuration for running the Kadena ## 3. Environment Setup ### 3.1. Configure AWS Credentials + Create an `.env` file using the `.env.template` as a reference: + ```bash cp .env.template .env ``` ### 3.2. Environment Variables + Required variables: + - `TF_VAR_AWS_ACCESS_KEY_ID`: Your AWS access key ID - `TF_VAR_AWS_SECRET_ACCESS_KEY`: Your AWS secret access key - `TF_VAR_AWS_ACCOUNT_ID`: Your AWS account ID @@ -47,17 +56,20 @@ Don't forget to define the remaining variables. Their values are described in [E ## 4. Terraform Operations ### 4.1. Initialize Terraform + ```bash terraform init ``` ### 4.2. Deploy Infrastructure + ```bash yarn terraform plan yarn terraform apply ``` ### 4.3. Destroy Infrastructure + ```bash yarn terraform destroy ``` @@ -67,6 +79,7 @@ yarn terraform destroy **NOTE:** This is not being actively maintained at the moment. Install act for local testing: + ```bash # For MacOS brew install act @@ -77,6 +90,7 @@ sudo apt-get install act ``` Run the terraform workflow: + ```bash yarn run-terraform-workflow ``` diff --git a/terraform/assets/chainweb-data/README.md b/terraform/assets/chainweb-data/README.md index bff459fb..bfef4c5d 100644 --- a/terraform/assets/chainweb-data/README.md +++ b/terraform/assets/chainweb-data/README.md @@ -1,40 +1,45 @@ -## Kadena Chainweb Data - -Chainweb Data serves as a data source that provides real-time information about chains, transactions, and other elements of the Chainweb blockchain. By connecting the Chainweb Node to the Chainweb Data, our library allows the node to access real-time data to facilitate its functions, such as transaction execution, block validation, and updating the blockchain state. - -### Run -```shell script -$ docker compose up chainweb-data --build -d -``` - -### Vars -The Chainweb-data container receives all database and node parameters via the .env file. Here are the variables: -``` -# Chainweb-data vars -CWD_NODE="" -CWD_DB_PORT="" -CWD_DB_USER="" -CWD_DB_NAME="" -CWD_DB_PASS="" -CWD_DB_HOST="" -``` - -### Chainweb-data complex solution -- server -- fill - -```shell script -Node info: service-port=31351 --p2p-port=31350 -``` - -### Health check -```shell script -docker inspect --format "{{json .State.Health }}" KadenaChainWebData | jq -``` - -### Endpoints -- /txs/recent gets a list of recent transactions -- /txs/search?search=foo&limit=20&offset=40 searches for transactions containing the string foo -- /stats returns a few stats such as transaction count and coins in circulation -- /coins returns just the coins in circulation - +## Kadena Chainweb Data + +Chainweb Data serves as a data source that provides real-time information about chains, transactions, and other elements of the Chainweb blockchain. By connecting the Chainweb Node to the Chainweb Data, our library allows the node to access real-time data to facilitate its functions, such as transaction execution, block validation, and updating the blockchain state. + +### Run + +```shell script +$ docker compose up chainweb-data --build -d +``` + +### Vars + +The Chainweb-data container receives all database and node parameters via the .env file. Here are the variables: + +``` +# Chainweb-data vars +CWD_NODE="" +CWD_DB_PORT="" +CWD_DB_USER="" +CWD_DB_NAME="" +CWD_DB_PASS="" +CWD_DB_HOST="" +``` + +### Chainweb-data complex solution + +- server +- fill + +```shell script +Node info: service-port=31351 --p2p-port=31350 +``` + +### Health check + +```shell script +docker inspect --format "{{json .State.Health }}" KadenaChainWebData | jq +``` + +### Endpoints + +- /txs/recent gets a list of recent transactions +- /txs/search?search=foo&limit=20&offset=40 searches for transactions containing the string foo +- /stats returns a few stats such as transaction count and coins in circulation +- /coins returns just the coins in circulation diff --git a/terraform/assets/chainweb-data/docker-compose.yaml b/terraform/assets/chainweb-data/docker-compose.yaml index be40c1c9..0cba56c2 100644 --- a/terraform/assets/chainweb-data/docker-compose.yaml +++ b/terraform/assets/chainweb-data/docker-compose.yaml @@ -1,46 +1,46 @@ -version: '3.9' - -# For usage instructions please take a look at the README.md file. -networks: - indexer: {} - -volumes: - pg_data: - -services: - chainweb-data: - expose: - - "8888" - ports: - - "8888:8888" - build: - dockerfile: ./src/Dockerfile - env_file: - - .env - networks: - - indexer - - postgresql: - image: postgres:14.5-bullseye - user: postgres - expose: - - "5432" - ports: - - "5432:5432" - env_file: - - .env - volumes: - - pg_data:/var/lib/postgresql/data - - ./src/scripts/init-database.sh:/docker-entrypoint-initdb.d/init.sh - networks: - - indexer - - # ########################################################################## # - # Debugging - - debug: - image: alpine:latest - entrypoint: - - /bin/sh - profiles: - - debug +version: '3.9' + +# For usage instructions please take a look at the README.md file. +networks: + indexer: {} + +volumes: + pg_data: + +services: + chainweb-data: + expose: + - '8888' + ports: + - '8888:8888' + build: + dockerfile: ./src/Dockerfile + env_file: + - .env + networks: + - indexer + + postgresql: + image: postgres:14.5-bullseye + user: postgres + expose: + - '5432' + ports: + - '5432:5432' + env_file: + - .env + volumes: + - pg_data:/var/lib/postgresql/data + - ./src/scripts/init-database.sh:/docker-entrypoint-initdb.d/init.sh + networks: + - indexer + + # ########################################################################## # + # Debugging + + debug: + image: alpine:latest + entrypoint: + - /bin/sh + profiles: + - debug diff --git a/terraform/assets/chainweb-data/package.json b/terraform/assets/chainweb-data/package.json index 15ad4490..1f023120 100644 --- a/terraform/assets/chainweb-data/package.json +++ b/terraform/assets/chainweb-data/package.json @@ -1,12 +1,12 @@ -{ - "name": "indexer-cwd", - "version": "1.0.0", - "license": "UNLICENSED", - "private": true, - "scripts": { - "docker:stop:cwd": "docker compose stop chainweb-data", - "docker:up:pgl": "docker compose up postgresql --build -d", - "docker:up:cwd": "docker compose up chainweb-data --build -d", - "docker:logs:cwd": "docker compose logs chainweb-data --follow" - } -} +{ + "name": "indexer-cwd", + "version": "1.0.0", + "license": "UNLICENSED", + "private": true, + "scripts": { + "docker:stop:cwd": "docker compose stop chainweb-data", + "docker:up:pgl": "docker compose up postgresql --build -d", + "docker:up:cwd": "docker compose up chainweb-data --build -d", + "docker:logs:cwd": "docker compose logs chainweb-data --follow" + } +} diff --git a/terraform/assets/chainweb-node/README.md b/terraform/assets/chainweb-node/README.md index 70bed64e..931e47b1 100644 --- a/terraform/assets/chainweb-node/README.md +++ b/terraform/assets/chainweb-node/README.md @@ -1,81 +1,76 @@ -Kadena Chainweb Node -===== - -The Chainweb Node is a central component in the Chainweb ecosystem. It plays a crucial role in executing and maintaining the operations of the Chainweb blockchain network. In our specific library, the Chainweb Node is configured to connect to a Chainweb Data. - -Initialize Database -------------------- - -``` -docker compose up -d chainweb-initialize-db -docker compose logs chainweb-initialize-db --follow -``` - -The resulting database is *untrusted*. It is fine for use in testing and -non-critical applications. - -The command can be skipped if the database has been initialized already. - -Validate Database ------------------ - -For production applications it is highly recommended to validate the database -after initialization. - -``` -docker compose up -d chainweb-validate-db -docker compose logs chainweb-validate-db --follow -``` - -The second command can take several hours depending on available hardware. -Currently, it takes about 6 hours on a cloud VM with eight CPU cores and eight -GB of RAM. Adding more CPU cores will speed up the process. - -NOTE: The chainweb database validation step does not work with testnet. - -Run Chainweb Node ------------------ - -Prerequisite: an initialized and possibly validated database. - -``` -docker compose up -d -``` - -The service API of the node is available on the docker host at port 1848. - -Options -------- - -By default the node runs in the Kadena mainnet. To run a node in the Kadena -testnet define the `KADENA_NETWORK` variable in an `.env` file: - -``` -cat >> .env <> .env <> .env <> .env <> .env <> .env <&2 - exit 1 - esac - - mkdir -p "/db/0" - rsync -avz --delete "rsync://$${SERVER}/db/0/rocksDb" "/db/0/" - rsync -avz --delete "rsync://$${SERVER}/db/0/rocksDb" "/db/0/" - rsync -avz --delete "rsync://$${SERVER}/db/0/rocksDb" "/db/0/" - rsync -avz --delete "rsync://$${SERVER}/db/0/rocksDb" "/db/0/" - for i in 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ; do - rsync -avz --delete "rsync://$${SERVER}/db/0/sqlite/pact-v1-chain-$${i}.*" "/db/0/sqlite/" - rsync -avz --delete "rsync://$${SERVER}/db/0/sqlite/pact-v1-chain-$${i}.*" "/db/0/sqlite/" - rsync -avz --delete "rsync://$${SERVER}/db/0/sqlite/pact-v1-chain-$${i}.*" "/db/0/sqlite/" - rsync -avz --delete "rsync://$${SERVER}/db/0/sqlite/pact-v1-chain-$${i}.*" "/db/0/sqlite/" - done - - # ########################################################################## # - # Database Validation - - chainweb-validate-db-config: - container_name: chainweb-validate-db-config - image: alpine:latest - environment: - KADENA_NETWORK: ${KADENA_NETWORK:-mainnet01} - volumes: - - chainweb-config:/config:rw - entrypoint: "/bin/sh" - command: - - "-c" - - | - cat > /config/chainweb-validate-db-config.yaml <&2; exit 1; } - rm -rf "/db/0/sqlite/*" - /chainweb/chainweb-node \ - +RTS \ - -T \ - -H1G \ - -A64M \ - -RTS \ - --config-file=/config/chainweb-validate-db-config.yaml \ - --log-handle=stderr - - # ########################################################################## # - # Chainweb Node - - chainweb-node: - container_name: chainweb-node - image: ${IMAGE:-ghcr.io/kadena-io/chainweb-node/ubuntu:latest} - environment: - KADENA_NETWORK: ${KADENA_NETWORK:-mainnet01} - restart: unless-stopped - stop_grace_period: "20s" - stop_signal: SIGINT - depends_on: - chainweb-validate-db: - condition: "service_completed_successfully" - ulimits: - nofile: - hard: 65535 - soft: 65535 - ports: - - mode: host - protocol: tcp - published: ${P2P_PORT:-1789} - target: 1789 - - mode: host - protocol: tcp - published: ${SERVICE_PORT:-1848} - target: 1848 - volumes: - - chainweb-db:/chainweb/db:rw - deploy: - restart_policy: - condition: on-failure - delay: "5s" - max_attempts: 3 - window: "120s" - update_config: - delay: "60s" - order: stop-first - entrypoint: - - /chainweb/chainweb-node - command: - - +RTS - - -T - - -H1G - - -A64M - - -RTS - - --header-stream - - --database-directory=/chainweb/db - - --chainweb-version=${KADENA_NETWORK:-mainnet01} - - --log-level=warn - - --log-handle=stdout - - --log-format=json - - --telemetry-log-handle=stderr - - --telemetry-log-format=json - - --rosetta - - --p2p-port=1789 - healthcheck: - interval: "1m" - retries: 3 - start_period: "30s" - test: - - CMD - - /bin/bash - - "-c" - - "set -e; while : ; do /bin/bash -c \"$$1\" ; shift || break ; done" - - '--' - - | - # check health endpoint - exec 3<>/dev/tcp/localhost/1848 - printf "GET /health-check HTTP/1.1\r\nhost: http://localhost:1848\r\nConnection: close\r\n\r\n" >&3 - grep -q "200 OK" <&3 || exit 1 - timeout: "10s" - - # ########################################################################## # - # Container Monitor - - container-monitor: - container_name: container-monitor - environment: - AUTOHEAL_CONTAINER_LABEL: all - image: willfarrell/autoheal - restart: unless-stopped - volumes: - - /var/run/docker.sock:/var/run/docker.sock - - # ########################################################################## # - # Debugging - - debug: - image: alpine:latest - entrypoint: - - /bin/sh - profiles: - - debug +version: '3.9' + +networks: + indexer: {} + +volumes: + chainweb-config: {} + chainweb-db: {} + +services: + # ########################################################################## # + # Database Initialization + chainweb-initialize-db: + container_name: chainweb-initialize-db + image: larsk/rsync + volumes: + - chainweb-db:/db:rw + env_file: + - .env + environment: + KADENA_NETWORK: ${KADENA_NETWORK:-mainnet01} + SERVER: ${DB_SYNC_SERVER:-us-w1.chainweb.com} + entrypoint: '/bin/sh' + command: + - '-c' + - | + case "$$KADENA_NETWORK" in + mainnet01) + echo "SERVER=$${SERVER:=us-w1.chainweb.com}" + ;; + testnet04) + echo "SERVER=$${SERVER:=us1.testnet.chainweb.com}" + ;; + *) + echo "unknown or unsupported chainweb version: $$KADENA_NETWORK" 1>&2 + exit 1 + esac + + mkdir -p "/db/0" + rsync -avz --delete "rsync://$${SERVER}/db/0/rocksDb" "/db/0/" + rsync -avz --delete "rsync://$${SERVER}/db/0/rocksDb" "/db/0/" + rsync -avz --delete "rsync://$${SERVER}/db/0/rocksDb" "/db/0/" + rsync -avz --delete "rsync://$${SERVER}/db/0/rocksDb" "/db/0/" + for i in 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ; do + rsync -avz --delete "rsync://$${SERVER}/db/0/sqlite/pact-v1-chain-$${i}.*" "/db/0/sqlite/" + rsync -avz --delete "rsync://$${SERVER}/db/0/sqlite/pact-v1-chain-$${i}.*" "/db/0/sqlite/" + rsync -avz --delete "rsync://$${SERVER}/db/0/sqlite/pact-v1-chain-$${i}.*" "/db/0/sqlite/" + rsync -avz --delete "rsync://$${SERVER}/db/0/sqlite/pact-v1-chain-$${i}.*" "/db/0/sqlite/" + done + + # ########################################################################## # + # Database Validation + + chainweb-validate-db-config: + container_name: chainweb-validate-db-config + image: alpine:latest + environment: + KADENA_NETWORK: ${KADENA_NETWORK:-mainnet01} + volumes: + - chainweb-config:/config:rw + entrypoint: '/bin/sh' + command: + - '-c' + - | + cat > /config/chainweb-validate-db-config.yaml <&2; exit 1; } + rm -rf "/db/0/sqlite/*" + /chainweb/chainweb-node \ + +RTS \ + -T \ + -H1G \ + -A64M \ + -RTS \ + --config-file=/config/chainweb-validate-db-config.yaml \ + --log-handle=stderr + + # ########################################################################## # + # Chainweb Node + + chainweb-node: + container_name: chainweb-node + image: ${IMAGE:-ghcr.io/kadena-io/chainweb-node/ubuntu:latest} + environment: + KADENA_NETWORK: ${KADENA_NETWORK:-mainnet01} + restart: unless-stopped + stop_grace_period: '20s' + stop_signal: SIGINT + depends_on: + chainweb-validate-db: + condition: 'service_completed_successfully' + ulimits: + nofile: + hard: 65535 + soft: 65535 + ports: + - mode: host + protocol: tcp + published: ${P2P_PORT:-1789} + target: 1789 + - mode: host + protocol: tcp + published: ${SERVICE_PORT:-1848} + target: 1848 + volumes: + - chainweb-db:/chainweb/db:rw + deploy: + restart_policy: + condition: on-failure + delay: '5s' + max_attempts: 3 + window: '120s' + update_config: + delay: '60s' + order: stop-first + entrypoint: + - /chainweb/chainweb-node + command: + - +RTS + - -T + - -H1G + - -A64M + - -RTS + - --header-stream + - --database-directory=/chainweb/db + - --chainweb-version=${KADENA_NETWORK:-mainnet01} + - --log-level=warn + - --log-handle=stdout + - --log-format=json + - --telemetry-log-handle=stderr + - --telemetry-log-format=json + - --rosetta + - --p2p-port=1789 + healthcheck: + interval: '1m' + retries: 3 + start_period: '30s' + test: + - CMD + - /bin/bash + - '-c' + - 'set -e; while : ; do /bin/bash -c "$$1" ; shift || break ; done' + - '--' + - | + # check health endpoint + exec 3<>/dev/tcp/localhost/1848 + printf "GET /health-check HTTP/1.1\r\nhost: http://localhost:1848\r\nConnection: close\r\n\r\n" >&3 + grep -q "200 OK" <&3 || exit 1 + timeout: '10s' + + # ########################################################################## # + # Container Monitor + + container-monitor: + container_name: container-monitor + environment: + AUTOHEAL_CONTAINER_LABEL: all + image: willfarrell/autoheal + restart: unless-stopped + volumes: + - /var/run/docker.sock:/var/run/docker.sock + + # ########################################################################## # + # Debugging + + debug: + image: alpine:latest + entrypoint: + - /bin/sh + profiles: + - debug diff --git a/terraform/assets/chainweb-node/package.json b/terraform/assets/chainweb-node/package.json index 7872c465..26bce699 100644 --- a/terraform/assets/chainweb-node/package.json +++ b/terraform/assets/chainweb-node/package.json @@ -1,24 +1,22 @@ -{ - "name": "indexer-cwn", - "version": "1.0.0", - "description": "", - "main": "index.js", - "scripts": { - "init-node": "./src/init-cwnode.sh", - "docker:stop:cwn": "docker compose stop chainweb-node", - "docker:pull:cwn": "docker compose pull chainweb-node", - "docker:up:cwn": "docker compose up chainweb-node --build -d", - "docker:logs:cwd": "docker compose logs chainweb-node --follow", - - "docker:up:initdb": "docker compose up chainweb-initialize-db", - "docker:stop:initdb": "docker compose stop chainweb-initialize-db", - "docker:logs:initdb": "docker compose logs chainweb-initialize-db --follow", - - "docker:up:validatedb": "docker compose up chainweb-validate-db", - "docker:stop:validatedb": "docker compose stop chainweb-validate-db", - "docker:logs:validatedb": "docker compose logs chainweb-validate-db --follow" - }, - "keywords": [], - "author": "", - "license": "ISC" -} +{ + "name": "indexer-cwn", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "init-node": "./src/init-cwnode.sh", + "docker:stop:cwn": "docker compose stop chainweb-node", + "docker:pull:cwn": "docker compose pull chainweb-node", + "docker:up:cwn": "docker compose up chainweb-node --build -d", + "docker:logs:cwd": "docker compose logs chainweb-node --follow", + "docker:up:initdb": "docker compose up chainweb-initialize-db", + "docker:stop:initdb": "docker compose stop chainweb-initialize-db", + "docker:logs:initdb": "docker compose logs chainweb-initialize-db --follow", + "docker:up:validatedb": "docker compose up chainweb-validate-db", + "docker:stop:validatedb": "docker compose stop chainweb-validate-db", + "docker:logs:validatedb": "docker compose logs chainweb-validate-db --follow" + }, + "keywords": [], + "author": "", + "license": "ISC" +} diff --git a/web/README.md b/web/README.md index 3cca7455..37b984d3 100644 --- a/web/README.md +++ b/web/README.md @@ -3,12 +3,13 @@ ========== ## Guidelines -* Use the present tense ("Add feature" not "Added feature") and the imperative mood ("Move class to..." not "Moves class to...") on commits and use the name issue on pull requests. -* Pull requests must be reviewed before merged. -* Done is better than perfect. Does it work as expected? Ship now, iterate later. -* All contributions must have tests. Remember to verify the [Github Actions CI status](https://github.com/hack-a-chain-software/indexer-kadena/actions/workflows/CI.yaml). -* Every commit is checked using [Github Actions](https://github.com/hack-a-chain-software/indexer-kadena/actions). -* If the CI status are not passing, the deploy will not work. + +- Use the present tense ("Add feature" not "Added feature") and the imperative mood ("Move class to..." not "Moves class to...") on commits and use the name issue on pull requests. +- Pull requests must be reviewed before merged. +- Done is better than perfect. Does it work as expected? Ship now, iterate later. +- All contributions must have tests. Remember to verify the [Github Actions CI status](https://github.com/hack-a-chain-software/indexer-kadena/actions/workflows/CI.yaml). +- Every commit is checked using [Github Actions](https://github.com/hack-a-chain-software/indexer-kadena/actions). +- If the CI status are not passing, the deploy will not work. ## Coding Style @@ -17,11 +18,13 @@ - Vue: https://vuejs.org/style-guide/ ## Task Management -* GitHub Issues is used to track all tasks that needed to be done. -* Kadscan board is used to get a decent look on what's going on wright now. -* Every two weeks all done tasks are put together in a Milestone and the current Sprint is closed. + +- GitHub Issues is used to track all tasks that needed to be done. +- Kadscan board is used to get a decent look on what's going on wright now. +- Every two weeks all done tasks are put together in a Milestone and the current Sprint is closed. ## Directory Structure + Here's a brief overview of the structure: ```bash @@ -36,29 +39,35 @@ Here's a brief overview of the structure: ``` ## Features + - [Nuxt 3](https://v3.nuxtjs.org/) - [Tailwind CSS](https://tailwindcss.com/) ## Installation + Kadscan is powered by [**Nuxt**](https://nuxt.com/). If you have any problems configuring your enviroment, remember to read the [Nuxt Documentation](https://nuxt.com/docs). ------------------ +--- #### Steps -1) Clone the repository: + +1. Clone the repository: + ```bash $ gh repo clone hack-a-chain-software/indexer-kadena $ cd indexer-kadena ``` -2) Check all packages and copy the .env.example file and edit it with your environment config: +2. Check all packages and copy the .env.example file and edit it with your environment config: + ```bash $ cp ./front/.env.example ./front/.env ``` -3) Install frontend dependencies via PNPM +3. Install frontend dependencies via PNPM + ```bash $ pnpm install ``` diff --git a/web/app.config.ts b/web/app.config.ts index 28b1d729..f9cccd33 100644 --- a/web/app.config.ts +++ b/web/app.config.ts @@ -1,5 +1,5 @@ -import * as config from './config' +import * as config from './config'; export default defineAppConfig({ - ...config -}) + ...config, +}); diff --git a/web/composables/balances.ts b/web/composables/balances.ts index 3b6e3d5e..f676d8e7 100644 --- a/web/composables/balances.ts +++ b/web/composables/balances.ts @@ -1,26 +1,20 @@ -import { unknownToken, staticTokens } from '../constants/tokens' +import { unknownToken, staticTokens } from '../constants/tokens'; -export const transformRawBalances = ({ - prices, - allBalances, -}: any) => { +export const transformRawBalances = ({ prices, allBalances }: any) => { if (!allBalances) { - return [] + return []; } const balancesObj = allBalances.nodes .sort((a: any, b: any) => a.chainId - b.chainId) .reduce((prev: any, current: any) => { - const { - balance, - module = '', - } = current || {} + const { balance, module = '' } = current || {}; - const formatedModule = current.module === 'coin' ? 'kadena' : current.module + const formatedModule = current.module === 'coin' ? 'kadena' : current.module; - const metadata = staticTokens.find(({ module }) => current.module === module) || unknownToken + const metadata = staticTokens.find(({ module }) => current.module === module) || unknownToken; - const etl = prices?.find(({ id }: any) => formatedModule.includes(id)) + const etl = prices?.find(({ id }: any) => formatedModule.includes(id)); if (!prev[module]) { prev[module] = { @@ -30,20 +24,20 @@ export const transformRawBalances = ({ balances: [], ...etl, - } + }; } - prev[module].balance = prev[module].balance + Number(balance) + prev[module].balance = prev[module].balance + Number(balance); prev[module].balances.push({ ...etl, ...current, - }) + }); - return prev - }, {}) + return prev; + }, {}); return Object.values(balancesObj).sort((a: any, b: any) => { - return (b.current_price || 0) - (a.current_price || 0) - }) -} + return (b.current_price || 0) - (a.current_price || 0); + }); +}; diff --git a/web/composables/blocks.ts b/web/composables/blocks.ts index c5a56dca..ed8c3486 100644 --- a/web/composables/blocks.ts +++ b/web/composables/blocks.ts @@ -1,3 +1,3 @@ -export const useBlockMiner = (minerData: string) => JSON.parse(minerData) +export const useBlockMiner = (minerData: string) => JSON.parse(minerData); -export const useBlockCoinbase = (coinbase: string) => JSON.parse(coinbase) +export const useBlockCoinbase = (coinbase: string) => JSON.parse(coinbase); diff --git a/web/composables/csv.ts b/web/composables/csv.ts index b2c044a3..5ca15990 100644 --- a/web/composables/csv.ts +++ b/web/composables/csv.ts @@ -1,37 +1,44 @@ -import { convertArrayToCSV } from 'convert-array-to-csv' +import { convertArrayToCSV } from 'convert-array-to-csv'; function flatObjectToString(obj) { - var s = ""; + var s = ''; Object.keys(obj).map(key => { if (obj[key] === null) { - s += key + ":"; + s += key + ':'; } else if (obj[key].toLocaleDateString) { - s += key + ": " + obj[key].toLocaleDateString() + "\n"; + s += key + ': ' + obj[key].toLocaleDateString() + '\n'; } else if (obj[key] instanceof Array) { - s += key + ":\n" + listToFlatString(obj[key]); - } else if (typeof obj[key] == "object") { - s += key + ":\n" + flatObjectToString(obj[key]); + s += key + ':\n' + listToFlatString(obj[key]); + } else if (typeof obj[key] == 'object') { + s += key + ':\n' + flatObjectToString(obj[key]); } else { - s += key + ":" + obj[key]; + s += key + ':' + obj[key]; } - s += "\n"; + s += '\n'; }); return s; } function listToFlatString(list) { - var s = ""; + var s = ''; list.map(item => { Object.keys(item).map(key => { - s += ""; + s += ''; if (item[key] instanceof Array) { - s += key + "\n" + listToFlatString(item[key]); - } else if (typeof item[key] == "object" && item[key] !== null) { - s += key + ": " + flatObjectToString(item[key]); + s += key + '\n' + listToFlatString(item[key]); + } else if (typeof item[key] == 'object' && item[key] !== null) { + s += key + ': ' + flatObjectToString(item[key]); } else { - s += key + ": " + (item[key] === null ? "" : item[key].toLocaleDateString ? item[key].toLocaleDateString : item[key].toString()); + s += + key + + ': ' + + (item[key] === null + ? '' + : item[key].toLocaleDateString + ? item[key].toLocaleDateString + : item[key].toString()); } - s += "\n"; + s += '\n'; }); }); return s; @@ -39,29 +46,32 @@ function listToFlatString(list) { function flatten(object, addToList, prefix) { Object.keys(object).map(key => { - if (object[key] === null) { - addToList[prefix + key] = ""; - } else - if (object[key] instanceof Array) { - // addToList[prefix + key] = listToFlatString(object[key]); - for (const i in object[key]) { - flatten(object[key][i], addToList, prefix + key + "." + i + '.') - } - } else if (typeof object[key] == 'object' && !object[key].toLocaleDateString) { - flatten(object[key], addToList, prefix + key + '.'); - } else { - addToList[prefix + key] = object[key]; + if (object[key] === null) { + addToList[prefix + key] = ''; + } else if (object[key] instanceof Array) { + // addToList[prefix + key] = listToFlatString(object[key]); + for (const i in object[key]) { + flatten(object[key][i], addToList, prefix + key + '.' + i + '.'); } + } else if (typeof object[key] == 'object' && !object[key].toLocaleDateString) { + flatten(object[key], addToList, prefix + key + '.'); + } else { + addToList[prefix + key] = object[key]; + } }); return addToList; } export function transactionToCsv({ transaction, transfers, events }: any) { - const flattenedData = flatten({ - ...transaction, - events, - transfers: transfers.map(({ transfer }: any) => transfer), - }, {}, ''); + const flattenedData = flatten( + { + ...transaction, + events, + transfers: transfers.map(({ transfer }: any) => transfer), + }, + {}, + '', + ); // transfers.forEach(({ transfer }: any, index: number) => { // const flatTransfer = flatten(transfer, flattenedData, `transfer_${index}_`); @@ -73,7 +83,7 @@ export function transactionToCsv({ transaction, transfers, events }: any) { // Object.assign(flattenedData, flatEvent); // }); - const csv = convertArrayToCSV([flattenedData]) + const csv = convertArrayToCSV([flattenedData]); return csv; } @@ -81,8 +91,7 @@ export function transactionToCsv({ transaction, transfers, events }: any) { export function blockToCsv(block: any) { const flattenedData = flatten(block, {}, ''); - - const csv = convertArrayToCSV([flattenedData]) + const csv = convertArrayToCSV([flattenedData]); return csv; } @@ -90,7 +99,8 @@ export function blockToCsv(block: any) { export function downloadCSV(csv: string, filename: string) { const blob = new Blob([csv], { type: 'text/csv;charset=utf-8;' }); const link = document.createElement('a'); - if ((navigator as any).msSaveBlob) { // IE 10+ + if ((navigator as any).msSaveBlob) { + // IE 10+ (navigator as any).msSaveBlob(blob, filename); } else { link.href = URL.createObjectURL(blob); diff --git a/web/composables/nfts.ts b/web/composables/nfts.ts index cbd713d3..8f46a1f9 100644 --- a/web/composables/nfts.ts +++ b/web/composables/nfts.ts @@ -1,13 +1,13 @@ const formatNftDatum = (datum: any) => { return { - attributes: (datum.attributes && typeof datum.attributes !== 'string') ? datum.attributes : [], - collection: datum.artistName || "Unknown collection", - name: datum.title || datum.name || "Unknown name", - description: datum.description || "No description available.", + attributes: datum.attributes && typeof datum.attributes !== 'string' ? datum.attributes : [], + collection: datum.artistName || 'Unknown collection', + name: datum.title || datum.name || 'Unknown name', + description: datum.description || 'No description available.', createdAt: datum.creationDate || datum['create-date'] || null, image: datum.assetUrl || datum.thumbnailUrl || null, - } -} + }; +}; export const useNft = (contract?: any) => { if (!contract) { @@ -15,15 +15,13 @@ export const useNft = (contract?: any) => { image: null, attributes: [], createdAt: null, - name: "Unknown name", - collection: "Unknown collection", - description: "No description available.", - } + name: 'Unknown name', + collection: 'Unknown collection', + description: 'No description available.', + }; } - const { - data, - } = JSON.parse(contract?.metadata); + const { data } = JSON.parse(contract?.metadata); return formatNftDatum(data[0].datum); -} +}; diff --git a/web/composables/number.ts b/web/composables/number.ts index 86b6640f..57023106 100644 --- a/web/composables/number.ts +++ b/web/composables/number.ts @@ -1,21 +1,21 @@ export const money = new Intl.NumberFormat('en-US', { - style: "currency", - currency: "USD", + style: 'currency', + currency: 'USD', minimumFractionDigits: 2, - maximumFractionDigits: 2 -}) + maximumFractionDigits: 2, +}); export const moneyCompact = new Intl.NumberFormat('en-US', { - style: "currency", - currency: "USD", - notation: "compact", + style: 'currency', + currency: 'USD', + notation: 'compact', minimumFractionDigits: 2, - maximumFractionDigits: 2 -}) + maximumFractionDigits: 2, +}); export const integer = new Intl.NumberFormat('en-US', { minimumFractionDigits: 0, -}) +}); export const customInteger = (value: any) => { const integerDigits = value === 0 ? 1 : Math.floor(Math.log10(Math.abs(value))) + 1; @@ -24,12 +24,11 @@ export const customInteger = (value: any) => { const formatter = new Intl.NumberFormat('en-US', { minimumFractionDigits: fractionDigits, - maximumFractionDigits: fractionDigits + maximumFractionDigits: fractionDigits, }); return formatter.format(value); -} - +}; export const customMoney = (value: any) => { const integerDigits = value === 0 ? 1 : Math.floor(Math.log10(Math.abs(value))) + 1; @@ -37,11 +36,11 @@ export const customMoney = (value: any) => { const fractionDigits = integerDigits < 1 ? 2 - integerDigits : 2; const formatter = new Intl.NumberFormat('en-US', { - style: "currency", - currency: "USD", + style: 'currency', + currency: 'USD', minimumFractionDigits: fractionDigits, - maximumFractionDigits: fractionDigits + maximumFractionDigits: fractionDigits, }); return formatter.format(value); -} +}; diff --git a/web/composables/paginate.ts b/web/composables/paginate.ts index 067c8fcb..340ea534 100644 --- a/web/composables/paginate.ts +++ b/web/composables/paginate.ts @@ -1,16 +1,16 @@ -import { ref, watch, onMounted } from 'vue' -import { useRoute, useRouter } from 'vue-router' +import { ref, watch, onMounted } from 'vue'; +import { useRoute, useRouter } from 'vue-router'; export interface PaginateInterface { - key: string - query: string + key: string; + query: string; } export function usePagination(defaultLimit = 20) { - const route = useRoute() - const router = useRouter() + const route = useRoute(); + const router = useRouter(); - const page = ref(isNaN(Number(route.query.page)) ? 1 : Number(route.query.page)) + const page = ref(isNaN(Number(route.query.page)) ? 1 : Number(route.query.page)); const limit = ref(defaultLimit); const cursor = ref(undefined); @@ -27,34 +27,34 @@ export function usePagination(defaultLimit = 20) { function getLastPageParams() { return { last: limit.value, - } + }; } function getFirstPageParams() { return { first: limit.value, - } + }; } function getNextPageParams(pageInfo: any) { return { after: pageInfo.endCursor, first: limit.value, - } + }; } function getPrevPageParams(pageInfo: any) { return { before: pageInfo.startCursor, last: limit.value, - } + }; } - function updateURL (newPage: number, newCursor: string | null) { + function updateURL(newPage: number, newCursor: string | null) { if (!newPage || !newCursor) { const { page, cursor, ...rest } = route.query; - router.replace({ query: rest }) + router.replace({ query: rest }); return; } @@ -68,18 +68,18 @@ export function usePagination(defaultLimit = 20) { query.cursor = newCursor; } - router.replace({ query }) + router.replace({ query }); } - function updatePage (newPage: number, pageInfo: any, totalCount: any, totalPages: any) { + function updatePage(newPage: number, pageInfo: any, totalCount: any, totalPages: any) { if (newPage === page.value) { - return + return; } if (newPage === totalPages) { const newParams = getLastPageParams(); - page.value = newPage + page.value = newPage; params.value = newParams; return; @@ -88,7 +88,7 @@ export function usePagination(defaultLimit = 20) { if (newPage === 1) { const newParams = getFirstPageParams(); - page.value = newPage + page.value = newPage; params.value = newParams; return; @@ -97,7 +97,7 @@ export function usePagination(defaultLimit = 20) { if (newPage > page.value) { const newParams = getNextPageParams(pageInfo); - page.value = newPage + page.value = newPage; params.value = newParams; return; @@ -106,7 +106,7 @@ export function usePagination(defaultLimit = 20) { if (newPage < page.value) { const newParams = getPrevPageParams(pageInfo); - page.value = newPage + page.value = newPage; params.value = newParams; return; @@ -114,8 +114,8 @@ export function usePagination(defaultLimit = 20) { } watch([page, cursor], ([newPage, newCursor]) => { - updateURL(newPage, newCursor) - }) + updateURL(newPage, newCursor); + }); return { page, @@ -124,5 +124,5 @@ export function usePagination(defaultLimit = 20) { params, updatePage, updateCursor, - } + }; } diff --git a/web/composables/search.ts b/web/composables/search.ts index 9d4c4efc..a00369be 100644 --- a/web/composables/search.ts +++ b/web/composables/search.ts @@ -1,4 +1,4 @@ -import debounce from 'lodash/debounce' +import debounce from 'lodash/debounce'; import { gql } from 'nuxt-graphql-request/utils'; const allQuery = gql` @@ -28,7 +28,7 @@ const allQuery = gql` } } } -` +`; const searchBlocksQuery = gql` query SearchBlocks($searchTerm: String!, $limit: Int!, $heightFilter: Int) { @@ -108,7 +108,7 @@ const filters = [ }, ]; -export function useSearch () { +export function useSearch() { const data = reactive({ query: '', open: false, @@ -164,11 +164,11 @@ export function useSearch () { function shouldRedirectBeforeSearch(search: any) { if (kadenaAddressRegex.test(search)) { - return "account"; + return 'account'; } if (requestKeyRegex.test(search)) { - return "transactions"; + return 'transactions'; } } @@ -241,14 +241,14 @@ export function useSearch () { }; const close = () => { - data.open = false - } + data.open = false; + }; const cleanup = () => { - data.query = '' - data.searched = null - close() - } + data.query = ''; + data.searched = null; + close(); + }; return { data, diff --git a/web/composables/string.ts b/web/composables/string.ts index 0b1eb3b5..9be05cae 100644 --- a/web/composables/string.ts +++ b/web/composables/string.ts @@ -1,29 +1,19 @@ -export const shortenAddress = ( - address: string, - chars = 4 -): string => { +export const shortenAddress = (address: string, chars = 4): string => { if (!address) { - return '' + return ''; } if (!address.includes('k:') && address.length <= 20) { - return address + return address; } - return `${address.slice(0, chars)}...${address.slice( - -chars - )}` -} + return `${address.slice(0, chars)}...${address.slice(-chars)}`; +}; -export const shortenString = ( - string: string, - chars = 4 -): string => { +export const shortenString = (string: string, chars = 4): string => { if (!string) { - return '' + return ''; } - return `${string.slice(0, chars)}...${string.slice( - -chars - )}` -} + return `${string.slice(0, chars)}...${string.slice(-chars)}`; +}; diff --git a/web/composables/tokens.ts b/web/composables/tokens.ts index 5b57ecbc..99b627a4 100644 --- a/web/composables/tokens.ts +++ b/web/composables/tokens.ts @@ -3,173 +3,173 @@ export const unknownNft = { symbol: 'ukn', id: 'unknown', name: 'unknown', -} +}; export const unknownToken = { image: '', symbol: 'ukn', id: 'unknown', name: 'unknown', -} +}; export const staticTokens = [ { - name: "ARKD", - symbol: "ARKD", - coingeckoId: "", - module: "arkade.token", - icon: "https://swap.ecko.finance/images/crypto/ark.png", + name: 'ARKD', + symbol: 'ARKD', + coingeckoId: '', + module: 'arkade.token', + icon: 'https://swap.ecko.finance/images/crypto/ark.png', }, { - id: "kadena", - module: "coin", - symbol: "KDA", - coingeckoId: "", - name: "Kadena", - icon: "https://swap.ecko.finance/images/crypto/kda-crypto.svg", + id: 'kadena', + module: 'coin', + symbol: 'KDA', + coingeckoId: '', + name: 'Kadena', + icon: 'https://swap.ecko.finance/images/crypto/kda-crypto.svg', }, { - name: "KAYC", - symbol: "KAYC", - coingeckoId: "", - module: "free.KAYC", - icon: "https://swap.ecko.finance/images/crypto/kayc.svg", + name: 'KAYC', + symbol: 'KAYC', + coingeckoId: '', + module: 'free.KAYC', + icon: 'https://swap.ecko.finance/images/crypto/kayc.svg', }, { - symbol: "BKA", - coingeckoId: "", - name: "Backalley", - module: "free.backalley", - icon: "https://swap.ecko.finance/images/crypto/bka.svg", + symbol: 'BKA', + coingeckoId: '', + name: 'Backalley', + module: 'free.backalley', + icon: 'https://swap.ecko.finance/images/crypto/bka.svg', }, { - name: "CRNA", - symbol: "CRNA", - coingeckoId: "", - module: "free.corona-token", - icon: "https://swap.ecko.finance/images/crypto/corona.svg", + name: 'CRNA', + symbol: 'CRNA', + coingeckoId: '', + module: 'free.corona-token', + icon: 'https://swap.ecko.finance/images/crypto/corona.svg', }, { - name: "Crankk", - symbol: "CRKK", - coingeckoId: "", - module: "free.crankk01", - icon: "https://swap.ecko.finance/images/crypto/crankk.png", + name: 'Crankk', + symbol: 'CRKK', + coingeckoId: '', + module: 'free.crankk01', + icon: 'https://swap.ecko.finance/images/crypto/crankk.png', }, { - name: "CFLY", - symbol: "CFLY", - coingeckoId: "", - module: "free.cyberfly_token", - icon: "https://swap.ecko.finance/images/crypto/cfly.svg", + name: 'CFLY', + symbol: 'CFLY', + coingeckoId: '', + module: 'free.cyberfly_token', + icon: 'https://swap.ecko.finance/images/crypto/cfly.svg', }, { - name: "DOC", - symbol: "DOC", - coingeckoId: "", - module: "free.docu", - icon: "https://swap.ecko.finance/images/crypto/docushield.svg", + name: 'DOC', + symbol: 'DOC', + coingeckoId: '', + module: 'free.docu', + icon: 'https://swap.ecko.finance/images/crypto/docushield.svg', }, { - name: "FINUX", - symbol: "FINUX", - coingeckoId: "", - module: "free.finux", - icon: "https://swap.ecko.finance/images/crypto/finux.png", + name: 'FINUX', + symbol: 'FINUX', + coingeckoId: '', + module: 'free.finux', + icon: 'https://swap.ecko.finance/images/crypto/finux.png', }, { - symbol: "JDE", - name: "Jodie", - coingeckoId: "", - module: "free.jodie-token", - icon: "https://swap.ecko.finance/images/crypto/jodie.svg", + symbol: 'JDE', + name: 'Jodie', + coingeckoId: '', + module: 'free.jodie-token', + icon: 'https://swap.ecko.finance/images/crypto/jodie.svg', }, { - name: "Kapy", - symbol: "KAPY", - coingeckoId: "", - module: "free.kapybara-token", - icon: "https://swap.ecko.finance/images/crypto/kapy.svg", + name: 'Kapy', + symbol: 'KAPY', + coingeckoId: '', + module: 'free.kapybara-token', + icon: 'https://swap.ecko.finance/images/crypto/kapy.svg', }, { - id: "kishu-ken", - symbol: "KISHK", - coingeckoId: "", - name: "Kishu Ken", - module: "free.kishu-ken", - icon: "https://swap.ecko.finance/images/crypto/kishk.svg", + id: 'kishu-ken', + symbol: 'KISHK', + coingeckoId: '', + name: 'Kishu Ken', + module: 'free.kishu-ken', + icon: 'https://swap.ecko.finance/images/crypto/kishk.svg', }, { - name: "MAGA", - symbol: "MAGA", - coingeckoId: "", - module: "free.maga", - icon: "https://swap.ecko.finance/images/crypto/maga.png", + name: 'MAGA', + symbol: 'MAGA', + coingeckoId: '', + module: 'free.maga', + icon: 'https://swap.ecko.finance/images/crypto/maga.png', }, { - name: "Hype", - symbol: "HYPE", - coingeckoId: "", - module: "hypercent.prod-hype-coin", - icon: "https://swap.ecko.finance/images/crypto/hypercent-crypto.svg", + name: 'Hype', + symbol: 'HYPE', + coingeckoId: '', + module: 'hypercent.prod-hype-coin', + icon: 'https://swap.ecko.finance/images/crypto/hypercent-crypto.svg', }, { - id: "kaddex", - symbol: "KDX", - name: "Kaddex", - coingeckoId: "", - module: "kaddex.kdx", - icon: "https://assets.coingecko.com/coins/images/27325/standard/kdx.png?1696526373", + id: 'kaddex', + symbol: 'KDX', + name: 'Kaddex', + coingeckoId: '', + module: 'kaddex.kdx', + icon: 'https://assets.coingecko.com/coins/images/27325/standard/kdx.png?1696526373', }, { - id: "kdswap", - symbol: "KDS", - name: "KDSwap", - coingeckoId: "", - module: "kdlaunch.kdswap-token", - icon: "https://swap.ecko.finance/images/crypto/kds.svg", + id: 'kdswap', + symbol: 'KDS', + name: 'KDSwap', + coingeckoId: '', + module: 'kdlaunch.kdswap-token', + icon: 'https://swap.ecko.finance/images/crypto/kds.svg', }, { - id: "kdlaunch", - symbol: "KDL", - name: "KDLaunch", - coingeckoId: "", - module: "kdlaunch.token", - icon: "https://swap.ecko.finance/images/crypto/kdl.svg", + id: 'kdlaunch', + symbol: 'KDL', + name: 'KDLaunch', + coingeckoId: '', + module: 'kdlaunch.token', + icon: 'https://swap.ecko.finance/images/crypto/kdl.svg', }, { - name: "Mok", - symbol: "MOK", - coingeckoId: "", - module: "mok.token", - icon: "https://swap.ecko.finance/images/crypto/mok.svg", + name: 'Mok', + symbol: 'MOK', + coingeckoId: '', + module: 'mok.token', + icon: 'https://swap.ecko.finance/images/crypto/mok.svg', }, { - name: "BRO", - symbol: "BRO", - coingeckoId: "", - module: "n_582fed11af00dc626812cd7890bb88e72067f28c.bro", - icon: "https://swap.ecko.finance/images/crypto/bro.png", + name: 'BRO', + symbol: 'BRO', + coingeckoId: '', + module: 'n_582fed11af00dc626812cd7890bb88e72067f28c.bro', + icon: 'https://swap.ecko.finance/images/crypto/bro.png', }, { - name: "zUSD", - symbol: "zUSD", - coingeckoId: "", - module: "n_b742b4e9c600892af545afb408326e82a6c0c6ed.zUSD", - icon: "https://swap.ecko.finance/images/crypto/zUSD.svg", + name: 'zUSD', + symbol: 'zUSD', + coingeckoId: '', + module: 'n_b742b4e9c600892af545afb408326e82a6c0c6ed.zUSD', + icon: 'https://swap.ecko.finance/images/crypto/zUSD.svg', }, { - name: "HERON", - symbol: "HERON", - coingeckoId: "", - module: "n_e309f0fa7cf3a13f93a8da5325cdad32790d2070.heron", - icon: "https://swap.ecko.finance/images/crypto/heron.png", + name: 'HERON', + symbol: 'HERON', + coingeckoId: '', + module: 'n_e309f0fa7cf3a13f93a8da5325cdad32790d2070.heron', + icon: 'https://swap.ecko.finance/images/crypto/heron.png', }, { - name: "Flux", - symbol: "FLUX", - coingeckoId: "", - module: "runonflux.flux", - icon: "https://swap.ecko.finance/images/crypto/flux-crypto.svg", - } -] + name: 'Flux', + symbol: 'FLUX', + coingeckoId: '', + module: 'runonflux.flux', + icon: 'https://swap.ecko.finance/images/crypto/flux-crypto.svg', + }, +]; diff --git a/web/composables/transactions.ts b/web/composables/transactions.ts index 916a4760..6526a0a5 100644 --- a/web/composables/transactions.ts +++ b/web/composables/transactions.ts @@ -1,17 +1,18 @@ -export const useTransactionStatus = (result: string) => result.includes('\"status\":\"success\"') ? 'success' : 'error' +export const useTransactionStatus = (result: string) => + result.includes('\"status\":\"success\"') ? 'success' : 'error'; export const useTransactionPubkey = (data: string) => { if (!data) { - return null + return null; } - const parsedData = JSON.parse(data || '{}') + const parsedData = JSON.parse(data || '{}'); - const [ first ] = parsedData.keyset?.keys || parsedData.ks?.keys || [] + const [first] = parsedData.keyset?.keys || parsedData.ks?.keys || []; - return first -} + return first; +}; -export const useTransactionSigs = (sigs: string) => JSON.parse(sigs) +export const useTransactionSigs = (sigs: string) => JSON.parse(sigs); -export const useTransactionGas = (transfers: any []) => transfers[0] +export const useTransactionGas = (transfers: any[]) => transfers[0]; diff --git a/web/composables/transfer.ts b/web/composables/transfer.ts index 0ef16adf..4a83aa6f 100644 --- a/web/composables/transfer.ts +++ b/web/composables/transfer.ts @@ -1,7 +1,7 @@ export const useLatestTransfer = (transfers: any[]) => { - const nodeLength = transfers.length || 0 + const nodeLength = transfers.length || 0; - const transferIndex = Math.max(nodeLength - 1, 0) + const transferIndex = Math.max(nodeLength - 1, 0); - return transfers[transferIndex] -} + return transfers[transferIndex]; +}; diff --git a/web/composables/usePopper.ts b/web/composables/usePopper.ts index 61af6209..5a2ab042 100644 --- a/web/composables/usePopper.ts +++ b/web/composables/usePopper.ts @@ -1,103 +1,122 @@ -import { ref, onMounted, watchEffect } from 'vue' -import type { Ref } from 'vue' -import { popperGenerator, defaultModifiers } from '@popperjs/core/lib/popper-lite' -import type { VirtualElement } from '@popperjs/core/lib/popper-lite' -import type { Instance } from '@popperjs/core' -import flip from '@popperjs/core/lib/modifiers/flip' -import offset from '@popperjs/core/lib/modifiers/offset' -import preventOverflow from '@popperjs/core/lib/modifiers/preventOverflow' -import computeStyles from '@popperjs/core/lib/modifiers/computeStyles' -import eventListeners from '@popperjs/core/lib/modifiers/eventListeners' -import arrowModifier from '@popperjs/core/lib/modifiers/arrow' -import { unrefElement } from '@vueuse/core' -import type { MaybeElement } from '@vueuse/core' +import { ref, onMounted, watchEffect } from 'vue'; +import type { Ref } from 'vue'; +import { popperGenerator, defaultModifiers } from '@popperjs/core/lib/popper-lite'; +import type { VirtualElement } from '@popperjs/core/lib/popper-lite'; +import type { Instance } from '@popperjs/core'; +import flip from '@popperjs/core/lib/modifiers/flip'; +import offset from '@popperjs/core/lib/modifiers/offset'; +import preventOverflow from '@popperjs/core/lib/modifiers/preventOverflow'; +import computeStyles from '@popperjs/core/lib/modifiers/computeStyles'; +import eventListeners from '@popperjs/core/lib/modifiers/eventListeners'; +import arrowModifier from '@popperjs/core/lib/modifiers/arrow'; +import { unrefElement } from '@vueuse/core'; +import type { MaybeElement } from '@vueuse/core'; -type PopperOptions = any +type PopperOptions = any; export const createPopper = popperGenerator({ - defaultModifiers: [...defaultModifiers, offset, flip, preventOverflow, computeStyles, eventListeners, arrowModifier] -}) + defaultModifiers: [ + ...defaultModifiers, + offset, + flip, + preventOverflow, + computeStyles, + eventListeners, + arrowModifier, + ], +}); -export function usePopper ({ - locked = false, - overflowPadding = 8, - offsetDistance = 8, - offsetSkid = 0, - gpuAcceleration = true, - adaptive = true, - scroll = true, - resize = true, - arrow = false, - placement, - strategy -}: PopperOptions, virtualReference?: Ref) { - const reference = ref(null) - const popper = ref(null) - const instance = ref(null) +export function usePopper( + { + locked = false, + overflowPadding = 8, + offsetDistance = 8, + offsetSkid = 0, + gpuAcceleration = true, + adaptive = true, + scroll = true, + resize = true, + arrow = false, + placement, + strategy, + }: PopperOptions, + virtualReference?: Ref, +) { + const reference = ref(null); + const popper = ref(null); + const instance = ref(null); onMounted(() => { - watchEffect((onInvalidate) => { - if (!popper.value) { return } - if (!reference.value && !virtualReference?.value) { return } + watchEffect(onInvalidate => { + if (!popper.value) { + return; + } + if (!reference.value && !virtualReference?.value) { + return; + } - const popperEl = unrefElement(popper) - const referenceEl = virtualReference?.value || unrefElement(reference) + const popperEl = unrefElement(popper); + const referenceEl = virtualReference?.value || unrefElement(reference); // if (!(referenceEl instanceof HTMLElement)) { return } - if (!(popperEl instanceof HTMLElement)) { return } - if (!referenceEl) { return } + if (!(popperEl instanceof HTMLElement)) { + return; + } + if (!referenceEl) { + return; + } const config: Record = { modifiers: [ { name: 'flip', - enabled: !locked + enabled: !locked, }, { name: 'preventOverflow', options: { - padding: overflowPadding - } + padding: overflowPadding, + }, }, { name: 'offset', options: { - offset: [offsetSkid, offsetDistance] - } + offset: [offsetSkid, offsetDistance], + }, }, { name: 'computeStyles', options: { adaptive, - gpuAcceleration - } + gpuAcceleration, + }, }, { name: 'eventListeners', options: { scroll, - resize - } + resize, + }, }, { name: 'arrow', - enabled: arrow - } - ] - } + enabled: arrow, + }, + ], + }; if (placement) { - config.placement = placement + config.placement = placement; } if (strategy) { - config.strategy = strategy + config.strategy = strategy; } - instance.value = createPopper(referenceEl, popperEl, config) + instance.value = createPopper(referenceEl, popperEl, config); - onInvalidate(instance.value.destroy) - }) - }) + onInvalidate(instance.value.destroy); + }); + }); - return [reference, popper, instance] as const + return [reference, popper, instance] as const; } diff --git a/web/config/account.ts b/web/config/account.ts index 80f618fe..2d550f55 100644 --- a/web/config/account.ts +++ b/web/config/account.ts @@ -1,35 +1,35 @@ -import { blockchainTooltipData } from "./tooltips" +import { blockchainTooltipData } from './tooltips'; export const assetsTableColumns = [ { cols: 5, key: 'asset', label: 'Asset', - description: blockchainTooltipData.account.tabAssets.asset + description: blockchainTooltipData.account.tabAssets.asset, }, { cols: 5, key: 'module', label: 'Module', - description: blockchainTooltipData.account.tabAssets.symbol + description: blockchainTooltipData.account.tabAssets.symbol, }, { cols: 4, key: 'balance', label: 'Quantity', - description: blockchainTooltipData.account.tabAssets.quantity + description: blockchainTooltipData.account.tabAssets.quantity, }, { cols: 4, key: 'price', label: 'Price', - description: blockchainTooltipData.account.tabAssets.price + description: blockchainTooltipData.account.tabAssets.price, }, { cols: 4, key: 'value', label: 'Value', - description: blockchainTooltipData.account.tabAssets.value + description: blockchainTooltipData.account.tabAssets.value, }, { cols: 2, @@ -37,7 +37,7 @@ export const assetsTableColumns = [ key: 'distribution', label: 'Distribution', }, -] +]; export const assetsTableSubColumns = [ { @@ -52,54 +52,53 @@ export const assetsTableSubColumns = [ key: 'value', label: 'Value', }, -] +]; export const statementTableColumns = [ { cols: 6, key: 'createdAt', label: 'Date', - description: blockchainTooltipData.account.tabAccountStatement.date + description: blockchainTooltipData.account.tabAccountStatement.date, }, { cols: 6, key: 'description', label: 'Transaction description', - description: blockchainTooltipData.account.tabAccountStatement.transactionDescription + description: blockchainTooltipData.account.tabAccountStatement.transactionDescription, }, { cols: 6, key: 'amount', label: 'Amount', - description: blockchainTooltipData.account.tabAccountStatement.amount + description: blockchainTooltipData.account.tabAccountStatement.amount, }, { cols: 6, key: 'balance', label: 'Running Balance', - description: blockchainTooltipData.account.tabAccountStatement.runningBalance + description: blockchainTooltipData.account.tabAccountStatement.runningBalance, }, -] - +]; export const accountTransactionsTableColumns = [ { cols: 2, key: 'status', label: 'Status', - description: blockchainTooltipData.transaction.status + description: blockchainTooltipData.transaction.status, }, { cols: 6, key: 'requestKey', label: 'Request Key', - description: blockchainTooltipData.transaction.requestKey + description: blockchainTooltipData.transaction.requestKey, }, { cols: 3, key: 'sender', label: 'Sender', - description: blockchainTooltipData.transaction.overview.from + description: blockchainTooltipData.transaction.overview.from, }, { cols: 2, @@ -112,20 +111,20 @@ export const accountTransactionsTableColumns = [ center: true, key: 'chainId', label: 'Chain', - description: blockchainTooltipData.transaction.chain + description: blockchainTooltipData.transaction.chain, }, { cols: 3, center: true, key: 'block', label: 'Block Height', - description: blockchainTooltipData.transaction.blockHeight + description: blockchainTooltipData.transaction.blockHeight, }, { cols: 5, key: 'createdAt', label: 'Date', - description: blockchainTooltipData.transaction.meta.creationTime + description: blockchainTooltipData.transaction.meta.creationTime, }, { isFixed: true, @@ -133,4 +132,4 @@ export const accountTransactionsTableColumns = [ key: 'icon', label: '', }, -] +]; diff --git a/web/config/blocks.ts b/web/config/blocks.ts index 1a99c5ce..8c4ff430 100644 --- a/web/config/blocks.ts +++ b/web/config/blocks.ts @@ -1,17 +1,17 @@ -import { blockchainTooltipData } from "./tooltips" +import { blockchainTooltipData } from './tooltips'; export const blocksTableColumns = [ { cols: 3, key: 'height', label: 'Block Height', - description: blockchainTooltipData.block.blockHeight + description: blockchainTooltipData.block.blockHeight, }, { cols: 5, key: 'hash', label: 'Hash', - description: blockchainTooltipData.block.overview.hash + description: blockchainTooltipData.block.overview.hash, }, { cols: 3, @@ -29,20 +29,20 @@ export const blocksTableColumns = [ center: true, key: 'chainId', label: 'Chain', - description: blockchainTooltipData.block.chain + description: blockchainTooltipData.block.chain, }, { cols: 3, key: 'fees', center: true, label: 'Total Fees (KDA)', - description: blockchainTooltipData.block.overview.totalFees + description: blockchainTooltipData.block.overview.totalFees, }, { cols: 5, key: 'createdAt', label: 'Date', - description: blockchainTooltipData.block.creationTime + description: blockchainTooltipData.block.creationTime, }, { isFixed: true, @@ -50,25 +50,25 @@ export const blocksTableColumns = [ key: 'icon', label: '', }, -] +]; export const blockTransactionsTableColumns = [ { cols: 2, key: 'status', label: 'Status', - description: blockchainTooltipData.block.transactions.status + description: blockchainTooltipData.block.transactions.status, }, { cols: 8, key: 'requestkey', label: 'Request Key', - description: blockchainTooltipData.block.transactions.requestKey + description: blockchainTooltipData.block.transactions.requestKey, }, { cols: 14, key: 'code', label: 'Code', - description: blockchainTooltipData.block.transactions.code + description: blockchainTooltipData.block.transactions.code, }, -] +]; diff --git a/web/config/collection.ts b/web/config/collection.ts index c6e5fa74..cb5e1d74 100644 --- a/web/config/collection.ts +++ b/web/config/collection.ts @@ -1,41 +1,41 @@ -import { blockchainTooltipData } from "./tooltips" +import { blockchainTooltipData } from './tooltips'; export const collectionActivityTableColumns = [ { cols: 4, key: 'item', label: 'Item', - description: blockchainTooltipData.block.blockHeight + description: blockchainTooltipData.block.blockHeight, }, { cols: 2, key: 'price', label: 'Price', - description: blockchainTooltipData.block.blockHeight + description: blockchainTooltipData.block.blockHeight, }, { cols: 4, key: 'hash', label: 'Hash', - description: blockchainTooltipData.block.blockHeight + description: blockchainTooltipData.block.blockHeight, }, { cols: 4, key: 'from', label: 'From', - description: blockchainTooltipData.block.blockHeight + description: blockchainTooltipData.block.blockHeight, }, { cols: 4, key: 'to', label: 'To', - description: blockchainTooltipData.block.blockHeight + description: blockchainTooltipData.block.blockHeight, }, { cols: 5, key: 'date', label: 'Date', - description: blockchainTooltipData.block.blockHeight + description: blockchainTooltipData.block.blockHeight, }, { @@ -44,12 +44,12 @@ export const collectionActivityTableColumns = [ key: 'icon', label: '', }, -] +]; export const mockCollectionTxs = [ { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', createdAt: 1712954633979, name: 'The Angel Doggos', collection: 'Mock', @@ -59,8 +59,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Kadena Bears', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -70,8 +70,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Brawler Bears', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -81,8 +81,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Boxing Badger', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -92,8 +92,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Kadena Mining Club', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -103,8 +103,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Kadena Komodos', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -114,8 +114,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'JODIE INU', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -125,8 +125,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'INU Crew', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -136,8 +136,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Eighties Bulls', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -147,8 +147,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -158,8 +158,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -169,8 +169,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -180,8 +180,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -191,8 +191,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -202,8 +202,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -213,8 +213,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -224,8 +224,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -235,8 +235,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -246,8 +246,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -257,8 +257,8 @@ export const mockCollectionTxs = [ from: 'k:0000000000000000000000000000000000000000000000000000000000000', }, { - amount: "00.00", - dollar: "$ 0,00", + amount: '00.00', + dollar: '$ 0,00', name: 'Dadbod #999999', image: '/nft/mock.webp', createdAt: 1712954633979, @@ -266,8 +266,8 @@ export const mockCollectionTxs = [ hash: '0000000000000000000000000000000000000000000000', to: 'k:0000000000000000000000000000000000000000000000000000000000000', from: 'k:0000000000000000000000000000000000000000000000000000000000000', - } -] + }, +]; export const mockCollectionNFTs = [ { @@ -390,7 +390,7 @@ export const mockCollectionNFTs = [ collection: 'Mock', image: '/nft/mock.webp', }, -] +]; export const mockCollection = [ { @@ -398,7 +398,7 @@ export const mockCollection = [ amount: '00.00', currency: 'KDA', delta: -0.401, - floor: "10", + floor: '10', owners: '5,270', transfers: '10,000', assets: '197,719', @@ -409,7 +409,7 @@ export const mockCollection = [ amount: '00.00', currency: 'KDA', delta: 0.0383, - floor: "2.326", + floor: '2.326', owners: '2,270', transfers: '10,000', assets: '17,719', @@ -420,7 +420,7 @@ export const mockCollection = [ amount: '00.00', currency: 'KDA', delta: 0.498, - floor: "0.000", + floor: '0.000', owners: '270', transfers: '20,000', assets: '17,719', @@ -431,7 +431,7 @@ export const mockCollection = [ amount: '00.00', currency: 'KDA', delta: 0.1864, - floor: "10", + floor: '10', owners: '70', transfers: '1000', assets: '7,719', @@ -442,7 +442,7 @@ export const mockCollection = [ amount: '00.00', currency: 'KDA', delta: 0.5936, - floor: "1.4", + floor: '1.4', owners: '5,470', transfers: '10,000', assets: '197,719', @@ -453,7 +453,7 @@ export const mockCollection = [ amount: '00.00', currency: 'KDA', delta: 0.3715, - floor: "0.", + floor: '0.', owners: '570', transfers: '10000', assets: '1,719', @@ -464,7 +464,7 @@ export const mockCollection = [ amount: '00.00', currency: 'KDA', delta: 0.401, - floor: "10", + floor: '10', owners: '50', transfers: '100', assets: '119', @@ -475,7 +475,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: 0.401, - floor: "1.385", + floor: '1.385', owners: '170', transfers: '14,000', assets: '19', @@ -486,7 +486,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: 0.18877, - floor: "10", + floor: '10', owners: '8,270', transfers: '100', assets: '19', @@ -497,7 +497,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: 0.401, - floor: "1.385", + floor: '1.385', owners: '670', transfers: '18,000', assets: '87,719', @@ -508,7 +508,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: 0.401, - floor: "10", + floor: '10', owners: '12,270', transfers: '14,000', assets: '197,719', @@ -519,7 +519,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: -0.401, - floor: "11.3852", + floor: '11.3852', owners: '530', transfers: '10,000', assets: '27,719', @@ -530,7 +530,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: -0.401, - floor: "10", + floor: '10', owners: '60', transfers: '100', assets: '197,719', @@ -541,7 +541,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: 0.401, - floor: "11.3852", + floor: '11.3852', owners: '5,270', transfers: '10,000', assets: '197,719', @@ -552,7 +552,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: -0.401, - floor: "10", + floor: '10', owners: '270', transfers: '10', assets: '197,719', @@ -563,7 +563,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: 0.401, - floor: "10", + floor: '10', owners: '1,270', transfers: '10,000', assets: '197,719', @@ -574,7 +574,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: 0.401, - floor: "1.385", + floor: '1.385', owners: '5,270', transfers: '10,000', assets: '197,719', @@ -585,7 +585,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: -0.401, - floor: "10", + floor: '10', owners: '5,470', transfers: '10,000', assets: '197,719', @@ -596,7 +596,7 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: 0.401, - floor: "10", + floor: '10', owners: '5,470', transfers: '10,000', assets: '197,719', @@ -607,10 +607,10 @@ export const mockCollection = [ amount: '00.00 ', currency: 'KDA', delta: 0.401, - floor: "10", + floor: '10', owners: '5,270', transfers: '10,000', assets: '197,719', icon: '/collection/profile.webp', - } -] + }, +]; diff --git a/web/config/index.ts b/web/config/index.ts index 72561181..5e856a10 100644 --- a/web/config/index.ts +++ b/web/config/index.ts @@ -1,9 +1,9 @@ -export * from './nft' -export * from './routes' -export * from './blocks' -export * from './account' -export * from './tokens' -export * from './networks' -export * from './collection' -export * from './transactions' -export * from './tooltips' +export * from './nft'; +export * from './routes'; +export * from './blocks'; +export * from './account'; +export * from './tokens'; +export * from './networks'; +export * from './collection'; +export * from './transactions'; +export * from './tooltips'; diff --git a/web/config/networks.ts b/web/config/networks.ts index 41ceafed..a75cc362 100644 --- a/web/config/networks.ts +++ b/web/config/networks.ts @@ -2,18 +2,18 @@ export const networks = [ { icon: 'github', title: 'Github', - to: 'https://github.com/opact-protocol' + to: 'https://github.com/opact-protocol', }, { icon: 'discord', title: 'Discord', - to: 'https://docs.tickets.opact.io/' + to: 'https://docs.tickets.opact.io/', }, { icon: 'twitter', title: 'Twitter', - to: 'https://twitter.com/opactzk' + to: 'https://twitter.com/opactzk', }, -] +]; -export default networks +export default networks; diff --git a/web/config/nft.ts b/web/config/nft.ts index 9891fe9f..0947f38b 100644 --- a/web/config/nft.ts +++ b/web/config/nft.ts @@ -1,11 +1,11 @@ -import { blockchainTooltipData } from "./tooltips" +import { blockchainTooltipData } from './tooltips'; export const nftCollectionsTableColumns = [ { cols: 1, center: true, key: 'position', - label: '#' + label: '#', }, { cols: 5, @@ -16,37 +16,37 @@ export const nftCollectionsTableColumns = [ cols: 3, key: 'volume', label: 'Volume', - description: blockchainTooltipData.trendingCollections.volume + description: blockchainTooltipData.trendingCollections.volume, }, { cols: 3, key: 'delta', label: 'Volume Change', - description: blockchainTooltipData.trendingCollections.volumeChange + description: blockchainTooltipData.trendingCollections.volumeChange, }, { cols: 2, key: 'floorPrice', label: 'Floor Price', - description: blockchainTooltipData.trendingCollections.floorPrice + description: blockchainTooltipData.trendingCollections.floorPrice, }, { cols: 3, key: 'owners', label: 'Owners', - description: blockchainTooltipData.trendingCollections.owners + description: blockchainTooltipData.trendingCollections.owners, }, { cols: 3, key: 'transfers', label: 'Transfers', - description: blockchainTooltipData.trendingCollections.transfers + description: blockchainTooltipData.trendingCollections.transfers, }, { cols: 3, key: 'assets', label: 'Total Assets', - description: blockchainTooltipData.trendingCollections.totalAssets + description: blockchainTooltipData.trendingCollections.totalAssets, }, { cols: 1, @@ -54,33 +54,33 @@ export const nftCollectionsTableColumns = [ key: 'icon', label: '', }, -] +]; export const nftTransfersTableColumns = [ { cols: 4, key: 'hash', label: 'Hash', - description: blockchainTooltipData.nft.transfers.hash + description: blockchainTooltipData.nft.transfers.hash, }, { center: true, cols: 2, key: 'method', label: 'Method', - description: blockchainTooltipData.nft.transfers.method + description: blockchainTooltipData.nft.transfers.method, }, { cols: 4, key: 'from', label: 'From', - description: blockchainTooltipData.nft.transfers.from + description: blockchainTooltipData.nft.transfers.from, }, { cols: 4, key: 'to', label: 'To', - description: blockchainTooltipData.nft.transfers.to + description: blockchainTooltipData.nft.transfers.to, }, { cols: 5, @@ -98,13 +98,13 @@ export const nftTransfersTableColumns = [ key: 'icon', label: '', }, -] +]; export const nftActivityTableColumns = [ { cols: 2, key: 'activity', - label: 'Activity' + label: 'Activity', }, { cols: 2, @@ -115,19 +115,19 @@ export const nftActivityTableColumns = [ cols: 6, key: 'hash', label: 'Hash', - description: blockchainTooltipData.nftDetails.activity.hash + description: blockchainTooltipData.nftDetails.activity.hash, }, { cols: 4, key: 'from', label: 'From', - description: blockchainTooltipData.nftDetails.activity.from + description: blockchainTooltipData.nftDetails.activity.from, }, { cols: 4, key: 'to', label: 'To', - description: blockchainTooltipData.nftDetails.activity.to + description: blockchainTooltipData.nftDetails.activity.to, }, { cols: 5, @@ -141,4 +141,4 @@ export const nftActivityTableColumns = [ key: 'icon', label: '', }, -] +]; diff --git a/web/config/routes.ts b/web/config/routes.ts index 54fe8a4b..5c5a1fc2 100644 --- a/web/config/routes.ts +++ b/web/config/routes.ts @@ -4,10 +4,10 @@ export interface BaseRoute { path?: string; tag: string; label: string; - disabled?: boolean + disabled?: boolean; } -export interface LinkRoute extends BaseRoute{ +export interface LinkRoute extends BaseRoute { type: 'link'; } @@ -55,7 +55,7 @@ export const routes: Route[] = [ label: 'Trending tokens', tag: 'route:tokens:trending', }, - ] + ], }, // { // label: 'NFT', @@ -75,6 +75,6 @@ export const routes: Route[] = [ // } // ] // } -] +]; -export default routes +export default routes; diff --git a/web/config/tokens.ts b/web/config/tokens.ts index 57366f99..63e78aea 100644 --- a/web/config/tokens.ts +++ b/web/config/tokens.ts @@ -1,4 +1,4 @@ -import { blockchainTooltipData } from "./tooltips" +import { blockchainTooltipData } from './tooltips'; export const trendingTokensTableColumns = [ { @@ -21,59 +21,59 @@ export const trendingTokensTableColumns = [ center: true, key: 'change', label: 'Change (%)', - description: blockchainTooltipData.trendingTokens.change + description: blockchainTooltipData.trendingTokens.change, }, { cols: 4, key: 'volume', label: 'Volume (24h)', - description: blockchainTooltipData.trendingTokens.volume + description: blockchainTooltipData.trendingTokens.volume, }, { cols: 3, key: 'marketCap', label: 'Market Cap', - description: blockchainTooltipData.trendingTokens.marketCap + description: blockchainTooltipData.trendingTokens.marketCap, }, { cols: 3, key: 'supply', label: 'Circulating Supply', - description: blockchainTooltipData.trendingTokens.circulatingSupply + description: blockchainTooltipData.trendingTokens.circulatingSupply, }, -] +]; export const tokenTransfersTableColumns = [ { cols: 2, key: 'requestkey', label: 'Hash', - description: blockchainTooltipData.tokenTransfers.hash + description: blockchainTooltipData.tokenTransfers.hash, }, { center: true, cols: 3, key: 'method', label: 'Method', - description: blockchainTooltipData.tokenTransfers.method + description: blockchainTooltipData.tokenTransfers.method, }, { cols: 4, key: 'from', label: 'From', - description: blockchainTooltipData.tokenTransfers.from + description: blockchainTooltipData.tokenTransfers.from, }, { cols: 4, key: 'to', label: 'To', - description: blockchainTooltipData.tokenTransfers.to + description: blockchainTooltipData.tokenTransfers.to, }, { cols: 2, key: 'amount', label: 'Amount', - description: blockchainTooltipData.tokenTransfers.amount + description: blockchainTooltipData.tokenTransfers.amount, }, { cols: 3, @@ -91,33 +91,33 @@ export const tokenTransfersTableColumns = [ key: 'icon', label: '', }, -] +]; export const tokenDetailTransferTableColumns = [ { cols: 5, key: 'requestKey', label: 'Request Key', - description: blockchainTooltipData.tokenDetails.transfers.hash + description: blockchainTooltipData.tokenDetails.transfers.hash, }, { center: true, cols: 2, key: 'method', label: 'Method', - description: blockchainTooltipData.tokenDetails.transfers.method + description: blockchainTooltipData.tokenDetails.transfers.method, }, { cols: 4, key: 'from', label: 'From', - description: blockchainTooltipData.tokenDetails.transfers.from + description: blockchainTooltipData.tokenDetails.transfers.from, }, { cols: 4, key: 'to', label: 'To', - description: blockchainTooltipData.tokenDetails.transfers.to + description: blockchainTooltipData.tokenDetails.transfers.to, }, { cols: 4, @@ -129,13 +129,13 @@ export const tokenDetailTransferTableColumns = [ key: 'date', label: 'Date', }, -] +]; export const holdersTableColumns = [ { cols: 1, key: 'ranking', - label: '#' + label: '#', }, { cols: 7, @@ -151,12 +151,12 @@ export const holdersTableColumns = [ cols: 5, key: 'value', label: 'Value', - description: blockchainTooltipData.tokenDetails.holders.valueUSD + description: blockchainTooltipData.tokenDetails.holders.valueUSD, }, { cols: 6, key: 'percentage', label: 'Percentage', - description: blockchainTooltipData.tokenDetails.holders.percentage + description: blockchainTooltipData.tokenDetails.holders.percentage, }, -] +]; diff --git a/web/config/tooltips.ts b/web/config/tooltips.ts index 8cd22c3b..ae4812c5 100644 --- a/web/config/tooltips.ts +++ b/web/config/tooltips.ts @@ -1,207 +1,219 @@ export const blockchainTooltipData = { transactions: { - status: "Indicates the current status of the transaction, such as pending, confirmed, or failed.", - requestKey: "A unique identifier assigned to each transaction for tracking and reference purposes.", - chain: "Specifies the blockchain network on which the transaction occurred.", - from: "The sender or origin address of the transaction.", - to: "The recipient or destination address of the transaction.", + status: + 'Indicates the current status of the transaction, such as pending, confirmed, or failed.', + requestKey: + 'A unique identifier assigned to each transaction for tracking and reference purposes.', + chain: 'Specifies the blockchain network on which the transaction occurred.', + from: 'The sender or origin address of the transaction.', + to: 'The recipient or destination address of the transaction.', }, transaction: { - requestKey: "A unique identifier assigned to the transaction request.", - status: "The current status or outcome of the transaction.", - chain: "The blockchain network on which the transaction occurred.", - blockHeight: "The numerical position of the block in the blockchain where the transaction is recorded.", - timestamp: "The date and time at which the transaction was validated.", + requestKey: 'A unique identifier assigned to the transaction request.', + status: 'The current status or outcome of the transaction.', + chain: 'The blockchain network on which the transaction occurred.', + blockHeight: + 'The numerical position of the block in the blockchain where the transaction is recorded.', + timestamp: 'The date and time at which the transaction was validated.', overview: { - from: "The address or account from which the transaction originated.", - to: "The address or account to which the transaction was sent.", - amount: "The quantity of tokens or cryptocurrency transferred in the transaction.", - transactionFee: "The fee paid for processing the transaction.", - paidBy: "The entity responsible for paying the transaction fee.", - gasPrice: "The cost per unit of gas used in the transaction.", - code: "Additional information or code associated with the transaction.", + from: 'The address or account from which the transaction originated.', + to: 'The address or account to which the transaction was sent.', + amount: 'The quantity of tokens or cryptocurrency transferred in the transaction.', + transactionFee: 'The fee paid for processing the transaction.', + paidBy: 'The entity responsible for paying the transaction fee.', + gasPrice: 'The cost per unit of gas used in the transaction.', + code: 'Additional information or code associated with the transaction.', }, meta: { - sender: "The entity or account initiating the transaction.", - chain: "The blockchain network on which the transaction occurred.", - gasLimit: "The maximum amount of gas allowed for the transaction.", - ttl: "Time to Live, indicating the duration for which the transaction remains valid.", - creationTime: "The timestamp indicating when the transaction was created.", - publicKey: "The public key associated with the transaction.", - nonce: "A sequential number assigned to each transaction initiated by an account.", - data: "Additional data or information included with the transaction.", + sender: 'The entity or account initiating the transaction.', + chain: 'The blockchain network on which the transaction occurred.', + gasLimit: 'The maximum amount of gas allowed for the transaction.', + ttl: 'Time to Live, indicating the duration for which the transaction remains valid.', + creationTime: 'The timestamp indicating when the transaction was created.', + publicKey: 'The public key associated with the transaction.', + nonce: 'A sequential number assigned to each transaction initiated by an account.', + data: 'Additional data or information included with the transaction.', }, output: { - transactionId: "The unique identifier assigned to the transaction.", - result: "The outcome or status of the transaction.", - logs: "Additional information or logs generated during the transaction.", - signatures: "Digital signatures associated with the transaction.", - continuation: "Any continuation or follow-up actions related to the transaction.", + transactionId: 'The unique identifier assigned to the transaction.', + result: 'The outcome or status of the transaction.', + logs: 'Additional information or logs generated during the transaction.', + signatures: 'Digital signatures associated with the transaction.', + continuation: 'Any continuation or follow-up actions related to the transaction.', }, events: { - coinTransfer: "This event signifies a transfer of cryptocurrency or tokens.", + coinTransfer: 'This event signifies a transfer of cryptocurrency or tokens.', }, }, blocks: { - status: "Indicates the current status of the transaction, such as pending, confirmed, or failed.", - requestKey: "A unique identifier assigned to each transaction for tracking and reference purposes.", - chain: "Specifies the blockchain network on which the transaction occurred.", + status: + 'Indicates the current status of the transaction, such as pending, confirmed, or failed.', + requestKey: + 'A unique identifier assigned to each transaction for tracking and reference purposes.', + chain: 'Specifies the blockchain network on which the transaction occurred.', }, block: { - chain: "The specific blockchain or chain to which the block belongs.", - blockHeight: "The numerical height or position of the block within the blockchain.", - creationTime: "The timestamp indicating when the block was created or mined.", - parent: "The parent block from which the current block was derived or built upon.", - powHash: "The Proof of Work (POW) hash associated with the block, indicating the computational work done to validate it.", + chain: 'The specific blockchain or chain to which the block belongs.', + blockHeight: 'The numerical height or position of the block within the blockchain.', + creationTime: 'The timestamp indicating when the block was created or mined.', + parent: 'The parent block from which the current block was derived or built upon.', + powHash: + 'The Proof of Work (POW) hash associated with the block, indicating the computational work done to validate it.', overview: { - target: "The intended recipient or target of the block.", - hash: "A unique identifier for the block, generated using a cryptographic hash function.", - totalFees: "The total fees collected by miner for including transactions in the block.", - nonce: "A value used in the mining process to vary the block hash.", - weight: "The weight assigned to the block within the blockchain network.", - epochStart: "The start time of the epoch in which the block was mined.", - flags: "Any special indicators or flags associated with the block.", - chainwebVersion: "The version of the Chainweb protocol used.", - neighbors: "Other blocks connected or adjacent to this block in the blockchain network.", + target: 'The intended recipient or target of the block.', + hash: 'A unique identifier for the block, generated using a cryptographic hash function.', + totalFees: 'The total fees collected by miner for including transactions in the block.', + nonce: 'A value used in the mining process to vary the block hash.', + weight: 'The weight assigned to the block within the blockchain network.', + epochStart: 'The start time of the epoch in which the block was mined.', + flags: 'Any special indicators or flags associated with the block.', + chainwebVersion: 'The version of the Chainweb protocol used.', + neighbors: 'Other blocks connected or adjacent to this block in the blockchain network.', }, payload: { - minerAccount: "The account of the miner who mined the block.", + minerAccount: 'The account of the miner who mined the block.', minerPublicKeys: "The public keys associated with the miner's account.", minerPredicate: "Any specific conditions or predicates associated with the miner's account.", - transactionsHash: "A hash of all transactions included in the block.", - outputsHash: "A hash of all outputs produced by transactions in the block.", + transactionsHash: 'A hash of all transactions included in the block.', + outputsHash: 'A hash of all outputs produced by transactions in the block.', payloadHash: "A hash of the block's payload data.", }, coinbase: { - gas: "The amount of gas consumed by the block.", + gas: 'The amount of gas consumed by the block.', result: "The result of executing the block's transactions.", requestKey: "The unique identifier for the block's request.", logs: "Any logs or messages generated by the block's execution.", - metadata: "Additional metadata associated with the block.", + metadata: 'Additional metadata associated with the block.', transactionId: "The identifier for the block's transaction.", }, transactions: { - blockHash: "The hash of the block containing the transaction.", - status: "The status of the transaction. Indicates whether the transaction is confirmed, pending, or failed.", - requestKey: "The unique request key associated with the transaction. It is used to uniquely identify the transaction on the blockchain.", - code: "The code or type of transaction executed. It may include information about the type of transaction, such as fund transfer, smart contract execution, among others.", + blockHash: 'The hash of the block containing the transaction.', + status: + 'The status of the transaction. Indicates whether the transaction is confirmed, pending, or failed.', + requestKey: + 'The unique request key associated with the transaction. It is used to uniquely identify the transaction on the blockchain.', + code: 'The code or type of transaction executed. It may include information about the type of transaction, such as fund transfer, smart contract execution, among others.', }, }, account: { - address: "The account identifier or address associated with the account.", - balance: "The current balance of the account.", - assets: "The total number of assets held in the account.", - transactions: "The total number of transactions associated with the account.", - createdAt: "The date and time when the account was created.", + address: 'The account identifier or address associated with the account.', + balance: 'The current balance of the account.', + assets: 'The total number of assets held in the account.', + transactions: 'The total number of transactions associated with the account.', + createdAt: 'The date and time when the account was created.', tabAssets: { - asset: "The name or identifier of the asset held in the account.", - symbol: "The symbol or abbreviation representing the asset.", - quantity: "The quantity or amount of the asset held.", - price: "The current price of the asset, if applicable.", - value: "The total value of the asset in the account, calculated based on quantity and price.", + asset: 'The name or identifier of the asset held in the account.', + symbol: 'The symbol or abbreviation representing the asset.', + quantity: 'The quantity or amount of the asset held.', + price: 'The current price of the asset, if applicable.', + value: 'The total value of the asset in the account, calculated based on quantity and price.', }, tabTransactions: { - status: "The status of the transaction associated with the account.", - requestKey: "The unique identifier associated with the transaction.", - chain: "The blockchain network or chain where the transaction occurred.", - blockHeight: "The height of the block where the transaction was recorded.", - from: "The sender or origin of the transaction.", - amount: "The amount transacted in the transaction.", - date: "The date and time when the transaction occurred.", + status: 'The status of the transaction associated with the account.', + requestKey: 'The unique identifier associated with the transaction.', + chain: 'The blockchain network or chain where the transaction occurred.', + blockHeight: 'The height of the block where the transaction was recorded.', + from: 'The sender or origin of the transaction.', + amount: 'The amount transacted in the transaction.', + date: 'The date and time when the transaction occurred.', }, tabAccountStatement: { - date: "The date and time of the transaction.", - transactionDescription: "A brief description or summary of the transaction.", - amount: "The amount involved in the transaction.", - runningBalance: "The updated balance of the account after the transaction.", + date: 'The date and time of the transaction.', + transactionDescription: 'A brief description or summary of the transaction.', + amount: 'The amount involved in the transaction.', + runningBalance: 'The updated balance of the account after the transaction.', }, }, trendingCollections: { - volume: "The total trading volume of the collection over the specified time period.", - volumeChange: "The percentage change in trading volume compared to the previous period.", - floorPrice: "The lowest price at which an NFT from this collection is currently listed for sale.", - owners: "The total number of unique owners or holders of NFTs from this collection.", - transfers: "The total number of transfers or transactions involving NFTs from this collection.", - totalAssets: "The total number of NFTs or assets within this collection.", + volume: 'The total trading volume of the collection over the specified time period.', + volumeChange: 'The percentage change in trading volume compared to the previous period.', + floorPrice: + 'The lowest price at which an NFT from this collection is currently listed for sale.', + owners: 'The total number of unique owners or holders of NFTs from this collection.', + transfers: 'The total number of transfers or transactions involving NFTs from this collection.', + totalAssets: 'The total number of NFTs or assets within this collection.', }, nft: { transfers: { - hash: "The unique identifier for the transaction involving the NFT.", - method: "The method or action performed in the transaction, such as transfer or mint.", - from: "The address or entity from which the NFT was transferred or originated.", - to: "The address or entity to which the NFT was transferred.", + hash: 'The unique identifier for the transaction involving the NFT.', + method: 'The method or action performed in the transaction, such as transfer or mint.', + from: 'The address or entity from which the NFT was transferred or originated.', + to: 'The address or entity to which the NFT was transferred.', }, collection: { - totalItems: "The total number of NFTs present in the collection.", - owners: "The number of unique individuals or addresses holding NFTs from this collection.", - forSale: "The number of NFTs from this collection currently listed for sale.", - floorPrice: "The lowest listed price for any NFT within this collection.", - averagePrice: "The average price of all NFTs within this collection.", - volume: "The total trading volume of NFTs from this collection over a specified period.", + totalItems: 'The total number of NFTs present in the collection.', + owners: 'The number of unique individuals or addresses holding NFTs from this collection.', + forSale: 'The number of NFTs from this collection currently listed for sale.', + floorPrice: 'The lowest listed price for any NFT within this collection.', + averagePrice: 'The average price of all NFTs within this collection.', + volume: 'The total trading volume of NFTs from this collection over a specified period.', tabActivity: { - hash: "A unique identifier for each transaction involving NFTs from this collection.", - from: "The address or entity initiating the transfer of NFTs.", - to: "The address or entity receiving the transferred NFTs.", + hash: 'A unique identifier for each transaction involving NFTs from this collection.', + from: 'The address or entity initiating the transfer of NFTs.', + to: 'The address or entity receiving the transferred NFTs.', }, }, }, nftDetails: { - id: "The indexer NFT ID.", - account: "The account identifier or address associated with the account.", - price: "The current price of the NFT in the marketplace.", - floorPrice: "The minimum price set for the NFT in the marketplace.", - owner: "The current owner of the NFT.", - creator: "The creator or original issuer of the NFT.", - timestamp: "The date and time when the NFT was created or last modified.", - marketplace: "The platform or marketplace where the NFT is listed for sale or auction.", + id: 'The indexer NFT ID.', + account: 'The account identifier or address associated with the account.', + price: 'The current price of the NFT in the marketplace.', + floorPrice: 'The minimum price set for the NFT in the marketplace.', + owner: 'The current owner of the NFT.', + creator: 'The creator or original issuer of the NFT.', + timestamp: 'The date and time when the NFT was created or last modified.', + marketplace: 'The platform or marketplace where the NFT is listed for sale or auction.', activity: { - account: "The account identifier or address associated with the account.", - hash: "A unique identifier for each transaction involving the NFT.", - from: "The address or entity initiating the transfer of the NFT.", - to: "The address or entity receiving the transferred NFT.", + account: 'The account identifier or address associated with the account.', + hash: 'A unique identifier for each transaction involving the NFT.', + from: 'The address or entity initiating the transfer of the NFT.', + to: 'The address or entity receiving the transferred NFT.', }, }, trendingTokens: { - change: "The percentage change in the token's price over a specific period, typically the last 24 hours.", - volume: "The total trading volume of the token over the past 24 hours.", - marketCap: "The total market capitalization of the token, calculated by multiplying the current price by the circulating supply.", - circulatingSupply: "The total number of tokens currently in circulation and available for trading.", + change: + "The percentage change in the token's price over a specific period, typically the last 24 hours.", + volume: 'The total trading volume of the token over the past 24 hours.', + marketCap: + 'The total market capitalization of the token, calculated by multiplying the current price by the circulating supply.', + circulatingSupply: + 'The total number of tokens currently in circulation and available for trading.', }, tokenTransfers: { - hash: "A unique identifier for the token transfer transaction.", + hash: 'A unique identifier for the token transfer transaction.', method: "Indicates the type or method of token transfer, such as 'transfer' or 'approve'.", - from: "The address from which the tokens were transferred.", - to: "The address to which the tokens were transferred.", - amount: "The quantity or amount of tokens transferred in the transaction.", + from: 'The address from which the tokens were transferred.', + to: 'The address to which the tokens were transferred.', + amount: 'The quantity or amount of tokens transferred in the transaction.', }, tokenDetails: { overview: { - price: "Current price of the token.", - maxTotalSupply: "Maximum total supply of the token.", - holders: "Total number of token holders.", - totalTransfers: "Total number of transfers involving the token.", + price: 'Current price of the token.', + maxTotalSupply: 'Maximum total supply of the token.', + holders: 'Total number of token holders.', + totalTransfers: 'Total number of transfers involving the token.', }, summary: { - contract: "Address of the token smart contract.", - decimals: "Number of decimal places used for the token.", - website: "Official website of the token project.", + contract: 'Address of the token smart contract.', + decimals: 'Number of decimal places used for the token.', + website: 'Official website of the token project.', }, transfers: { - hash: "Unique identifier for the token transfer transaction.", - blockHeight: "The block number in which the transaction occurred.", + hash: 'Unique identifier for the token transfer transaction.', + blockHeight: 'The block number in which the transaction occurred.', method: "Type or method of token transfer (e.g., 'transfer' or 'approve').", - from: "Address from which the tokens were transferred.", - to: "Address to which the tokens were transferred.", + from: 'Address from which the tokens were transferred.', + to: 'Address to which the tokens were transferred.', }, holders: { - quantity: "Quantity of tokens held by the holder.", - valueUSD: "Value of the tokens held by the holder in USD.", - percentage: "Percentage of total tokens held by the holder.", + quantity: 'Quantity of tokens held by the holder.', + valueUSD: 'Value of the tokens held by the holder in USD.', + percentage: 'Percentage of total tokens held by the holder.', }, information: { - marketCapitalization: "Total market capitalization of the token.", - volume24H: "Total trading volume of the token in the last 24 hours.", - circulatingSupply: "Total circulating supply of the token.", + marketCapitalization: 'Total market capitalization of the token.', + volume24H: 'Total trading volume of the token in the last 24 hours.', + circulatingSupply: 'Total circulating supply of the token.', }, }, }; diff --git a/web/config/transactions.ts b/web/config/transactions.ts index cc0dc5d4..fab69601 100644 --- a/web/config/transactions.ts +++ b/web/config/transactions.ts @@ -1,23 +1,23 @@ -import { blockchainTooltipData } from "./tooltips" +import { blockchainTooltipData } from './tooltips'; export const transactionTableColumns = [ { cols: 2, key: 'status', label: 'Status', - description: blockchainTooltipData.transaction.status + description: blockchainTooltipData.transaction.status, }, { cols: 6, key: 'requestKey', label: 'Request Key', - description: blockchainTooltipData.transaction.requestKey + description: blockchainTooltipData.transaction.requestKey, }, { cols: 3, key: 'sender', label: 'Sender', - description: blockchainTooltipData.transaction.overview.from + description: blockchainTooltipData.transaction.overview.from, }, { cols: 2, @@ -38,7 +38,7 @@ export const transactionTableColumns = [ center: true, key: 'block', label: 'Block Height', - description: blockchainTooltipData.transaction.blockHeight + description: blockchainTooltipData.transaction.blockHeight, }, // { // cols: 4, @@ -50,7 +50,7 @@ export const transactionTableColumns = [ cols: 5, key: 'createdAt', label: 'Date', - description: blockchainTooltipData.transaction.meta.creationTime + description: blockchainTooltipData.transaction.meta.creationTime, }, { isFixed: true, @@ -58,4 +58,4 @@ export const transactionTableColumns = [ key: 'icon', label: '', }, -] +]; diff --git a/web/constants/tokens.ts b/web/constants/tokens.ts index f0215d01..c2a2526a 100644 --- a/web/constants/tokens.ts +++ b/web/constants/tokens.ts @@ -3,172 +3,172 @@ export const unknownNft = { symbol: 'ukn', id: 'unknown', name: 'unknown', -} +}; export const unknownToken = { image: '', symbol: 'ukn', id: 'unknown', name: 'unknown', -} +}; export const staticTokens = [ { - name: "ARKD", - symbol: "ARKD", - coingeckoId: "", - module: "arkade.token", - icon: "https://swap.ecko.finance/images/crypto/ark.png", + name: 'ARKD', + symbol: 'ARKD', + coingeckoId: '', + module: 'arkade.token', + icon: 'https://swap.ecko.finance/images/crypto/ark.png', }, { - module: "coin", - symbol: "KDA", - coingeckoId: "kadena", - name: "Kadena", - icon: "https://swap.ecko.finance/images/crypto/kda-crypto.svg", + module: 'coin', + symbol: 'KDA', + coingeckoId: 'kadena', + name: 'Kadena', + icon: 'https://swap.ecko.finance/images/crypto/kda-crypto.svg', }, { - name: "KAYC", - symbol: "KAYC", - coingeckoId: "", - module: "free.KAYC", - icon: "https://swap.ecko.finance/images/crypto/kayc.svg", + name: 'KAYC', + symbol: 'KAYC', + coingeckoId: '', + module: 'free.KAYC', + icon: 'https://swap.ecko.finance/images/crypto/kayc.svg', }, { - symbol: "BKA", - coingeckoId: "", - name: "Backalley", - module: "free.backalley", - icon: "https://swap.ecko.finance/images/crypto/bka.svg", + symbol: 'BKA', + coingeckoId: '', + name: 'Backalley', + module: 'free.backalley', + icon: 'https://swap.ecko.finance/images/crypto/bka.svg', }, { - name: "CRNA", - symbol: "CRNA", - coingeckoId: "", - module: "free.corona-token", - icon: "https://swap.ecko.finance/images/crypto/corona.svg", + name: 'CRNA', + symbol: 'CRNA', + coingeckoId: '', + module: 'free.corona-token', + icon: 'https://swap.ecko.finance/images/crypto/corona.svg', }, { - name: "CRKK", - symbol: "CRKK", - coingeckoId: "", - module: "free.crankk01", - icon: "https://swap.ecko.finance/images/crypto/crankk.png", + name: 'CRKK', + symbol: 'CRKK', + coingeckoId: '', + module: 'free.crankk01', + icon: 'https://swap.ecko.finance/images/crypto/crankk.png', }, { - name: "CFLY", - symbol: "CFLY", - coingeckoId: "", - module: "free.cyberfly_token", - icon: "https://swap.ecko.finance/images/crypto/cfly.svg", + name: 'CFLY', + symbol: 'CFLY', + coingeckoId: '', + module: 'free.cyberfly_token', + icon: 'https://swap.ecko.finance/images/crypto/cfly.svg', }, { - name: "DOC", - symbol: "DOC", - coingeckoId: "", - module: "free.docu", - icon: "https://swap.ecko.finance/images/crypto/docushield.svg", + name: 'DOC', + symbol: 'DOC', + coingeckoId: '', + module: 'free.docu', + icon: 'https://swap.ecko.finance/images/crypto/docushield.svg', }, { - name: "FINUX", - symbol: "FINUX", - coingeckoId: "", - module: "free.finux", - icon: "https://swap.ecko.finance/images/crypto/finux.png", + name: 'FINUX', + symbol: 'FINUX', + coingeckoId: '', + module: 'free.finux', + icon: 'https://swap.ecko.finance/images/crypto/finux.png', }, { - symbol: "JDE", - name: "Jodie", - coingeckoId: "", - module: "free.jodie-token", - icon: "https://swap.ecko.finance/images/crypto/jodie.svg", + symbol: 'JDE', + name: 'Jodie', + coingeckoId: '', + module: 'free.jodie-token', + icon: 'https://swap.ecko.finance/images/crypto/jodie.svg', }, { - name: "Kapy", - symbol: "KAPY", - coingeckoId: "", - module: "free.kapybara-token", - icon: "https://swap.ecko.finance/images/crypto/kapy.svg", + name: 'Kapy', + symbol: 'KAPY', + coingeckoId: '', + module: 'free.kapybara-token', + icon: 'https://swap.ecko.finance/images/crypto/kapy.svg', }, { - id: "kishu-ken", - symbol: "KISHK", - coingeckoId: "", - name: "Kishu Ken", - module: "free.kishu-ken", - icon: "https://swap.ecko.finance/images/crypto/kishk.svg", + id: 'kishu-ken', + symbol: 'KISHK', + coingeckoId: '', + name: 'Kishu Ken', + module: 'free.kishu-ken', + icon: 'https://swap.ecko.finance/images/crypto/kishk.svg', }, { - name: "MAGA", - symbol: "MAGA", - coingeckoId: "", - module: "free.maga", - icon: "https://swap.ecko.finance/images/crypto/maga.png", + name: 'MAGA', + symbol: 'MAGA', + coingeckoId: '', + module: 'free.maga', + icon: 'https://swap.ecko.finance/images/crypto/maga.png', }, { - name: "Hype", - symbol: "HYPE", - coingeckoId: "", - module: "hypercent.prod-hype-coin", - icon: "https://swap.ecko.finance/images/crypto/hypercent-crypto.svg", + name: 'Hype', + symbol: 'HYPE', + coingeckoId: '', + module: 'hypercent.prod-hype-coin', + icon: 'https://swap.ecko.finance/images/crypto/hypercent-crypto.svg', }, { - id: "kaddex", - symbol: "KDX", - name: "Kaddex", - coingeckoId: "", - module: "kaddex.kdx", - icon: "https://swap.ecko.finance/images/crypto/kaddex-crypto.svg", + id: 'kaddex', + symbol: 'KDX', + name: 'Kaddex', + coingeckoId: '', + module: 'kaddex.kdx', + icon: 'https://swap.ecko.finance/images/crypto/kaddex-crypto.svg', }, { - id: "kdswap", - symbol: "KDS", - name: "KDSwap", - coingeckoId: "", - module: "kdlaunch.kdswap-token", - icon: "https://swap.ecko.finance/images/crypto/kds.svg", + id: 'kdswap', + symbol: 'KDS', + name: 'KDSwap', + coingeckoId: '', + module: 'kdlaunch.kdswap-token', + icon: 'https://swap.ecko.finance/images/crypto/kds.svg', }, { - id: "kdlaunch", - symbol: "KDL", - name: "KDLaunch", - coingeckoId: "", - module: "kdlaunch.token", - icon: "https://swap.ecko.finance/images/crypto/kdl.svg", + id: 'kdlaunch', + symbol: 'KDL', + name: 'KDLaunch', + coingeckoId: '', + module: 'kdlaunch.token', + icon: 'https://swap.ecko.finance/images/crypto/kdl.svg', }, { - name: "Mok", - symbol: "MOK", - coingeckoId: "", - module: "mok.token", - icon: "https://swap.ecko.finance/images/crypto/mok.svg", + name: 'Mok', + symbol: 'MOK', + coingeckoId: '', + module: 'mok.token', + icon: 'https://swap.ecko.finance/images/crypto/mok.svg', }, { - name: "BRO", - symbol: "BRO", - coingeckoId: "", - module: "n_582fed11af00dc626812cd7890bb88e72067f28c.bro", - icon: "https://swap.ecko.finance/images/crypto/bro.png", + name: 'BRO', + symbol: 'BRO', + coingeckoId: '', + module: 'n_582fed11af00dc626812cd7890bb88e72067f28c.bro', + icon: 'https://swap.ecko.finance/images/crypto/bro.png', }, { - name: "zUSD", - symbol: "zUSD", - coingeckoId: "", - module: "n_b742b4e9c600892af545afb408326e82a6c0c6ed.zUSD", - icon: "https://swap.ecko.finance/images/crypto/zUSD.svg", + name: 'zUSD', + symbol: 'zUSD', + coingeckoId: '', + module: 'n_b742b4e9c600892af545afb408326e82a6c0c6ed.zUSD', + icon: 'https://swap.ecko.finance/images/crypto/zUSD.svg', }, { - name: "HERON", - symbol: "HERON", - coingeckoId: "", - module: "n_e309f0fa7cf3a13f93a8da5325cdad32790d2070.heron", - icon: "https://swap.ecko.finance/images/crypto/heron.png", + name: 'HERON', + symbol: 'HERON', + coingeckoId: '', + module: 'n_e309f0fa7cf3a13f93a8da5325cdad32790d2070.heron', + icon: 'https://swap.ecko.finance/images/crypto/heron.png', }, { - name: "Flux", - symbol: "FLUX", - coingeckoId: "", - module: "runonflux.flux", - icon: "https://swap.ecko.finance/images/crypto/flux-crypto.svg", - } -] + name: 'Flux', + symbol: 'FLUX', + coingeckoId: '', + module: 'runonflux.flux', + icon: 'https://swap.ecko.finance/images/crypto/flux-crypto.svg', + }, +]; diff --git a/web/nuxt.config.ts b/web/nuxt.config.ts index 8af45573..2959f372 100644 --- a/web/nuxt.config.ts +++ b/web/nuxt.config.ts @@ -7,24 +7,20 @@ export default defineNuxtConfig({ }, }, - modules: [ - "nuxt-headlessui", - "@nuxtjs/tailwindcss", - "nuxt-graphql-request", - ], + modules: ['nuxt-headlessui', '@nuxtjs/tailwindcss', 'nuxt-graphql-request'], build: { transpile: ['@popperjs/core', 'nuxt-graphql-request', '@venegrad/vue3-click-outside'], }, alias: { - "cross-fetch": "cross-fetch/dist/browser-ponyfill.js", + 'cross-fetch': 'cross-fetch/dist/browser-ponyfill.js', }, runtimeConfig: { public: { CG_KEY: process.env.CG_KEY, - CG_URL: process.env.CG_URL || "https://api.coingecko.com/api/v3", + CG_URL: process.env.CG_URL || 'https://api.coingecko.com/api/v3', API_URL: process.env.API_URL, }, }, @@ -54,4 +50,4 @@ export default defineNuxtConfig({ headlessui: { prefix: '', }, -}) +}); diff --git a/web/plugins/chartjs.ts b/web/plugins/chartjs.ts index a02102da..a242b7ae 100644 --- a/web/plugins/chartjs.ts +++ b/web/plugins/chartjs.ts @@ -7,10 +7,20 @@ import { Title, Filler, Tooltip, - Legend -} from 'chart.js' + Legend, +} from 'chart.js'; import gradient from 'chartjs-plugin-gradient'; export default defineNuxtPlugin(() => { - Chart.register(gradient, CategoryScale, Filler, LinearScale, Title, Tooltip, Legend, PointElement, LineElement) -}) + Chart.register( + gradient, + CategoryScale, + Filler, + LinearScale, + Title, + Tooltip, + Legend, + PointElement, + LineElement, + ); +}); diff --git a/web/plugins/coingecko.ts b/web/plugins/coingecko.ts index 8262fb68..29ca0d75 100644 --- a/web/plugins/coingecko.ts +++ b/web/plugins/coingecko.ts @@ -1,8 +1,5 @@ export default defineNuxtPlugin(async () => { - const { - CG_KEY: apiKey, - CG_URL: baseUrl, - } = useRuntimeConfig().public; + const { CG_KEY: apiKey, CG_URL: baseUrl } = useRuntimeConfig().public; const request = async (endpoint: string, params = {}) => { const url = new URL(`${baseUrl}/${endpoint}`) as any; @@ -12,8 +9,8 @@ export default defineNuxtPlugin(async () => { try { const response = await fetch(url, { headers: { - 'x-cg-pro-api-key': apiKey - } + 'x-cg-pro-api-key': apiKey, + }, }); if (!response.ok) { diff --git a/web/plugins/outside.ts b/web/plugins/outside.ts index 95d8f3b5..af9e3d54 100644 --- a/web/plugins/outside.ts +++ b/web/plugins/outside.ts @@ -1,5 +1,5 @@ -import outside from "@venegrad/vue3-click-outside" +import outside from '@venegrad/vue3-click-outside'; -export default defineNuxtPlugin(async (nuxtApp) => { - nuxtApp.vueApp.directive('outside', outside) -}) +export default defineNuxtPlugin(async nuxtApp => { + nuxtApp.vueApp.directive('outside', outside); +}); diff --git a/web/tailwind.config.js b/web/tailwind.config.js index 8ecb120a..f93f70b3 100644 --- a/web/tailwind.config.js +++ b/web/tailwind.config.js @@ -1,31 +1,27 @@ /** @type {import('tailwindcss').Config} */ -const defaultTheme = require('tailwindcss/defaultTheme') +const defaultTheme = require('tailwindcss/defaultTheme'); export default { content: [ - "./components/**/*.{js,vue,ts}", - "./layouts/**/*.vue", - "./pages/**/*.vue", - "./plugins/**/*.{js,ts}", - "./app.vue", - "./error.vue", + './components/**/*.{js,vue,ts}', + './layouts/**/*.vue', + './pages/**/*.vue', + './plugins/**/*.{js,ts}', + './app.vue', + './error.vue', ], theme: { extend: { fontFamily: { - sans: [ - 'Inter', ...defaultTheme.fontFamily.sans - ], - title: [ - 'Roboto', ...defaultTheme.fontFamily.sans - ] + sans: ['Inter', ...defaultTheme.fontFamily.sans], + title: ['Roboto', ...defaultTheme.fontFamily.sans], }, fontSize: { - 'xs': ['0.75rem', '140%'], - 'sm': ['0.875rem', '140%'], - 'base': ['1rem', '140%'], - 'lg': ['1.125rem', '140%'], - 'xl': ['1.25rem', '140%'], + xs: ['0.75rem', '140%'], + sm: ['0.875rem', '140%'], + base: ['1rem', '140%'], + lg: ['1.125rem', '140%'], + xl: ['1.25rem', '140%'], '2xl': ['1.5rem', '140%'], }, colors: { @@ -37,7 +33,6 @@ export default { red: { DEFAULT: '#C6454B', }, - }, kadscan: { @@ -53,7 +48,6 @@ export default { 500: '#939393', }, - gray: { 900: '#010101', 800: '#1A1C1D', @@ -65,7 +59,7 @@ export default { 200: '#555757', 100: '#5E6060', }, - } + }, }, screens: { sm: '640px', @@ -75,5 +69,4 @@ export default { bazk: '1352px', }, }, -} - +};