diff --git a/.gitignore b/.gitignore index dc281e26..584e59ce 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ node_modules/ dist/ data/ draft/ -.env +.env* .terraform .terraform.lock.hcl .terraform.tfstate.lock.info diff --git a/Dockerfile b/Dockerfile index 6d241cae..efc3525e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,4 +15,7 @@ COPY --from=builder /app/src/kadena-server/config/schema.graphql ./dist/kadena-s COPY --from=builder /app/src/circulating-coins/ ./dist/circulating-coins/ EXPOSE 3001 -CMD ["node", "dist/index.js", "--graphql"] +ARG NODE_MODE=graphql + +# Use the build argument in CMD +CMD ["sh", "-c", "node dist/index.js --${NODE_MODE}"] diff --git a/deploy.sh b/deploy.sh new file mode 100644 index 00000000..949a4581 --- /dev/null +++ b/deploy.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# Check if mode parameter is provided +if [ -z "$1" ]; then + echo "Error: Please provide a mode (graphql, streaming, or missing)" + echo "Usage: ./deploy.sh " + exit 1 +fi + +MODE=$1 + +# Build the image with the specified mode +docker build --build-arg NODE_MODE=$MODE --no-cache -t kadindexer-ecr:$MODE . + +# Tag the image +docker tag kadindexer-ecr:$MODE 325501467038.dkr.ecr.us-east-1.amazonaws.com/kadindexer-ecr:$MODE + +# Push to ECR +docker push 325501467038.dkr.ecr.us-east-1.amazonaws.com/kadindexer-ecr:$MODE \ No newline at end of file diff --git a/indexer/.env.template b/indexer/.env.template index 62be3523..13e77fc5 100644 --- a/indexer/.env.template +++ b/indexer/.env.template @@ -1,17 +1,15 @@ -NODE_API_URL=https://api.chainweb.com -SYNC_BASE_URL="https://api.chainweb.com/chainweb/0.0" - -SYNC_MIN_HEIGHT=0 -SYNC_FETCH_INTERVAL_IN_BLOCKS=100 -SYNC_NETWORK="mainnet01" -KADENA_GRAPHQL_API_URL=localhost -KADENA_GRAPHQL_API_PORT=3001 - -API_GATEWAY_URL=https://api.mainnet.kadindexer.io -API_KADENA_URL=https://kadena.io - DB_USERNAME=postgres DB_PASSWORD=password DB_NAME=indexer -DB_HOST="YOUR_DB_HOST" -DB_SSL_ENABLED=false \ No newline at end of file +DB_HOST=localhost +DB_SSL_ENABLED=false + +NODE_API_URL=http://localhost:1848 +SYNC_BASE_URL=http://localhost:1848/chainweb/0.0 +SYNC_NETWORK=mainnet01 + +# GraphQL Server Configuration +KADENA_GRAPHQL_API_URL=http://localhost +KADENA_GRAPHQL_API_PORT=3001 +API_GATEWAY_URL=http://localhost:3001 +API_KADENA_URL=kadena.io diff --git a/indexer/src/index.ts b/indexer/src/index.ts index 4960d420..831563b8 100644 --- a/indexer/src/index.ts +++ b/indexer/src/index.ts @@ -8,7 +8,6 @@ import { usePostgraphile } from './server/metrics'; import { useKadenaGraphqlServer } from './kadena-server/server'; import { closeDatabase } from './config/database'; import { initializeDatabase } from './config/init'; -import { startBackfillCoinbaseTransactions } from './services/sync/coinbase'; import { backfillBalances } from './services/sync/balances'; import { startMissingBlocks } from './services/sync/missing'; @@ -42,8 +41,6 @@ async function main() { await backfillBalances(); await closeDatabase(); process.exit(0); - } else if (options.coinbase) { - await startBackfillCoinbaseTransactions(); } else if (options.missing) { await startMissingBlocks(); process.exit(0); diff --git a/indexer/src/services/sync/coinbase.ts b/indexer/src/services/sync/coinbase.ts index cc8e5cc8..90e2a975 100644 --- a/indexer/src/services/sync/coinbase.ts +++ b/indexer/src/services/sync/coinbase.ts @@ -1,4 +1,3 @@ -import { closeDatabase, rootPgPool, sequelize } from '../../config/database'; import TransactionModel, { TransactionCreationAttributes } from '../../models/transaction'; import Transfer, { TransferAttributes } from '../../models/transfer'; import { Transaction } from 'sequelize'; @@ -6,8 +5,6 @@ import Event, { EventAttributes } from '../../models/event'; import { getCoinTransfers } from './transfers'; import Signer from '../../models/signer'; import Guard from '../../models/guard'; -import { handleSingleQuery } from '../../kadena-server/utils/raw-query'; -import { TransactionDetailsCreationAttributes } from '../../models/transaction-details'; interface CoinbaseTransactionData { transactionAttributes: TransactionCreationAttributes; @@ -15,49 +12,6 @@ interface CoinbaseTransactionData { transfersCoinAttributes: TransferAttributes[]; } -export async function startBackfillCoinbaseTransactions() { - console.info('[INFO][SYNC][COINBASE] Starting coinbase backfill ...'); - - const limit = 1000; // Number of rows to process in one batch - let offset = 0; - - while (true) { - console.info(`[INFO][SYNC][COINBASE] Fetching rows from offset: ${offset}, limit: ${limit}`); - const res = await rootPgPool.query( - `SELECT b.id, b.coinbase, b."chainId", b."creationTime" FROM "Blocks" b ORDER BY b.id LIMIT $1 OFFSET $2`, - [limit, offset], - ); - - const rows = res.rows; - if (rows.length === 0) { - console.info('[INFO][SYNC][COINBASE] No more rows to process.'); - break; - } - - const tx = await sequelize.transaction(); - try { - await addCoinbaseTransactions(rows, tx); - await tx.commit(); - console.info(`[INFO][SYNC][COINBASE] Batch at offset ${offset} processed successfully.`); - offset += limit; - } catch (batchError) { - console.error(`[ERROR][SYNC][COINBASE] Processing batch at offset ${offset}:`, batchError); - try { - await tx.rollback(); - console.info( - `[INFO][SYNC][COINBASE] Transaction for batch at offset ${offset} rolled back.`, - ); - } catch (rollbackError) { - console.error('[ERROR][SYNC][COINBASE] Error during rollback:', rollbackError); - } - break; - } - } - - await closeDatabase(); - process.exit(0); -} - export async function addCoinbaseTransactions( rows: Array, tx: Transaction, diff --git a/indexer/src/utils/helpers.ts b/indexer/src/utils/helpers.ts index 53b23a8b..2bf76257 100644 --- a/indexer/src/utils/helpers.ts +++ b/indexer/src/utils/helpers.ts @@ -24,40 +24,6 @@ export function delay(ms: number): Promise { return new Promise(resolve => setTimeout(resolve, ms)); } -/** - * Splits a range into smaller chunks based on a specified size. - * This function is useful for breaking down a large range of numbers into manageable parts, - * for example, when processing data in batches. - * - * @param min The minimum value of the range to split. - * @param max The maximum value of the range to split. - * @param rangeSize The size of each chunk. - * @returns An array of arrays, where each inner array represents a chunk with a start and end value. - */ -export function splitIntoChunks(min: number, max: number, rangeSize: number): number[][] { - const chunks = []; - let current = max; - if (max - min <= rangeSize) { - return [[min, max]]; - } - while (current > min) { - const next = Math.max(current - rangeSize, min); - chunks.push([next, current]); - current = next - 1; - } - return chunks; -} - -/** - * Calculates the size of a data object in bytes. - * - * @param data The data object to size. - * @returns The size of the data in bytes. - */ -export function calculateDataSize(data: any) { - return Buffer.byteLength(JSON.stringify(data), 'utf8'); -} - /** * Retrieves a required environment variable as a string. * Throws an error if the variable is not found, ensuring that the application configuration is correctly defined. @@ -73,46 +39,3 @@ export function getRequiredEnvString(key: string): string { } return value; } - -/** - * Retrieves a required environment variable as a number. - * Parses the variable value as an integer and throws an error if the variable is not found or if it cannot be parsed as a number. - * This ensures that numeric environment configurations are valid and available before proceeding. - * - * @param key - The name of the environment variable to retrieve and parse as a number. - * @returns The parsed value of the environment variable as a number. - * @throws {Error} If the environment variable is not set or cannot be parsed as a valid number. - */ -export function getRequiredEnvNumber(key: string): number { - const value = process.env[key]; - if (!value) { - throw new Error(`[ERROR][ENV][MISSING] Environment variable ${key} is required`); - } - const parsed = parseInt(value, 10); - if (isNaN(parsed)) { - throw new Error(`[ERROR][ENV][INVALID] Environment variable ${key} must be a valid number`); - } - return parsed; -} - -/** - * Creates a signal object that can be used to manage shutdown or interrupt signals in asynchronous operations. - * It provides a mechanism to gracefully exit from a loop or terminate a process when an external signal is received. - * The signal object contains a boolean flag that is initially set to false and can be toggled to true using the - * trigger method. This flag can be checked periodically in asynchronous loops to determine if the process should - * continue running or begin shutdown procedures. - * - * @returns An object with properties 'isTriggered' to check the current state of the signal, - * and 'trigger' to change the state to triggered, indicating that a shutdown or interrupt has been requested. - */ -export function createSignal() { - let isTriggered = false; - return { - get isTriggered() { - return isTriggered; - }, - trigger() { - isTriggered = true; - }, - }; -} diff --git a/package.json b/package.json index 699b1390..31aa4256 100644 --- a/package.json +++ b/package.json @@ -21,7 +21,11 @@ "run-indexer-workflow": "act -W .github/workflows/indexer.yml --secret-file ./indexer/.env", "format": "prettier --write \"**/*.{yml,yaml,json,md,js,ts}\"", "format:check": "prettier --check \"**/*.{yml,yaml,json,md,js,ts}\"", - "prepare": "husky" + "prepare": "husky", + "docker:login": "aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin 325501467038.dkr.ecr.us-east-1.amazonaws.com", + "deploy:missing": "sh deploy.sh missing", + "deploy:streaming": "sh deploy.sh streaming", + "deploy:graphql": "sh deploy.sh graphql" }, "dependencies": {}, "devDependencies": {