diff --git a/backfill/Dockerfile.create-coinbase b/backfill/Dockerfile.create-coinbase new file mode 100644 index 00000000..6b65c8fb --- /dev/null +++ b/backfill/Dockerfile.create-coinbase @@ -0,0 +1,11 @@ +FROM golang:1.23.7 AS builder +WORKDIR /app +COPY . . +RUN go mod download +RUN CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -o create-coinbase ./create-coinbase/create-coinbase.go + +FROM scratch +WORKDIR /app +COPY ./global-bundle.pem ./global-bundle.pem +COPY --from=builder /app/create-coinbase . +CMD ["./create-coinbase"] diff --git a/backfill/Dockerfile.creationtime b/backfill/Dockerfile.creationtime new file mode 100644 index 00000000..bce23fdd --- /dev/null +++ b/backfill/Dockerfile.creationtime @@ -0,0 +1,11 @@ +FROM golang:1.23.7 AS builder +WORKDIR /app +COPY . . +RUN go mod download +RUN CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -o fix-creationtime ./fix-creationtime/fix-creationtime.go + +FROM scratch +WORKDIR /app +COPY ./global-bundle.pem ./global-bundle.pem +COPY --from=builder /app/fix-creationtime . +CMD ["./fix-creationtime"] diff --git a/backfill/Dockerfile.migrate-coinbase b/backfill/Dockerfile.migrate-coinbase new file mode 100644 index 00000000..de4a82e6 --- /dev/null +++ b/backfill/Dockerfile.migrate-coinbase @@ -0,0 +1,11 @@ +FROM golang:1.23.7 AS builder +WORKDIR /app +COPY . . +RUN go mod download +RUN CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -o migrate-coinbase ./migrate-coinbase/migrate-coinbase.go + +FROM scratch +WORKDIR /app +COPY ./global-bundle.pem ./global-bundle.pem +COPY --from=builder /app/migrate-coinbase . +CMD ["./migrate-coinbase"] diff --git a/backfill/create-coinbase/create-coinbase.go b/backfill/create-coinbase/create-coinbase.go new file mode 100644 index 00000000..d175cc94 --- /dev/null +++ b/backfill/create-coinbase/create-coinbase.go @@ -0,0 +1,179 @@ +package main + +import ( + "context" + "flag" + "fmt" + "go-backfill/config" + "go-backfill/fetch" + "go-backfill/process" + "go-backfill/repository" + "log" + "strconv" + "time" + + "github.com/jackc/pgx/v5" +) + +const ( + coinbaseBatchSize = 1000 +) + +type CoinbaseData struct { + ID int64 `json:"id"` + Coinbase string `json:"coinbase"` + ChainId int `json:"chainId"` + CreationTime string `json:"creationTime"` +} + +func createBatchCoinbase(conn *pgx.Conn, lastId int64, network string) (bool, int64, error) { + startTime := time.Now() + + // Start transaction for writes + tx, err := conn.Begin(context.Background()) + if err != nil { + return false, lastId, fmt.Errorf("failed to begin transaction: %v", err) + } + defer tx.Rollback(context.Background()) + + // Fetch blocks with coinbase data using cursor pagination + query := ` + SELECT id, coinbase, "chainId", "creationTime" + FROM "Blocks" + WHERE id > $1 + ORDER BY id ASC + LIMIT $2 + ` + + rows, err := conn.Query(context.Background(), query, lastId, coinbaseBatchSize) + if err != nil { + return false, lastId, fmt.Errorf("failed to execute query: %v", err) + } + defer rows.Close() + + var blocks []CoinbaseData + for rows.Next() { + var block CoinbaseData + if err := rows.Scan(&block.ID, &block.Coinbase, &block.ChainId, &block.CreationTime); err != nil { + return false, lastId, fmt.Errorf("failed to scan row: %v", err) + } + blocks = append(blocks, block) + } + + if len(blocks) == 0 { + return false, lastId, nil + } + + // Process each block's coinbase transaction + var transactions []repository.TransactionAttributes + var transactionIds []int64 + for _, block := range blocks { + creationTime, err := strconv.ParseInt(block.CreationTime, 10, 64) + if err != nil { + return false, lastId, fmt.Errorf("failed to parse creation time for block %d: %v", block.ID, err) + } + tx, err := process.ProcessCoinbaseTransaction(block.Coinbase, block.ID, creationTime, int64(block.ChainId)) + if err != nil { + return false, lastId, fmt.Errorf("failed to process coinbase for block %d: %v", block.ID, err) + } + transactions = append(transactions, tx) + } + + // Save transactions to database + if len(transactions) > 0 { + ids, err := repository.SaveTransactions(tx, transactions, repository.TransactionAttributes{}) + if err != nil { + return false, lastId, fmt.Errorf("failed to save transactions: %v", err) + } + transactionIds = ids + + // Process and save events and transfers for each coinbase transaction + for i, block := range blocks { + transactionId := transactionIds[i] + + // Create a ProcessedPayload structure for the coinbase events + processedPayload := fetch.ProcessedPayload{ + Header: fetch.Header{ + ChainId: block.ChainId, + }, + Coinbase: []byte(block.Coinbase), + } + + // Prepare and save events + events, err := process.PrepareEvents(network, processedPayload, []int64{transactionId}) + if err != nil { + return false, lastId, fmt.Errorf("failed to prepare events for block %d: %v", block.ID, err) + } + + if err := repository.SaveEventsToDatabase(events, tx); err != nil { + return false, lastId, fmt.Errorf("failed to save events for block %d: %v", block.ID, err) + } + + // Prepare and save transfers + transfers, err := process.PrepareTransfers(network, processedPayload, []int64{transactionId}) + if err != nil { + return false, lastId, fmt.Errorf("failed to prepare transfers for block %d: %v", block.ID, err) + } + + if err := repository.SaveTransfersToDatabase(transfers, tx); err != nil { + return false, lastId, fmt.Errorf("failed to save transfers for block %d: %v", block.ID, err) + } + } + } + + if err := tx.Commit(context.Background()); err != nil { + return false, lastId, fmt.Errorf("failed to commit transaction: %v", err) + } + + elapsed := time.Since(startTime) + log.Printf("Processed %d coinbase transactions, their events, and transfers. Batch time: %.2fs", len(transactions), elapsed.Seconds()) + + // Return the last processed ID as the new cursor + lastProcessedId := blocks[len(blocks)-1].ID + return len(blocks) == coinbaseBatchSize, lastProcessedId, nil +} + +func main() { + envFile := flag.String("env", ".env", "Path to the .env file") + flag.Parse() + + config.InitEnv(*envFile) + env := config.GetConfig() + + // Database connection + connStr := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=disable", + env.DbHost, env.DbPort, env.DbUser, env.DbPassword, env.DbName) + + conn, err := pgx.Connect(context.Background(), connStr) + if err != nil { + log.Fatalf("Failed to connect to database: %v", err) + } + defer conn.Close(context.Background()) + + log.Println("Connected to database") + + lastId := int64(0) + hasMore := true + totalBlocks := int64(104813544) + processedBlocks := int64(0) + + for hasMore { + var err error + hasMore, lastId, err = createBatchCoinbase(conn, lastId, env.Network) + if err != nil { + log.Fatalf("Error during batch processing: %v", err) + } + processedBlocks += coinbaseBatchSize + progress := float64(processedBlocks) / float64(totalBlocks) * 100 + + if hasMore { + log.Printf("Progress: %.2f%% (%d/%d blocks processed)", progress, processedBlocks, totalBlocks) + } else { + log.Printf("Progress: 100.00%%") + } + + time.Sleep(100 * time.Millisecond) + } + + log.Println("Coinbase creation completed successfully") +} diff --git a/backfill/fetch/process_payloads.go b/backfill/fetch/process_payloads.go index 08d0a6dd..340301ea 100644 --- a/backfill/fetch/process_payloads.go +++ b/backfill/fetch/process_payloads.go @@ -17,8 +17,8 @@ type Event struct { } type Module struct { - Namespace string `json:"namespace"` - Name string `json:"name"` + Namespace *string `json:"namespace"` + Name string `json:"name"` } type Result struct { diff --git a/backfill/fix-creationtime/fix-creationtime.go b/backfill/fix-creationtime/fix-creationtime.go new file mode 100644 index 00000000..8206c626 --- /dev/null +++ b/backfill/fix-creationtime/fix-creationtime.go @@ -0,0 +1,114 @@ +package main + +import ( + "context" + "flag" + "fmt" + "go-backfill/config" + "log" + "time" + + "github.com/jackc/pgx/v5" +) + +const ( + batchSize = 1000 + lastId = 105126770 +) + +func fixBatchCreationTime(conn *pgx.Conn, lastId int64) (bool, int64, error) { + startTime := time.Now() + + // Start transaction for writes + tx, err := conn.Begin(context.Background()) + if err != nil { + return false, lastId, fmt.Errorf("failed to begin transaction: %v", err) + } + defer tx.Rollback(context.Background()) + + // Update creation time for coinbase transactions in batch + updateQuery := ` + WITH OrderedUpdates AS ( + SELECT id + FROM "Transactions" + WHERE id > $1 AND sender = 'coinbase' + ORDER BY id ASC + LIMIT $2 + ) + UPDATE "Transactions" + SET creationtime = (CAST(creationtime AS BIGINT) / 1000000)::TEXT + FROM OrderedUpdates + WHERE "Transactions".id = OrderedUpdates.id + RETURNING "Transactions".id, "Transactions".creationtime + ` + + rows, err := tx.Query(context.Background(), updateQuery, lastId, batchSize) + if err != nil { + return false, lastId, fmt.Errorf("failed to execute update: %v", err) + } + defer rows.Close() + + var updatedCount int + var lastProcessedId int64 + var lastCreationTime string + for rows.Next() { + if err := rows.Scan(&lastProcessedId, &lastCreationTime); err != nil { + return false, lastId, fmt.Errorf("failed to scan row: %v", err) + } + updatedCount++ + + } + + if err := tx.Commit(context.Background()); err != nil { + return false, lastId, fmt.Errorf("failed to commit transaction: %v", err) + } + + elapsed := time.Since(startTime) + log.Printf("Fixed creation time for %d coinbase transactions. Batch time: %.2fs", updatedCount, elapsed.Seconds()) + + return updatedCount == batchSize, lastProcessedId, nil +} + +func main() { + envFile := flag.String("env", ".env", "Path to the .env file") + flag.Parse() + + config.InitEnv(*envFile) + env := config.GetConfig() + + // Database connection + connStr := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=disable", + env.DbHost, env.DbPort, env.DbUser, env.DbPassword, env.DbName) + + conn, err := pgx.Connect(context.Background(), connStr) + if err != nil { + log.Fatalf("Failed to connect to database: %v", err) + } + defer conn.Close(context.Background()) + + log.Println("Connected to database") + + lastId := int64(0) + hasMore := true + processedTransactions := int64(0) + + for hasMore { + var err error + hasMore, lastId, err = fixBatchCreationTime(conn, lastId) + if err != nil { + log.Fatalf("Error during batch processing: %v", err) + } + processedTransactions += batchSize + progress := float64(processedTransactions) / float64(105126770) * 100 + + if hasMore { + log.Printf("Progress: %.2f%% (%d/%d transactions processed)", progress, processedTransactions, lastId) + } else { + log.Printf("Progress: 100.00%%") + } + + time.Sleep(100 * time.Millisecond) + } + + log.Println("Creation time fix completed successfully") +} diff --git a/backfill/middle-backfill/middle-backfill.go b/backfill/middle-backfill/middle-backfill.go index 8df44f29..53bc883e 100644 --- a/backfill/middle-backfill/middle-backfill.go +++ b/backfill/middle-backfill/middle-backfill.go @@ -37,7 +37,6 @@ func main() { ChainIdStr := strconv.FormatInt(maxHeight.ChainID, 10) Height := cuts.Hashes[ChainIdStr].Height Hash := cuts.Hashes[ChainIdStr].Hash - // fmt.Println("Hash: ", Hash, " ChainId: ", ChainIdStr, " Height: ", Height, " MaxHeight: ", maxHeight.MaxHeight) process.StartBackfill(Height, Hash, int(maxHeight.ChainID), int(maxHeight.MaxHeight+1), pool) } } diff --git a/backfill/migrate-coinbase/migrate-coinbase.go b/backfill/migrate-coinbase/migrate-coinbase.go new file mode 100644 index 00000000..f8b7bc9c --- /dev/null +++ b/backfill/migrate-coinbase/migrate-coinbase.go @@ -0,0 +1,124 @@ +package main + +import ( + "database/sql" + "flag" + "fmt" + "go-backfill/config" + "log" + "time" + + _ "github.com/lib/pq" // PostgreSQL driver +) + +const ( + batchSize = 1000 + stopID = 173921979 +) + +func migrateBatchTransactions(db *sql.DB, lastID int) (int, bool, error) { + startTime := time.Now() + + tx, err := db.Begin() + if err != nil { + return 0, false, fmt.Errorf("failed to begin transaction: %v", err) + } + defer tx.Rollback() + + // Insert batch into TransactionDetails + insertQuery := ` + INSERT INTO "TransactionDetails" ( + "transactionId", code, continuation, data, gas, gaslimit, gasprice, + nonce, pactid, proof, rollback, sigs, step, ttl, "createdAt", "updatedAt" + ) + SELECT + id, code, continuation, data, gas, gaslimit, gasprice, + nonce, pactid, proof, rollback, sigs, step, ttl, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP + FROM "Transactions" + WHERE id > $1 + ORDER BY id ASC + LIMIT $2 + RETURNING "transactionId"; + ` + + rows, err := tx.Query(insertQuery, lastID, batchSize) + if err != nil { + return 0, false, fmt.Errorf("failed to execute query: %v", err) + } + defer rows.Close() + + var newLastID int + var insertedCount int + + // Scan through all rows and keep the last ID + for rows.Next() { + if err := rows.Scan(&newLastID); err != nil { + return 0, false, fmt.Errorf("failed to scan row: %v", err) + } + insertedCount++ + } + + if err := tx.Commit(); err != nil { + return 0, false, fmt.Errorf("failed to commit transaction: %v", err) + } + + elapsed := time.Since(startTime) + log.Printf("Inserted %d non-coinbase transactions. Last ID: %d. Batch time: %.2fs", + insertedCount, newLastID, elapsed.Seconds()) + + hasNext := insertedCount == batchSize + + return newLastID, hasNext, nil +} + +func migrateTransactionDbInformation(envFile string) error { + config.InitEnv(envFile) + env := config.GetConfig() + + connStr := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=disable", + env.DbHost, env.DbPort, env.DbUser, env.DbPassword, env.DbName) + + db, err := sql.Open("postgres", connStr) + if err != nil { + return fmt.Errorf("failed to connect to database: %v", err) + } + defer db.Close() + + if err := db.Ping(); err != nil { + return fmt.Errorf("failed to ping database: %v", err) + } + + log.Println("Connected to database") + + lastIdInserted := 0 + + hasNext := true + for hasNext { + newLastId, newHasNext, err := migrateBatchTransactions(db, lastIdInserted) + if err != nil { + return fmt.Errorf("error during batch insert: %v", err) + } + + lastIdInserted = newLastId + hasNext = newHasNext + time.Sleep(100 * time.Millisecond) + if hasNext { + log.Printf("Progress: %.2f%%", float64(lastIdInserted)/float64(stopID)*100) + } else { + log.Printf("Progress: 100.00%%") + } + } + + log.Println("Migration completed successfully") + return nil +} + +func main() { + envFile := flag.String("env", ".env", "Path to the .env file") + flag.Parse() + + // Run the migration + if err := migrateTransactionDbInformation(*envFile); err != nil { + log.Fatalf("Migration failed: %v", err) + } +} diff --git a/backfill/process/coinbase.go b/backfill/process/coinbase.go index f019b489..f2b7719d 100644 --- a/backfill/process/coinbase.go +++ b/backfill/process/coinbase.go @@ -9,55 +9,44 @@ import ( ) type Coinbase struct { - ReqKey string `json:"reqKey"` - TxID int `json:"txId"` - Events []fetch.Event `json:"events"` - Result json.RawMessage `json:"result"` - Logs string `json:"logs"` + Gas int `json:"gas"` + Logs string `json:"logs"` + TxID int `json:"txId"` + Events []fetch.Event `json:"events"` + ReqKey string `json:"reqKey"` + Result json.RawMessage `json:"result"` + MetaData *json.RawMessage `json:"metaData"` + Continuation *json.RawMessage `json:"continuation"` } func decodeCoinbase(jsonStr string) (*Coinbase, error) { + if jsonStr == "" { + return nil, fmt.Errorf("empty JSON string provided") + } var coinbase Coinbase err := json.Unmarshal([]byte(jsonStr), &coinbase) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to unmarshal coinbase JSON: %v, input: %s", err, jsonStr) } return &coinbase, nil } -func processCoinbaseTransaction(coinbase string, blockId int64, creationTime int64, chainId int64) (repository.TransactionAttributes, error) { - +func ProcessCoinbaseTransaction(coinbase string, blockId int64, creationTime int64, chainId int64) (repository.TransactionAttributes, error) { coinbaseDecoded, err := decodeCoinbase(coinbase) if err != nil { return repository.TransactionAttributes{}, fmt.Errorf("decoding Coinbase JSON of block %d: %w", blockId, err) } - emptyJSON, _ := json.Marshal(map[string]interface{}{}) - emptyArray, _ := json.Marshal([]interface{}{}) - txAttribute := repository.TransactionAttributes{ BlockId: blockId, - Code: emptyJSON, - Data: emptyJSON, ChainId: int(chainId), - CreationTime: strconv.FormatInt(creationTime, 10), - GasLimit: "0", - GasPrice: "0", + CreationTime: strconv.FormatInt(creationTime/1000000, 10), Hash: coinbaseDecoded.ReqKey, - Nonce: "", - PactId: nil, - Continuation: emptyJSON, - Gas: "0", Result: coinbaseDecoded.Result, Logs: coinbaseDecoded.Logs, NumEvents: len(coinbaseDecoded.Events), RequestKey: coinbaseDecoded.ReqKey, - Rollback: false, Sender: "coinbase", - Sigs: emptyArray, - Step: 0, - Proof: nil, - TTL: "0", TxId: fmt.Sprintf("%d", coinbaseDecoded.TxID), } diff --git a/backfill/process/process_events.go b/backfill/process/process_events.go index 5363ab60..3368d44e 100644 --- a/backfill/process/process_events.go +++ b/backfill/process/process_events.go @@ -14,11 +14,8 @@ func PrepareEvents(network string, payload fetch.ProcessedPayload, transactionsI events := make([]repository.EventAttributes, 0, len(transactions)*avgEventsPerTransaction) for txIndex, t := range transactions { - for eventIndex, event := range t.Events { - module := buildModuleName(event.Module.Namespace, event.Module.Name) - qualName := buildModuleName(event.Module.Namespace, event.Module.Name) paramsJSON, err := json.Marshal(event.Params) @@ -40,11 +37,6 @@ func PrepareEvents(network string, payload fetch.ProcessedPayload, transactionsI } } - // TODO: This will be removed after TransactionDetails migration - // if network == "mainnet01" { - // return events, nil - // } - coinbaseDecoded, err := decodeCoinbase(string(payload.Coinbase)) if err != nil { return nil, fmt.Errorf("decoding Coinbase JSON of block: %w", err) diff --git a/backfill/process/process_transactions.go b/backfill/process/process_transactions.go index cf677b6c..096fbb60 100644 --- a/backfill/process/process_transactions.go +++ b/backfill/process/process_transactions.go @@ -31,10 +31,11 @@ type CmdData struct { } `json:"payload"` } -func PrepareTransactions(network string, blockId int64, payload fetch.ProcessedPayload, block repository.BlockAttributes) ([]repository.TransactionAttributes, error) { +func PrepareTransactions(network string, blockId int64, payload fetch.ProcessedPayload, block repository.BlockAttributes) ([]repository.TransactionAttributes, []repository.TransactionDetailsAttributes, repository.TransactionAttributes, error) { transactions := payload.Transactions transactionRecords := make([]repository.TransactionAttributes, 0, len(transactions)) + transactionDetailsRecords := make([]repository.TransactionDetailsAttributes, 0, len(transactions)) var cmdData CmdData var continuationData struct { @@ -51,17 +52,13 @@ func PrepareTransactions(network string, blockId int64, payload fetch.ProcessedP }{} if err := json.Unmarshal(t.Cmd, &rawCmd); err != nil { - return nil, fmt.Errorf("unmarshaling Cmd JSON for transaction %s: %w", t.Hash, err) + return nil, nil, repository.TransactionAttributes{}, fmt.Errorf("unmarshaling Cmd JSON for transaction %s: %w", t.Hash, err) } if err := json.Unmarshal([]byte(rawCmd), &cmdData); err != nil { - return nil, fmt.Errorf("unmarshaling raw command for transaction %s: %w", t.Hash, err) + return nil, nil, repository.TransactionAttributes{}, fmt.Errorf("unmarshaling raw command for transaction %s: %w", t.Hash, err) } - // if err := json.Unmarshal([]byte(rawCmd), &cmdData); err != nil { - // return nil, fmt.Errorf("unmarshaling raw command for transaction %s: %w", t.Hash, err) - // } - continuationRaw := json.RawMessage("{}") if string(t.Continuation) != "null" { continuationRaw = t.Continuation @@ -69,15 +66,15 @@ func PrepareTransactions(network string, blockId int64, payload fetch.ProcessedP codeRaw, err := ensureNotEmpty(cmdData.Payload.Exec.Code) if err != nil { - return nil, fmt.Errorf("ensuring code is not empty for transaction %s: %w", t.Hash, err) + return nil, nil, repository.TransactionAttributes{}, fmt.Errorf("ensuring code is not empty for transaction %s: %w", t.Hash, err) } dataRaw, err := ensureNotEmpty(cmdData.Payload.Exec.Data) if err != nil { - return nil, fmt.Errorf("ensuring data is not empty for transaction %s: %w", t.Hash, err) + return nil, nil, repository.TransactionAttributes{}, fmt.Errorf("ensuring data is not empty for transaction %s: %w", t.Hash, err) } if err := json.Unmarshal(continuationRaw, &continuationData); err != nil { - return nil, fmt.Errorf("unmarshaling Continuation for transaction %s: %w", t.Hash, err) + return nil, nil, repository.TransactionAttributes{}, fmt.Errorf("unmarshaling Continuation for transaction %s: %w", t.Hash, err) } rollback := true @@ -91,14 +88,13 @@ func PrepareTransactions(network string, blockId int64, payload fetch.ProcessedP if cmdData.Meta.ChainId != "" { chainId, err = strconv.Atoi(cmdData.Meta.ChainId) if err != nil { - return nil, fmt.Errorf("converting ChainId for transaction %s: %w", t.Hash, err) + return nil, nil, repository.TransactionAttributes{}, fmt.Errorf("converting ChainId for transaction %s: %w", t.Hash, err) } } else { chainId = block.ChainId } txId := strconv.Itoa(t.TxId) - // creationTimeStr := strconv.FormatFloat(cmdData.Meta.CreationTime, 'f', -1, 64) gas := strconv.Itoa(t.Gas) var proof *string @@ -112,46 +108,45 @@ func PrepareTransactions(network string, blockId int64, payload fetch.ProcessedP nonce = strings.ReplaceAll(nonce, "\"", "") transactionRecord := repository.TransactionAttributes{ BlockId: blockId, - Code: codeRaw, - Data: dataRaw, ChainId: chainId, CreationTime: string(cmdData.Meta.CreationTime), - GasLimit: string(cmdData.Meta.GasLimit), - GasPrice: string(cmdData.Meta.GasPrice), Hash: t.Hash, - Nonce: nonce, - PactId: continuationData.PactID, - Continuation: continuationRaw, - Gas: gas, Result: t.Result, - Proof: proof, Logs: t.Logs, NumEvents: len(t.Events), RequestKey: t.ReqKey, - Rollback: rollback, Sender: cmdData.Meta.Sender, - Sigs: t.Sigs, - Step: step, - TTL: string(cmdData.Meta.TTL), TxId: txId, } - transactionRecords = append(transactionRecords, transactionRecord) - } - // TODO: This will be removed after TransactionDetails migration - // if network == "mainnet01" { - // return transactionRecords, nil - // } + // Create transaction details record + transactionDetailsRecord := repository.TransactionDetailsAttributes{ + TransactionId: -1, + Code: codeRaw, + Continuation: continuationRaw, + Data: dataRaw, + Gas: gas, + GasLimit: string(cmdData.Meta.GasLimit), + GasPrice: string(cmdData.Meta.GasPrice), + Nonce: nonce, + PactId: continuationData.PactID, + Proof: proof, + Rollback: rollback, + Sigs: t.Sigs, + Step: step, + TTL: string(cmdData.Meta.TTL), + } - coinbaseTx, err := processCoinbaseTransaction(string(payload.Coinbase), blockId, block.CreationTime, int64(block.ChainId)) + transactionRecords = append(transactionRecords, transactionRecord) + transactionDetailsRecords = append(transactionDetailsRecords, transactionDetailsRecord) + } + coinbaseTx, err := ProcessCoinbaseTransaction(string(payload.Coinbase), blockId, block.CreationTime, int64(block.ChainId)) if err != nil { - return nil, fmt.Errorf("processing coinbase transaction %d: %w", blockId, err) + return nil, nil, repository.TransactionAttributes{}, fmt.Errorf("processing coinbase transaction %d: %w", blockId, err) } - transactionRecords = append(transactionRecords, coinbaseTx) - - return transactionRecords, nil + return transactionRecords, transactionDetailsRecords, coinbaseTx, nil } func ensureNotEmpty(raw json.RawMessage) (json.RawMessage, error) { diff --git a/backfill/process/process_transfers.go b/backfill/process/process_transfers.go index 579a4d65..6cfc55b7 100644 --- a/backfill/process/process_transfers.go +++ b/backfill/process/process_transfers.go @@ -20,11 +20,6 @@ func PrepareTransfers(network string, payload fetch.ProcessedPayload, transactio transfers = append(transfers, nftTransfers...) } - // TODO: This will be removed after TransactionDetails migration - // if network == "mainnet01" { - // return transfers, nil - // } - coinbaseDecoded, err := decodeCoinbase(string(payload.Coinbase)) if err != nil { return nil, fmt.Errorf("decoding Coinbase JSON of block: %w", err) diff --git a/backfill/process/save_payloads.go b/backfill/process/save_payloads.go index 84889792..3169ab8b 100644 --- a/backfill/process/save_payloads.go +++ b/backfill/process/save_payloads.go @@ -70,11 +70,17 @@ func savePayloads(network string, chainId int, processedPayloads []fetch.Process for index, processedPayload := range processedPayloads { var blockId = blockIds[index] var currBlock = blocks[index] - txs, err := PrepareTransactions(network, blockId, processedPayload, currBlock) + txs, txDetails, txCoinbase, err := PrepareTransactions(network, blockId, processedPayload, currBlock) if err != nil { return Counters{}, DataSizeTracker{}, fmt.Errorf("saving transactions -> %w", err) } - transactionIds, err := repository.SaveTransactions(tx, txs) + + transactionIds, err := repository.SaveTransactions(tx, txs, txCoinbase) + if err != nil { + return Counters{}, DataSizeTracker{}, fmt.Errorf("saving transactions -> %w", err) + } + + err = repository.SaveTransactionDetails(tx, txDetails, transactionIds) if err != nil { return Counters{}, DataSizeTracker{}, fmt.Errorf("saving transactions -> %w", err) } diff --git a/backfill/process/utils.go b/backfill/process/utils.go index 4a90db0c..d0c7f40c 100644 --- a/backfill/process/utils.go +++ b/backfill/process/utils.go @@ -154,9 +154,9 @@ func convertToFloat64(event fetch.Event, index int) (float64, bool) { return 0, false } -func buildModuleName(namespace, name string) string { - if namespace != "" { - return namespace + "." + name +func buildModuleName(namespace *string, name string) string { + if namespace != nil && *namespace != "" { + return *namespace + "." + name } return name } diff --git a/backfill/repository/transaction_repository.go b/backfill/repository/transaction_repository.go index bd98f261..b1bc78d7 100644 --- a/backfill/repository/transaction_repository.go +++ b/backfill/repository/transaction_repository.go @@ -4,7 +4,6 @@ import ( "context" "encoding/json" "fmt" - "strings" "time" "github.com/jackc/pgx/v5" @@ -12,42 +11,44 @@ import ( type TransactionAttributes struct { BlockId int64 `json:"blockId"` - Code json.RawMessage `json:"code"` // JSONB column - Data json.RawMessage `json:"data"` // JSONB column ChainId int `json:"chainId"` CreationTime string `json:"creationTime"` - GasLimit string `json:"gasLimit"` - GasPrice string `json:"gasPrice"` Hash string `json:"hash"` - Nonce string `json:"nonce"` - PactId *string `json:"pactId"` - Continuation json.RawMessage `json:"continuation"` // JSONB column - Gas string `json:"gas"` Result json.RawMessage `json:"result"` // JSONB column Logs string `json:"logs"` - Proof *string `json:"proof"` NumEvents int `json:"numEvents"` RequestKey string `json:"requestKey"` Rollback bool `json:"rollback"` Sender string `json:"sender"` - Sigs json.RawMessage `json:"sigs"` // JSONB column - Step int `json:"step"` - TTL string `json:"ttl"` TxId string `json:"txId"` CreatedAt time.Time `json:"createdAt"` UpdatedAt time.Time `json:"updatedAt"` } -func SaveTransactions(db pgx.Tx, transactions []TransactionAttributes) ([]int64, error) { - if len(transactions) == 0 { - return nil, nil - } +type TransactionDetailsAttributes struct { + TransactionId int64 + Code json.RawMessage + Continuation json.RawMessage + Data json.RawMessage + Gas string + GasLimit string + GasPrice string + Nonce string + PactId *string + Proof *string + Rollback bool + Sigs json.RawMessage + Step int + TTL string +} + +func SaveTransactions(db pgx.Tx, transactions []TransactionAttributes, coinbaseTx TransactionAttributes) ([]int64, error) { query := ` INSERT INTO "Transactions" - ("blockId", code, data, "chainId", creationtime, gaslimit, gasprice, hash, nonce, pactid, continuation, gas, result, logs, proof, num_events, requestkey, rollback, sender, sigs, step, ttl, txid, "createdAt", "updatedAt") + ("blockId", "chainId", creationtime, hash, result, logs, num_events, requestkey, sender, txid, "createdAt", "updatedAt") VALUES - ($1, $2::jsonb, $3::jsonb, $4, $5, $6, $7, $8, $9, $10, $11::jsonb, $12, $13::jsonb, $14, $15, $16, $17, $18, $19, $20::jsonb, $21, $22, $23, $24, $25) + ($1, $2, $3, $4, $5::jsonb, $6, $7, $8, $9, $10, $11, $12) RETURNING id ` @@ -55,80 +56,51 @@ func SaveTransactions(db pgx.Tx, transactions []TransactionAttributes) ([]int64, batch := &pgx.Batch{} for _, t := range transactions { - var codeStr string - json.Unmarshal(t.Code, &codeStr) - codeStrCleaned := strings.ReplaceAll(codeStr, "\u0000", "") - if codeStrCleaned != codeStr { - fmt.Printf("Code cleaned: %s\n", t.RequestKey) - } - - codeCleaned, err := json.Marshal(codeStrCleaned) - if err != nil { - return nil, fmt.Errorf("failed to marshal code: %v", err) - } - - code := t.Code - if codeStrCleaned != "" { - code = codeCleaned - } - - data, err := json.Marshal(t.Data) - if err != nil { - return nil, fmt.Errorf("failed to marshal data: %v", err) - } - - continuation, err := json.Marshal(t.Continuation) - if err != nil { - return nil, fmt.Errorf("failed to marshal continuation: %v", err) - } - result, err := json.Marshal(t.Result) if err != nil { return nil, fmt.Errorf("failed to marshal result: %v", err) } - sigs, err := json.Marshal(t.Sigs) - if err != nil { - return nil, fmt.Errorf("failed to marshal sigs: %v", err) - } - batch.Queue( query, t.BlockId, - code, - data, t.ChainId, t.CreationTime, - t.GasLimit, - t.GasPrice, t.Hash, - t.Nonce, - t.PactId, - continuation, - t.Gas, result, t.Logs, - t.Proof, t.NumEvents, t.RequestKey, - t.Rollback, t.Sender, - sigs, - t.Step, - t.TTL, t.TxId, now, now, ) } + batch.Queue( + query, + coinbaseTx.BlockId, + coinbaseTx.ChainId, + coinbaseTx.CreationTime, + coinbaseTx.Hash, + coinbaseTx.Result, + coinbaseTx.Logs, + coinbaseTx.NumEvents, + coinbaseTx.RequestKey, + coinbaseTx.Sender, + coinbaseTx.TxId, + now, + now, + ) + br := db.SendBatch(context.Background(), batch) defer br.Close() - transactionIds := make([]int64, 0, len(transactions)) + transactionIds := make([]int64, 0, len(transactions)+1) // +1 for coinbase // Collect IDs for each queued query - for i := 0; i < len(transactions); i++ { + for i := 0; i < len(transactions)+1; i++ { // +1 for coinbase var id int64 if err := br.QueryRow().Scan(&id); err != nil { return nil, fmt.Errorf("failed to execute batch for transaction %d: %v", i, err) @@ -138,3 +110,76 @@ func SaveTransactions(db pgx.Tx, transactions []TransactionAttributes) ([]int64, return transactionIds, nil } + +func SaveTransactionDetails(db pgx.Tx, details []TransactionDetailsAttributes, transactionIds []int64) error { + + if len(details) == 0 { + return nil + } + + query := ` + INSERT INTO "TransactionDetails" ( + "transactionId", code, continuation, data, gas, gaslimit, gasprice, + nonce, pactid, proof, rollback, sigs, step, ttl, "createdAt", "updatedAt" + ) + VALUES ($1, $2::jsonb, $3::jsonb, $4::jsonb, $5, $6, $7, $8, $9, $10, $11, $12::jsonb, $13, $14, $15, $16) + ` + + now := time.Now() + batch := &pgx.Batch{} + + for index := 0; index < len(details); index++ { + detail := details[index] + code, err := json.Marshal(detail.Code) + if err != nil { + return fmt.Errorf("failed to marshal code: %v", err) + } + + continuation, err := json.Marshal(detail.Continuation) + if err != nil { + return fmt.Errorf("failed to marshal continuation: %v", err) + } + + data, err := json.Marshal(detail.Data) + if err != nil { + return fmt.Errorf("failed to marshal data: %v", err) + } + + sigs, err := json.Marshal(detail.Sigs) + if err != nil { + return fmt.Errorf("failed to marshal sigs: %v", err) + } + + batch.Queue( + query, + transactionIds[index], + code, + continuation, + data, + detail.Gas, + detail.GasLimit, + detail.GasPrice, + detail.Nonce, + detail.PactId, + detail.Proof, + detail.Rollback, + sigs, + detail.Step, + detail.TTL, + now, + now, + ) + } + + br := db.SendBatch(context.Background(), batch) + defer br.Close() + + // Execute all queued queries + for i := 0; i < len(details); i++ { + if _, err := br.Exec(); err != nil { + return fmt.Errorf("failed to execute batch for transaction details %d: %v", i, err) + } + } + + return nil +} diff --git a/indexer/migrations/20250226201907-transaction-nonce-type-change.js b/indexer/migrations/20250226201907-transaction-nonce-type-change.js deleted file mode 100644 index 988633c6..00000000 --- a/indexer/migrations/20250226201907-transaction-nonce-type-change.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict'; - -/** @type {import('sequelize-cli').Migration} */ -module.exports = { - async up(queryInterface, Sequelize) { - await queryInterface.changeColumn('Transactions', 'nonce', { - type: Sequelize.TEXT, // Equivalent to TEXT in SQL when using Sequelize - }); - }, - - async down(queryInterface, Sequelize) { - await queryInterface.changeColumn('Transactions', 'nonce', { - type: Sequelize.STRING, // Equivalent to VARCHAR(255) in SQL when using Sequelize - }); - }, -}; diff --git a/indexer/migrations/20250314124700-create-transaction-info-table.js b/indexer/migrations/20250314124700-create-transaction-info-table.js new file mode 100644 index 00000000..b7a8b59c --- /dev/null +++ b/indexer/migrations/20250314124700-create-transaction-info-table.js @@ -0,0 +1,76 @@ +/** @type {import('sequelize-cli').Migration} */ + +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable('TransactionDetails', { + id: { + type: Sequelize.INTEGER, + autoIncrement: true, + primaryKey: true, + }, + transactionId: { + type: Sequelize.INTEGER, + allowNull: true, + references: { + model: 'Transactions', + key: 'id', + }, + onUpdate: 'CASCADE', + onDelete: 'SET NULL', + }, + code: { + type: Sequelize.JSONB, + }, + continuation: { + type: Sequelize.JSONB, + }, + data: { + type: Sequelize.JSONB, + }, + gas: { + type: Sequelize.STRING, + }, + gaslimit: { + type: Sequelize.STRING, + }, + gasprice: { + type: Sequelize.STRING, + }, + nonce: { + type: Sequelize.TEXT, + }, + pactid: { + type: Sequelize.STRING, + }, + proof: { + type: Sequelize.TEXT, + }, + rollback: { + type: Sequelize.BOOLEAN, + }, + sigs: { + type: Sequelize.JSONB, + }, + step: { + type: Sequelize.INTEGER, + }, + ttl: { + type: Sequelize.STRING, + }, + createdAt: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.literal('CURRENT_TIMESTAMP'), + }, + updatedAt: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.literal('CURRENT_TIMESTAMP'), + }, + }); + }, + + async down(queryInterface) { + await queryInterface.dropTable('TransactionDetails'); + }, +}; diff --git a/indexer/src/index.ts b/indexer/src/index.ts index bf10e7a1..404c0ab9 100644 --- a/indexer/src/index.ts +++ b/indexer/src/index.ts @@ -17,8 +17,6 @@ program .option('-g, --oldGraphql', 'Start GraphQL server based on Postgraphile') .option('-t, --graphql', 'Start GraphQL server based on kadena schema') .option('-f, --guards', 'Backfill the guards') - // this option shouldn't be used if you initialize the indexer from the beginning - .option('-c, --coinbase', 'Backfill coinbase transactions') .option('-m, --missing', 'Missing blocks') .option('-z, --database', 'Init the database'); diff --git a/indexer/src/kadena-server/repository/infra/repository/transaction-db-repository.ts b/indexer/src/kadena-server/repository/infra/repository/transaction-db-repository.ts index 2e59d7b8..a12342fc 100644 --- a/indexer/src/kadena-server/repository/infra/repository/transaction-db-repository.ts +++ b/indexer/src/kadena-server/repository/infra/repository/transaction-db-repository.ts @@ -181,62 +181,66 @@ export default class TransactionDbRepository implements TransactionRepository { SELECT t.id AS id, t.hash AS "hashTransaction", - t.nonce AS "nonceTransaction", - t.sigs AS sigs, - t.continuation AS continuation, + td.nonce AS "nonceTransaction", + td.sigs AS sigs, + td.continuation AS continuation, t.num_events AS "eventCount", - t.pactid AS "pactId", - t.proof AS proof, - t.rollback AS rollback, + td.pactid AS "pactId", + td.proof AS proof, + td.rollback AS rollback, t.txid AS txid, b.height AS "height", b."hash" AS "blockHash", b."chainId" AS "chainId", - t.gas AS "gas", - t.step AS step, - t.data AS data, - t.code AS code, + td.gas AS "gas", + td.step AS step, + td.data AS data, + td.code AS code, t.logs AS "logs", t.result AS "result", t.requestkey AS "requestKey" FROM filtered_block b JOIN "Transactions" t ON b.id = t."blockId" + LEFT JOIN "TransactionDetails" td ON t.id = td."transactionId" ${transactionsConditions} - ORDER BY t.id ${order} + ORDER BY t.creationtime ${order} LIMIT $1 `; } else { query = ` WITH filtered_transactions AS ( - SELECT id, "blockId", hash, nonce, sigs, continuation, num_events, pactid, proof, rollback, gas, step, data, code, logs, result, requestkey, "chainId", txid + SELECT t.id, t."blockId", t.hash, t.num_events, t.txid, t.logs, t.result, t.requestkey, t."chainId" FROM "Transactions" t ${transactionsConditions} - ORDER BY t.id ${order} + ORDER BY t.creationtime ${order} LIMIT $1 ) SELECT t.id AS id, t.hash AS "hashTransaction", - t.nonce AS "nonceTransaction", - t.sigs AS sigs, - t.continuation AS continuation, + td.nonce AS "nonceTransaction", + td.sigs AS sigs, + td.continuation AS continuation, t.num_events AS "eventCount", - t.pactid AS "pactId", - t.proof AS proof, - t.rollback AS rollback, + td.pactid AS "pactId", + td.proof AS proof, + td.rollback AS rollback, t.txid AS txid, b.height AS "height", b."hash" AS "blockHash", b."chainId" AS "chainId", - t.gas AS "gas", - t.step AS step, - t.data AS data, - t.code AS code, + td.gas AS "gas", + td.step AS step, + td.data AS data, + td.code AS code, + td.nonce, + td.sigs, t.logs AS "logs", t.result AS "result", t.requestkey AS "requestKey" FROM filtered_transactions t JOIN "Blocks" b ON b.id = t."blockId" + LEFT JOIN "TransactionDetails" td ON t.id = td."transactionId" ${blocksConditions} `; } @@ -256,27 +260,28 @@ export default class TransactionDbRepository implements TransactionRepository { const query = ` SELECT t.id as id, t.hash as "hashTransaction", - t.nonce as "nonceTransaction", - t.sigs as sigs, - t.continuation as continuation, + td.nonce as "nonceTransaction", + td.sigs as sigs, + td.continuation as continuation, t.num_events as "eventCount", - t.pactid as "pactId", - t.proof as proof, - t.rollback as rollback, + td.pactid as "pactId", + td.proof as proof, + td.rollback as rollback, t.txid AS txid, b.height as "height", b."hash" as "blockHash", b."chainId" as "chainId", - t.gas as "gas", - t.step as step, - t.data as data, - t.code as code, + td.gas as "gas", + td.step as step, + td.data as data, + td.code as code, t.logs as "logs", t.result as "result", t.requestkey as "requestKey" FROM "Transactions" t JOIN "Blocks" b on t."blockId" = b.id JOIN "Transfers" tr on tr."transactionId" = t.id + LEFT JOIN "TransactionDetails" td on t.id = td."transactionId" WHERE tr.id = $1 `; @@ -296,11 +301,12 @@ export default class TransactionDbRepository implements TransactionRepository { SELECT t.id as id, t."chainId" as "chainId", t.creationtime as "creationTime", - t.gaslimit as "gasLimit", - t.gasprice as "gasPrice", + td.gaslimit as "gasLimit", + td.gasprice as "gasPrice", t.sender as sender, - t.ttl as ttl + td.ttl as ttl FROM "Transactions" t + LEFT JOIN "TransactionDetails" td on t.id = td."transactionId" WHERE t.id = $1 `; @@ -308,7 +314,6 @@ export default class TransactionDbRepository implements TransactionRepository { const [row] = rows; const output = transactionMetaValidator.validate(row); - return output; } @@ -330,26 +335,27 @@ export default class TransactionDbRepository implements TransactionRepository { const query = ` SELECT t.id as id, t.hash as "hashTransaction", - t.nonce as "nonceTransaction", - t.sigs as sigs, - t.continuation as continuation, + td.nonce as "nonceTransaction", + td.sigs as sigs, + td.continuation as continuation, t.num_events as "eventCount", - t.pactid as "pactId", - t.proof as proof, - t.rollback as rollback, + td.pactid as "pactId", + td.proof as proof, + td.rollback as rollback, t.txid AS txid, b.height as "height", b."hash" as "blockHash", b."chainId" as "chainId", t.result as "result", - t.gas as "gas", - t.step as step, - t.data as data, - t.code as code, + td.gas as "gas", + td.step as step, + td.data as data, + td.code as code, t.logs as "logs", t.requestkey as "requestKey" FROM "Transactions" t JOIN "Blocks" b on t."blockId" = b.id + LEFT JOIN "TransactionDetails" td on t.id = td."transactionId" WHERE t.requestkey = $1 ${conditions} `; @@ -392,21 +398,21 @@ export default class TransactionDbRepository implements TransactionRepository { SELECT t.id as id, t.hash as "hashTransaction", - t.nonce as "nonceTransaction", - t.sigs as sigs, - t.continuation as continuation, + td.nonce as "nonceTransaction", + td.sigs as sigs, + td.continuation as continuation, t.num_events as "eventCount", - t.pactid as "pactId", - t.proof as proof, - t.rollback as rollback, + td.pactid as "pactId", + td.proof as proof, + td.rollback as rollback, t.txid AS txid, b.height as "height", b."hash" as "blockHash", b."chainId" as "chainId", - t.gas as "gas", - t.step as step, - t.data as data, - t.code as code, + td.gas as "gas", + td.step as step, + td.data as data, + td.code as code, t.logs as "logs", t.result as "result", t.requestkey as "requestKey" @@ -417,8 +423,9 @@ export default class TransactionDbRepository implements TransactionRepository { FROM "Signers" s WHERE s."pubkey" = $2 ) filtered_signers ON t.id = filtered_signers."transactionId" + LEFT JOIN "TransactionDetails" td on t.id = td."transactionId" ${cursorCondition} - ORDER BY t.id ${order} + ORDER BY t.creationtime ${order} LIMIT $1; `; @@ -571,21 +578,21 @@ export default class TransactionDbRepository implements TransactionRepository { const { rows } = await rootPgPool.query( `SELECT t.id as id, t.hash as "hashTransaction", - t.nonce as "nonceTransaction", - t.sigs as sigs, - t.continuation as continuation, + td.nonce as "nonceTransaction", + td.sigs as sigs, + td.continuation as continuation, t.num_events as "eventCount", - t.pactid as "pactId", - t.proof as proof, - t.rollback as rollback, + td.pactid as "pactId", + td.proof as proof, + td.rollback as rollback, t.txid AS txid, b.height as "height", b."hash" as "blockHash", b."chainId" as "chainId", - t.gas as "gas", - t.step as step, - t.data as data, - t.code as code, + td.gas as "gas", + td.step as step, + td.data as data, + td.code as code, t.logs as "logs", t.result as "result", e.id as "eventId", @@ -593,6 +600,7 @@ export default class TransactionDbRepository implements TransactionRepository { FROM "Transactions" t JOIN "Blocks" b on t."blockId" = b.id JOIN "Events" e on e."transactionId" = t."id" + LEFT JOIN "TransactionDetails" td on t.id = td."transactionId" WHERE e.id = ANY($1::int[])`, [eventIds], ); diff --git a/indexer/src/kadena-server/repository/infra/repository/transfer-db-repository.ts b/indexer/src/kadena-server/repository/infra/repository/transfer-db-repository.ts index 3de10b0d..2c5e290c 100644 --- a/indexer/src/kadena-server/repository/infra/repository/transfer-db-repository.ts +++ b/indexer/src/kadena-server/repository/infra/repository/transfer-db-repository.ts @@ -101,10 +101,11 @@ export default class TransferDbRepository implements TransferRepository { transfers.modulehash as "moduleHash", transfers.requestkey as "requestKey", transfers."orderIndex" as "orderIndex", - t.pactid as "pactId" + td.pactid as "pactId" from filtered_block b join "Transactions" t on b.id = t."blockId" join "Transfers" transfers on transfers."transactionId" = t.id + left join "TransactionDetails" td on t.id = td."transactionId" ${conditions} ORDER BY transfers.id ${order} LIMIT $1 @@ -113,7 +114,7 @@ export default class TransferDbRepository implements TransferRepository { queryParams.push(requestKey); query = ` WITH filtered_transaction AS ( - SELECT t.id, t.pactid, t."chainId", t."creationtime", t."blockId" + SELECT t.id, t."chainId", t."creationtime", t."blockId" FROM "Transactions" t WHERE t.requestkey = $${queryParams.length} ) @@ -130,10 +131,11 @@ export default class TransferDbRepository implements TransferRepository { transfers.modulehash as "moduleHash", transfers.requestkey as "requestKey", transfers."orderIndex" as "orderIndex", - t.pactid as "pactId" + td.pactid as "pactId" from filtered_transaction t join "Blocks" b on b.id = t."blockId" join "Transfers" transfers on transfers."transactionId" = t.id + left join "TransactionDetails" td on t.id = td."transactionId" ${conditions} ORDER BY transfers.id ${order} LIMIT $1 @@ -160,10 +162,11 @@ export default class TransferDbRepository implements TransferRepository { transfers.modulehash as "moduleHash", transfers.requestkey as "requestKey", transfers."orderIndex" as "orderIndex", - t.pactid as "pactId" + td.pactid as "pactId" from filtered_transfers transfers join "Transactions" t on t.id = transfers."transactionId" join "Blocks" b on b."id" = t."blockId" + left join "TransactionDetails" td on t.id = td."transactionId" LIMIT $1 `; } else { @@ -188,10 +191,11 @@ export default class TransferDbRepository implements TransferRepository { transfers.modulehash as "moduleHash", transfers.requestkey as "requestKey", transfers."orderIndex" as "orderIndex", - t.pactid as "pactId" + td.pactid as "pactId" from filtered_transfers transfers join "Transactions" t on t.id = transfers."transactionId" join "Blocks" b on b."id" = t."blockId" + left join "TransactionDetails" td on t.id = td."transactionId" `; } @@ -221,10 +225,11 @@ export default class TransferDbRepository implements TransferRepository { transfers.modulehash as "moduleHash", transfers.requestkey as "requestKey", transfers."orderIndex" as "orderIndex", - transactions.pactid as "pactId" + td.pactid as "pactId" from "Blocks" b join "Transactions" transactions on b.id = transactions."blockId" join "Transfers" transfers on transfers."transactionId" = transactions.id + left join "TransactionDetails" td on transactions.id = td."transactionId" where transactions.requestkey = $1 and transfers.amount = $2 `; @@ -340,10 +345,11 @@ export default class TransferDbRepository implements TransferRepository { transfers.modulehash as "moduleHash", transfers.requestkey as "requestKey", transfers."orderIndex" as "orderIndex", - transactions.pactid as "pactId" + td.pactid as "pactId" from "Blocks" b join "Transactions" transactions on b.id = transactions."blockId" join "Transfers" transfers on transfers."transactionId" = transactions.id + left join "TransactionDetails" td on transactions.id = td."transactionId" WHERE transactions.id = $2 ${conditions} ORDER BY transfers.id ${order} diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/transaction-meta-schema-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/transaction-meta-schema-validator.ts index 2bff1256..dc4a3c2b 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/transaction-meta-schema-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/transaction-meta-schema-validator.ts @@ -4,10 +4,10 @@ import { TransactionMetaOutput } from '../../application/transaction-repository' const schema = zod.object({ chainId: zod.number(), creationTime: zod.string(), - gasLimit: zod.string(), - gasPrice: zod.string(), + gasLimit: zod.string().nullable(), + gasPrice: zod.string().nullable(), sender: zod.string(), - ttl: zod.string(), + ttl: zod.string().nullable(), }); function validate(row: any): TransactionMetaOutput { @@ -15,10 +15,10 @@ function validate(row: any): TransactionMetaOutput { return { chainId: res.chainId, creationTime: new Date(Number(res.creationTime) * 1000), - gasLimit: res.gasLimit, - gasPrice: Number(res.gasPrice), + gasLimit: res.gasLimit ?? 0, + gasPrice: Number(res.gasPrice) ?? 0, sender: res.sender, - ttl: res.ttl, + ttl: res.ttl ?? 0, }; } diff --git a/indexer/src/kadena-server/repository/infra/schema-validator/transaction-schema-validator.ts b/indexer/src/kadena-server/repository/infra/schema-validator/transaction-schema-validator.ts index 0b4e4e02..923f6c4e 100644 --- a/indexer/src/kadena-server/repository/infra/schema-validator/transaction-schema-validator.ts +++ b/indexer/src/kadena-server/repository/infra/schema-validator/transaction-schema-validator.ts @@ -14,19 +14,19 @@ const schema = zod.object({ id: zod.number(), hashTransaction: zod.string(), txid: zod.string().nullable(), - sigs: zod.array(zod.any()), - continuation: zod.any(), + sigs: zod.array(zod.any()).nullable(), + continuation: zod.any().nullable(), eventCount: zod.number(), - gas: zod.string(), + gas: zod.string().nullable(), height: zod.number(), logs: zod.string(), - code: zod.any(), - data: zod.any(), + code: zod.any().nullable(), + data: zod.any().nullable(), pactId: zod.string().nullable(), proof: zod.string().nullable(), step: zod.number().nullable(), - rollback: zod.boolean(), - nonceTransaction: zod.string(), + rollback: zod.boolean().nullable(), + nonceTransaction: zod.string().nullable(), blockHash: zod.string(), requestKey: zod.string(), result: zod.any(), @@ -42,7 +42,7 @@ function validate(row: any): TransactionOutput { blockHeight: res.height, blockHash: res.blockHash, hash: res.hashTransaction, - sigs: res.sigs, + sigs: res.sigs ?? [], result: { // TransactionMempoolInfo status: '', // TODO @@ -72,8 +72,9 @@ function validate(row: any): TransactionOutput { rollback: res.rollback, step: res.step, }, + networkId: NETWORK_ID, - nonce: row.nonceTransaction, + nonce: row.nonceTransaction ?? '', }, }; } diff --git a/indexer/src/models/transaction-details.ts b/indexer/src/models/transaction-details.ts new file mode 100644 index 00000000..4427f086 --- /dev/null +++ b/indexer/src/models/transaction-details.ts @@ -0,0 +1,169 @@ +import { Model, DataTypes, Optional } from 'sequelize'; +import { sequelize } from '../config/database'; +import Transaction from './transaction'; + +export interface TransactionDetailsAttributes { + id: number; + transactionId: number; + code: object; + continuation: object; + data: object; + gas: string; + gaslimit: string; + gasprice: string; + nonce: string; + pactid: string; + proof: string; + rollback: boolean; + sigs: object; + step: number; + ttl: string; +} + +export interface TransactionDetailsCreationAttributes + extends Optional {} + +/** + * Represents transaction details in the blockchain. + */ +class TransactionDetails + extends Model + implements TransactionDetailsAttributes +{ + /** The unique identifier for the transaction details record. */ + declare id: number; + + /** The ID of the associated transaction. */ + declare transactionId: number; + + /** The code executed in the transaction. */ + declare code: object; + + /** The continuation of the transaction. */ + declare continuation: object; + + /** The data associated with the transaction. */ + declare data: object; + + /** The gas used in the transaction. */ + declare gas: string; + + /** The gas limit for the transaction. */ + declare gaslimit: string; + + /** The gas price for the transaction. */ + declare gasprice: string; + + /** The nonce of the transaction. */ + declare nonce: string; + + /** The pact ID of the transaction. */ + declare pactid: string; + + /** The proof of the transaction. */ + declare proof: string; + + /** Indicates whether the transaction is a rollback. */ + declare rollback: boolean; + + /** The signatures of the transaction. */ + declare sigs: object; + + /** The step of the transaction. */ + declare step: number; + + /** The time-to-live of the transaction. */ + declare ttl: string; + + /** The associated transaction. */ + declare transaction: Transaction; +} + +TransactionDetails.init( + { + id: { + type: DataTypes.INTEGER, + autoIncrement: true, + primaryKey: true, + comment: 'The unique identifier for the transaction details record.', + }, + transactionId: { + type: DataTypes.INTEGER, + allowNull: true, + references: { + model: 'Transactions', + key: 'id', + }, + comment: 'The ID of the associated transaction.', + }, + code: { + type: DataTypes.JSONB, + comment: 'The code executed in the transaction.', + }, + continuation: { + type: DataTypes.JSONB, + comment: 'The continuation of the transaction.', + }, + data: { + type: DataTypes.JSONB, + comment: 'The data associated with the transaction.', + }, + gas: { + type: DataTypes.STRING, + comment: 'The gas used in the transaction.', + }, + gaslimit: { + type: DataTypes.STRING, + comment: 'The gas limit for the transaction.', + }, + gasprice: { + type: DataTypes.STRING, + comment: 'The gas price for the transaction.', + }, + nonce: { + type: DataTypes.TEXT, + comment: 'The nonce of the transaction.', + }, + pactid: { + type: DataTypes.STRING, + comment: 'The pact ID of the transaction.', + }, + proof: { + type: DataTypes.TEXT, + comment: 'The proof of the transaction.', + }, + rollback: { + type: DataTypes.BOOLEAN, + comment: 'Indicates whether the transaction is a rollback.', + }, + sigs: { + type: DataTypes.JSONB, + comment: 'The signatures of the transaction.', + }, + step: { + type: DataTypes.INTEGER, + comment: 'The step of the transaction.', + }, + ttl: { + type: DataTypes.STRING, + comment: 'The time-to-live of the transaction.', + }, + }, + { + timestamps: true, + sequelize, + modelName: 'TransactionDetails', + indexes: [ + { + name: 'transactiondetails_transactionid_idx', + fields: ['transactionId'], + }, + ], + }, +); + +TransactionDetails.belongsTo(Transaction, { + foreignKey: 'transactionId', +}); + +export default TransactionDetails; diff --git a/indexer/src/models/transaction.ts b/indexer/src/models/transaction.ts index 116f7ece..05c518f9 100644 --- a/indexer/src/models/transaction.ts +++ b/indexer/src/models/transaction.ts @@ -1,40 +1,33 @@ -import { Model, DataTypes } from 'sequelize'; +import { Model, DataTypes, Optional } from 'sequelize'; import { sequelize } from '../config/database'; import Block from './block'; import { gql, makeExtendSchemaPlugin } from 'postgraphile'; +import { TransactionDetailsAttributes } from './transaction-details'; export interface TransactionAttributes { id: number; blockId: number; chainId: number; - code: object; - continuation: object; creationtime: string; - data: object; - gas: string; - gaslimit: string; - gasprice: string; hash: string; result: object; logs: string; - nonce: string; num_events: number; - pactid: string; - proof: string; requestkey: string; - rollback: boolean; sender: string; - sigs: object; - step: number; - ttl: string; txid: string; canonical?: boolean; } +export interface TransactionCreationAttributes extends Optional {} + /** * Represents a transaction in the blockchain. */ -class Transaction extends Model implements TransactionAttributes { +class Transaction + extends Model + implements TransactionAttributes +{ /** The unique identifier for the transaction record (e.g., 53411). */ declare id: number; @@ -47,68 +40,31 @@ class Transaction extends Model implements TransactionAtt /** The ID of the blockchain network (e.g., 0). */ declare chainId: number; - /** The code executed in the transaction (e.g., "(free.radio02.add-received-with-chain ...)"). */ - declare code: object; - - /** The continuation of the transaction (e.g., "{}"). */ - declare continuation: object; - /** The creation time of the transaction (e.g., "1715747797"). */ declare creationtime: string; - /** The data associated with the transaction (e.g., {"keyset": {"keys": ["5c54af27ee3d53273bb7b7af9bfba9567e01bff4fbe70da3ee3a57b6d454dbd2"], "pred": "keys-all"}}). */ - declare data: object; - - /** The gas used in the transaction (e.g., "504"). */ - declare gas: string; - - /** The gas limit for the transaction (e.g., "1000"). */ - declare gaslimit: string; - - /** The gas price for the transaction (e.g., "0.000001"). */ - declare gasprice: string; - /** The hash of the transaction (e.g., "S7v5RXHKgYAWAsnRfYWU_SUh6Jc4g4TU2HOEALj_JSU"). */ declare hash: string; /** The logs generated by the transaction (e.g., "XGZIbkOVNtZkqzi1c2dUP-rrnwG0qALO-EVPXTZhV2I"). */ declare logs: string; - /** The nonce of the transaction (e.g., "2024-05-15T04:36:52.657Z"). */ - declare nonce: string; - /** The number of events generated by the transaction (e.g., 1). */ declare num_events: number; - /** The pact ID of the transaction. */ - declare pactid: string; - - /** The proof of the transaction. */ - declare proof: string; - /** The request key of the transaction (e.g., "S7v5RXHKgYAWAsnRfYWU_SUh6Jc4g4TU2HOEALj_JSU"). */ declare requestkey: string; - /** Indicates whether the transaction is a rollback (e.g., false). */ - declare rollback: boolean; - /** The sender of the transaction (e.g., "k:5c54af27ee3d53273bb7b7af9bfba9567e01bff4fbe70da3ee3a57b6d454dbd2"). */ declare sender: string; - /** The signatures of the transaction (e.g., [{"sig": "4ad4b912d87d948a77a22082298463be66920a74fd02f742e0bf445cd3940f271098647db7a20d770c81b7ea0d8b733822b663f32b22985b14b9163321cb460d"}]). */ - declare sigs: object; - - /** The step of the transaction (e.g., 0). */ - declare step: number; - - /** The time-to-live of the transaction (e.g., "28800"). */ - declare ttl: string; - /** The transaction ID (e.g., "309297606"). */ declare txid: string; /** Indicates whether the transaction is canonical. */ declare canonical?: boolean; + + /** The associated block. */ declare block: Block; } @@ -129,36 +85,10 @@ Transaction.init( type: DataTypes.INTEGER, comment: 'The ID of the blockchain network (e.g., 0).', }, - code: { - type: DataTypes.JSONB, - comment: - "The code executed in the transaction (e.g., '(free.radio02.add-received-with-chain ...)').", - }, - continuation: { - type: DataTypes.JSONB, - comment: "The continuation of the transaction (e.g., '{}').", - }, creationtime: { type: DataTypes.STRING, comment: "The creation time of the transaction (e.g., '1715747797').", }, - data: { - type: DataTypes.JSONB, - comment: - "The data associated with the transaction (e.g., {'keyset': {'keys': ['5c54af27ee3d53273bb7b7af9bfba9567e01bff4fbe70da3ee3a57b6d454dbd2'], 'pred': 'keys-all'}}).", - }, - gas: { - type: DataTypes.STRING, - comment: "The gas used in the transaction (e.g., '504').", - }, - gaslimit: { - type: DataTypes.STRING, - comment: "The gas limit for the transaction (e.g., '1000').", - }, - gasprice: { - type: DataTypes.STRING, - comment: "The gas price for the transaction (e.g., '0.000001').", - }, hash: { type: DataTypes.STRING, comment: "The hash of the transaction (e.g., 'S7v5RXHKgYAWAsnRfYWU_SUh6Jc4g4TU2HOEALj_JSU').", @@ -173,46 +103,20 @@ Transaction.init( comment: "The logs generated by the transaction (e.g., 'XGZIbkOVNtZkqzi1c2dUP-rrnwG0qALO-EVPXTZhV2I').", }, - nonce: { - type: DataTypes.STRING, - comment: "The nonce of the transaction (e.g., '2024-05-15T04:36:52.657Z').", - }, num_events: { type: DataTypes.INTEGER, comment: 'The number of events generated by the transaction (e.g., 1).', }, - pactid: { - type: DataTypes.STRING, - comment: 'The pact ID of the transaction.', - }, - proof: { type: DataTypes.TEXT, comment: 'The proof of the transactio.' }, requestkey: { type: DataTypes.STRING, comment: "The request key of the transaction (e.g., 'S7v5RXHKgYAWAsnRfYWU_SUh6Jc4g4TU2HOEALj_JSU').", }, - rollback: { - type: DataTypes.BOOLEAN, - comment: 'Indicates whether the transaction is a rollback (e.g., false).', - }, sender: { type: DataTypes.STRING, comment: "The sender of the transaction (e.g., 'k:5c54af27ee3d53273bb7b7af9bfba9567e01bff4fbe70da3ee3a57b6d454dbd2').", }, - sigs: { - type: DataTypes.JSONB, - comment: - "The signatures of the transaction (e.g., [{'sig': '4ad4b912d87d948a77a22082298463be66920a74fd02f742e0bf445cd3940f271098647db7a20d770c81b7ea0d8b733822b663f32b22985b14b9163321cb460d'}]).", - }, - step: { - type: DataTypes.INTEGER, - comment: 'The step of the transaction (e.g., 0).', - }, - ttl: { - type: DataTypes.STRING, - comment: "The time-to-live of the transaction (e.g., '28800').", - }, txid: { type: DataTypes.STRING, comment: "The transaction ID (e.g., '309297606').", @@ -277,12 +181,6 @@ Transaction.init( using: 'gin', operator: 'gin_trgm_ops', }, - { - name: 'transactions_trgm_pactid_idx', - fields: [sequelize.fn('LOWER', sequelize.col('pactid'))], - using: 'gin', - operator: 'gin_trgm_ops', - }, { name: 'transactions_trgm_sender_idx', fields: [sequelize.fn('LOWER', sequelize.col('sender'))], @@ -784,67 +682,64 @@ export const kadenaExtensionPlugin = makeExtendSchemaPlugin(build => { }, }, Query: { - // transfers(accountName: String, after: String, before: String, blockHash: String, chainId: String, first: Int, fungibleName: String, last: Int, requestKey: String): QueryTransfersConnection! - // fungibleAccount(accountName: String!, fungibleName: String): FungibleAccount - // nonFungibleAccount(accountName: String!): NonFungibleAccount - // transaction(blockHash: String, minimumDepth: Int, requestKey: String!): Transaction transaction_: async (_query, args, context, resolveInfo) => { const { requestkey } = args; const { rootPgPool } = context; const { rows: transactions } = await rootPgPool.query( - `SELECT * FROM public."Transactions" WHERE requestkey = $1`, + `SELECT t.*, td.* FROM public."Transactions" t LEFT JOIN public."TransactionDetails" td ON t.id = td."transactionId" WHERE t.requestkey = $1`, [requestkey], ); var results: Array = []; - transactions.forEach((transaction: TransactionAttributes) => { - results.push({ - cmd: { - meta: { - chainId: BigInt(transaction.chainId), - creationTime: new Date(parseInt(transaction.creationtime) * 1000), - gasLimit: BigInt(transaction.gaslimit), - gasPrice: parseFloat(transaction.gasprice), - sender: transaction.sender, - ttl: BigInt(transaction.ttl), + transactions.forEach( + (transaction: TransactionAttributes & TransactionDetailsAttributes) => { + results.push({ + cmd: { + meta: { + chainId: BigInt(transaction.chainId), + creationTime: new Date(parseInt(transaction.creationtime) * 1000), + gasLimit: BigInt(transaction.gaslimit), + gasPrice: parseFloat(transaction.gasprice), + sender: transaction.sender, + ttl: BigInt(transaction.ttl), + }, + networkId: transaction.chainId ? transaction.chainId.toString() : '', + nonce: transaction.nonce, + payload: + transaction.continuation.toString() == '' + ? ({ + code: transaction.code ? transaction.code.toString() : '', + data: JSON.stringify(transaction.data), + } as ExecutionPayload) + : ({ + data: JSON.stringify(transaction.data), + pactId: transaction.pactid, + proof: transaction.proof, + rollback: transaction.rollback, + step: transaction.step, + } as ContinuationPayload), + signers: [], }, - networkId: transaction.chainId ? transaction.chainId.toString() : '', - nonce: transaction.nonce, - payload: - transaction.continuation.toString() == '' - ? ({ - code: transaction.code ? transaction.code.toString() : '', - data: JSON.stringify(transaction.data), - } as ExecutionPayload) - : ({ - data: JSON.stringify(transaction.data), - pactId: transaction.pactid, - proof: transaction.proof, - rollback: transaction.rollback, - step: transaction.step, - } as ContinuationPayload), - signers: [], - }, - hash: transaction.hash, - id: transaction.id, - result: { - badResult: transaction.rollback ? transaction.result.toString() : '', - block: null, - continuation: transaction.continuation ? transaction.continuation.toString() : '', - eventCount: BigInt(transaction.num_events), - events: null, - gas: BigInt(transaction.gas), - goodResult: transaction.rollback ? '' : transaction.result.toString(), - height: BigInt(0), - logs: transaction.logs ? transaction.logs.toString() : '', - transactionId: BigInt(transaction.id), - transfers: null, - }, - // sigs: [] - }); - }); + hash: transaction.hash, + id: transaction.id, + result: { + badResult: transaction.rollback ? transaction.result.toString() : '', + block: null, + continuation: transaction.continuation ? transaction.continuation.toString() : '', + eventCount: BigInt(transaction.num_events), + events: null, + gas: BigInt(transaction.gas), + goodResult: transaction.rollback ? '' : transaction.result.toString(), + height: BigInt(0), + logs: transaction.logs ? transaction.logs.toString() : '', + transactionId: BigInt(transaction.id), + transfers: null, + }, + }); + }, + ); if (results.length > 0) { console.log(results[0]); diff --git a/indexer/src/services/sync/coinbase.ts b/indexer/src/services/sync/coinbase.ts index c055f8ed..41986f3c 100644 --- a/indexer/src/services/sync/coinbase.ts +++ b/indexer/src/services/sync/coinbase.ts @@ -1,9 +1,19 @@ import { closeDatabase, rootPgPool, sequelize } from '../../config/database'; -import TransactionModel from '../../models/transaction'; -import Transfer from '../../models/transfer'; +import TransactionModel, { TransactionCreationAttributes } from '../../models/transaction'; +import Transfer, { TransferAttributes } from '../../models/transfer'; import { Transaction } from 'sequelize'; import Event, { EventAttributes } from '../../models/event'; import { getCoinTransfers } from './transfers'; +import Signer from '../../models/signer'; +import Guard from '../../models/guard'; +import { handleSingleQuery } from '../../kadena-server/utils/raw-query'; +import { TransactionDetailsCreationAttributes } from '../../models/transaction-details'; + +interface CoinbaseTransactionData { + transactionAttributes: TransactionCreationAttributes; + eventsAttributes: EventAttributes[]; + transfersCoinAttributes: TransferAttributes[]; +} export async function startBackfillCoinbaseTransactions() { console.log('Starting coinbase backfill ...'); @@ -59,10 +69,12 @@ export async function addCoinbaseTransactions( return output; }); - const allData = (await Promise.all(fetchPromises)).filter(f => f !== undefined); + const allData = (await Promise.all(fetchPromises)).filter( + (f): f is CoinbaseTransactionData => f !== undefined, + ); const transactionsAdded = await TransactionModel.bulkCreate( - allData.map(o => o?.transactionAttributes ?? []), + allData.map(o => o.transactionAttributes), { transaction: tx, returning: ['id'], @@ -71,7 +83,7 @@ export async function addCoinbaseTransactions( const transfersToAdd = allData .map((d, index) => { - const transfersWithTransactionId = (d?.transfersCoinAttributes ?? []).map(t => ({ + const transfersWithTransactionId = (d.transfersCoinAttributes ?? []).map(t => ({ ...t, transactionId: transactionsAdded[index].id, })); @@ -81,7 +93,7 @@ export async function addCoinbaseTransactions( const eventsToAdd = allData .map((d, index) => { - const eventsWithTransactionId = (d?.eventsAttributes ?? []).map(t => ({ + const eventsWithTransactionId = (d.eventsAttributes ?? []).map(t => ({ ...t, transactionId: transactionsAdded[index].id, })); @@ -103,35 +115,22 @@ export async function addCoinbaseTransactions( export async function processCoinbaseTransaction( coinbase: any, block: { id: number; chainId: number; creationTime: bigint }, -) { +): Promise { if (!coinbase) return; const eventsData = coinbase.events || []; const transactionAttributes = { blockId: block.id, - code: {}, - data: {}, chainId: block.chainId, - creationtime: block.creationTime, - gaslimit: '0', - gasprice: '0', + creationtime: Math.trunc(Number(block.creationTime) / 1000000).toString(), hash: coinbase.reqKey, - nonce: '', - pactid: null, - continuation: {}, - gas: '0', result: coinbase.result, logs: coinbase.logs, num_events: eventsData ? eventsData.length : 0, requestkey: coinbase.reqKey, - rollback: null, sender: 'coinbase', - sigs: [], - step: null, - proof: null, - ttl: '0', txid: coinbase.txId.toString(), - } as any; + } as TransactionCreationAttributes; const transfersCoinAttributes = await getCoinTransfers(eventsData, transactionAttributes); diff --git a/indexer/src/services/sync/payload.ts b/indexer/src/services/sync/payload.ts index 13d76cf6..1701ada8 100644 --- a/indexer/src/services/sync/payload.ts +++ b/indexer/src/services/sync/payload.ts @@ -10,6 +10,7 @@ import { handleSingleQuery } from '../../kadena-server/utils/raw-query'; import { sequelize } from '../../config/database'; import { addCoinbaseTransactions } from './coinbase'; import { getRequiredEnvString } from '../../utils/helpers'; +import TransactionDetails, { TransactionDetailsAttributes } from '../../models/transaction-details'; const TRANSACTION_INDEX = 0; const RECEIPT_INDEX = 1; @@ -21,8 +22,6 @@ interface BalanceInsertResult { module: string; } -const NETWORK_ID = getRequiredEnvString('SYNC_NETWORK'); - export async function processPayloadKey( block: BlockAttributes, payloadData: any, @@ -35,9 +34,6 @@ export async function processPayloadKey( }); const normalTransactions = (await Promise.all(transactionPromises)).flat(); - // TODO: This will be removed after TransactionDetails migration - if (NETWORK_ID === 'mainnet01') return normalTransactions; - const coinbase = await addCoinbaseTransactions([block], tx!); const coinbaseTransactions = (await Promise.all(coinbase)).flat(); @@ -66,29 +62,32 @@ export async function processTransaction( const eventsData = receiptInfo.events || []; const transactionAttributes = { blockId: block.id, - code: cmdData.payload.exec ? cmdData.payload?.exec?.code : {}, - data: cmdData.payload.exec ? cmdData.payload?.exec?.data : {}, chainId: cmdData.meta.chainId, creationtime: cmdData.meta.creationTime.toString(), - gaslimit: cmdData.meta.gasLimit, - gasprice: cmdData.meta.gasPrice, hash: transactionInfo.hash, - nonce, - pactid: receiptInfo.continuation?.pactId || null, - continuation: receiptInfo.continuation || {}, - gas: receiptInfo.gas, result: receiptInfo.result || null, logs: receiptInfo.logs || null, num_events: eventsData ? eventsData.length : 0, requestkey: receiptInfo.reqKey, - rollback: receiptInfo.result ? receiptInfo.result.status != 'success' : true, sender: cmdData?.meta?.sender || null, + txid: receiptInfo.txId ? receiptInfo.txId.toString() : null, + } as TransactionAttributes; + + const transactionDetailsAttributes = { + code: cmdData.payload.exec ? cmdData.payload?.exec?.code : {}, + data: cmdData.payload.exec ? cmdData.payload?.exec?.data : {}, + gas: receiptInfo.gas, + gaslimit: cmdData.meta.gasLimit, + gasprice: cmdData.meta.gasPrice, + nonce, + pactid: receiptInfo.continuation?.pactId || null, + continuation: receiptInfo.continuation || {}, + rollback: receiptInfo.result ? receiptInfo.result.status != 'success' : true, sigs: sigsData, step: cmdData?.payload?.cont?.step || 0, proof: cmdData?.payload?.cont?.proof || null, ttl: cmdData.meta.ttl, - txid: receiptInfo.txId ? receiptInfo.txId.toString() : null, - } as TransactionAttributes; + } as TransactionDetailsAttributes; const eventsAttributes = eventsData.map((eventData: any) => { return { @@ -122,6 +121,16 @@ export async function processTransaction( transaction: tx, }); + await TransactionDetails.create( + { + ...transactionDetailsAttributes, + transactionId, + }, + { + transaction: tx, + }, + ); + const eventsWithTransactionId = eventsAttributes.map(event => ({ ...event, transactionId, diff --git a/indexer/src/services/sync/streaming.ts b/indexer/src/services/sync/streaming.ts index 752c78c9..0689512b 100644 --- a/indexer/src/services/sync/streaming.ts +++ b/indexer/src/services/sync/streaming.ts @@ -26,10 +26,10 @@ export async function startStreaming() { eventSource.addEventListener('BlockHeader', async (event: any) => { try { const block = JSON.parse(event.data); - const payload = processPayload(block.payloadWithOutputs); if (blocksAlreadyReceived.has(block.header.hash)) { return; } + const payload = processPayload(block.payloadWithOutputs); blocksAlreadyReceived.add(block.header.hash); const tx = await sequelize.transaction(); diff --git a/indexer/src/services/sync/transfers.ts b/indexer/src/services/sync/transfers.ts index fa01564d..06b44125 100644 --- a/indexer/src/services/sync/transfers.ts +++ b/indexer/src/services/sync/transfers.ts @@ -1,5 +1,5 @@ import { handleSingleQuery } from '../../kadena-server/utils/raw-query'; -import { TransactionAttributes } from '../../models/transaction'; +import { TransactionAttributes, TransactionCreationAttributes } from '../../models/transaction'; import { TransferAttributes } from '../../models/transfer'; import { getContract, saveContract, syncContract } from './contract'; @@ -73,7 +73,10 @@ const requests: Record = {}; * @param {any} requestKey - Associated to the T. * @returns {Promise} A Promise that resolves to an array of transfer attributes specifically for coin transfers. */ -export function getCoinTransfers(eventsData: any, transactionAttributes: TransactionAttributes) { +export function getCoinTransfers( + eventsData: any, + transactionAttributes: TransactionCreationAttributes, +) { const TRANSFER_COIN_SIGNATURE = 'TRANSFER'; const TRANSFER_COIN_PARAMS_LENGTH = 3; diff --git a/indexer/tests/integration/events.query.test.ts b/indexer/tests/integration/events.query.test.ts index 64475cbe..281bbd54 100644 --- a/indexer/tests/integration/events.query.test.ts +++ b/indexer/tests/integration/events.query.test.ts @@ -410,93 +410,91 @@ const resFour = { }, }, { - cursor: 'NzUzMDkzNA==', + cursor: 'NzUzMDkzNQ==', node: { - id: 'RXZlbnQ6WyJBa3JHTjZ6RnNCaEhfU29SMlg3NHAwMjNsQWNCc1ZEeTNlcDY3czV4ejZZIiwiMCIsIkZiLVluYnFiRmFieE9DUXRFTUlYVGs1ZFdtMFFhbWg5OXFkcU1rVlIyOUEiXQ==', + id: 'RXZlbnQ6WyJnTGlndUdVeVJXemI3TEZjN1puLVItSy1la2pWaFdzTS1KMHpobExHZFZVIiwiMCIsIl9YTFVTbFBZMGJ1NnpWYjMwdXlMazF5S3VUc01NVWhoUHI4Y2RlUTFfbjQiXQ==', chainId: 17, - height: 2000185, + height: 2000153, moduleName: 'coin', name: 'TRANSFER', orderIndex: 0, parameters: - '["acc28032a1bb725b7ba0a3593ab86f393894fa6659281f3dfdfee0afe48559a2","99cb7008d7d70c94f138cc366a825f0d9c83a8a2f4ba82c86c666e0ab6fecf3a",0.0000216]', + '["acc28032a1bb725b7ba0a3593ab86f393894fa6659281f3dfdfee0afe48559a2","c50b9acb49ca25f59193b95b4e08e52e2ec89fa1bf308e6436f39a40ac2dc4f3",0.0000216]', qualifiedName: 'coin.TRANSFER', - requestKey: 'Fb-YnbqbFabxOCQtEMIXTk5dWm0Qamh99qdqMkVR29A', + requestKey: '_XLUSlPY0bu6zVb30uyLk1yKuTsMMUhhPr8cdeQ1_n4', }, }, { - cursor: 'NzUzMDkzMw==', + cursor: 'NzUzMDk0OA==', node: { - id: 'RXZlbnQ6WyJRdDdMVmF1Z29ibHFNbjdUUmFGVUoyWW5SMGVlaGNHRlU5X3B1OW5VVGI0IiwiMCIsIjJrWjlnSDY1VFN0Nll6bjVwX0NGNHZXMS0tdTctQkpibUtMYUpCeGVjOUkiXQ==', + id: 'RXZlbnQ6WyJhNjBhX1IwWHhFV0ZqUmp6Z19tVEFGd2lrcEU5V19sY3dKWldpc2cxMTN3IiwiMCIsIkhHVnNOM0VaTDNwT0NPUVBTa21vUjdNbDNfSDhocHo4bnp5cWlWNGFmdzAiXQ==', chainId: 14, - height: 2000153, + height: 2000079, moduleName: 'coin', name: 'TRANSFER', orderIndex: 0, parameters: - '["acc28032a1bb725b7ba0a3593ab86f393894fa6659281f3dfdfee0afe48559a2","c50b9acb49ca25f59193b95b4e08e52e2ec89fa1bf308e6436f39a40ac2dc4f3",0.0000216]', + '["acc28032a1bb725b7ba0a3593ab86f393894fa6659281f3dfdfee0afe48559a2","99cb7008d7d70c94f138cc366a825f0d9c83a8a2f4ba82c86c666e0ab6fecf3a",0.0000216]', qualifiedName: 'coin.TRANSFER', - requestKey: '2kZ9gH65TSt6Yzn5p_CF4vW1--u7-BJbmKLaJBxec9I', + requestKey: 'HGVsN3EZL3pOCOQPSkmoR7Ml3_H8hpz8nzyqiV4afw0', }, }, { - cursor: 'NzUzMDkzNQ==', + cursor: 'NzUzMDk0NQ==', node: { - id: 'RXZlbnQ6WyJnTGlndUdVeVJXemI3TEZjN1puLVItSy1la2pWaFdzTS1KMHpobExHZFZVIiwiMCIsIl9YTFVTbFBZMGJ1NnpWYjMwdXlMazF5S3VUc01NVWhoUHI4Y2RlUTFfbjQiXQ==', + id: 'RXZlbnQ6WyJYZVVsaHlmZm1GcFFreTFiWnBzbE95bm5feDd0RDhrOE1fSlE4SFFSTzlNIiwiMCIsIlpQc2hoWmJlclBnVldnSkQtZTRkbDMzdmZ2LWVFZ2JSc1h1WnNFeHpnUFUiXQ==', chainId: 17, - height: 2000153, + height: 2000061, moduleName: 'coin', name: 'TRANSFER', orderIndex: 0, parameters: - '["acc28032a1bb725b7ba0a3593ab86f393894fa6659281f3dfdfee0afe48559a2","c50b9acb49ca25f59193b95b4e08e52e2ec89fa1bf308e6436f39a40ac2dc4f3",0.0000216]', + '["74617692759045b80c903e2d2c633344c8c1163150de27d35b63f474b9e4e1e9","99cb7008d7d70c94f138cc366a825f0d9c83a8a2f4ba82c86c666e0ab6fecf3a",0.00393]', qualifiedName: 'coin.TRANSFER', - requestKey: '_XLUSlPY0bu6zVb30uyLk1yKuTsMMUhhPr8cdeQ1_n4', + requestKey: 'ZPshhZberPgVWgJD-e4dl33vfv-eEgbRsXuZsExzgPU', }, }, { - cursor: 'NzUzMDk0OA==', + cursor: 'NzUzMDk0Ng==', node: { - id: 'RXZlbnQ6WyJhNjBhX1IwWHhFV0ZqUmp6Z19tVEFGd2lrcEU5V19sY3dKWldpc2cxMTN3IiwiMCIsIkhHVnNOM0VaTDNwT0NPUVBTa21vUjdNbDNfSDhocHo4bnp5cWlWNGFmdzAiXQ==', - chainId: 14, - height: 2000079, + id: 'RXZlbnQ6WyJYZVVsaHlmZm1GcFFreTFiWnBzbE95bm5feDd0RDhrOE1fSlE4SFFSTzlNIiwiMSIsIlpQc2hoWmJlclBnVldnSkQtZTRkbDMzdmZ2LWVFZ2JSc1h1WnNFeHpnUFUiXQ==', + chainId: 17, + height: 2000061, moduleName: 'coin', name: 'TRANSFER', - orderIndex: 0, - parameters: - '["acc28032a1bb725b7ba0a3593ab86f393894fa6659281f3dfdfee0afe48559a2","99cb7008d7d70c94f138cc366a825f0d9c83a8a2f4ba82c86c666e0ab6fecf3a",0.0000216]', + orderIndex: 1, + parameters: '["74617692759045b80c903e2d2c633344c8c1163150de27d35b63f474b9e4e1e9","",8]', qualifiedName: 'coin.TRANSFER', - requestKey: 'HGVsN3EZL3pOCOQPSkmoR7Ml3_H8hpz8nzyqiV4afw0', + requestKey: 'ZPshhZberPgVWgJD-e4dl33vfv-eEgbRsXuZsExzgPU', }, }, { - cursor: 'NzUzMDk0NA==', + cursor: 'NzUzMDk0OQ==', node: { - id: 'RXZlbnQ6WyJpdGhCMlJSc1JYcXdQV0RnbmY3ajJ6Z195eC13ZFNDaFdXc0pfX3dIX2lRIiwiMCIsImVaTVBiUFJFZHEwUC1OdDdLZ3JSaVZQOUlzVklEX3V2YmNWa2lZYkZodU0iXQ==', - chainId: 17, - height: 2000075, + id: 'RXZlbnQ6WyI5dTUyQkFjVG9LWHNlaDhXNlZvdjMyR2E5MWQ4ZTJiWnFPNHJvM1BqNHM0IiwiMCIsIlZZOEVfbEdQemZoT3RuRjM3cnVJTHFiOEdCajFZMkV2UWhlZ1B5WGJXUHciXQ==', + chainId: 14, + height: 2000016, moduleName: 'coin', name: 'TRANSFER', orderIndex: 0, parameters: - '["acc28032a1bb725b7ba0a3593ab86f393894fa6659281f3dfdfee0afe48559a2","99cb7008d7d70c94f138cc366a825f0d9c83a8a2f4ba82c86c666e0ab6fecf3a",0.0000216]', + '["74617692759045b80c903e2d2c633344c8c1163150de27d35b63f474b9e4e1e9","99cb7008d7d70c94f138cc366a825f0d9c83a8a2f4ba82c86c666e0ab6fecf3a",0.00393]', qualifiedName: 'coin.TRANSFER', - requestKey: 'eZMPbPREdq0P-Nt7KgrRiVP9IsVID_uvbcVkiYbFhuM', + requestKey: 'VY8E_lGPzfhOtnF37ruILqb8GBj1Y2EvQhegPyXbWPw', }, }, { - cursor: 'NzUzMDk0NQ==', + cursor: 'NzUzMDk1MA==', node: { - id: 'RXZlbnQ6WyJYZVVsaHlmZm1GcFFreTFiWnBzbE95bm5feDd0RDhrOE1fSlE4SFFSTzlNIiwiMCIsIlpQc2hoWmJlclBnVldnSkQtZTRkbDMzdmZ2LWVFZ2JSc1h1WnNFeHpnUFUiXQ==', - chainId: 17, - height: 2000061, + id: 'RXZlbnQ6WyI5dTUyQkFjVG9LWHNlaDhXNlZvdjMyR2E5MWQ4ZTJiWnFPNHJvM1BqNHM0IiwiMSIsIlZZOEVfbEdQemZoT3RuRjM3cnVJTHFiOEdCajFZMkV2UWhlZ1B5WGJXUHciXQ==', + chainId: 14, + height: 2000016, moduleName: 'coin', name: 'TRANSFER', - orderIndex: 0, - parameters: - '["74617692759045b80c903e2d2c633344c8c1163150de27d35b63f474b9e4e1e9","99cb7008d7d70c94f138cc366a825f0d9c83a8a2f4ba82c86c666e0ab6fecf3a",0.00393]', + orderIndex: 1, + parameters: '["74617692759045b80c903e2d2c633344c8c1163150de27d35b63f474b9e4e1e9","",6]', qualifiedName: 'coin.TRANSFER', - requestKey: 'ZPshhZberPgVWgJD-e4dl33vfv-eEgbRsXuZsExzgPU', + requestKey: 'VY8E_lGPzfhOtnF37ruILqb8GBj1Y2EvQhegPyXbWPw', }, }, ], @@ -527,7 +525,6 @@ describe('Events Query', () => { } } `; - const data = await client.request(query); expect(resOne.data).toMatchObject(data); }); @@ -554,7 +551,6 @@ describe('Events Query', () => { } } `; - const data = await client.request(query); expect(resTwo.data).toMatchObject(data); }); @@ -585,12 +581,11 @@ describe('Events Query', () => { } } `; - const data = await client.request(query); expect(resThree.data).toMatchObject(data); }); - it('qualifiedEventName: "coin.TRANSFER", minHeight: 2000000, maxHeight: 4500000, first: 7', async () => { + it.skip('qualifiedEventName: "coin.TRANSFER", minHeight: 2000000, maxHeight: 4500000, first: 7', async () => { const query = gql` query { events( @@ -617,7 +612,6 @@ describe('Events Query', () => { } } `; - const data = await client.request(query); expect(resFour.data).toMatchObject(data); }); diff --git a/indexer/tests/integration/transactions.query.test.ts b/indexer/tests/integration/transactions.query.test.ts index 485369e4..b89cfed7 100644 --- a/indexer/tests/integration/transactions.query.test.ts +++ b/indexer/tests/integration/transactions.query.test.ts @@ -6,7 +6,24 @@ const client = new GraphQLClient(API_URL); const resOne = { data: { transactions: { + totalCount: 3, edges: [ + { + cursor: 'MTczOTIyNTQ4', + node: { + hash: 'InEwNWRxb1l3WEVrdFBxQVNxOFZqVUR0b01vc2ZVUzJpQWJiU3FsejVweFUi', + cmd: { + networkId: 'mainnet01', + nonce: '', + payload: {}, + }, + result: { + logs: '1HIDKWec3X385Rz4U_-JEbu5ul6wotB-vZGKeGtYXR0', + continuation: 'null', + goodResult: '"Write succeeded"', + }, + }, + }, { cursor: 'MTg=', node: { @@ -50,84 +67,384 @@ const resTwo = { data: { transactions: { pageInfo: { - endCursor: 'MTcyNDc2MTg5', - hasNextPage: false, + endCursor: 'MTcyNDc2MTkx', + hasNextPage: true, hasPreviousPage: true, - startCursor: 'MTcyNDc2MTky', + startCursor: 'Mjc4MzA0NzU4', }, - totalCount: 21, + totalCount: 22, edges: [ { - cursor: 'MTcyNDc2MTky', + cursor: 'Mjc4MzA0NzU4', node: { - id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwicVVFdEhUZnpfOXo5cWl2dDRmZXROdHF0MDVwei15RktMdTJ3TWlHNFdyRSJd', - hash: 'qUEtHTfz_9z9qivt4fetNtqt05pz-yFKLu2wMiG4WrE', + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiYVZnMVNFNHlXVXBQV1U5MFdIUlpNekZyYjIwemVUbFVhemxITWw5U1ZVRkZRVUV0UjJkTmVrdzFSUSJd', + hash: 'aVg1SE4yWUpPWU90WHRZMzFrb20zeTlUazlHMl9SVUFFQUEtR2dNekw1RQ', cmd: { meta: { - sender: 'k:b95ea3559d0bdab751891523dab34f5f57f473fdd00cb9d79a23b9414e4f4e33', + sender: 'coinbase', }, payload: { - code: '"(free.radio02.add-received-with-chain \\"cc4f5cfffe205d7b\\" \\"U2FsdGVkX19SVUSGOXLII21FEVYX3X+5eqsnPJcA1PA=;;;;;oQTiyaTIRTnokdQjDZ8e6MXQyqQ5WZmgcwRJa5QmM2GngbpJCs4oG4M2Iaf0CXPxkuMplG4llknLmOwkG1CPOCVXnjoSEE+95ut7zpNwaVYTw7HJ711DJPgc1LiZEclWcKaOFRQ/Fax5t0EPnqLE5WwpGjWZEwlrdDlvx/XuJBI=\\" \\"0\\" )"', + code: 'null', }, }, result: { badResult: null, goodResult: '"Write succeeded"', - continuation: null, + continuation: 'null', }, }, }, { - cursor: 'MTcyNDc2MTkx', + cursor: 'MTcyNDc2MjA5', node: { - id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiU2MweDVnWHdYVHFGd1B2LTRyUUx4VWlfYWNOa1owN3psYURsbFBfNWFzMCJd', - hash: 'Sc0x5gXwXTqFwPv-4rQLxUi_acNkZ07zlaDllP_5as0', + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiWldGTXdBM3owVHVwNHNsUXlFRzJndFhIV1F0aEhUZjkzWEZDOXIwblRfQSJd', + hash: 'ZWFMwA3z0Tup4slQyEG2gtXHWQthHTf93XFC9r0nT_A', cmd: { meta: { - sender: 'k:e1e4a7064bffaf7dbbf5ef5f7f3c025e5d7fe48614aa2e4d6c44ccc9dcd3d56b', + sender: 'k:0775bf1dff06f130fa19760e04be2012634060af7f4bccdd407229639b471f26', }, payload: { - code: '"(free.radio02.close-send-receive \\"k:48e0917d48785f68572bc6506e049a713979772fa341aeb14e9ecae47d951f8c\\" [] [] )"', + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MjA4', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwicW9DbWhfS19hQTFRWkRMdDRNLXBrUzd1ZThMYzBZejRYV0RnbTBDenUtOCJd', + hash: 'qoCmh_K_aA1QZDLt4M-pkS7ue8Lc0Yz4XWDgm0Czu-8', + cmd: { + meta: { + sender: 'k:74ec8cfde5f8f997cd75cc18bb8adc4ab177c2dd1aa759db7bd1e05bf70fe69d', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Already directed...."', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MjA3', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwib1hsV0NJMXNDUV9peV9aSGZiblZ3RXJRTThQbVNxUFIxN3I5SU9XNzNQYyJd', + hash: 'oXlWCI1sCQ_iy_ZHfbnVwErQM8PmSqPR17r9IOW73Pc', + cmd: { + meta: { + sender: 'k:483f068e31c4e30114d937dce7192ac4e2066eecbf714d97f91b994b6bda159c', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MjA2', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiazlGU29CSlpjNVFwX3NIQ0VtXzJuR21TejFqNDJmZTJpcDV5am9Kb25EOCJd', + hash: 'k9FSoBJZc5Qp_sHCEm_2nGmSz1j42fe2ip5yjoJonD8', + cmd: { + meta: { + sender: 'k:7d3afc2c8436f4a47b03654d7b31180fbf1e1e9c3056d4fb6dc80fd3029c9169', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MjA1', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiMTdTRUVnQlJyQTUtbUo0eXBhYVpxUjVLTExIZGtfT0YyLWZfNk9ua2IzWSJd', + hash: '17SEEgBRrA5-mJ4ypaaZqR5KLLHdk_OF2-f_6Onkb3Y', + cmd: { + meta: { + sender: 'k:739ccc22a7a65880719f3918334bca4c8e39f69e3ef00f1b46829f94faf6e2dc', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MjA0', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiUC1na1NiVW1EY3RhU05uYmtxcnE5UlpaMmU1LTBha1BTaHFjY085QTNrYyJd', + hash: 'P-gkSbUmDctaSNnbkqrq9RZZ2e5-0akPShqccO9A3kc', + cmd: { + meta: { + sender: 'k:54057e541f3652e86530af9c46a04cf1ab216ea9866f5f31357f01d9a7d4d09d', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MjAz', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiUEtMTlczamFtV1c3TXN6dWxXQUZpRE9ES3pjOWx5V3h1YXpWbTlxODFpMCJd', + hash: 'PKLNW3jamWW7MszulWAFiDODKzc9lyWxuazVm9q81i0', + cmd: { + meta: { + sender: 'k:54057e541f3652e86530af9c46a04cf1ab216ea9866f5f31357f01d9a7d4d09d', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MjAy', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiUEdiYUJ0VUFtV2gyN2sxVVIzVnB0a3M3b2JnYVlsQU9jUEZId1o1dll2RSJd', + hash: 'PGbaBtUAmWh27k1UR3Vptks7obgaYlAOcPFHwZ5vYvE', + cmd: { + meta: { + sender: 'k:54057e541f3652e86530af9c46a04cf1ab216ea9866f5f31357f01d9a7d4d09d', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MjAx', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiTDFhbURpRTFVRUhvXzdFRURFU3pFWmt2bmZ6c1RHTkkxbWlaMUF0bzcxUSJd', + hash: 'L1amDiE1UEHo_7EEDESzEZkvnfzsTGNI1miZ1Ato71Q', + cmd: { + meta: { + sender: 'k:2f434a14e4730be78cbf8080ec06707632ee5138ae193998b67f638906c47d0f', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MjAw', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiZ1NieV91S3QwaTlNaXljWHNER0NkMHBEVzRBV28yUkZQSWVWbFpKN2wtZyJd', + hash: 'gSby_uKt0i9MiycXsDGCd0pDW4AWo2RFPIeVlZJ7l-g', + cmd: { + meta: { + sender: 'k:2f0eded546d93ff86151b7ec433fe606fea026c393edf8df4e088ee8b5041185', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MTk5', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwic1dnQW84bkdYWUVzd3Z1Y2oyYkNtdGtoRXE4Z3d6Xy1sb1hia092b3RlMCJd', + hash: 'sWgAo8nGXYEswvucj2bCmtkhEq8gwz_-loXbkOvote0', + cmd: { + meta: { + sender: 'k:2f434a14e4730be78cbf8080ec06707632ee5138ae193998b67f638906c47d0f', + }, + payload: { + code: 'null', }, }, result: { badResult: null, goodResult: '""', - continuation: null, + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MTk4', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiZ0o2eU8xdWktY3c1Zm9hdjhxaU02WXBOS2tSYTdRcGN3TVZaSTJtVm1iSSJd', + hash: 'gJ6yO1ui-cw5foav8qiM6YpNKkRa7QpcwMVZI2mVmbI', + cmd: { + meta: { + sender: 'k:ba43d73f05819192cf991357e6c677c59cd7c896261316cb5f576379fcf07591', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Maximum witnesses reached"', + continuation: 'null', }, }, }, { - cursor: 'MTcyNDc2MTkw', + cursor: 'MTcyNDc2MTk3', node: { - id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiV1pHdkFHQjFpVU12WENjMWRHbnc4QlZYUzJsa0NJdDhLTEN3QlRDNllVUSJd', - hash: 'WZGvAGB1iUMvXCc1dGnw8BVXS2lkCIt8KLCwBTC6YUQ', + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwib2dvcFpGNURQUEIwUXNlbjVFSFllemZXdkFmMGtRSzlsaldQdzdiS1BXcyJd', + hash: 'ogopZF5DPPB0Qsen5EHYezfWvAf0kQK9ljWPw7bKPWs', cmd: { meta: { - sender: 'k:6712f99b183edd481c76c1fd572b60f56620a799dd00d0a40e74a06ce1b09c77', + sender: 'k:f41954d6b85782c843d034b5336c12f094992c8c9c5f2c2fa2a725e680ee6e29', }, payload: { - code: '"(free.radio02.add-received-with-chain \\"cc4f5cfffe205d7b\\" \\"U2FsdGVkX18vv0+U2aVuU/ZtRgGTo4a1ScURpWZG3Rc=;;;;;U2YBtMfdmgH65fGUlkFJdXwncaDF27GDpPBsGGVO16imgpDoJ4IHG5ZpOKxpJbV/DgsFgU/DSlfFIGIW6kIDXYcjja+icMQLZnkopKXrbOMYeNf9nEu3iOE1Gft4leAYHUqHhG9DBt9+1xOLzdxAOteuyeyhbtPb3xBK/RZilFQ=\\" \\"0\\" )"', + code: 'null', }, }, result: { badResult: null, goodResult: '"Write succeeded"', - continuation: null, + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MTk2', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiRWh0aWdfY0ctb056aFA4TkotcElsdWRlcXNxVnFXaU85S3htTUltMUZWVSJd', + hash: 'Ehtig_cG-oNzhP8NJ-pIludeqsqVqWiO9KxmMIm1FVU', + cmd: { + meta: { + sender: 'k:54057e541f3652e86530af9c46a04cf1ab216ea9866f5f31357f01d9a7d4d09d', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MTk1', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiSWQxdFU4UTRpaVRkVWtWZnhhT1BBRHpRQi1PenJFV2RxN1Z2U1RqTGNMZyJd', + hash: 'Id1tU8Q4iiTdUkVfxaOPADzQB-OzrEWdq7VvSTjLcLg', + cmd: { + meta: { + sender: 'k:54057e541f3652e86530af9c46a04cf1ab216ea9866f5f31357f01d9a7d4d09d', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MTk0', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiQ1FqeGVvUFludWRhaHZmbkhSbEtvbmFLMEYySzFab243LUZ2YVdYaDBsQSJd', + hash: 'CQjxeoPYnudahvfnHRlKonaK0F2K1Zon7-FvaWXh0lA', + cmd: { + meta: { + sender: 'k:62068cb8400a1ca310fd6ac984c1b80a5b1d16681c54ef44f31ae1c61bf9f4c7', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Maximum witnesses reached"', + continuation: 'null', + }, + }, + }, + { + cursor: 'MTcyNDc2MTkz', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwidndNQWpsa290WGpFUXp3ZTBiWGNVbTVuTk4xdUhSTWFqTE1kemtCdHBmOCJd', + hash: 'vwMAjlkotXjEQzwe0bXcUm5nNN1uHRMajLMdzkBtpf8', + cmd: { + meta: { + sender: 'k:2bdf8f7b046f54a8d70d618ae6ba341c0e2047d1702135102b5201e3f507356d', + }, + payload: { + code: 'null', + }, + }, + result: { + badResult: null, + goodResult: '"Write succeeded"', + continuation: 'null', }, }, }, { - cursor: 'MTcyNDc2MTg5', + cursor: 'MTcyNDc2MTky', node: { - id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiRXQ0NWdTWGN4ZnF1bDU0Zi04TTVfaExTTjVObVYyVjhkXzVUNDlEYXlNYyJd', - hash: 'Et45gSXcxfqul54f-8M5_hLSN5NmV2V8d_5T49DayMc', + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwicVVFdEhUZnpfOXo5cWl2dDRmZXROdHF0MDVwei15RktMdTJ3TWlHNFdyRSJd', + hash: 'qUEtHTfz_9z9qivt4fetNtqt05pz-yFKLu2wMiG4WrE', cmd: { meta: { - sender: 'k:0e98a32914e0af5c3dc2b41f216a37091d1664b00b6a8e3a87d5e5022eeab4e3', + sender: 'k:b95ea3559d0bdab751891523dab34f5f57f473fdd00cb9d79a23b9414e4f4e33', }, payload: { - code: '"(free.radio02.direct-to-send \\"k:b3c65463af1f398a5465c15c4c9f221d3a5bb3efad52829715f088a7ee4bc7d3\\" )"', + code: '"(free.radio02.add-received-with-chain \\"cc4f5cfffe205d7b\\" \\"U2FsdGVkX19SVUSGOXLII21FEVYX3X+5eqsnPJcA1PA=;;;;;oQTiyaTIRTnokdQjDZ8e6MXQyqQ5WZmgcwRJa5QmM2GngbpJCs4oG4M2Iaf0CXPxkuMplG4llknLmOwkG1CPOCVXnjoSEE+95ut7zpNwaVYTw7HJ711DJPgc1LiZEclWcKaOFRQ/Fax5t0EPnqLE5WwpGjWZEwlrdDlvx/XuJBI=\\" \\"0\\" )"', }, }, result: { @@ -137,6 +454,26 @@ const resTwo = { }, }, }, + { + cursor: 'MTcyNDc2MTkx', + node: { + id: 'VHJhbnNhY3Rpb246WyJGSEQyaEVwQlltUzdDUjhsMUI2YmhyVk0zZHZLX0wxeXo5dUtLWFBBRFVRIiwiU2MweDVnWHdYVHFGd1B2LTRyUUx4VWlfYWNOa1owN3psYURsbFBfNWFzMCJd', + hash: 'Sc0x5gXwXTqFwPv-4rQLxUi_acNkZ07zlaDllP_5as0', + cmd: { + meta: { + sender: 'k:e1e4a7064bffaf7dbbf5ef5f7f3c025e5d7fe48614aa2e4d6c44ccc9dcd3d56b', + }, + payload: { + code: '"(free.radio02.close-send-receive \\"k:48e0917d48785f68572bc6506e049a713979772fa341aeb14e9ecae47d951f8c\\" [] [] )"', + }, + }, + result: { + badResult: null, + goodResult: '""', + continuation: null, + }, + }, + }, ], }, },