Skip to content

Commit 8b84fc2

Browse files
authored
Merge pull request #336 from hack-a-chain-software/bare-server
Merge 'bare-metal-server' into 'develop'
2 parents 52a9e49 + 75a6d78 commit 8b84fc2

File tree

8 files changed

+63
-35
lines changed

8 files changed

+63
-35
lines changed

backfill/main.go

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,14 @@ func main() {
1919
go config.StartMemoryMonitoring()
2020
cut := fetch.FetchCut()
2121
ChainId := env.ChainId
22-
minHeights := config.GetMinHeights(env.Network)
23-
SyncMinHeight := minHeights[ChainId]
24-
process.StartBackfill(cut.Height, cut.Hash, ChainId, SyncMinHeight, pool)
22+
23+
var effectiveSyncMinHeight int
24+
if env.SyncMinHeight > 0 {
25+
effectiveSyncMinHeight = env.SyncMinHeight
26+
} else {
27+
chainGenesisHeights := config.GetMinHeights(env.Network)
28+
effectiveSyncMinHeight = chainGenesisHeights[ChainId]
29+
}
30+
31+
process.StartBackfill(cut.Height, cut.Hash, ChainId, effectiveSyncMinHeight, pool)
2532
}

backfill/process/process.go

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,14 @@ func StartBackfill(LastHeight int, Hash string, ChainId int, SyncMinHeight int,
2020

2121
for CurrentHeight >= SyncMinHeight {
2222
startTime := time.Now()
23-
nextHeight := max(CurrentHeight-env.SyncFetchIntervalInBlocks+1, SyncMinHeight)
23+
24+
var nextHeight int
25+
if CurrentHeight == SyncMinHeight {
26+
nextHeight = SyncMinHeight
27+
} else {
28+
nextHeight = Max(CurrentHeight-env.SyncFetchIntervalInBlocks+1, SyncMinHeight)
29+
}
30+
2431
log.Printf("Processing height %d to %d...\n", CurrentHeight, nextHeight)
2532

2633
blocks, err := fetch.FetchPayloadsWithHeaders(network, ChainId, Hash, nextHeight, CurrentHeight)

backfill/process/save_payloads.go

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -72,17 +72,17 @@ func savePayloads(network string, chainId int, processedPayloads []fetch.Process
7272
var currBlock = blocks[index]
7373
txs, txDetails, txCoinbase, err := PrepareTransactions(network, blockId, processedPayload, currBlock)
7474
if err != nil {
75-
return Counters{}, DataSizeTracker{}, fmt.Errorf("saving transactions -> %w", err)
75+
return Counters{}, DataSizeTracker{}, fmt.Errorf("preparing transactions for block %d -> %w", currBlock.Height, err)
7676
}
7777

7878
transactionIds, err := repository.SaveTransactions(tx, txs, txCoinbase)
7979
if err != nil {
80-
return Counters{}, DataSizeTracker{}, fmt.Errorf("saving transactions -> %w", err)
80+
return Counters{}, DataSizeTracker{}, fmt.Errorf("saving transactions for block %d -> %w", currBlock.Height, err)
8181
}
8282

8383
err = repository.SaveTransactionDetails(tx, txDetails, transactionIds)
8484
if err != nil {
85-
return Counters{}, DataSizeTracker{}, fmt.Errorf("saving transactions -> %w", err)
85+
return Counters{}, DataSizeTracker{}, fmt.Errorf("saving transaction details for block %d -> %w", currBlock.Height, err)
8686
}
8787

8888
txsSize := approximateSize(txs)
@@ -144,15 +144,18 @@ func savePayloads(network string, chainId int, processedPayloads []fetch.Process
144144
// counters.Guards += len(guards)
145145
// }
146146

147+
log.Printf("Saved payloads in %fs\n", time.Since(startTime).Seconds())
148+
149+
commitStartTime := time.Now()
147150
if err := tx.Commit(context.Background()); err != nil {
148151
return Counters{}, DataSizeTracker{}, fmt.Errorf("committing transaction: %w", err)
149152
}
153+
log.Printf("DB commit took %fs\n", time.Since(commitStartTime).Seconds())
150154

151155
dataSizeTracker.TransactionsKB /= 1024
152156
dataSizeTracker.EventsKB /= 1024
153157
dataSizeTracker.TransfersKB /= 1024
154158
dataSizeTracker.SignersKB /= 1024
155159

156-
log.Printf("Saved payloads in %fs\n", time.Since(startTime).Seconds())
157160
return counters, dataSizeTracker, nil
158161
}

backfill/process/utils.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ func buildModuleName(namespace *string, name string) string {
161161
return name
162162
}
163163

164-
func max(a, b int) int {
164+
func Max(a, b int) int {
165165
if a > b {
166166
return a
167167
}

backfill/repository/transaction_repository.go

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package repository
22

33
import (
4+
"bytes"
45
"context"
56
"encoding/json"
67
"fmt"
@@ -119,8 +120,7 @@ func SaveTransactionDetails(db pgx.Tx, details []TransactionDetailsAttributes, t
119120
return nil
120121
}
121122

122-
query := `
123-
INSERT INTO "TransactionDetails" (
123+
query := `INSERT INTO "TransactionDetails" (
124124
"transactionId", code, continuation, data, gas, gaslimit, gasprice,
125125
nonce, pactid, proof, rollback, sigs, step, ttl, "createdAt", "updatedAt"
126126
)
@@ -131,6 +131,10 @@ func SaveTransactionDetails(db pgx.Tx, details []TransactionDetailsAttributes, t
131131
batch := &pgx.Batch{}
132132

133133
for index := 0; index < len(details); index++ {
134+
// Sanitize data before marshalling to prevent "unsupported Unicode escape sequence" errors.
135+
// PostgreSQL's JSONB type does not support null characters (\u0000), so we remove them.
136+
details[index].Data = bytes.ReplaceAll(details[index].Data, []byte(`\u0000`), []byte{})
137+
134138
detail := details[index]
135139
code, err := json.Marshal(detail.Code)
136140
if err != nil {
@@ -179,9 +183,11 @@ func SaveTransactionDetails(db pgx.Tx, details []TransactionDetailsAttributes, t
179183
// Execute all queued queries
180184
for i := 0; i < len(details); i++ {
181185
if _, err := br.Exec(); err != nil {
182-
return fmt.Errorf("failed to execute batch for transaction details %d: %v", i, err)
186+
failedDetail := details[i]
187+
failedData, _ := json.MarshalIndent(failedDetail, "", " ")
188+
return fmt.Errorf("failed to execute batch for transaction details %d: %v\nFailing data:\n%s", i, err, string(failedData))
183189
}
184190
}
185191

186192
return nil
187-
}
193+
}

indexer/docker-compose.yml

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ version: '3.8'
33
services:
44
indexer-db:
55
image: postgres
6-
container_name: kad-indexer-postgres
6+
container_name: postgres-indexer
77
environment:
88
POSTGRES_USER: ${DB_USERNAME}
99
POSTGRES_PASSWORD: ${DB_PASSWORD}
@@ -12,6 +12,24 @@ services:
1212
- '5432:5432'
1313
volumes:
1414
- ${PWD}/indexer/postgres:/var/lib/postgresql/data
15+
command:
16+
- 'postgres'
17+
- '-c'
18+
- 'shared_buffers=8GB'
19+
- '-c'
20+
- 'effective_cache_size=24GB'
21+
- '-c'
22+
- 'maintenance_work_mem=2GB'
23+
- '-c'
24+
- 'work_mem=64MB'
25+
- '-c'
26+
- 'max_wal_size=16GB'
27+
- '-c'
28+
- 'random_page_cost=1.1'
29+
- '-c'
30+
- 'effective_io_concurrency=200'
31+
- '-c'
32+
- 'synchronous_commit=off'
1533
healthcheck:
1634
test: ['CMD-SHELL', 'pg_isready -U ${DB_USERNAME}']
1735
interval: 10s

indexer/migrations/20250610124420-update-pool-stats-timestamp.js

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -29,27 +29,13 @@ module.exports = {
2929
type: Sequelize.DATEONLY,
3030
allowNull: false,
3131
});
32-
33-
// Create unique index
34-
await queryInterface.addIndex('PoolStats', ['pairId', 'timestamp'], {
35-
unique: true,
36-
name: 'pool_stats_pairid_timestamp_idx',
37-
});
3832
},
3933

4034
async down(queryInterface, Sequelize) {
41-
// Remove unique index
42-
await queryInterface.removeIndex('PoolStats', 'pool_stats_pairid_timestamp_idx');
43-
4435
// Convert back to TIMESTAMP WITH TIME ZONE
4536
await queryInterface.changeColumn('PoolStats', 'timestamp', {
4637
type: Sequelize.DATE,
4738
allowNull: false,
4839
});
49-
50-
// Recreate original non-unique index
51-
await queryInterface.addIndex('PoolStats', ['pairId', 'timestamp'], {
52-
name: 'pool_stats_pairid_timestamp_idx',
53-
});
5440
},
5541
};

indexer/src/services/payload.ts

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -198,13 +198,14 @@ export async function processTransaction(
198198
);
199199

200200
const events = await Promise.all(eventsAttributes);
201-
const swapEvents = events.filter(event => event.name === 'SWAP');
202-
console.log('swapEvents', JSON.stringify(swapEvents, null, 2));
203-
const addLiquidityEvents = events.filter(event => event.name === 'ADD_LIQUIDITY');
204-
console.log('addLiquidityEvents', JSON.stringify(addLiquidityEvents, null, 2));
205-
const mintEvents = events.filter(event => event.name === 'MINT_EVENT');
206-
console.log('mintEvents', JSON.stringify(mintEvents, null, 2));
207-
console.log('------------------------------------- end -------------------------------');
201+
// Note: Should not summit debug to the 'main' branch
202+
// const swapEvents = events.filter(event => event.name === 'SWAP');
203+
// console.log('swapEvents', JSON.stringify(swapEvents, null, 2));
204+
// const addLiquidityEvents = events.filter(event => event.name === 'ADD_LIQUIDITY');
205+
// console.log('addLiquidityEvents', JSON.stringify(addLiquidityEvents, null, 2));
206+
// const mintEvents = events.filter(event => event.name === 'MINT_EVENT');
207+
// console.log('mintEvents', JSON.stringify(mintEvents, null, 2));
208+
// console.log('------------------------------------- end -------------------------------');
208209
const eventsWithTransactionId = events.map(event => ({
209210
...event,
210211
transactionId,

0 commit comments

Comments
 (0)