diff --git a/migrations/1747151261415_recalculate-balances.js b/migrations/1747151261415_recalculate-balances.js new file mode 100644 index 0000000000..dc73af890d --- /dev/null +++ b/migrations/1747151261415_recalculate-balances.js @@ -0,0 +1,100 @@ +/* eslint-disable camelcase */ + +exports.shorthands = undefined; + +exports.up = pgm => { + // Remove old balances. + pgm.sql(`TRUNCATE TABLE ft_balances`); + + // Recalculate STX balances + pgm.sql(` + WITH all_balances AS ( + SELECT sender AS address, -SUM(amount) AS balance_change + FROM stx_events + WHERE asset_event_type_id IN (1, 3) -- Transfers and Burns affect the sender's balance + AND canonical = true AND microblock_canonical = true + GROUP BY sender + UNION ALL + SELECT recipient AS address, SUM(amount) AS balance_change + FROM stx_events + WHERE asset_event_type_id IN (1, 2) -- Transfers and Mints affect the recipient's balance + AND canonical = true AND microblock_canonical = true + GROUP BY recipient + ), + net_balances AS ( + SELECT address, SUM(balance_change) AS balance + FROM all_balances + GROUP BY address + ), + fees AS ( + SELECT address, SUM(total_fees) AS total_fees + FROM ( + SELECT sender_address AS address, SUM(fee_rate) AS total_fees + FROM txs + WHERE canonical = true AND microblock_canonical = true AND sponsored = false + GROUP BY sender_address + UNION ALL + SELECT sponsor_address AS address, SUM(fee_rate) AS total_fees + FROM txs + WHERE canonical = true AND microblock_canonical = true AND sponsored = true + GROUP BY sponsor_address + ) AS subquery + GROUP BY address + ), + rewards AS ( + SELECT + recipient AS address, + SUM( + coinbase_amount + tx_fees_anchored + tx_fees_streamed_confirmed + tx_fees_streamed_produced + ) AS total_rewards + FROM miner_rewards + WHERE canonical = true + GROUP BY recipient + ), + all_addresses AS ( + SELECT address FROM net_balances + UNION + SELECT address FROM fees + UNION + SELECT address FROM rewards + ) + INSERT INTO ft_balances (address, balance, token) + SELECT + aa.address, + COALESCE(nb.balance, 0) - COALESCE(f.total_fees, 0) + COALESCE(r.total_rewards, 0) AS balance, + 'stx' AS token + FROM all_addresses aa + LEFT JOIN net_balances nb ON aa.address = nb.address + LEFT JOIN fees f ON aa.address = f.address + LEFT JOIN rewards r ON aa.address = r.address + `); + + // Recalculate FT balances + pgm.sql(` + WITH all_balances AS ( + SELECT sender AS address, asset_identifier, -SUM(amount) AS balance_change + FROM ft_events + WHERE asset_event_type_id IN (1, 3) -- Transfers and Burns affect the sender's balance + AND canonical = true + AND microblock_canonical = true + GROUP BY sender, asset_identifier + UNION ALL + SELECT recipient AS address, asset_identifier, SUM(amount) AS balance_change + FROM ft_events + WHERE asset_event_type_id IN (1, 2) -- Transfers and Mints affect the recipient's balance + AND canonical = true + AND microblock_canonical = true + GROUP BY recipient, asset_identifier + ), + net_balances AS ( + SELECT address, asset_identifier, SUM(balance_change) AS balance + FROM all_balances + GROUP BY address, asset_identifier + ) + INSERT INTO ft_balances (address, balance, token) + SELECT address, balance, asset_identifier AS token + FROM net_balances + `); +}; + +exports.down = pgm => {}; diff --git a/src/datastore/pg-write-store.ts b/src/datastore/pg-write-store.ts index 796e89300a..fd0e55ee2c 100644 --- a/src/datastore/pg-write-store.ts +++ b/src/datastore/pg-write-store.ts @@ -203,7 +203,7 @@ export class PgWriteStore extends PgStore { async update(data: DataStoreBlockUpdateData): Promise { let garbageCollectedMempoolTxs: string[] = []; - let batchedTxData: DataStoreTxEventData[] = []; + let newTxData: DataStoreTxEventData[] = []; await this.sqlWriteTransaction(async sql => { const chainTip = await this.getChainTip(sql); @@ -223,7 +223,7 @@ export class PgWriteStore extends PgStore { // Insert microblocks, if any. Clear already inserted microblock txs from the anchor-block // update data to avoid duplicate inserts. const insertedMicroblockHashes = await this.insertMicroblocksFromBlockUpdate(sql, data); - batchedTxData = data.txs.filter(entry => { + newTxData = data.txs.filter(entry => { return !insertedMicroblockHashes.has(entry.tx.microblock_hash); }); @@ -264,7 +264,7 @@ export class PgWriteStore extends PgStore { // Clear accepted microblock txs from the anchor-block update data to avoid duplicate // inserts. - batchedTxData = batchedTxData.filter(entry => { + newTxData = newTxData.filter(entry => { const matchingTx = acceptedMicroblockTxs.find(tx => tx.tx_id === entry.tx.tx_id); return !matchingTx; }); @@ -285,29 +285,32 @@ export class PgWriteStore extends PgStore { const q = new PgWriteQueue(); q.enqueue(() => this.updateMinerRewards(sql, data.minerRewards)); if (isCanonical) { - q.enqueue(() => this.updateStxBalances(sql, batchedTxData, data.minerRewards)); - q.enqueue(() => this.updateFtBalances(sql, batchedTxData)); + // Use `data.txs` directly instead of `newTxData` for these STX/FT balance updates because + // we don't want to skip balance changes in transactions that were previously confirmed + // via microblocks. + q.enqueue(() => this.updateStxBalances(sql, data.txs, data.minerRewards)); + q.enqueue(() => this.updateFtBalances(sql, data.txs)); } if (data.poxSetSigners && data.poxSetSigners.signers) { const poxSet = data.poxSetSigners; q.enqueue(() => this.updatePoxSetsBatch(sql, data.block, poxSet)); } - if (batchedTxData.length > 0) { + if (newTxData.length > 0) { q.enqueue(() => this.updateTx( sql, - batchedTxData.map(b => b.tx) + newTxData.map(b => b.tx) ) ); - q.enqueue(() => this.updateStxEvents(sql, batchedTxData)); - q.enqueue(() => this.updatePrincipalStxTxs(sql, batchedTxData)); - q.enqueue(() => this.updateSmartContractEvents(sql, batchedTxData)); - q.enqueue(() => this.updatePoxSyntheticEvents(sql, 'pox2_events', batchedTxData)); - q.enqueue(() => this.updatePoxSyntheticEvents(sql, 'pox3_events', batchedTxData)); - q.enqueue(() => this.updatePoxSyntheticEvents(sql, 'pox4_events', batchedTxData)); - q.enqueue(() => this.updateStxLockEvents(sql, batchedTxData)); - q.enqueue(() => this.updateFtEvents(sql, batchedTxData)); - for (const entry of batchedTxData) { + q.enqueue(() => this.updateStxEvents(sql, newTxData)); + q.enqueue(() => this.updatePrincipalStxTxs(sql, newTxData)); + q.enqueue(() => this.updateSmartContractEvents(sql, newTxData)); + q.enqueue(() => this.updatePoxSyntheticEvents(sql, 'pox2_events', newTxData)); + q.enqueue(() => this.updatePoxSyntheticEvents(sql, 'pox3_events', newTxData)); + q.enqueue(() => this.updatePoxSyntheticEvents(sql, 'pox4_events', newTxData)); + q.enqueue(() => this.updateStxLockEvents(sql, newTxData)); + q.enqueue(() => this.updateFtEvents(sql, newTxData)); + for (const entry of newTxData) { q.enqueue(() => this.updateNftEvents(sql, entry.tx, entry.nftEvents)); q.enqueue(() => this.updateSmartContracts(sql, entry.tx, entry.smartContracts)); q.enqueue(() => this.updateNamespaces(sql, entry.tx, entry.namespaces)); diff --git a/tests/api/address.test.ts b/tests/api/address.test.ts index 216e50595b..4e0ebfaf34 100644 --- a/tests/api/address.test.ts +++ b/tests/api/address.test.ts @@ -3057,4 +3057,108 @@ describe('address tests', () => { expect(json6.results.length).toEqual(4); expect(json6.results[0].tx_id).toEqual('0xffa1'); }); + + test('balance calculation after microblock confirmations', async () => { + const addr1 = 'SP3D6PV2ACBPEKYJTCMH7HEN02KP87QSP8KTEH335'; + const addr2 = 'SP2TBW1RSC44JZA4XQ1C2G5SZRGSMM14C5NWAKSDD'; + + // Send some initial balance for addr1 + await db.update( + new TestBlockBuilder({ + block_height: 1, + index_block_hash: '0x0001', + parent_index_block_hash: '', + }) + .addTx({ + tx_id: '0x1101', + token_transfer_recipient_address: addr1, + type_id: DbTxTypeId.TokenTransfer, + token_transfer_amount: 20_000n, + fee_rate: 50n, + }) + .addTxStxEvent({ + amount: 20_000n, + block_height: 1, + recipient: addr1, + tx_id: '0x1101', + }) + .build() + ); + // Send STX to addr2 in a microblock in transaction 0x1102 + await db.updateMicroblocks( + new TestMicroblockStreamBuilder() + .addMicroblock({ + parent_index_block_hash: '0x0001', + microblock_hash: '0xff01', + microblock_sequence: 0, + }) + .addTx({ + tx_id: '0x1102', + sender_address: addr1, + token_transfer_recipient_address: addr2, + type_id: DbTxTypeId.TokenTransfer, + token_transfer_amount: 2000n, + fee_rate: 100n, + microblock_hash: '0xff01', + microblock_sequence: 0, + }) + .addTxStxEvent({ + amount: 2000n, + block_height: 2, + sender: addr1, + recipient: addr2, + tx_id: '0x1102', + }) + .build() + ); + await db.update( + new TestBlockBuilder({ + block_height: 2, + index_block_hash: '0x0002', + parent_index_block_hash: '0x0001', + parent_microblock_hash: '0xff01', + parent_microblock_sequence: 0, + }) + // Same transaction 0x1102 now appears confirmed in an anchor block + .addTx({ + tx_id: '0x1102', + sender_address: addr1, + token_transfer_recipient_address: addr2, + type_id: DbTxTypeId.TokenTransfer, + token_transfer_amount: 2000n, + fee_rate: 100n, + microblock_hash: '0xff01', + microblock_sequence: 0, + }) + .addTxStxEvent({ + amount: 2000n, + block_height: 2, + sender: addr1, + recipient: addr2, + tx_id: '0x1102', + }) + .build() + ); + + // Check that v1 balance matches v2 balance for both accounts. + let result = await supertest(api.server).get(`/extended/v1/address/${addr1}/stx`); + expect(result.status).toBe(200); + expect(result.type).toBe('application/json'); + let v1balance = JSON.parse(result.text).balance; + expect(v1balance).toBe('17900'); + result = await supertest(api.server).get(`/extended/v2/addresses/${addr1}/balances/stx`); + expect(result.status).toBe(200); + expect(result.type).toBe('application/json'); + expect(JSON.parse(result.text).balance).toBe(v1balance); + + result = await supertest(api.server).get(`/extended/v1/address/${addr2}/stx`); + expect(result.status).toBe(200); + expect(result.type).toBe('application/json'); + v1balance = JSON.parse(result.text).balance; + expect(v1balance).toBe('2000'); + result = await supertest(api.server).get(`/extended/v2/addresses/${addr2}/balances/stx`); + expect(result.status).toBe(200); + expect(result.type).toBe('application/json'); + expect(JSON.parse(result.text).balance).toBe(v1balance); + }); });