Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 14 additions & 9 deletions cmd/mcp/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,41 +118,46 @@ for datadir mode.
## Available Tools

### Ethereum Standard (eth_*)

`eth_blockNumber`, `eth_getBlockByNumber`, `eth_getBlockByHash`,
`eth_getBalance`, `eth_getTransactionByHash`, `eth_getTransactionReceipt`,
`eth_getBlockReceipts`, `eth_getLogs`, `eth_getCode`, `eth_getStorageAt`,
`eth_getTransactionCount`, `eth_call`, `eth_estimateGas`, `eth_gasPrice`,
`eth_chainId`, `eth_syncing`, `eth_getProof`, and more.

### Erigon-Specific (erigon_*)

`erigon_forks`, `erigon_blockNumber`, `erigon_getHeaderByNumber`,
`erigon_getHeaderByHash`, `erigon_getBlockByTimestamp`,
`erigon_getBalanceChangesInBlock`, `erigon_getLogsByHash`,
`erigon_getLogs`, `erigon_getBlockReceiptsByBlockHash`, `erigon_nodeInfo`.

### Otterscan (ots_*)

`ots_getApiLevel`, `ots_getInternalOperations`,
`ots_searchTransactionsBefore`, `ots_searchTransactionsAfter`,
`ots_getBlockDetails`, `ots_getBlockTransactions`, `ots_hasCode`,
`ots_traceTransaction`, `ots_getTransactionError`,
`ots_getTransactionBySenderAndNonce`, `ots_getContractCreator`.

### Log Analysis

`logs_tail`, `logs_head`, `logs_grep`, `logs_stats` — requires `--log.dir`
or `--datadir` to locate Erigon/torrent log files.

### Metrics

`metrics_list`, `metrics_get` — only available in embedded mode (inside Erigon).
In standalone mode, these return an informational message.

## Flags

| Flag | Default | Description |
|------|---------|-------------|
| `--rpc.url` | `http://127.0.0.1:8545` | Erigon JSON-RPC endpoint URL |
| `--port` | 0 | JSON-RPC port shorthand |
| `--datadir` | | Erigon data directory (enables direct DB mode) |
| `--private.api.addr` | `127.0.0.1:9090` | gRPC private API (with --datadir) |
| `--transport` | `stdio` | Transport: `stdio` or `sse` |
| `--sse.addr` | `127.0.0.1:8553` | SSE listen address |
| `--log.dir` | | Log directory (overrides datadir detection) |
| Flag | Default | Description |
|----------------------|-------------------------|------------------------------------------------|
| `--rpc.url` | `http://127.0.0.1:8545` | Erigon JSON-RPC endpoint URL |
| `--port` | 0 | JSON-RPC port shorthand |
| `--datadir` | | Erigon data directory (enables direct DB mode) |
| `--private.api.addr` | `127.0.0.1:9090` | gRPC private API (with --datadir) |
| `--transport` | `stdio` | Transport: `stdio` or `sse` |
| `--sse.addr` | `127.0.0.1:8553` | SSE listen address |
| `--log.dir` | | Log directory (overrides datadir detection) |
16 changes: 15 additions & 1 deletion db/recsplit/eliasfano32/elias_fano.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ func (ef *EliasFano) deriveFields() int {
jumpWords := ef.jumpSizeWords()
totalWords := wordsLowerBits + wordsUpperBits + jumpWords
//fmt.Printf("EF: %d, %d,%d,%d\n", totalWords, wordsLowerBits, wordsUpperBits, jumpWords)
if ef.data == nil {
if cap(ef.data) < totalWords {
ef.data = make([]uint64, totalWords)
} else {
ef.data = ef.data[:totalWords]
Expand All @@ -120,6 +120,20 @@ func (ef *EliasFano) deriveFields() int {
return wordsUpperBits
}

// ResetForWrite reinitializes the EliasFano for writing a new sequence, reusing
// the existing data slice if it has sufficient capacity (avoiding allocation).
// The caller must call Build() after all AddOffset calls, same as with NewEliasFano.
func (ef *EliasFano) ResetForWrite(count, maxOffset uint64) {
ef.count = count - 1
ef.maxOffset = maxOffset
ef.u = maxOffset + 1
ef.i = 0
ef.wordsUpperBits = ef.deriveFields()
// Zero out the backing array so OR-style setBits starts from a clean slate.
// deriveFields() may have resliced ef.data without zeroing it.
clear(ef.data)
}

// Build construct Elias Fano index for a given sequences
func (ef *EliasFano) Build() {
for i, c, lastSuperQ := uint64(0), uint64(0), uint64(0); i < uint64(ef.wordsUpperBits); i++ {
Expand Down
39 changes: 39 additions & 0 deletions db/recsplit/multiencseq/sequence_builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ type SequenceBuilder struct {
smallBuf [SIMPLE_SEQUENCE_MAX_THRESHOLD]uint32 // rebased values for simple encoding (count <= 16)
smallCount uint8
rebasedEf *eliasfano32.EliasFano // direct rebased EF for large sequences (count > 16)
it1 SequenceIterator
it2 SequenceIterator
}

// Creates a new builder. The builder is not meant to be reused. The construction
Expand Down Expand Up @@ -60,6 +62,18 @@ func NewBuilder(baseNum, count, maxOffset uint64) *SequenceBuilder {
return &SequenceBuilder{baseNum: baseNum}
}

// Reset reinitializes the builder for a new sequence, reusing the existing object
// and its internal EliasFano allocation where possible.
// Same parameter semantics as NewBuilder.
func (b *SequenceBuilder) Reset(baseNum, count, maxOffset uint64) {
b.baseNum = baseNum
if b.rebasedEf != nil {
b.rebasedEf.ResetForWrite(count, maxOffset)
} else {
b.rebasedEf = eliasfano32.NewEliasFano(count, maxOffset)
}
}

func (b *SequenceBuilder) AddOffset(offset uint64) {
if b.rebasedEf != nil {
b.rebasedEf.AddOffset(offset - b.baseNum)
Expand Down Expand Up @@ -94,3 +108,28 @@ func (b *SequenceBuilder) simpleEncoding(buf []byte) []byte {

return buf
}

// Merge merges s1 and s2 into this builder, resetting it first.
// s1 and s2 must be pre-sorted with s1.Max() <= s2.Min().
// Call AppendBytes on the builder to serialize.
func (b *SequenceBuilder) Merge(s1, s2 *SequenceReader, outBaseNum uint64) error {
b.Reset(outBaseNum, s1.Count()+s2.Count(), s2.Max())
b.it1.Reset(s1, 0)
b.it2.Reset(s2, 0)
for b.it1.HasNext() {
v, err := b.it1.Next()
if err != nil {
return err
}
b.AddOffset(v)
}
for b.it2.HasNext() {
v, err := b.it2.Next()
if err != nil {
return err
}
b.AddOffset(v)
}
b.Build()
return nil
}
25 changes: 0 additions & 25 deletions db/recsplit/multiencseq/sequence_reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -194,31 +194,6 @@ func (s *SequenceReader) ReverseIterator(v int) stream.U64 {
panic(fmt.Sprintf("unknown sequence encoding: %d", s.currentEnc))
}

// Merge merges the other sequence into this one, returning a built SequenceBuilder
// with outBaseNum. Both sequences must be pre-sorted with s.Max() <= other.Min().
// Call AppendBytes on the result to serialize.
func (s *SequenceReader) Merge(other *SequenceReader, outBaseNum uint64, it1, it2 *SequenceIterator) (*SequenceBuilder, error) {
it1.Reset(s, 0)
it2.Reset(other, 0)
newSeq := NewBuilder(outBaseNum, s.Count()+other.Count(), other.Max())
for it1.HasNext() {
v, err := it1.Next()
if err != nil {
return nil, err
}
newSeq.AddOffset(v)
}
for it2.HasNext() {
v, err := it2.Next()
if err != nil {
return nil, err
}
newSeq.AddOffset(v)
}
newSeq.Build()
return newSeq, nil
}

// SequenceIterator is a reusable iterator for SequenceReader.
// Create as a value and call Reset() to (re)initialize — avoids heap allocation
// for SimpleEncoding (the common case).
Expand Down
16 changes: 8 additions & 8 deletions db/state/merge.go
Original file line number Diff line number Diff line change
Expand Up @@ -650,7 +650,8 @@ func (iit *InvertedIndexRoTx) mergeFiles(ctx context.Context, files []*FilesItem
var keyBuf, valBuf []byte
var lastKey, lastVal []byte
preSeq, mergeSeq := &multiencseq.SequenceReader{}, &multiencseq.SequenceReader{}
preIt, mergeIt := &multiencseq.SequenceIterator{}, &multiencseq.SequenceIterator{}
preIt := &multiencseq.SequenceIterator{}
builder := &multiencseq.SequenceBuilder{}
i := uint64(0)
for cp.Len() > 0 {
lastKey = append(lastKey[:0], cp[0].key...)
Expand All @@ -659,16 +660,16 @@ func (iit *InvertedIndexRoTx) mergeFiles(ctx context.Context, files []*FilesItem
// Pre-rebase the first sequence
preSeq.Reset(cp[0].startTxNum, lastVal)
preIt.Reset(preSeq, 0)
newSeq := multiencseq.NewBuilder(startTxNum, preSeq.Count(), preSeq.Max())
builder.Reset(startTxNum, preSeq.Count(), preSeq.Max())
for preIt.HasNext() {
v, err := preIt.Next()
if err != nil {
return nil, err
}
newSeq.AddOffset(v)
builder.AddOffset(v)
}
newSeq.Build()
lastVal = newSeq.AppendBytes(nil)
builder.Build()
lastVal = builder.AppendBytes(lastVal[:0])
var mergedOnce bool

// Advance all the items that have this key (including the top)
Expand All @@ -677,11 +678,10 @@ func (iit *InvertedIndexRoTx) mergeFiles(ctx context.Context, files []*FilesItem
if mergedOnce {
mergeSeq.Reset(ci1.startTxNum, ci1.val)
preSeq.Reset(startTxNum, lastVal)
merged, mergeErr := mergeSeq.Merge(preSeq, startTxNum, mergeIt, preIt)
if mergeErr != nil {
if mergeErr := builder.Merge(mergeSeq, preSeq, startTxNum); mergeErr != nil {
return nil, fmt.Errorf("merge %s inverted index: %w", iit.ii.FilenameBase, mergeErr)
}
lastVal = merged.AppendBytes(nil)
lastVal = builder.AppendBytes(lastVal[:0])
} else {
mergedOnce = true
}
Expand Down
4 changes: 2 additions & 2 deletions db/state/merge_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -748,10 +748,10 @@ func Test_mergeEliasFano(t *testing.T) {
}

var seq1, seq2 multiencseq.SequenceReader
var it1, it2 multiencseq.SequenceIterator
seq1.Reset(0, firstBytes)
seq2.Reset(0, secondBytes)
mergedSeq, err := seq1.Merge(&seq2, 0, &it1, &it2)
var mergedSeq multiencseq.SequenceBuilder
err := mergedSeq.Merge(&seq1, &seq2, 0)
require.NoError(t, err)
menc := mergedSeq.AppendBytes(nil)

Expand Down
Loading