Skip to content

Commit 23079fd

Browse files
perf(ui): improve large-file ingestion and scrolling stability
1 parent 75090e0 commit 23079fd

2 files changed

Lines changed: 45 additions & 6 deletions

File tree

internal/ui/model.go

Lines changed: 20 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,16 @@ type Model struct {
133133
func InitialModel(filename string, lines []string, reader io.Reader) Model {
134134
var streamer *Streamer
135135
if reader != nil {
136-
streamer = NewStreamer(reader)
136+
cfg := StreamerConfig{
137+
BatchLines: 200,
138+
FlushEvery: 50 * time.Millisecond,
139+
}
140+
// File startup backfill should favor throughput over ultra-low latency.
141+
if filename != "Stdin" {
142+
cfg.BatchLines = 5000
143+
cfg.FlushEvery = 100 * time.Millisecond
144+
}
145+
streamer = NewStreamerWithConfig(reader, cfg)
137146
}
138147

139148
ti := textinput.New()
@@ -215,10 +224,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
215224
// Handle error?
216225
} else if msg.NewContent != "" {
217226
// Append new content
218-
newLines := strings.Split(msg.NewContent, "\n")
219-
// Handle edge case where last line was incomplete?
220-
// For simplicity, just append. Ideally we handle partial lines.
221-
227+
newLines := splitIncomingContent(msg.NewContent)
222228
m.appendIncomingLines(newLines)
223229

224230
m.fileSize = msg.NewOffset
@@ -708,6 +714,15 @@ func (m *Model) appendIncomingLines(newLines []string) {
708714
m.applyFilters(false)
709715
}
710716

717+
func splitIncomingContent(content string) []string {
718+
lines := strings.Split(content, "\n")
719+
// File appends usually end with '\n', which creates a trailing empty element.
720+
if len(lines) > 0 && lines[len(lines)-1] == "" {
721+
lines = lines[:len(lines)-1]
722+
}
723+
return lines
724+
}
725+
711726
func (m *Model) applyFilters(resetView bool) {
712727
var filtered []string
713728
// Directly iterate over originalLines

internal/ui/stream.go

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,11 @@ import (
88
tea "github.com/charmbracelet/bubbletea"
99
)
1010

11+
const (
12+
defaultStreamBatchLines = 1000
13+
defaultStreamFlushEvery = 50 * time.Millisecond
14+
)
15+
1116
// LogChunkMsg can carry a chunk of raw string or a slice of lines.
1217
// For efficient TUI updates, sending a batch of lines is preferred.
1318
type LogChunkMsg struct {
@@ -21,7 +26,26 @@ type Streamer struct {
2126
err chan error
2227
}
2328

29+
type StreamerConfig struct {
30+
BatchLines int
31+
FlushEvery time.Duration
32+
}
33+
2434
func NewStreamer(r io.Reader) *Streamer {
35+
return NewStreamerWithConfig(r, StreamerConfig{
36+
BatchLines: defaultStreamBatchLines,
37+
FlushEvery: defaultStreamFlushEvery,
38+
})
39+
}
40+
41+
func NewStreamerWithConfig(r io.Reader, cfg StreamerConfig) *Streamer {
42+
if cfg.BatchLines <= 0 {
43+
cfg.BatchLines = defaultStreamBatchLines
44+
}
45+
if cfg.FlushEvery <= 0 {
46+
cfg.FlushEvery = defaultStreamFlushEvery
47+
}
48+
2549
s := &Streamer{
2650
lines: make(chan []string),
2751
err: make(chan error),
@@ -38,7 +62,7 @@ func NewStreamer(r io.Reader) *Streamer {
3862
batch = append(batch, scanner.Text())
3963

4064
// Flush if batch is big enough or time passed
41-
if len(batch) >= 100 || time.Since(lastSend) > 50*time.Millisecond {
65+
if len(batch) >= cfg.BatchLines || time.Since(lastSend) > cfg.FlushEvery {
4266
s.lines <- batch
4367
batch = nil // Reset
4468
lastSend = time.Now()

0 commit comments

Comments
 (0)