Skip to content

Commit c46bb16

Browse files
geokneesebastianst
andauthored
op-batcher: introduce PREFER_LOCAL_SAFE_L2 config var (#14587)
* op-batcher: introduce PREFER_LOCAL_SAFE_L2 config var * lint * Apply suggestions from code review Co-authored-by: Sebastian Stammler <[email protected]> * lint --------- Co-authored-by: Sebastian Stammler <[email protected]>
1 parent d8b84ae commit c46bb16

File tree

6 files changed

+134
-55
lines changed

6 files changed

+134
-55
lines changed

op-batcher/batcher/config.go

+4
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,9 @@ type CLIConfig struct {
110110
// ThrottleAlwaysBlockSize is the total per-block DA limit to always imposing on block building.
111111
ThrottleAlwaysBlockSize uint64
112112

113+
// PreferLocalSafeL2 triggers the batcher to load blocks from the sequencer based on the LocalSafeL2 SyncStatus field (instead of the SafeL2 field).
114+
PreferLocalSafeL2 bool
115+
113116
// TestUseMaxTxSizeForBlobs allows to set the blob size with MaxL1TxSize.
114117
// Should only be used for testing purposes.
115118
TestUseMaxTxSizeForBlobs bool
@@ -215,5 +218,6 @@ func NewConfig(ctx *cli.Context) *CLIConfig {
215218
ThrottleTxSize: ctx.Uint64(flags.ThrottleTxSizeFlag.Name),
216219
ThrottleBlockSize: ctx.Uint64(flags.ThrottleBlockSizeFlag.Name),
217220
ThrottleAlwaysBlockSize: ctx.Uint64(flags.ThrottleAlwaysBlockSizeFlag.Name),
221+
PreferLocalSafeL2: ctx.Bool(flags.PreferLocalSafeL2Flag.Name),
218222
}
219223
}

op-batcher/batcher/driver.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -422,7 +422,7 @@ func (l *BatchSubmitter) syncAndPrune(syncStatus *eth.SyncStatus) *inclusiveBloc
422422
defer l.channelMgrMutex.Unlock()
423423

424424
// Decide appropriate actions
425-
syncActions, outOfSync := computeSyncActions(*syncStatus, l.prevCurrentL1, l.channelMgr.blocks, l.channelMgr.channelQueue, l.Log)
425+
syncActions, outOfSync := computeSyncActions(*syncStatus, l.prevCurrentL1, l.channelMgr.blocks, l.channelMgr.channelQueue, l.Log, l.Config.PreferLocalSafeL2)
426426

427427
if outOfSync {
428428
// If the sequencer is out of sync

op-batcher/batcher/service.go

+4
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,8 @@ type BatcherConfig struct {
4848
// For throttling DA. See CLIConfig in config.go for details on these parameters.
4949
ThrottleThreshold, ThrottleTxSize uint64
5050
ThrottleBlockSize, ThrottleAlwaysBlockSize uint64
51+
52+
PreferLocalSafeL2 bool
5153
}
5254

5355
// BatcherService represents a full batch-submitter instance and its resources,
@@ -111,6 +113,8 @@ func (bs *BatcherService) initFromCLIConfig(ctx context.Context, version string,
111113
bs.ThrottleBlockSize = cfg.ThrottleBlockSize
112114
bs.ThrottleAlwaysBlockSize = cfg.ThrottleAlwaysBlockSize
113115

116+
bs.PreferLocalSafeL2 = cfg.PreferLocalSafeL2
117+
114118
optsFromRPC, err := bs.initRPCClients(ctx, cfg)
115119
if err != nil {
116120
return err

op-batcher/batcher/sync_actions.go

+22-8
Original file line numberDiff line numberDiff line change
@@ -47,15 +47,29 @@ func (s syncActions) TerminalString() string {
4747
// state of the batcher (blocks and channels), the new sync status, and the previous current L1 block. The actions are returned
4848
// in a struct specifying the number of blocks to prune, the number of channels to prune, whether to wait for node sync, the block
4949
// range to load into the local state, and whether to clear the state entirely. Returns an boolean indicating if the sequencer is out of sync.
50-
func computeSyncActions[T channelStatuser](newSyncStatus eth.SyncStatus, prevCurrentL1 eth.L1BlockRef, blocks queue.Queue[*types.Block], channels []T, l log.Logger) (syncActions, bool) {
50+
func computeSyncActions[T channelStatuser](
51+
newSyncStatus eth.SyncStatus,
52+
prevCurrentL1 eth.L1BlockRef,
53+
blocks queue.Queue[*types.Block],
54+
channels []T,
55+
l log.Logger,
56+
preferLocalSafeL2 bool,
57+
) (syncActions, bool) {
5158

5259
m := l.With(
5360
"syncStatus.headL1", newSyncStatus.HeadL1,
5461
"syncStatus.currentL1", newSyncStatus.CurrentL1,
5562
"syncStatus.localSafeL2", newSyncStatus.LocalSafeL2,
63+
"syncStatus.safeL2", newSyncStatus.SafeL2,
5664
"syncStatus.unsafeL2", newSyncStatus.UnsafeL2,
5765
)
5866

67+
safeL2 := newSyncStatus.SafeL2
68+
if preferLocalSafeL2 {
69+
// This is preffered when running interop, but not yet enabled by default.
70+
safeL2 = newSyncStatus.LocalSafeL2
71+
}
72+
5973
// PART 1: Initial checks on the sync status
6074
if newSyncStatus.HeadL1 == (eth.L1BlockRef{}) {
6175
m.Warn("empty sync status")
@@ -69,8 +83,8 @@ func computeSyncActions[T channelStatuser](newSyncStatus eth.SyncStatus, prevCur
6983
}
7084

7185
var allUnsafeBlocks *inclusiveBlockRange
72-
if newSyncStatus.UnsafeL2.Number > newSyncStatus.LocalSafeL2.Number {
73-
allUnsafeBlocks = &inclusiveBlockRange{newSyncStatus.LocalSafeL2.Number + 1, newSyncStatus.UnsafeL2.Number}
86+
if newSyncStatus.UnsafeL2.Number > safeL2.Number {
87+
allUnsafeBlocks = &inclusiveBlockRange{safeL2.Number + 1, newSyncStatus.UnsafeL2.Number}
7488
}
7589

7690
// PART 2: checks involving only the oldest block in the state
@@ -89,12 +103,12 @@ func computeSyncActions[T channelStatuser](newSyncStatus eth.SyncStatus, prevCur
89103
// and we need to start over, loading all unsafe blocks
90104
// from the sequencer.
91105
startAfresh := syncActions{
92-
clearState: &newSyncStatus.LocalSafeL2.L1Origin,
106+
clearState: &safeL2.L1Origin,
93107
blocksToLoad: allUnsafeBlocks,
94108
}
95109

96110
oldestBlockInStateNum := oldestBlockInState.NumberU64()
97-
nextSafeBlockNum := newSyncStatus.LocalSafeL2.Number + 1
111+
nextSafeBlockNum := safeL2.Number + 1
98112

99113
if nextSafeBlockNum < oldestBlockInStateNum {
100114
m.Warn("next safe block is below oldest block in state",
@@ -120,7 +134,7 @@ func computeSyncActions[T channelStatuser](newSyncStatus eth.SyncStatus, prevCur
120134
return startAfresh, false
121135
}
122136

123-
if numBlocksToDequeue > 0 && blocks[numBlocksToDequeue-1].Hash() != newSyncStatus.LocalSafeL2.Hash {
137+
if numBlocksToDequeue > 0 && blocks[numBlocksToDequeue-1].Hash() != safeL2.Hash {
124138
m.Warn("safe chain reorg, clearing channel manager state",
125139
"syncActions", startAfresh,
126140
"existingBlock", eth.ToBlockID(blocks[numBlocksToDequeue-1]))
@@ -132,7 +146,7 @@ func computeSyncActions[T channelStatuser](newSyncStatus eth.SyncStatus, prevCur
132146
if ch.isFullySubmitted() &&
133147
!ch.isTimedOut() &&
134148
newSyncStatus.CurrentL1.Number > ch.MaxInclusionBlock() &&
135-
newSyncStatus.LocalSafeL2.Number < ch.LatestL2().Number {
149+
safeL2.Number < ch.LatestL2().Number {
136150
// Safe head did not make the expected progress
137151
// for a fully submitted channel. This indicates
138152
// that the derivation pipeline may have stalled
@@ -147,7 +161,7 @@ func computeSyncActions[T channelStatuser](newSyncStatus eth.SyncStatus, prevCur
147161
// PART 5: happy path
148162
numChannelsToPrune := 0
149163
for _, ch := range channels {
150-
if ch.LatestL2().Number > newSyncStatus.LocalSafeL2.Number {
164+
if ch.LatestL2().Number > safeL2.Number {
151165
// If the channel has blocks which are not yet safe
152166
// we do not want to prune it.
153167
break

op-batcher/batcher/sync_actions_test.go

+96-46
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
5656
}
5757

5858
happyCaseLogs := []string{"computed sync actions"}
59+
noBlocksLogs := []string{"no blocks in state"}
5960

6061
type TestCase struct {
6162
name string
@@ -68,6 +69,7 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
6869
expected syncActions
6970
expectedSeqOutOfSync bool
7071
expectedLogs []string
72+
preferLocalSafeL2 bool
7173
}
7274

7375
testCases := []TestCase{
@@ -95,10 +97,10 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
9597
// although the sequencer has derived up the same
9698
// L1 block height, it derived fewer safe L2 blocks.
9799
newSyncStatus: eth.SyncStatus{
98-
HeadL1: eth.BlockRef{Number: 6},
99-
CurrentL1: eth.BlockRef{Number: 1},
100-
LocalSafeL2: eth.L2BlockRef{Number: 100, L1Origin: eth.BlockID{Number: 1}},
101-
UnsafeL2: eth.L2BlockRef{Number: 109},
100+
HeadL1: eth.BlockRef{Number: 6},
101+
CurrentL1: eth.BlockRef{Number: 1},
102+
SafeL2: eth.L2BlockRef{Number: 100, L1Origin: eth.BlockID{Number: 1}},
103+
UnsafeL2: eth.L2BlockRef{Number: 109},
102104
},
103105
prevCurrentL1: eth.BlockRef{Number: 1},
104106
blocks: queue.Queue[*types.Block]{block102, block103}, // note absence of block101
@@ -113,10 +115,10 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
113115
// This can happen if another batcher instance got some blocks
114116
// included in the safe chain:
115117
newSyncStatus: eth.SyncStatus{
116-
HeadL1: eth.BlockRef{Number: 6},
117-
CurrentL1: eth.BlockRef{Number: 2},
118-
LocalSafeL2: eth.L2BlockRef{Number: 104, L1Origin: eth.BlockID{Number: 1}},
119-
UnsafeL2: eth.L2BlockRef{Number: 109},
118+
HeadL1: eth.BlockRef{Number: 6},
119+
CurrentL1: eth.BlockRef{Number: 2},
120+
SafeL2: eth.L2BlockRef{Number: 104, L1Origin: eth.BlockID{Number: 1}},
121+
UnsafeL2: eth.L2BlockRef{Number: 109},
120122
},
121123
prevCurrentL1: eth.BlockRef{Number: 1},
122124
blocks: queue.Queue[*types.Block]{block101, block102, block103},
@@ -131,10 +133,10 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
131133
// This can happen if there is an L1 reorg, the safe chain is at an acceptable
132134
// height but it does not descend from the blocks in state:
133135
newSyncStatus: eth.SyncStatus{
134-
HeadL1: eth.BlockRef{Number: 5},
135-
CurrentL1: eth.BlockRef{Number: 2},
136-
LocalSafeL2: eth.L2BlockRef{Number: 103, Hash: block101.Hash(), L1Origin: eth.BlockID{Number: 1}}, // note hash mismatch
137-
UnsafeL2: eth.L2BlockRef{Number: 109},
136+
HeadL1: eth.BlockRef{Number: 5},
137+
CurrentL1: eth.BlockRef{Number: 2},
138+
SafeL2: eth.L2BlockRef{Number: 103, Hash: block101.Hash(), L1Origin: eth.BlockID{Number: 1}}, // note hash mismatch
139+
UnsafeL2: eth.L2BlockRef{Number: 109},
138140
},
139141
prevCurrentL1: eth.BlockRef{Number: 1},
140142
blocks: queue.Queue[*types.Block]{block101, block102, block103},
@@ -149,10 +151,10 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
149151
// This could happen if the batcher unexpectedly violates the
150152
// Holocene derivation rules:
151153
newSyncStatus: eth.SyncStatus{
152-
HeadL1: eth.BlockRef{Number: 3},
153-
CurrentL1: eth.BlockRef{Number: 2},
154-
LocalSafeL2: eth.L2BlockRef{Number: 101, Hash: block101.Hash(), L1Origin: eth.BlockID{Number: 1}},
155-
UnsafeL2: eth.L2BlockRef{Number: 109},
154+
HeadL1: eth.BlockRef{Number: 3},
155+
CurrentL1: eth.BlockRef{Number: 2},
156+
SafeL2: eth.L2BlockRef{Number: 101, Hash: block101.Hash(), L1Origin: eth.BlockID{Number: 1}},
157+
UnsafeL2: eth.L2BlockRef{Number: 109},
156158
},
157159
prevCurrentL1: eth.BlockRef{Number: 1},
158160
blocks: queue.Queue[*types.Block]{block101, block102, block103},
@@ -166,10 +168,10 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
166168
{name: "failed to make expected progress (unsafe=safe)",
167169
// Edge case where unsafe = safe
168170
newSyncStatus: eth.SyncStatus{
169-
HeadL1: eth.BlockRef{Number: 3},
170-
CurrentL1: eth.BlockRef{Number: 2},
171-
LocalSafeL2: eth.L2BlockRef{Number: 101, Hash: block101.Hash(), L1Origin: eth.BlockID{Number: 1}},
172-
UnsafeL2: eth.L2BlockRef{Number: 101},
171+
HeadL1: eth.BlockRef{Number: 3},
172+
CurrentL1: eth.BlockRef{Number: 2},
173+
SafeL2: eth.L2BlockRef{Number: 101, Hash: block101.Hash(), L1Origin: eth.BlockID{Number: 1}},
174+
UnsafeL2: eth.L2BlockRef{Number: 101},
173175
},
174176
prevCurrentL1: eth.BlockRef{Number: 1},
175177
blocks: queue.Queue[*types.Block]{block102, block103},
@@ -185,10 +187,10 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
185187
// and we didn't submit or have any txs confirmed since
186188
// the last sync.
187189
newSyncStatus: eth.SyncStatus{
188-
HeadL1: eth.BlockRef{Number: 4},
189-
CurrentL1: eth.BlockRef{Number: 1},
190-
LocalSafeL2: eth.L2BlockRef{Number: 100},
191-
UnsafeL2: eth.L2BlockRef{Number: 109},
190+
HeadL1: eth.BlockRef{Number: 4},
191+
CurrentL1: eth.BlockRef{Number: 1},
192+
SafeL2: eth.L2BlockRef{Number: 100},
193+
UnsafeL2: eth.L2BlockRef{Number: 109},
192194
},
193195
prevCurrentL1: eth.BlockRef{Number: 1},
194196
blocks: queue.Queue[*types.Block]{block101, block102, block103},
@@ -201,10 +203,10 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
201203
{name: "no blocks",
202204
// This happens when the batcher is starting up for the first time
203205
newSyncStatus: eth.SyncStatus{
204-
HeadL1: eth.BlockRef{Number: 5},
205-
CurrentL1: eth.BlockRef{Number: 2},
206-
LocalSafeL2: eth.L2BlockRef{Number: 103, Hash: block103.Hash()},
207-
UnsafeL2: eth.L2BlockRef{Number: 109},
206+
HeadL1: eth.BlockRef{Number: 5},
207+
CurrentL1: eth.BlockRef{Number: 2},
208+
SafeL2: eth.L2BlockRef{Number: 103, Hash: block103.Hash()},
209+
UnsafeL2: eth.L2BlockRef{Number: 109},
208210
},
209211
prevCurrentL1: eth.BlockRef{Number: 1},
210212
blocks: queue.Queue[*types.Block]{},
@@ -217,10 +219,10 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
217219
{name: "happy path",
218220
// This happens when the safe chain is being progressed as expected:
219221
newSyncStatus: eth.SyncStatus{
220-
HeadL1: eth.BlockRef{Number: 5},
221-
CurrentL1: eth.BlockRef{Number: 2},
222-
LocalSafeL2: eth.L2BlockRef{Number: 103, Hash: block103.Hash()},
223-
UnsafeL2: eth.L2BlockRef{Number: 109},
222+
HeadL1: eth.BlockRef{Number: 5},
223+
CurrentL1: eth.BlockRef{Number: 2},
224+
SafeL2: eth.L2BlockRef{Number: 103, Hash: block103.Hash()},
225+
UnsafeL2: eth.L2BlockRef{Number: 109},
224226
},
225227
prevCurrentL1: eth.BlockRef{Number: 1},
226228
blocks: queue.Queue[*types.Block]{block101, block102, block103},
@@ -234,10 +236,10 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
234236
},
235237
{name: "happy path + multiple channels",
236238
newSyncStatus: eth.SyncStatus{
237-
HeadL1: eth.BlockRef{Number: 5},
238-
CurrentL1: eth.BlockRef{Number: 2},
239-
LocalSafeL2: eth.L2BlockRef{Number: 103, Hash: block103.Hash()},
240-
UnsafeL2: eth.L2BlockRef{Number: 109},
239+
HeadL1: eth.BlockRef{Number: 5},
240+
CurrentL1: eth.BlockRef{Number: 2},
241+
SafeL2: eth.L2BlockRef{Number: 103, Hash: block103.Hash()},
242+
UnsafeL2: eth.L2BlockRef{Number: 109},
241243
},
242244
prevCurrentL1: eth.BlockRef{Number: 1},
243245
blocks: queue.Queue[*types.Block]{block101, block102, block103, block104},
@@ -251,23 +253,23 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
251253
},
252254
{name: "no progress + unsafe=safe",
253255
newSyncStatus: eth.SyncStatus{
254-
HeadL1: eth.BlockRef{Number: 5},
255-
CurrentL1: eth.BlockRef{Number: 2},
256-
LocalSafeL2: eth.L2BlockRef{Number: 100},
257-
UnsafeL2: eth.L2BlockRef{Number: 100},
256+
HeadL1: eth.BlockRef{Number: 5},
257+
CurrentL1: eth.BlockRef{Number: 2},
258+
SafeL2: eth.L2BlockRef{Number: 100},
259+
UnsafeL2: eth.L2BlockRef{Number: 100},
258260
},
259261
prevCurrentL1: eth.BlockRef{Number: 1},
260262
blocks: queue.Queue[*types.Block]{},
261263
channels: []channelStatuser{},
262264
expected: syncActions{},
263-
expectedLogs: []string{"no blocks in state"},
265+
expectedLogs: noBlocksLogs,
264266
},
265267
{name: "no progress + unsafe=safe + blocks in state",
266268
newSyncStatus: eth.SyncStatus{
267-
HeadL1: eth.BlockRef{Number: 5},
268-
CurrentL1: eth.BlockRef{Number: 2},
269-
LocalSafeL2: eth.L2BlockRef{Number: 101, Hash: block101.Hash()},
270-
UnsafeL2: eth.L2BlockRef{Number: 101},
269+
HeadL1: eth.BlockRef{Number: 5},
270+
CurrentL1: eth.BlockRef{Number: 2},
271+
SafeL2: eth.L2BlockRef{Number: 101, Hash: block101.Hash()},
272+
UnsafeL2: eth.L2BlockRef{Number: 101},
271273
},
272274
prevCurrentL1: eth.BlockRef{Number: 1},
273275
blocks: queue.Queue[*types.Block]{block101},
@@ -277,6 +279,54 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
277279
},
278280
expectedLogs: happyCaseLogs,
279281
},
282+
{name: "localSafeL2 > safeL2, preferLocalSafeL2=false",
283+
newSyncStatus: eth.SyncStatus{
284+
HeadL1: eth.BlockRef{Number: 5},
285+
CurrentL1: eth.BlockRef{Number: 2},
286+
SafeL2: eth.L2BlockRef{Number: 103, Hash: block103.Hash()},
287+
LocalSafeL2: eth.L2BlockRef{Number: 104, Hash: block104.Hash()},
288+
UnsafeL2: eth.L2BlockRef{Number: 109},
289+
},
290+
prevCurrentL1: eth.BlockRef{Number: 1},
291+
blocks: queue.Queue[*types.Block]{},
292+
channels: []channelStatuser{},
293+
expected: syncActions{
294+
blocksToLoad: &inclusiveBlockRange{104, 109},
295+
},
296+
expectedLogs: noBlocksLogs,
297+
},
298+
{name: "localSafeL2 > safeL2, preferLocalSafeL2=true",
299+
preferLocalSafeL2: true,
300+
newSyncStatus: eth.SyncStatus{
301+
HeadL1: eth.BlockRef{Number: 5},
302+
CurrentL1: eth.BlockRef{Number: 2},
303+
SafeL2: eth.L2BlockRef{Number: 103, Hash: block103.Hash()},
304+
LocalSafeL2: eth.L2BlockRef{Number: 104, Hash: block104.Hash()},
305+
UnsafeL2: eth.L2BlockRef{Number: 109},
306+
},
307+
prevCurrentL1: eth.BlockRef{Number: 1},
308+
blocks: queue.Queue[*types.Block]{},
309+
channels: []channelStatuser{},
310+
expected: syncActions{
311+
blocksToLoad: &inclusiveBlockRange{105, 109},
312+
},
313+
expectedLogs: noBlocksLogs,
314+
},
315+
{name: "LocalSafeL2=0,SafeL2>0", // This shouldn't ever happen, but has occurred due to bugs
316+
newSyncStatus: eth.SyncStatus{
317+
HeadL1: eth.BlockRef{Number: 5},
318+
CurrentL1: eth.BlockRef{Number: 2},
319+
SafeL2: eth.L2BlockRef{Number: 104, Hash: block104.Hash()},
320+
UnsafeL2: eth.L2BlockRef{Number: 109},
321+
},
322+
prevCurrentL1: eth.BlockRef{Number: 1},
323+
blocks: queue.Queue[*types.Block]{},
324+
channels: []channelStatuser{},
325+
expected: syncActions{
326+
blocksToLoad: &inclusiveBlockRange{105, 109},
327+
},
328+
expectedLogs: noBlocksLogs,
329+
},
280330
}
281331

282332
for _, tc := range testCases {
@@ -285,7 +335,7 @@ func TestBatchSubmitter_computeSyncActions(t *testing.T) {
285335
l, h := testlog.CaptureLogger(t, log.LevelDebug)
286336

287337
result, outOfSync := computeSyncActions(
288-
tc.newSyncStatus, tc.prevCurrentL1, tc.blocks, tc.channels, l,
338+
tc.newSyncStatus, tc.prevCurrentL1, tc.blocks, tc.channels, l, tc.preferLocalSafeL2,
289339
)
290340

291341
require.Equal(t, tc.expected, result, "unexpected actions")

op-batcher/flags/flags.go

+7
Original file line numberDiff line numberDiff line change
@@ -180,6 +180,12 @@ var (
180180
Value: 130_000, // should be larger than the builder's max-l2-tx-size to prevent endlessly throttling some txs
181181
EnvVars: prefixEnvVars("THROTTLE_ALWAYS_BLOCK_SIZE"),
182182
}
183+
PreferLocalSafeL2Flag = &cli.BoolFlag{
184+
Name: "prefer-local-safe-l2",
185+
Usage: "Load unsafe blocks higher than the sequencer's LocalSafeL2 instead of SafeL2",
186+
Value: false,
187+
EnvVars: prefixEnvVars("PREFER_LOCAL_SAFE_L2"),
188+
}
183189
// Legacy Flags
184190
SequencerHDPathFlag = txmgr.SequencerHDPathFlag
185191
)
@@ -212,6 +218,7 @@ var optionalFlags = []cli.Flag{
212218
ThrottleTxSizeFlag,
213219
ThrottleBlockSizeFlag,
214220
ThrottleAlwaysBlockSizeFlag,
221+
PreferLocalSafeL2Flag,
215222
}
216223

217224
func init() {

0 commit comments

Comments
 (0)