@@ -160,23 +160,6 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
160
160
tokenStr := token .String ()
161
161
if ext , ok := externalLexers [tokenStr ]; ok {
162
162
emit (tokenStartLine , false , nil )
163
- // for {
164
- // tok, err := ext.Lex()
165
-
166
- // if errors.Is(err, StopExtLexer{}) {
167
- // break
168
- // }
169
-
170
- // if err != nil {
171
- // emit(tokenStartLine, false, &ParseError{
172
- // File: &lexerFile,
173
- // What: err.Error(),
174
- // Line: &tokenLine, // TODO: I See this used but never updated
175
- // })
176
- // continue
177
- // }
178
- // tokenCh <- tok
179
- // }
180
163
externalScanner .tokenLine = tokenLine
181
164
extTokenCh := ext .Lex (tokenStr )
182
165
for tok := range extTokenCh {
@@ -341,25 +324,23 @@ func (ll *LuaLexer) Register(s LexScanner) []string {
341
324
}
342
325
}
343
326
327
+ //nolint:funlen
344
328
func (ll * LuaLexer ) Lex (matchedToken string ) <- chan NgxToken {
345
329
tokenCh := make (chan NgxToken )
346
330
347
331
tokenDepth := 0
348
- // var leadingWhitespace strings.Builder
349
332
350
333
go func () {
351
334
defer close (tokenCh )
352
335
var tok strings.Builder
353
336
var inQuotes bool
354
- var startedText bool
355
337
356
338
if matchedToken == "set_by_lua_block" {
357
339
arg := ""
358
340
for {
359
341
if ! ll .s .Scan () {
360
342
return
361
343
}
362
-
363
344
next := ll .s .Text ()
364
345
if isSpace (next ) {
365
346
if arg != "" {
@@ -376,55 +357,44 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
376
357
}
377
358
arg += next
378
359
}
379
- }
360
+ // skip leading whitespace after the return value
361
+ for {
362
+ if ! ll .s .Scan () {
363
+ return
364
+ }
365
+ next := ll .s .Text ()
380
366
381
- for {
382
- if ! ll .s .Scan () {
383
- return
384
- }
385
- if next := ll .s .Text (); ! isSpace (next ) {
386
- break
367
+ if ! isSpace (next ) {
368
+ if next != "{" {
369
+ lineno := ll .s .Line ()
370
+ tokenCh <- NgxToken {Error : & ParseError {File : & lexerFile , What : `unexpected "}"` , Line : & lineno }}
371
+ return
372
+ }
373
+ tokenDepth ++
374
+ break
375
+ }
387
376
}
388
377
}
389
378
390
- if ! ll .s .Scan () {
391
- // TODO: err?
392
- return
393
- }
394
-
395
- if next := ll .s .Text (); next != "{" {
396
- // TODO: Return error, need { to open blockj
397
- }
398
-
399
- tokenDepth += 1
400
-
401
- // TODO: check for opening brace?
402
-
379
+ // Grab everything in Lua block as a single token and watch for curly brace '{' in strings
403
380
for {
404
381
if ! ll .s .Scan () {
405
- return // shrug emoji
382
+ return
406
383
}
407
384
385
+ next := ll .s .Text ()
408
386
if err := ll .s .Err (); err != nil {
409
387
lineno := ll .s .Line ()
410
388
tokenCh <- NgxToken {Error : & ParseError {File : & lexerFile , What : err .Error (), Line : & lineno }}
411
389
412
390
}
413
391
414
- next := ll .s .Text ()
415
-
416
- // Handle leading whitespace
417
- if ! startedText && isSpace (next ) && tokenDepth == 0 && ! inQuotes {
418
- // leadingWhitespace.WriteString(next)
419
- continue
420
- }
421
-
422
- // As soon as we hit a nonspace, we consider text to have started.
423
- startedText = true
424
-
425
392
switch {
426
393
case next == "{" && ! inQuotes :
427
394
tokenDepth ++
395
+ if tokenDepth > 1 { // not the first open brace
396
+ tok .WriteString (next )
397
+ }
428
398
429
399
case next == "}" && ! inQuotes :
430
400
tokenDepth --
@@ -434,50 +404,62 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
434
404
return
435
405
}
436
406
407
+ if tokenDepth > 0 { // not the last close brace for it to be 0
408
+ tok .WriteString (next )
409
+ }
410
+
437
411
if tokenDepth == 0 {
438
- // tokenCh <- NgxToken{Value: next, Line: 0}
439
- // Before finishing the block, prepend any leading whitespace.
440
- // finalTokenValue := leadingWhitespace.String() + tok.String()
441
412
tokenCh <- NgxToken {Value : tok .String (), Line : ll .s .Line (), IsQuoted : true }
442
- // tokenCh <- NgxToken{Value: finalTokenValue, Line: ll.s.Line(), IsQuoted: true}
443
- // tok.Reset()
444
- // leadingWhitespace.Reset()
445
413
tokenCh <- NgxToken {Value : ";" , Line : ll .s .Line (), IsQuoted : false } // For an end to the Lua string, seems hacky.
446
414
// See: https://github.com/nginxinc/crossplane/blob/master/crossplane/ext/lua.py#L122C25-L122C41
447
415
return
448
416
}
417
+
449
418
case next == `"` || next == "'" :
450
- if ! inQuotes {
451
- inQuotes = true
452
- } else {
453
- inQuotes = false
454
- }
419
+ inQuotes = ! inQuotes
455
420
tok .WriteString (next )
456
421
457
- // case isSpace(next):
458
- // if tok.Len() == 0 {
459
- // continue
460
- // }
461
- // tok.WriteString(next)
462
422
default :
463
423
// Expected first token encountered to be a "{" to open a Lua block. Handle any other non-whitespace
464
424
// character to mean we are not about to tokenize Lua.
425
+
426
+ // ignoring everything until first open brace where tokenDepth > 0
427
+ if isSpace (next ) && tokenDepth == 0 {
428
+ continue
429
+ }
430
+
431
+ // stricly check that first non space character is {
465
432
if tokenDepth == 0 {
466
433
tokenCh <- NgxToken {Value : next , Line : ll .s .Line (), IsQuoted : false }
467
434
return
468
435
}
469
436
tok .WriteString (next )
470
437
}
471
-
472
- // if tokenDepth == 0 {
473
- // return
474
- // }
475
- if tokenDepth == 0 && ! inQuotes {
476
- startedText = false
477
- // leadingWhitespace.Reset()
478
- }
479
438
}
480
439
}()
481
440
482
441
return tokenCh
483
442
}
443
+
444
+ // TODO: 1. check for opening brace?
445
+ // assume nested parathesis only with () and [], no curly parathesis
446
+ // ignore space until first {
447
+ // // rewrite_by_lua_block x will be accepted -- work on this case
448
+ // stricly check that first non space character is {
449
+
450
+ /* commit 2. do we strictly check for equal number of open and close braces rewite_by_lua_block { {1,2 // parser will use close from outside of lua block
451
+
452
+ http{ server {
453
+ rewite_by_lua_block { {1,2
454
+
455
+ }}
456
+
457
+ ==>
458
+ scenario 1
459
+ error for http and server block
460
+ rewite_by_lua_block { {1,2}}
461
+
462
+ scenario 2
463
+ error for rewite_by_lua_block with insufficient close
464
+ http and server parse succeeds
465
+ */
0 commit comments