@@ -160,23 +160,6 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
160
160
tokenStr := token .String ()
161
161
if ext , ok := externalLexers [tokenStr ]; ok {
162
162
emit (tokenStartLine , false , nil )
163
- // for {
164
- // tok, err := ext.Lex()
165
-
166
- // if errors.Is(err, StopExtLexer{}) {
167
- // break
168
- // }
169
-
170
- // if err != nil {
171
- // emit(tokenStartLine, false, &ParseError{
172
- // File: &lexerFile,
173
- // What: err.Error(),
174
- // Line: &tokenLine, // TODO: I See this used but never updated
175
- // })
176
- // continue
177
- // }
178
- // tokenCh <- tok
179
- // }
180
163
externalScanner .tokenLine = tokenLine
181
164
extTokenCh := ext .Lex (tokenStr )
182
165
for tok := range extTokenCh {
@@ -345,21 +328,18 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
345
328
tokenCh := make (chan NgxToken )
346
329
347
330
tokenDepth := 0
348
- // var leadingWhitespace strings.Builder
349
331
350
332
go func () {
351
333
defer close (tokenCh )
352
334
var tok strings.Builder
353
335
var inQuotes bool
354
-
355
336
356
337
if matchedToken == "set_by_lua_block" {
357
338
arg := ""
358
339
for {
359
340
if ! ll .s .Scan () {
360
341
return
361
342
}
362
-
363
343
next := ll .s .Text ()
364
344
if isSpace (next ) {
365
345
if arg != "" {
@@ -376,36 +356,29 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
376
356
}
377
357
arg += next
378
358
}
379
- }
359
+ // skip leading whitespace after the return value
360
+ for {
361
+ if ! ll .s .Scan () {
362
+ return
363
+ }
364
+ next := ll .s .Text ()
380
365
381
- for {
382
- if ! ll .s .Scan () {
383
- return
384
- }
385
- if next := ll .s .Text (); ! isSpace (next ) {
386
- break
366
+ if ! isSpace (next ) {
367
+ if next != "{" {
368
+ lineno := ll .s .Line ()
369
+ tokenCh <- NgxToken {Error : & ParseError {File : & lexerFile , What : `unexpected "}"` , Line : & lineno }}
370
+ return
371
+ }
372
+ tokenDepth ++
373
+ break
374
+ }
387
375
}
388
376
}
389
377
390
- if ! ll .s .Scan () {
391
- // TODO: err?
392
- return
393
- }
394
-
395
- if next := ll .s .Text (); next != "{" {
396
- lineno := ll .s .Line ()
397
- tokenCh <- NgxToken {Error : & ParseError {File : & lexerFile , What : `unexpected "}"` , Line : & lineno }}
398
- return
399
-
400
- }
401
-
402
- tokenDepth ++
403
-
404
- // TODO: check for opening brace?
405
-
378
+ // Grab everything in Lua block as a single token and watch for curly brace '{' in strings
406
379
for {
407
380
if ! ll .s .Scan () {
408
- return // shrug emoji
381
+ return
409
382
}
410
383
411
384
next := ll .s .Text ()
@@ -415,13 +388,12 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
415
388
416
389
}
417
390
418
-
419
-
420
-
421
-
422
391
switch {
423
392
case next == "{" && ! inQuotes :
424
393
tokenDepth ++
394
+ if tokenDepth > 1 { // not the first open brace
395
+ tok .WriteString (next )
396
+ }
425
397
426
398
case next == "}" && ! inQuotes :
427
399
tokenDepth --
@@ -431,47 +403,62 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
431
403
return
432
404
}
433
405
406
+ if tokenDepth > 0 { // not the last close brace for it to be 0
407
+ tok .WriteString (next )
408
+ }
409
+
434
410
if tokenDepth == 0 {
435
- // tokenCh <- NgxToken{Value: next, Line: 0}
436
- // Before finishing the block, prepend any leading whitespace.
437
- // finalTokenValue := leadingWhitespace.String() + tok.String()
438
411
tokenCh <- NgxToken {Value : tok .String (), Line : ll .s .Line (), IsQuoted : true }
439
- // tokenCh <- NgxToken{Value: finalTokenValue, Line: ll.s.Line(), IsQuoted: true}
440
- // tok.Reset()
441
- // leadingWhitespace.Reset()
442
412
tokenCh <- NgxToken {Value : ";" , Line : ll .s .Line (), IsQuoted : false } // For an end to the Lua string, seems hacky.
443
413
// See: https://github.com/nginxinc/crossplane/blob/master/crossplane/ext/lua.py#L122C25-L122C41
444
414
return
445
415
}
416
+
446
417
case next == `"` || next == "'" :
447
- if ! inQuotes {
448
- inQuotes = true
449
- } else {
450
- inQuotes = false
451
- }
418
+ inQuotes = ! inQuotes
452
419
tok .WriteString (next )
453
420
454
- // case isSpace(next):
455
- // if tok.Len() == 0 {
456
- // continue
457
- // }
458
- // tok.WriteString(next)
459
421
default :
460
422
// Expected first token encountered to be a "{" to open a Lua block. Handle any other non-whitespace
461
423
// character to mean we are not about to tokenize Lua.
424
+
425
+ // ignoring everything until first open brace where tokenDepth > 0
426
+ if isSpace (next ) && tokenDepth == 0 {
427
+ continue
428
+ }
429
+
430
+ // stricly check that first non space character is {
462
431
if tokenDepth == 0 {
463
432
tokenCh <- NgxToken {Value : next , Line : ll .s .Line (), IsQuoted : false }
464
433
return
465
434
}
466
435
tok .WriteString (next )
467
436
}
468
-
469
- // if tokenDepth == 0 {
470
- // return
471
- // }
472
-
473
437
}
474
438
}()
475
439
476
440
return tokenCh
477
441
}
442
+
443
+ // TODO: 1. check for opening brace?
444
+ // assume nested parathesis only with () and [], no curly parathesis
445
+ // ignore space until first {
446
+ // // rewrite_by_lua_block x will be accepted -- work on this case
447
+ // stricly check that first non space character is {
448
+
449
+ /* commit 2. do we strictly check for equal number of open and close braces rewite_by_lua_block { {1,2 // parser will use close from outside of lua block
450
+
451
+ http{ server {
452
+ rewite_by_lua_block { {1,2
453
+
454
+ }}
455
+
456
+ ==>
457
+ scenario 1
458
+ error for http and server block
459
+ rewite_by_lua_block { {1,2}}
460
+
461
+ scenario 2
462
+ error for rewite_by_lua_block with insufficient close
463
+ http and server parse succeeds
464
+ */
0 commit comments