Skip to content

Commit 48d8fdd

Browse files
committed
fix "rewrite_by_lua_block"
1 parent 3879d41 commit 48d8fdd

File tree

2 files changed

+59
-77
lines changed

2 files changed

+59
-77
lines changed

lex.go

+58-76
Original file line numberDiff line numberDiff line change
@@ -160,23 +160,6 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
160160
tokenStr := token.String()
161161
if ext, ok := externalLexers[tokenStr]; ok {
162162
emit(tokenStartLine, false, nil)
163-
// for {
164-
// tok, err := ext.Lex()
165-
166-
// if errors.Is(err, StopExtLexer{}) {
167-
// break
168-
// }
169-
170-
// if err != nil {
171-
// emit(tokenStartLine, false, &ParseError{
172-
// File: &lexerFile,
173-
// What: err.Error(),
174-
// Line: &tokenLine, // TODO: I See this used but never updated
175-
// })
176-
// continue
177-
// }
178-
// tokenCh <- tok
179-
// }
180163
externalScanner.tokenLine = tokenLine
181164
extTokenCh := ext.Lex(tokenStr)
182165
for tok := range extTokenCh {
@@ -341,25 +324,23 @@ func (ll *LuaLexer) Register(s LexScanner) []string {
341324
}
342325
}
343326

327+
//nolint:funlen
344328
func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
345329
tokenCh := make(chan NgxToken)
346330

347331
tokenDepth := 0
348-
// var leadingWhitespace strings.Builder
349332

350333
go func() {
351334
defer close(tokenCh)
352335
var tok strings.Builder
353336
var inQuotes bool
354-
var startedText bool
355337

356338
if matchedToken == "set_by_lua_block" {
357339
arg := ""
358340
for {
359341
if !ll.s.Scan() {
360342
return
361343
}
362-
363344
next := ll.s.Text()
364345
if isSpace(next) {
365346
if arg != "" {
@@ -376,55 +357,44 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
376357
}
377358
arg += next
378359
}
379-
}
360+
// skip leading whitespace after the return value
361+
for {
362+
if !ll.s.Scan() {
363+
return
364+
}
365+
next := ll.s.Text()
380366

381-
for {
382-
if !ll.s.Scan() {
383-
return
384-
}
385-
if next := ll.s.Text(); !isSpace(next) {
386-
break
367+
if !isSpace(next) {
368+
if next != "{" {
369+
lineno := ll.s.Line()
370+
tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: `unexpected "}"`, Line: &lineno}}
371+
return
372+
}
373+
tokenDepth++
374+
break
375+
}
387376
}
388377
}
389378

390-
if !ll.s.Scan() {
391-
// TODO: err?
392-
return
393-
}
394-
395-
if next := ll.s.Text(); next != "{" {
396-
// TODO: Return error, need { to open blockj
397-
}
398-
399-
tokenDepth += 1
400-
401-
// TODO: check for opening brace?
402-
379+
// Grab everything in Lua block as a single token and watch for curly brace '{' in strings
403380
for {
404381
if !ll.s.Scan() {
405-
return // shrug emoji
382+
return
406383
}
407384

385+
next := ll.s.Text()
408386
if err := ll.s.Err(); err != nil {
409387
lineno := ll.s.Line()
410388
tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: err.Error(), Line: &lineno}}
411389

412390
}
413391

414-
next := ll.s.Text()
415-
416-
// Handle leading whitespace
417-
if !startedText && isSpace(next) && tokenDepth == 0 && !inQuotes {
418-
// leadingWhitespace.WriteString(next)
419-
continue
420-
}
421-
422-
// As soon as we hit a nonspace, we consider text to have started.
423-
startedText = true
424-
425392
switch {
426393
case next == "{" && !inQuotes:
427394
tokenDepth++
395+
if tokenDepth > 1 { // not the first open brace
396+
tok.WriteString(next)
397+
}
428398

429399
case next == "}" && !inQuotes:
430400
tokenDepth--
@@ -434,50 +404,62 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
434404
return
435405
}
436406

407+
if tokenDepth > 0 { // not the last close brace for it to be 0
408+
tok.WriteString(next)
409+
}
410+
437411
if tokenDepth == 0 {
438-
// tokenCh <- NgxToken{Value: next, Line: 0}
439-
// Before finishing the block, prepend any leading whitespace.
440-
// finalTokenValue := leadingWhitespace.String() + tok.String()
441412
tokenCh <- NgxToken{Value: tok.String(), Line: ll.s.Line(), IsQuoted: true}
442-
// tokenCh <- NgxToken{Value: finalTokenValue, Line: ll.s.Line(), IsQuoted: true}
443-
// tok.Reset()
444-
// leadingWhitespace.Reset()
445413
tokenCh <- NgxToken{Value: ";", Line: ll.s.Line(), IsQuoted: false} // For an end to the Lua string, seems hacky.
446414
// See: https://github.com/nginxinc/crossplane/blob/master/crossplane/ext/lua.py#L122C25-L122C41
447415
return
448416
}
417+
449418
case next == `"` || next == "'":
450-
if !inQuotes {
451-
inQuotes = true
452-
} else {
453-
inQuotes = false
454-
}
419+
inQuotes = !inQuotes
455420
tok.WriteString(next)
456421

457-
// case isSpace(next):
458-
// if tok.Len() == 0 {
459-
// continue
460-
// }
461-
// tok.WriteString(next)
462422
default:
463423
// Expected first token encountered to be a "{" to open a Lua block. Handle any other non-whitespace
464424
// character to mean we are not about to tokenize Lua.
425+
426+
// ignoring everything until first open brace where tokenDepth > 0
427+
if isSpace(next) && tokenDepth == 0 {
428+
continue
429+
}
430+
431+
// stricly check that first non space character is {
465432
if tokenDepth == 0 {
466433
tokenCh <- NgxToken{Value: next, Line: ll.s.Line(), IsQuoted: false}
467434
return
468435
}
469436
tok.WriteString(next)
470437
}
471-
472-
// if tokenDepth == 0 {
473-
// return
474-
// }
475-
if tokenDepth == 0 && !inQuotes {
476-
startedText = false
477-
// leadingWhitespace.Reset()
478-
}
479438
}
480439
}()
481440

482441
return tokenCh
483442
}
443+
444+
// TODO: 1. check for opening brace?
445+
// assume nested parathesis only with () and [], no curly parathesis
446+
// ignore space until first {
447+
// // rewrite_by_lua_block x will be accepted -- work on this case
448+
// stricly check that first non space character is {
449+
450+
/* commit 2. do we strictly check for equal number of open and close braces rewite_by_lua_block { {1,2 // parser will use close from outside of lua block
451+
452+
http{ server {
453+
rewite_by_lua_block { {1,2
454+
455+
}}
456+
457+
==>
458+
scenario 1
459+
error for http and server block
460+
rewite_by_lua_block { {1,2}}
461+
462+
scenario 2
463+
error for rewite_by_lua_block with insufficient close
464+
http and server parse succeeds
465+
*/

lex_test.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -411,7 +411,7 @@ func TestLex(t *testing.T) {
411411
fixture := fixture
412412
t.Run(fixture.name, func(t *testing.T) {
413413
t.Parallel()
414-
// if fixture.name != "lua-block-simple" {
414+
// if fixture.name != "lua-block-tricky" {
415415
// t.Skip()
416416
// }
417417
path := getTestConfigPath(fixture.name, "nginx.conf")

0 commit comments

Comments
 (0)