@@ -52,7 +52,7 @@ type LexScanner interface {
52
52
}
53
53
54
54
type ExtLexer interface {
55
- Register (LexScanner ) []string
55
+ Register (scanner LexScanner ) []string
56
56
Lex (matchedToken string ) <- chan NgxToken
57
57
}
58
58
@@ -93,7 +93,7 @@ type tokenInfo struct {
93
93
LineNumberExists bool
94
94
}
95
95
96
- //nolint:gocyclo,funlen,gocognit
96
+ //nolint:gocyclo,funlen,gocognit,maintidx
97
97
func tokenize (reader io.Reader , tokenCh chan NgxToken , options LexOptions ) {
98
98
token := strings.Builder {}
99
99
tokenLine := 1
@@ -164,6 +164,7 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
164
164
la = "\\ " + la
165
165
}
166
166
167
+ // special handling for *_by_lua_block directives
167
168
if token .Len () > 0 {
168
169
tokenStr := token .String ()
169
170
if ext , ok := externalLexers [tokenStr ]; ok {
@@ -187,7 +188,6 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
187
188
if lastLexState == inQuote && la == quote {
188
189
continue
189
190
}
190
-
191
191
}
192
192
}
193
193
}
@@ -345,7 +345,7 @@ func (ll *LuaLexer) Register(s LexScanner) []string {
345
345
}
346
346
}
347
347
348
- //nolint:funlen
348
+ //nolint:funlen,gocognit,gocyclo
349
349
func (ll * LuaLexer ) Lex (matchedToken string ) <- chan NgxToken {
350
350
tokenCh := make (chan NgxToken )
351
351
@@ -356,6 +356,7 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
356
356
var tok strings.Builder
357
357
var inQuotes bool
358
358
359
+ // special handling for'set_by_lua_block' directive
359
360
if matchedToken == "set_by_lua_block" {
360
361
arg := ""
361
362
for {
@@ -407,7 +408,6 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
407
408
if err := ll .s .Err (); err != nil {
408
409
lineno := ll .s .Line ()
409
410
tokenCh <- NgxToken {Error : & ParseError {File : & lexerFile , What : err .Error (), Line : & lineno }}
410
-
411
411
}
412
412
413
413
switch {
@@ -431,7 +431,7 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
431
431
432
432
if tokenDepth == 0 {
433
433
tokenCh <- NgxToken {Value : tok .String (), Line : ll .s .Line (), IsQuoted : true }
434
- tokenCh <- NgxToken {Value : ";" , Line : ll .s .Line (), IsQuoted : false } // For an end to the Lua string, seems hacky.
434
+ tokenCh <- NgxToken {Value : ";" , Line : ll .s .Line (), IsQuoted : false } // For an end to the Lua string based on the nginx bahavior
435
435
// See: https://github.com/nginxinc/crossplane/blob/master/crossplane/ext/lua.py#L122C25-L122C41
436
436
return
437
437
}
@@ -442,8 +442,7 @@ func (ll *LuaLexer) Lex(matchedToken string) <-chan NgxToken {
442
442
443
443
default :
444
444
// Expected first token encountered to be a "{" to open a Lua block. Handle any other non-whitespace
445
- // character to mean we are not about to tokenize Lua.
446
-
445
+ // character to mean we are not about to tokenize Lua
447
446
// ignoring everything until first open brace where tokenDepth > 0
448
447
if isSpace (next ) && tokenDepth == 0 {
449
448
continue
0 commit comments