Skip to content

Commit cb5157e

Browse files
committed
directive with quotes around
1 parent 45e523b commit cb5157e

File tree

2 files changed

+29
-15
lines changed

2 files changed

+29
-15
lines changed

lex.go

Lines changed: 29 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,10 @@ func (e *extScanner) Err() error { return e.scanner.Err() }
8989
func (e *extScanner) Text() string { return e.scanner.Text() }
9090
func (e *extScanner) Line() int { return e.tokenLine }
9191

92+
type tokenInfo struct {
93+
LineNumberExists bool
94+
}
95+
9296
//nolint:gocyclo,funlen,gocognit
9397
func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
9498
token := strings.Builder{}
@@ -103,15 +107,15 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
103107
depth := 0
104108
var la, quote string
105109

106-
// check the lua token is not the directive start
107-
processdLine := make(map[int]bool)
110+
// check if the lua token is directive, if token line is already processd, then this is not the directive
111+
nextTokenIsDirective := make(map[int]tokenInfo)
108112

109113
scanner := bufio.NewScanner(reader)
110114
scanner.Split(bufio.ScanRunes)
111115

112116
emit := func(line int, quoted bool, err error) {
113117
tokenCh <- NgxToken{Value: token.String(), Line: line, IsQuoted: quoted, Error: err}
114-
processdLine[line] = true
118+
nextTokenIsDirective[line] = tokenInfo{LineNumberExists: true}
115119
token.Reset()
116120
lexState = skipSpace
117121
}
@@ -162,16 +166,29 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
162166

163167
if token.Len() > 0 {
164168
tokenStr := token.String()
165-
if ext, ok := externalLexers[tokenStr]; ok && !processdLine[tokenLine] {
166-
emit(tokenStartLine, false, nil)
167-
externalScanner.tokenLine = tokenLine
168-
extTokenCh := ext.Lex(tokenStr)
169-
for tok := range extTokenCh {
170-
tokenCh <- tok
171-
}
169+
if ext, ok := externalLexers[tokenStr]; ok {
170+
if _, exists := nextTokenIsDirective[tokenLine]; !exists {
171+
// saving lex state before emitting tokenStr to know if we encountered start quote
172+
lastLexState := lexState
173+
if lexState == inQuote {
174+
emit(tokenStartLine, true, nil)
175+
} else {
176+
emit(tokenStartLine, false, nil)
177+
}
172178

173-
lexState = skipSpace
174-
tokenLine = externalScanner.tokenLine
179+
externalScanner.tokenLine = tokenLine
180+
extTokenCh := ext.Lex(tokenStr)
181+
for tok := range extTokenCh {
182+
tokenCh <- tok
183+
}
184+
tokenLine = externalScanner.tokenLine
185+
186+
// if we detected a start quote and current char after external lexer processing is end quote we skip it
187+
if lastLexState == inQuote && la == quote {
188+
continue
189+
}
190+
191+
}
175192
}
176193
}
177194

lex_test.go

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -411,9 +411,6 @@ func TestLex(t *testing.T) {
411411
fixture := fixture
412412
t.Run(fixture.name, func(t *testing.T) {
413413
t.Parallel()
414-
// if fixture.name != "lua-block-tricky" {
415-
// t.Skip()
416-
// }
417414
path := getTestConfigPath(fixture.name, "nginx.conf")
418415
file, err := os.Open(path)
419416
if err != nil {

0 commit comments

Comments
 (0)