@@ -89,6 +89,10 @@ func (e *extScanner) Err() error { return e.scanner.Err() }
89
89
func (e * extScanner ) Text () string { return e .scanner .Text () }
90
90
func (e * extScanner ) Line () int { return e .tokenLine }
91
91
92
+ type tokenInfo struct {
93
+ LineNumberExists bool
94
+ }
95
+
92
96
//nolint:gocyclo,funlen,gocognit
93
97
func tokenize (reader io.Reader , tokenCh chan NgxToken , options LexOptions ) {
94
98
token := strings.Builder {}
@@ -103,15 +107,15 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
103
107
depth := 0
104
108
var la , quote string
105
109
106
- // check the lua token is not the directive start
107
- processdLine := make (map [int ]bool )
110
+ // check if the lua token is directive, if token line is already processd, then this is not the directive
111
+ nextTokenIsDirective := make (map [int ]tokenInfo )
108
112
109
113
scanner := bufio .NewScanner (reader )
110
114
scanner .Split (bufio .ScanRunes )
111
115
112
116
emit := func (line int , quoted bool , err error ) {
113
117
tokenCh <- NgxToken {Value : token .String (), Line : line , IsQuoted : quoted , Error : err }
114
- processdLine [line ] = true
118
+ nextTokenIsDirective [line ] = tokenInfo { LineNumberExists : true }
115
119
token .Reset ()
116
120
lexState = skipSpace
117
121
}
@@ -162,16 +166,29 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
162
166
163
167
if token .Len () > 0 {
164
168
tokenStr := token .String ()
165
- if ext , ok := externalLexers [tokenStr ]; ok && ! processdLine [tokenLine ] {
166
- emit (tokenStartLine , false , nil )
167
- externalScanner .tokenLine = tokenLine
168
- extTokenCh := ext .Lex (tokenStr )
169
- for tok := range extTokenCh {
170
- tokenCh <- tok
171
- }
169
+ if ext , ok := externalLexers [tokenStr ]; ok {
170
+ if _ , exists := nextTokenIsDirective [tokenLine ]; ! exists {
171
+ // saving lex state before emitting tokenStr to know if we encountered start quote
172
+ lastLexState := lexState
173
+ if lexState == inQuote {
174
+ emit (tokenStartLine , true , nil )
175
+ } else {
176
+ emit (tokenStartLine , false , nil )
177
+ }
172
178
173
- lexState = skipSpace
174
- tokenLine = externalScanner .tokenLine
179
+ externalScanner .tokenLine = tokenLine
180
+ extTokenCh := ext .Lex (tokenStr )
181
+ for tok := range extTokenCh {
182
+ tokenCh <- tok
183
+ }
184
+ tokenLine = externalScanner .tokenLine
185
+
186
+ // if we detected a start quote and current char after external lexer processing is end quote we skip it
187
+ if lastLexState == inQuote && la == quote {
188
+ continue
189
+ }
190
+
191
+ }
175
192
}
176
193
}
177
194
0 commit comments