Skip to content

Commit 39418cf

Browse files
authored
PostgreSQL Tokenization: Fix unexpected characters after question mark being silently ignored (#2129)
1 parent d78dbc9 commit 39418cf

File tree

1 file changed

+20
-1
lines changed

1 file changed

+20
-1
lines changed

src/tokenizer.rs

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1717,7 +1717,7 @@ impl<'a> Tokenizer<'a> {
17171717
}
17181718
}
17191719
Some('#') => self.consume_and_return(chars, Token::QuestionMarkSharp),
1720-
_ => self.consume_and_return(chars, Token::Question),
1720+
_ => Ok(Some(Token::Question)),
17211721
}
17221722
}
17231723
'?' => {
@@ -4147,4 +4147,23 @@ mod tests {
41474147
panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}");
41484148
}
41494149
}
4150+
4151+
#[test]
4152+
fn tokenize_question_mark() {
4153+
let dialect = PostgreSqlDialect {};
4154+
let sql = "SELECT x ? y";
4155+
let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap();
4156+
compare(
4157+
tokens,
4158+
vec![
4159+
Token::make_keyword("SELECT"),
4160+
Token::Whitespace(Whitespace::Space),
4161+
Token::make_word("x", None),
4162+
Token::Whitespace(Whitespace::Space),
4163+
Token::Question,
4164+
Token::Whitespace(Whitespace::Space),
4165+
Token::make_word("y", None),
4166+
],
4167+
)
4168+
}
41504169
}

0 commit comments

Comments
 (0)