From d95cef054bfed0e1f107e1e97bc7bef29e6636c5 Mon Sep 17 00:00:00 2001 From: jnlt3 Date: Mon, 8 Dec 2025 15:01:53 +0300 Subject: [PATCH 1/3] postgres tokenizer: do not silently ignore the character after a question mark if it is not one of the expected characters --- src/tokenizer.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 2ae17cf4a..304bf4dbd 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -1717,7 +1717,7 @@ impl<'a> Tokenizer<'a> { } } Some('#') => self.consume_and_return(chars, Token::QuestionMarkSharp), - _ => self.consume_and_return(chars, Token::Question), + _ => Ok(Some(Token::Question)), } } '?' => { From 1e2be127dab2676c95533dc129bdc7827edf0c4f Mon Sep 17 00:00:00 2001 From: jnlt3 Date: Sun, 14 Dec 2025 15:16:05 +0300 Subject: [PATCH 2/3] Add test case for the tokenizer consuming the character following a question mark --- tests/sqlparser_postgres.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs index 9f4564ef2..345d8777e 100644 --- a/tests/sqlparser_postgres.rs +++ b/tests/sqlparser_postgres.rs @@ -7914,3 +7914,20 @@ fn parse_create_operator_class() { ) .is_err()); } + +#[test] +fn tokenize_question_mark() { + let sql = "SELECT x ? y"; + pg().tokenizes_to( + sql, + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::make_word("x", None), + Token::Whitespace(Whitespace::Space), + Token::Question, + Token::Whitespace(Whitespace::Space), + Token::make_word("y", None), + ], + ) +} From f23087e50c835eb3a773444e77e97c54747aa693 Mon Sep 17 00:00:00 2001 From: jnlt3 Date: Thu, 18 Dec 2025 09:29:25 +0300 Subject: [PATCH 3/3] Move test case from sqlparser_postgres.rs to tokenizer.rs --- src/tokenizer.rs | 19 +++++++++++++++++++ tests/sqlparser_postgres.rs | 17 ----------------- 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 304bf4dbd..8666563ac 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -4147,4 +4147,23 @@ mod tests { panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}"); } } + + #[test] + fn tokenize_question_mark() { + let dialect = PostgreSqlDialect {}; + let sql = "SELECT x ? y"; + let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + compare( + tokens, + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::make_word("x", None), + Token::Whitespace(Whitespace::Space), + Token::Question, + Token::Whitespace(Whitespace::Space), + Token::make_word("y", None), + ], + ) + } } diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs index 345d8777e..9f4564ef2 100644 --- a/tests/sqlparser_postgres.rs +++ b/tests/sqlparser_postgres.rs @@ -7914,20 +7914,3 @@ fn parse_create_operator_class() { ) .is_err()); } - -#[test] -fn tokenize_question_mark() { - let sql = "SELECT x ? y"; - pg().tokenizes_to( - sql, - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::make_word("x", None), - Token::Whitespace(Whitespace::Space), - Token::Question, - Token::Whitespace(Whitespace::Space), - Token::make_word("y", None), - ], - ) -}