Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 0 additions & 29 deletions sqlglot/dialects/postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,8 +358,6 @@ class Tokenizer(tokens.Tokenizer):
"<@": TokenType.LT_AT,
"?&": TokenType.QMARK_AMP,
"?|": TokenType.QMARK_PIPE,
"&<": TokenType.AMP_LT,
"&>": TokenType.AMP_GT,
"#-": TokenType.HASH_DASH,
"|/": TokenType.PIPE_SLASH,
"||/": TokenType.DPIPE_SLASH,
Expand All @@ -376,7 +374,6 @@ class Tokenizer(tokens.Tokenizer):
"NAME": TokenType.NAME,
"OID": TokenType.OBJECT_IDENTIFIER,
"ONLY": TokenType.ONLY,
"OPERATOR": TokenType.OPERATOR,
"REFRESH": TokenType.COMMAND,
"REINDEX": TokenType.COMMAND,
"RESET": TokenType.COMMAND,
Expand Down Expand Up @@ -479,12 +476,9 @@ class Parser(parser.Parser):
RANGE_PARSERS = {
**parser.Parser.RANGE_PARSERS,
TokenType.DAMP: binary_range_parser(exp.ArrayOverlaps),
TokenType.AMP_LT: binary_range_parser(exp.ExtendsLeft),
TokenType.AMP_GT: binary_range_parser(exp.ExtendsRight),
TokenType.DAT: lambda self, this: self.expression(
exp.MatchAgainst, this=self._parse_bitwise(), expressions=[this]
),
TokenType.OPERATOR: lambda self, this: self._parse_operator(this),
}

STATEMENT_PARSERS = {
Expand Down Expand Up @@ -517,29 +511,6 @@ def _parse_query_parameter(self) -> t.Optional[exp.Expression]:
self._match_text_seq("S")
return self.expression(exp.Placeholder, this=this)

def _parse_operator(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]:
while True:
if not self._match(TokenType.L_PAREN):
break

op = ""
while self._curr and not self._match(TokenType.R_PAREN):
op += self._curr.text
self._advance()

this = self.expression(
exp.Operator,
comments=self._prev_comments,
this=this,
operator=op,
expression=self._parse_bitwise(),
)

if not self._match(TokenType.OPERATOR):
break

return this

def _parse_date_part(self) -> exp.Expression:
part = self._parse_type()
self._match(TokenType.COMMA)
Expand Down
26 changes: 26 additions & 0 deletions sqlglot/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -979,6 +979,9 @@ class Parser(metaclass=_Parser):
TokenType.QMARK_AMP: binary_range_parser(exp.JSONBContainsAllTopKeys),
TokenType.QMARK_PIPE: binary_range_parser(exp.JSONBContainsAnyTopKeys),
TokenType.HASH_DASH: binary_range_parser(exp.JSONBDeleteAtPath),
TokenType.OPERATOR: lambda self, this: self._parse_operator(this),
TokenType.AMP_LT: binary_range_parser(exp.ExtendsLeft),
TokenType.AMP_GT: binary_range_parser(exp.ExtendsRight),
}

PIPE_SYNTAX_TRANSFORM_PARSERS = {
Expand Down Expand Up @@ -9077,3 +9080,26 @@ def _parse_initcap(self) -> exp.Initcap:
expr.set("expression", exp.Literal.string(self.dialect.INITCAP_DEFAULT_DELIMITER_CHARS))

return expr

def _parse_operator(self, this: t.Optional[exp.Expression]) -> t.Optional[exp.Expression]:
while True:
if not self._match(TokenType.L_PAREN):
break

op = ""
while self._curr and not self._match(TokenType.R_PAREN):
op += self._curr.text
self._advance()

this = self.expression(
exp.Operator,
comments=self._prev_comments,
this=this,
operator=op,
expression=self._parse_bitwise(),
)

if not self._match(TokenType.OPERATOR):
break

return this
3 changes: 3 additions & 0 deletions sqlglot/tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -714,6 +714,8 @@ class Tokenizer(metaclass=_Tokenizer):
**{f"{{{{{postfix}": TokenType.BLOCK_START for postfix in ("+", "-")},
**{f"{prefix}}}}}": TokenType.BLOCK_END for prefix in ("+", "-")},
HINT_START: TokenType.HINT,
"&<": TokenType.AMP_LT,
"&>": TokenType.AMP_GT,
"==": TokenType.EQ,
"::": TokenType.DCOLON,
"?::": TokenType.QDCOLON,
Expand Down Expand Up @@ -850,6 +852,7 @@ class Tokenizer(metaclass=_Tokenizer):
"PRAGMA": TokenType.PRAGMA,
"PRIMARY KEY": TokenType.PRIMARY_KEY,
"PROCEDURE": TokenType.PROCEDURE,
"OPERATOR": TokenType.OPERATOR,
"QUALIFY": TokenType.QUALIFY,
"RANGE": TokenType.RANGE,
"RECURSIVE": TokenType.RECURSIVE,
Expand Down
14 changes: 14 additions & 0 deletions tests/dialects/test_dialect.py
Original file line number Diff line number Diff line change
Expand Up @@ -4840,3 +4840,17 @@ def test_session_user(self):
self.assertEqual(
parse_one(func_sql, dialect=dialect).sql(dialect), no_paren_sql
)

def test_operator(self):
expr = self.validate_identity("1 OPERATOR(+) 2 OPERATOR(*) 3")

expr.left.assert_is(exp.Operator)
expr.left.left.assert_is(exp.Literal)
expr.left.right.assert_is(exp.Literal)
expr.right.assert_is(exp.Literal)
self.assertEqual(expr.sql(dialect="postgres"), "1 OPERATOR(+) 2 OPERATOR(*) 3")

self.validate_identity("SELECT operator FROM t")
self.validate_identity("SELECT 1 OPERATOR(+) 2")
self.validate_identity("SELECT 1 OPERATOR(+) /* foo */ 2")
self.validate_identity("SELECT 1 OPERATOR(pg_catalog.+) 2")
14 changes: 0 additions & 14 deletions tests/dialects/test_postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -1315,20 +1315,6 @@ def test_array_offset(self):
],
)

def test_operator(self):
expr = self.parse_one("1 OPERATOR(+) 2 OPERATOR(*) 3")

expr.left.assert_is(exp.Operator)
expr.left.left.assert_is(exp.Literal)
expr.left.right.assert_is(exp.Literal)
expr.right.assert_is(exp.Literal)
self.assertEqual(expr.sql(dialect="postgres"), "1 OPERATOR(+) 2 OPERATOR(*) 3")

self.validate_identity("SELECT operator FROM t")
self.validate_identity("SELECT 1 OPERATOR(+) 2")
self.validate_identity("SELECT 1 OPERATOR(+) /* foo */ 2")
self.validate_identity("SELECT 1 OPERATOR(pg_catalog.+) 2")

def test_bool_or(self):
self.validate_identity(
"SELECT a, LOGICAL_OR(b) FROM table GROUP BY a",
Expand Down