Skip to content

Commit a34bcde

Browse files
authored
Fix(bigquery): properly consume dashed table parts (#4477)
1 parent a2899c2 commit a34bcde

File tree

2 files changed

+26
-22
lines changed

2 files changed

+26
-22
lines changed

sqlglot/dialects/bigquery.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -589,6 +589,8 @@ class Parser(parser.Parser):
589589

590590
NULL_TOKENS = {TokenType.NULL, TokenType.UNKNOWN}
591591

592+
DASHED_TABLE_PART_FOLLOW_TOKENS = {TokenType.DOT, TokenType.L_PAREN, TokenType.R_PAREN}
593+
592594
STATEMENT_PARSERS = {
593595
**parser.Parser.STATEMENT_PARSERS,
594596
TokenType.ELSE: lambda self: self._parse_as_command(self._prev),
@@ -615,11 +617,13 @@ def _parse_table_part(self, schema: bool = False) -> t.Optional[exp.Expression]:
615617
if isinstance(this, exp.Identifier):
616618
table_name = this.name
617619
while self._match(TokenType.DASH, advance=False) and self._next:
618-
text = ""
619-
while self._is_connected() and self._curr.token_type != TokenType.DOT:
620+
start = self._curr
621+
while self._is_connected() and not self._match_set(
622+
self.DASHED_TABLE_PART_FOLLOW_TOKENS, advance=False
623+
):
620624
self._advance()
621-
text += self._prev.text
622-
table_name += text
625+
626+
table_name += self._find_sql(start, self._prev)
623627

624628
this = exp.Identifier(this=table_name, quoted=this.args.get("quoted"))
625629
elif isinstance(this, exp.Literal):

tests/dialects/test_bigquery.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -200,24 +200,7 @@ def test_bigquery(self):
200200
self.validate_identity("CAST(x AS NVARCHAR)", "CAST(x AS STRING)")
201201
self.validate_identity("CAST(x AS TIMESTAMPTZ)", "CAST(x AS TIMESTAMP)")
202202
self.validate_identity("CAST(x AS RECORD)", "CAST(x AS STRUCT)")
203-
self.validate_all(
204-
"EDIT_DISTANCE(col1, col2, max_distance => 3)",
205-
write={
206-
"bigquery": "EDIT_DISTANCE(col1, col2, max_distance => 3)",
207-
"clickhouse": UnsupportedError,
208-
"databricks": UnsupportedError,
209-
"drill": UnsupportedError,
210-
"duckdb": UnsupportedError,
211-
"hive": UnsupportedError,
212-
"postgres": "LEVENSHTEIN_LESS_EQUAL(col1, col2, 3)",
213-
"presto": UnsupportedError,
214-
"snowflake": "EDITDISTANCE(col1, col2, 3)",
215-
"spark": UnsupportedError,
216-
"spark2": UnsupportedError,
217-
"sqlite": UnsupportedError,
218-
},
219-
)
220-
203+
self.validate_identity("SELECT * FROM x WHERE x.y >= (SELECT MAX(a) FROM b-c) - 20")
221204
self.validate_identity(
222205
"MERGE INTO dataset.NewArrivals USING (SELECT * FROM UNNEST([('microwave', 10, 'warehouse #1'), ('dryer', 30, 'warehouse #1'), ('oven', 20, 'warehouse #2')])) ON FALSE WHEN NOT MATCHED THEN INSERT ROW WHEN NOT MATCHED BY SOURCE THEN DELETE"
223206
)
@@ -332,6 +315,23 @@ def test_bigquery(self):
332315
"SELECT CAST(1 AS INT64)",
333316
)
334317

318+
self.validate_all(
319+
"EDIT_DISTANCE(col1, col2, max_distance => 3)",
320+
write={
321+
"bigquery": "EDIT_DISTANCE(col1, col2, max_distance => 3)",
322+
"clickhouse": UnsupportedError,
323+
"databricks": UnsupportedError,
324+
"drill": UnsupportedError,
325+
"duckdb": UnsupportedError,
326+
"hive": UnsupportedError,
327+
"postgres": "LEVENSHTEIN_LESS_EQUAL(col1, col2, 3)",
328+
"presto": UnsupportedError,
329+
"snowflake": "EDITDISTANCE(col1, col2, 3)",
330+
"spark": UnsupportedError,
331+
"spark2": UnsupportedError,
332+
"sqlite": UnsupportedError,
333+
},
334+
)
335335
self.validate_all(
336336
"EDIT_DISTANCE(a, b)",
337337
write={

0 commit comments

Comments
 (0)