Skip to content

Commit b63b60e

Browse files
authored
Feat: treat CHAR[ACTER] VARYING as VARCHAR for all dialects (#5093)
* adds support for parsing CHARACTER VARYING * char varying * char varying * tsql char varying unit tests * moved char varying to main tokens * ruff
1 parent 612a2da commit b63b60e

File tree

5 files changed

+42
-4
lines changed

5 files changed

+42
-4
lines changed

sqlglot/dialects/duckdb.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -320,7 +320,6 @@ class Tokenizer(tokens.Tokenizer):
320320
"BITSTRING": TokenType.BIT,
321321
"BPCHAR": TokenType.TEXT,
322322
"CHAR": TokenType.TEXT,
323-
"CHARACTER VARYING": TokenType.TEXT,
324323
"DATETIME": TokenType.TIMESTAMPNTZ,
325324
"DETACH": TokenType.DETACH,
326325
"EXCLUDE": TokenType.EXCEPT,

sqlglot/dialects/postgres.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -321,7 +321,6 @@ class Tokenizer(tokens.Tokenizer):
321321
"BEGIN": TokenType.COMMAND,
322322
"BEGIN TRANSACTION": TokenType.BEGIN,
323323
"BIGSERIAL": TokenType.BIGSERIAL,
324-
"CHARACTER VARYING": TokenType.VARCHAR,
325324
"CONSTRAINT TRIGGER": TokenType.COMMAND,
326325
"CSTRING": TokenType.PSEUDO_TYPE,
327326
"DECLARE": TokenType.COMMAND,

sqlglot/dialects/snowflake.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -949,8 +949,6 @@ class Tokenizer(tokens.Tokenizer):
949949
**tokens.Tokenizer.KEYWORDS,
950950
"FILE://": TokenType.URI_START,
951951
"BYTEINT": TokenType.INT,
952-
"CHAR VARYING": TokenType.VARCHAR,
953-
"CHARACTER VARYING": TokenType.VARCHAR,
954952
"EXCLUDE": TokenType.EXCEPT,
955953
"FILE FORMAT": TokenType.FILE_FORMAT,
956954
"GET": TokenType.GET,

sqlglot/tokens.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -907,6 +907,8 @@ class Tokenizer(metaclass=_Tokenizer):
907907
"JSONB": TokenType.JSONB,
908908
"CHAR": TokenType.CHAR,
909909
"CHARACTER": TokenType.CHAR,
910+
"CHAR VARYING": TokenType.VARCHAR,
911+
"CHARACTER VARYING": TokenType.VARCHAR,
910912
"NCHAR": TokenType.NCHAR,
911913
"VARCHAR": TokenType.VARCHAR,
912914
"VARCHAR2": TokenType.VARCHAR,

tests/dialects/test_dialect.py

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -326,6 +326,46 @@ def test_cast(self):
326326
"doris": "CAST(a AS VARCHAR(3))",
327327
},
328328
)
329+
self.validate_all(
330+
"CAST(a AS CHARACTER VARYING)",
331+
write={
332+
"bigquery": "CAST(a AS STRING)",
333+
"drill": "CAST(a AS VARCHAR)",
334+
"duckdb": "CAST(a AS TEXT)",
335+
"materialize": "CAST(a AS VARCHAR)",
336+
"mysql": "CAST(a AS CHAR)",
337+
"hive": "CAST(a AS STRING)",
338+
"oracle": "CAST(a AS VARCHAR2)",
339+
"postgres": "CAST(a AS VARCHAR)",
340+
"presto": "CAST(a AS VARCHAR)",
341+
"redshift": "CAST(a AS VARCHAR)",
342+
"snowflake": "CAST(a AS VARCHAR)",
343+
"spark": "CAST(a AS STRING)",
344+
"starrocks": "CAST(a AS VARCHAR)",
345+
"tsql": "CAST(a AS VARCHAR)",
346+
"doris": "CAST(a AS VARCHAR)",
347+
},
348+
)
349+
self.validate_all(
350+
"CAST(a AS CHARACTER VARYING(3))",
351+
write={
352+
"bigquery": "CAST(a AS STRING)",
353+
"drill": "CAST(a AS VARCHAR(3))",
354+
"duckdb": "CAST(a AS TEXT(3))",
355+
"materialize": "CAST(a AS VARCHAR(3))",
356+
"mysql": "CAST(a AS CHAR(3))",
357+
"hive": "CAST(a AS VARCHAR(3))",
358+
"oracle": "CAST(a AS VARCHAR2(3))",
359+
"postgres": "CAST(a AS VARCHAR(3))",
360+
"presto": "CAST(a AS VARCHAR(3))",
361+
"redshift": "CAST(a AS VARCHAR(3))",
362+
"snowflake": "CAST(a AS VARCHAR(3))",
363+
"spark": "CAST(a AS VARCHAR(3))",
364+
"starrocks": "CAST(a AS VARCHAR(3))",
365+
"tsql": "CAST(a AS VARCHAR(3))",
366+
"doris": "CAST(a AS VARCHAR(3))",
367+
},
368+
)
329369
self.validate_all(
330370
"CAST(a AS SMALLINT)",
331371
write={

0 commit comments

Comments
 (0)