@@ -967,15 +967,170 @@ def test_virtual_column_table():
967967 "location" : "@ADL_Azure_Storage_Account_Container_Name/" ,
968968 "table_properties" : {
969969 "auto_refresh" : False ,
970- "file_format" : [
971- "TYPE" ,"=" ,"JSON" ,
972- "NULL_IF" ,"=('field')" ,
973- "DATE_FORMAT" ,"=" ,"AUTO" ,
974- "TRIM_SPACE" ,"=" ,"TRUE" ,
975- ],
976- "stage_file_format" : ["TYPE" ,"=" ,"JSON" , "NULL_IF" ,"=()" ],
970+ "file_format" : {'TYPE' : 'JSON' , 'NULL_IF' : "('field')" , 'DATE_FORMAT' : 'AUTO' , 'TRIM_SPACE' : 'TRUE' },
971+ "stage_file_format" : {'TYPE' : 'JSON' , 'NULL_IF' :'()' },
977972 },
978973 }
979974 ]
980975
981976 assert result_ext_table == expected_ext_table
977+
978+ def test_schema_create ():
979+ ddl = """
980+ create schema myschema;
981+ """
982+ result = DDLParser (ddl ).run (output_mode = "snowflake" )
983+ expected = [{"schema_name" : 'myschema' }]
984+
985+ assert expected == result
986+
987+ def test_schema_create_if_not_exists ():
988+ ddl = """
989+ create schema if not exists myschema;
990+ """
991+ result = DDLParser (ddl ).run (output_mode = "snowflake" )
992+ expected = [{"schema_name" : 'myschema' , 'if_not_exists' : True }]
993+
994+ assert expected == result
995+
996+ def test_schema_create_or_replace ():
997+ #https://docs.snowflake.com/en/sql-reference/sql/create-schema
998+ ddl = """
999+ create or replace schema myschema;
1000+ """
1001+ result = DDLParser (ddl , normalize_names = True , debug = True ).run (output_mode = "snowflake" )
1002+ expected = [{"schema_name" : 'myschema' }]
1003+
1004+ assert result == expected
1005+
1006+ def test_external_table_with_nullif ():
1007+ ddl = """create or replace external table if not exists ${database_name}.MySchemaName.MyTableName(
1008+ "Filename" VARCHAR(16777216) AS (METADATA$FILENAME))
1009+ partition by ("Filename")
1010+ location = @ADL_DH_DL_PTS/
1011+ auto_refresh = false
1012+ file_format = (TYPE=JSON NULLIF=())
1013+ ;"""
1014+
1015+ result = DDLParser (ddl , normalize_names = True , debug = True ).run (output_mode = "snowflake" )
1016+ expected = [{'table_name' : 'MyTableName' ,
1017+ 'schema' : 'MySchemaName' ,
1018+ 'primary_key' : [],
1019+ 'columns' : [{
1020+ 'name' : 'Filename' ,
1021+ 'type' : 'VARCHAR' ,
1022+ 'size' : 16777216 ,
1023+ 'references' : None ,
1024+ 'unique' : False ,
1025+ 'nullable' : True ,
1026+ 'default' : None ,
1027+ 'check' : None ,
1028+ 'generated' : {'as' : 'METADATA$FILENAME' }
1029+ }],
1030+ 'alter' : {},
1031+ 'checks' : [],
1032+ 'index' : [],
1033+ 'partitioned_by' : [],
1034+ 'partition_by' : {'columns' : ['Filename' ], 'type' : None },
1035+ 'tablespace' : None ,
1036+ 'if_not_exists' : True ,
1037+ 'table_properties' : {'project' : '${database_name}' ,
1038+ 'auto_refresh' : False ,
1039+ 'file_format' : {'TYPE' : 'JSON' , 'NULLIF' :'()' },
1040+ },
1041+ 'replace' : True ,
1042+ 'location' : '@ADL_DH_DL_PTS/' ,
1043+ 'external' : True ,
1044+ 'primary_key_enforced' : None ,
1045+ 'clone' : None
1046+ }]
1047+
1048+ assert result == expected
1049+
1050+ def test_external_table_with_field_delimiter ():
1051+ ddl = """create or replace external table if not exists ${database_name}.MySchemaName.MyTableName(
1052+ "Filename" VARCHAR(16777216) AS (METADATA$FILENAME))
1053+ partition by ("Filename")
1054+ location = @ADL_DH_DL_PTS/
1055+ auto_refresh = false
1056+ file_format = (TYPE=CSV FIELD_DELIMITER='|' TRIM_SPACE=TRUE ERROR_ON_COLUMN_COUNT_MISMATCH=FALSE REPLACE_INVALID_CHARACTERS=TRUE)
1057+ ;"""
1058+
1059+ result = DDLParser (ddl , normalize_names = True , debug = True ).run (output_mode = "snowflake" )
1060+ expected = [{'table_name' : 'MyTableName' ,
1061+ 'schema' : 'MySchemaName' ,
1062+ 'primary_key' : [],
1063+ 'columns' : [{
1064+ 'name' : 'Filename' ,
1065+ 'type' : 'VARCHAR' ,
1066+ 'size' : 16777216 ,
1067+ 'references' : None ,
1068+ 'unique' : False ,
1069+ 'nullable' : True ,
1070+ 'default' : None ,
1071+ 'check' : None ,
1072+ 'generated' : {'as' : 'METADATA$FILENAME' }
1073+ }],
1074+ 'alter' : {},
1075+ 'checks' : [],
1076+ 'index' : [],
1077+ 'partitioned_by' : [],
1078+ 'partition_by' : {'columns' : ['Filename' ], 'type' : None },
1079+ 'tablespace' : None ,
1080+ 'if_not_exists' : True ,
1081+ 'table_properties' : {'project' : '${database_name}' ,
1082+ 'auto_refresh' : False ,
1083+ 'file_format' : {'TYPE' : 'CSV' ,
1084+ 'FIELD_DELIMITER' : "'|'" ,
1085+ 'TRIM_SPACE' : 'TRUE' ,
1086+ 'ERROR_ON_COLUMN_COUNT_MISMATCH' : 'FALSE' ,
1087+ 'REPLACE_INVALID_CHARACTERS' :'TRUE' }},
1088+ 'replace' : True ,
1089+ 'location' : '@ADL_DH_DL_PTS/' ,
1090+ 'external' : True ,
1091+ 'primary_key_enforced' : None ,
1092+ 'clone' : None
1093+ }]
1094+
1095+ assert result == expected
1096+
1097+ def test_table_column_def_clusterby ():
1098+ ddl = """CREATE TABLE ${database_name}.MySchemaName."MyTableName" (ID NUMBER(38,0) NOT NULL, "DocProv" VARCHAR(2)) cluster by ("DocProv");"""
1099+
1100+ result = DDLParser (ddl , normalize_names = True , debug = True ).run (output_mode = "snowflake" )
1101+ expected = [{'table_name' : 'MyTableName' ,
1102+ 'schema' : 'MySchemaName' ,
1103+ 'primary_key' : [],
1104+ 'columns' : [{
1105+ 'name' : 'ID' ,
1106+ 'size' : (38 ,0 ),
1107+ 'type' : 'NUMBER' ,
1108+ 'references' : None ,
1109+ 'unique' : False ,
1110+ 'nullable' : False ,
1111+ 'default' : None ,
1112+ 'check' : None ,
1113+ },
1114+ {
1115+ 'name' : 'DocProv' ,
1116+ 'size' : 2 ,
1117+ 'type' : 'VARCHAR' ,
1118+ 'references' : None ,
1119+ 'unique' : False ,
1120+ 'nullable' : True ,
1121+ 'default' : None ,
1122+ 'check' : None ,
1123+ }],
1124+ 'alter' : {},
1125+ 'checks' : [],
1126+ 'index' : [],
1127+ 'partitioned_by' : [],
1128+ 'cluster_by' : ['DocProv' ],
1129+ 'tablespace' : None ,
1130+ 'external' : False ,
1131+ 'primary_key_enforced' : None ,
1132+ 'table_properties' : {'project' : '${database_name}' },
1133+ 'clone' : None
1134+ }]
1135+
1136+ assert result == expected
0 commit comments