@@ -484,7 +484,8 @@ def test_table_name_with_project_id():
484484 }
485485 ],
486486 "partition_by" : {
487- "columns" : ["fiscal_half_year_reporting_week_no" , "DAY" ],
487+ "columns" : ["fiscal_half_year_reporting_week_no" ],
488+ "trunc_by" : "DAY" ,
488489 "type" : "DATETIME_TRUNC" ,
489490 },
490491 "partitioned_by" : [],
@@ -646,7 +647,8 @@ def test_multiple_options():
646647 {"option_four" : '"Four"' },
647648 ],
648649 "partition_by" : {
649- "columns" : ["fiscal_half_year_reporting_week_no" , "DAY" ],
650+ "columns" : ["fiscal_half_year_reporting_week_no" ],
651+ "trunc_by" : "DAY" ,
650652 "type" : "DATETIME_TRUNC" ,
651653 },
652654 "partitioned_by" : [],
@@ -869,3 +871,126 @@ def test_bigquery_options_string():
869871 "types" : [],
870872 }
871873 assert result == expected
874+
875+
876+ def test_bigquery_partition_range ():
877+ ddl = """
878+ CREATE TABLE data.test(
879+ field_a INT OPTIONS(description='some description')
880+ )
881+ PARTITION BY RANGE_BUCKET(field_a, GENERATE_ARRAY(10, 1000, 1));"""
882+
883+ result = DDLParser (ddl ).run (output_mode = "bigquery" )
884+ expected = [
885+ {
886+ "alter" : {},
887+ "checks" : [],
888+ "columns" : [
889+ {
890+ "check" : None ,
891+ "default" : None ,
892+ "name" : "field_a" ,
893+ "nullable" : True ,
894+ "options" : [{"description" : "'some description'" }],
895+ "references" : None ,
896+ "size" : None ,
897+ "type" : "INT" ,
898+ "unique" : False ,
899+ }
900+ ],
901+ "index" : [],
902+ "partition_by" : {
903+ "columns" : ["field_a" ],
904+ "range" : "GENERATE_ARRAY(10,1000,1)" ,
905+ "type" : "RANGE_BUCKET" ,
906+ },
907+ "partitioned_by" : [],
908+ "primary_key" : [],
909+ "dataset" : "data" ,
910+ "table_name" : "test" ,
911+ "tablespace" : None ,
912+ }
913+ ]
914+
915+ assert result == expected
916+
917+
918+ def test_array_range ():
919+ ddl = """CREATE TABLE data.test(
920+ field_a INT OPTIONS(description='some description')
921+ )
922+ PARTITION BY RANGE_BUCKET(field_a, [1,2,3]]) ;"""
923+
924+ result = DDLParser (ddl ).run (output_mode = "bigquery" )
925+ expected = [
926+ {
927+ "alter" : {},
928+ "checks" : [],
929+ "columns" : [
930+ {
931+ "check" : None ,
932+ "default" : None ,
933+ "name" : "field_a" ,
934+ "nullable" : True ,
935+ "options" : [{"description" : "'some description'" }],
936+ "references" : None ,
937+ "size" : None ,
938+ "type" : "INT" ,
939+ "unique" : False ,
940+ }
941+ ],
942+ "dataset" : "data" ,
943+ "index" : [],
944+ "partition_by" : {
945+ "columns" : ["field_a" ],
946+ "range" : ["1" , "2" , "3" ],
947+ "type" : "RANGE_BUCKET" ,
948+ },
949+ "partitioned_by" : [],
950+ "primary_key" : [],
951+ "table_name" : "test" ,
952+ "tablespace" : None ,
953+ }
954+ ]
955+ assert expected == result
956+
957+
958+ def test_date_trunc ():
959+ ddl = """CREATE TABLE data.test(
960+ field_a INT OPTIONS(description='some description')
961+ )
962+ PARTITION BY DATE_TRUNC(field, MONTH);"""
963+
964+ result = DDLParser (ddl ).run (output_mode = "bigquery" )
965+ expected = [
966+ {
967+ "alter" : {},
968+ "checks" : [],
969+ "columns" : [
970+ {
971+ "check" : None ,
972+ "default" : None ,
973+ "name" : "field_a" ,
974+ "nullable" : True ,
975+ "options" : [{"description" : "'some description'" }],
976+ "references" : None ,
977+ "size" : None ,
978+ "type" : "INT" ,
979+ "unique" : False ,
980+ }
981+ ],
982+ "dataset" : "data" ,
983+ "index" : [],
984+ "partition_by" : {
985+ "columns" : ["field" ],
986+ "trunc_by" : "MONTH" ,
987+ "type" : "DATE_TRUNC" ,
988+ },
989+ "partitioned_by" : [],
990+ "primary_key" : [],
991+ "table_name" : "test" ,
992+ "tablespace" : None ,
993+ }
994+ ]
995+
996+ assert result == expected
0 commit comments