@@ -183,62 +183,72 @@ def test_get_comment_from_dte_output():
183183
184184def get_databricks_non_compound_types ():
185185 return [
186- Integer ,
187- String ,
188- Boolean ,
189- Date ,
190- DateTime ,
191- Time ,
192- Uuid ,
193- Numeric ,
194- TINYINT ,
195- TIMESTAMP ,
196- TIMESTAMP_NTZ ,
197- BigInteger ,
186+ Integer () ,
187+ String () ,
188+ Boolean () ,
189+ Date () ,
190+ DateTime () ,
191+ Time () ,
192+ Uuid () ,
193+ Numeric () ,
194+ TINYINT () ,
195+ TIMESTAMP () ,
196+ TIMESTAMP_NTZ () ,
197+ BigInteger () ,
198198 ]
199199
200200
201+ def get_databricks_compound_types ():
202+ return [DatabricksArray (String ), DatabricksMap (String , String )]
203+
204+
201205@pytest .mark .parametrize ("internal_type" , get_databricks_non_compound_types ())
202206def test_array_parsing (internal_type ):
203- array_type = DatabricksArray (internal_type () )
207+ array_type = DatabricksArray (internal_type )
204208
205209 actual_parsed = array_type .compile (dialect = dialect )
206- expected_parsed = "ARRAY<{}>" .format (internal_type () .compile (dialect = dialect ))
210+ expected_parsed = "ARRAY<{}>" .format (internal_type .compile (dialect = dialect ))
207211 assert actual_parsed == expected_parsed
208212
209213
210214@pytest .mark .parametrize ("internal_type_1" , get_databricks_non_compound_types ())
211215@pytest .mark .parametrize ("internal_type_2" , get_databricks_non_compound_types ())
212216def test_map_parsing (internal_type_1 , internal_type_2 ):
213- map_type = DatabricksMap (internal_type_1 () , internal_type_2 () )
217+ map_type = DatabricksMap (internal_type_1 , internal_type_2 )
214218
215219 actual_parsed = map_type .compile (dialect = dialect )
216220 expected_parsed = "MAP<{},{}>" .format (
217- internal_type_1 () .compile (dialect = dialect ),
218- internal_type_2 () .compile (dialect = dialect ),
221+ internal_type_1 .compile (dialect = dialect ),
222+ internal_type_2 .compile (dialect = dialect ),
219223 )
220224 assert actual_parsed == expected_parsed
221225
222226
223- @pytest .mark .parametrize ("internal_type" , get_databricks_non_compound_types ())
227+ @pytest .mark .parametrize (
228+ "internal_type" ,
229+ get_databricks_non_compound_types () + get_databricks_compound_types (),
230+ )
224231def test_multilevel_array_type_parsing (internal_type ):
225- array_type = DatabricksArray (DatabricksArray (DatabricksArray (internal_type () )))
232+ array_type = DatabricksArray (DatabricksArray (DatabricksArray (internal_type )))
226233
227234 actual_parsed = array_type .compile (dialect = dialect )
228235 expected_parsed = "ARRAY<ARRAY<ARRAY<{}>>>" .format (
229- internal_type () .compile (dialect = dialect )
236+ internal_type .compile (dialect = dialect )
230237 )
231238 assert actual_parsed == expected_parsed
232239
233240
234- @pytest .mark .parametrize ("internal_type" , get_databricks_non_compound_types ())
241+ @pytest .mark .parametrize (
242+ "internal_type" ,
243+ get_databricks_non_compound_types () + get_databricks_compound_types (),
244+ )
235245def test_multilevel_map_type_parsing (internal_type ):
236246 map_type = DatabricksMap (
237- String , DatabricksMap (String , DatabricksMap (String , internal_type () ))
247+ String , DatabricksMap (String , DatabricksMap (String , internal_type ))
238248 )
239249
240250 actual_parsed = map_type .compile (dialect = dialect )
241251 expected_parsed = "MAP<STRING,MAP<STRING,MAP<STRING,{}>>>" .format (
242- internal_type () .compile (dialect = dialect )
252+ internal_type .compile (dialect = dialect )
243253 )
244254 assert actual_parsed == expected_parsed
0 commit comments