1515
1616
1717def table (db_name , table_name , location ):
18- columns = [
19- ColumnBuilder ("id" , "string" , "col comment" ).build ()
20- ]
18+ columns = [ColumnBuilder ("id" , "string" , "col comment" ).build ()]
2119
2220 serde_info = SerDeInfoBuilder (
2321 serialization_lib = "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"
@@ -40,11 +38,15 @@ def table(db_name, table_name, location):
4038 return test_table
4139
4240
43- if __name__ == ' __main__' :
41+ if __name__ == " __main__" :
4442 all_args = argparse .ArgumentParser (description = "Test hive metastore." )
4543 all_args .add_argument ("-p" , "--port" , help = "Metastore server port" , default = "9083" )
46- all_args .add_argument ("-d" , "--database" , help = "Test DB name" , default = "test_metastore" )
47- all_args .add_argument ("-m" , "--metastore" , help = "The host or service to connect to" , required = True )
44+ all_args .add_argument (
45+ "-d" , "--database" , help = "Test DB name" , default = "test_metastore"
46+ )
47+ all_args .add_argument (
48+ "-m" , "--metastore" , help = "The host or service to connect to" , required = True
49+ )
4850 args = vars (all_args .parse_args ())
4951
5052 database_name = args ["database" ]
@@ -61,24 +63,46 @@ def table(db_name, table_name, location):
6163
6264 # Local access
6365 try :
64- hive_client .create_table (table (database_name , local_test_table_name , f"/stackable/warehouse/location_{ database_name } _{ local_test_table_name } " ))
66+ hive_client .create_table (
67+ table (
68+ database_name ,
69+ local_test_table_name ,
70+ f"/stackable/warehouse/location_{ database_name } _{ local_test_table_name } " ,
71+ )
72+ )
6573 except AlreadyExistsException :
6674 print (f"[INFO]: Table { local_test_table_name } already existed" )
67- schema = hive_client .get_schema (db_name = database_name , table_name = local_test_table_name )
68- expected = [FieldSchema (name = 'id' , type = 'string' , comment = 'col comment' )]
75+ schema = hive_client .get_schema (
76+ db_name = database_name , table_name = local_test_table_name
77+ )
78+ expected = [FieldSchema (name = "id" , type = "string" , comment = "col comment" )]
6979 if schema != expected :
70- print ("[ERROR]: Received local schema " + str (schema ) + " - expected schema: " + expected )
80+ print (
81+ "[ERROR]: Received local schema "
82+ + str (schema )
83+ + " - expected schema: "
84+ + expected
85+ )
7186 exit (- 1 )
7287
7388 # S3 access
7489 try :
75- hive_client .create_table (table (database_name , s3_test_table_name , "s3a://hive/" ))
90+ hive_client .create_table (
91+ table (database_name , s3_test_table_name , "s3a://hive/" )
92+ )
7693 except AlreadyExistsException :
7794 print (f"[INFO]: Table { s3_test_table_name } already existed" )
78- schema = hive_client .get_schema (db_name = database_name , table_name = s3_test_table_name )
79- expected = [FieldSchema (name = 'id' , type = 'string' , comment = 'col comment' )]
95+ schema = hive_client .get_schema (
96+ db_name = database_name , table_name = s3_test_table_name
97+ )
98+ expected = [FieldSchema (name = "id" , type = "string" , comment = "col comment" )]
8099 if schema != expected :
81- print ("[ERROR]: Received s3 schema " + str (schema ) + " - expected schema: " + expected )
100+ print (
101+ "[ERROR]: Received s3 schema "
102+ + str (schema )
103+ + " - expected schema: "
104+ + expected
105+ )
82106 exit (- 1 )
83107
84108 # Removed test, because it failed against Hive 3.1.3. We do not know if the behavior of the Hive metastore changed or we made a mistake. We improved the Trino tests to do more stuff with S3 (e.g. writing tables) which passed,
0 commit comments