@@ -1396,3 +1396,38 @@ def test_unsigned_parquet(bucket, database):
13961396 df ["c0" ] = df .c0 .astype ("uint64" )
13971397 with pytest .raises (wr .exceptions .UnsupportedType ):
13981398 wr .s3 .to_parquet (df = df , path = path , dataset = True , database = database , table = table , mode = "overwrite" )
1399+
1400+ wr .s3 .delete_objects (path = path )
1401+ wr .catalog .delete_table_if_exists (database = database , table = table )
1402+
1403+
1404+ def test_parquet_uint64 (bucket ):
1405+ path = f"s3://{ bucket } /test_parquet_uint64/"
1406+ wr .s3 .delete_objects (path = path )
1407+ df = pd .DataFrame (
1408+ {
1409+ "c0" : [0 , 0 , (2 ** 8 ) - 1 ],
1410+ "c1" : [0 , 0 , (2 ** 16 ) - 1 ],
1411+ "c2" : [0 , 0 , (2 ** 32 ) - 1 ],
1412+ "c3" : [0 , 0 , (2 ** 64 ) - 1 ],
1413+ "c4" : [0 , 1 , 2 ],
1414+ }
1415+ )
1416+ print (df )
1417+ df ["c0" ] = df .c0 .astype ("uint8" )
1418+ df ["c1" ] = df .c1 .astype ("uint16" )
1419+ df ["c2" ] = df .c2 .astype ("uint32" )
1420+ df ["c3" ] = df .c3 .astype ("uint64" )
1421+ paths = wr .s3 .to_parquet (df = df , path = path , dataset = True , mode = "overwrite" , partition_cols = ["c4" ])["paths" ]
1422+ wr .s3 .wait_objects_exist (paths = paths , use_threads = False )
1423+ df = wr .s3 .read_parquet (path = path , dataset = True )
1424+ print (df )
1425+ print (df .dtypes )
1426+ assert len (df .index ) == 3
1427+ assert len (df .columns ) == 5
1428+ assert df .c0 .max () == (2 ** 8 ) - 1
1429+ assert df .c1 .max () == (2 ** 16 ) - 1
1430+ assert df .c2 .max () == (2 ** 32 ) - 1
1431+ assert df .c3 .max () == (2 ** 64 ) - 1
1432+ assert df .c4 .astype ("uint8" ).sum () == 3
1433+ wr .s3 .delete_objects (path = path )
0 commit comments