@@ -195,7 +195,7 @@ def test_readjson_each_chunk(request, lines_json_df, engine):
195
195
assert chunks [1 ].shape == (1 , 2 )
196
196
197
197
198
- def test_readjson_chunks_from_file (request , engine ):
198
+ def test_readjson_chunks_from_file (request , engine , temp_file ):
199
199
if engine == "pyarrow" :
200
200
# GH 48893
201
201
reason = (
@@ -204,41 +204,41 @@ def test_readjson_chunks_from_file(request, engine):
204
204
)
205
205
request .applymarker (pytest .mark .xfail (reason = reason , raises = ValueError ))
206
206
207
- with tm . ensure_clean ( "test.json" ) as path :
208
- df = DataFrame ({"A" : [1 , 2 , 3 ], "B" : [4 , 5 , 6 ]})
209
- df .to_json (path , lines = True , orient = "records" )
210
- with read_json (path , lines = True , chunksize = 1 , engine = engine ) as reader :
211
- chunked = pd .concat (reader )
212
- unchunked = read_json (path , lines = True , engine = engine )
213
- tm .assert_frame_equal (unchunked , chunked )
207
+ path = str ( temp_file )
208
+ df = DataFrame ({"A" : [1 , 2 , 3 ], "B" : [4 , 5 , 6 ]})
209
+ df .to_json (path , lines = True , orient = "records" )
210
+ with read_json (path , lines = True , chunksize = 1 , engine = engine ) as reader :
211
+ chunked = pd .concat (reader )
212
+ unchunked = read_json (path , lines = True , engine = engine )
213
+ tm .assert_frame_equal (unchunked , chunked )
214
214
215
215
216
216
@pytest .mark .parametrize ("chunksize" , [None , 1 ])
217
- def test_readjson_chunks_closes (chunksize ):
218
- with tm . ensure_clean ( "test.json" ) as path :
219
- df = DataFrame ({"A" : [1 , 2 , 3 ], "B" : [4 , 5 , 6 ]})
220
- df .to_json (path , lines = True , orient = "records" )
221
- reader = JsonReader (
222
- path ,
223
- orient = None ,
224
- typ = "frame" ,
225
- dtype = True ,
226
- convert_axes = True ,
227
- convert_dates = True ,
228
- keep_default_dates = True ,
229
- precise_float = False ,
230
- date_unit = None ,
231
- encoding = None ,
232
- lines = True ,
233
- chunksize = chunksize ,
234
- compression = None ,
235
- nrows = None ,
236
- )
237
- with reader :
238
- reader .read ()
239
- assert reader .handles .handle .closed , (
240
- f"didn't close stream with chunksize = { chunksize } "
241
- )
217
+ def test_readjson_chunks_closes (chunksize , temp_file ):
218
+ path = str ( temp_file )
219
+ df = DataFrame ({"A" : [1 , 2 , 3 ], "B" : [4 , 5 , 6 ]})
220
+ df .to_json (path , lines = True , orient = "records" )
221
+ reader = JsonReader (
222
+ path ,
223
+ orient = None ,
224
+ typ = "frame" ,
225
+ dtype = True ,
226
+ convert_axes = True ,
227
+ convert_dates = True ,
228
+ keep_default_dates = True ,
229
+ precise_float = False ,
230
+ date_unit = None ,
231
+ encoding = None ,
232
+ lines = True ,
233
+ chunksize = chunksize ,
234
+ compression = None ,
235
+ nrows = None ,
236
+ )
237
+ with reader :
238
+ reader .read ()
239
+ assert reader .handles .handle .closed , (
240
+ f"didn't close stream with chunksize = { chunksize } "
241
+ )
242
242
243
243
244
244
@pytest .mark .parametrize ("chunksize" , [0 , - 1 , 2.2 , "foo" ])
@@ -278,7 +278,7 @@ def test_readjson_chunks_multiple_empty_lines(chunksize):
278
278
tm .assert_frame_equal (orig , test , obj = f"chunksize: { chunksize } " )
279
279
280
280
281
- def test_readjson_unicode (request , monkeypatch , engine ):
281
+ def test_readjson_unicode (request , monkeypatch , engine , temp_file ):
282
282
if engine == "pyarrow" :
283
283
# GH 48893
284
284
reason = (
@@ -287,14 +287,14 @@ def test_readjson_unicode(request, monkeypatch, engine):
287
287
)
288
288
request .applymarker (pytest .mark .xfail (reason = reason , raises = ValueError ))
289
289
290
- with tm . ensure_clean ( "test.json" ) as path :
291
- monkeypatch .setattr ("locale.getpreferredencoding" , lambda do_setlocale : "cp949" )
292
- with open (path , "w" , encoding = "utf-8" ) as f :
293
- f .write ('{"£©µÀÆÖÞßéöÿ":["АБВГДабвгд가"]}' )
290
+ path = str ( temp_file )
291
+ monkeypatch .setattr ("locale.getpreferredencoding" , lambda do_setlocale : "cp949" )
292
+ with open (path , "w" , encoding = "utf-8" ) as f :
293
+ f .write ('{"£©µÀÆÖÞßéöÿ":["АБВГДабвгд가"]}' )
294
294
295
- result = read_json (path , engine = engine )
296
- expected = DataFrame ({"£©µÀÆÖÞßéöÿ" : ["АБВГДабвгд가" ]})
297
- tm .assert_frame_equal (result , expected )
295
+ result = read_json (path , engine = engine )
296
+ expected = DataFrame ({"£©µÀÆÖÞßéöÿ" : ["АБВГДабвгд가" ]})
297
+ tm .assert_frame_equal (result , expected )
298
298
299
299
300
300
@pytest .mark .parametrize ("nrows" , [1 , 2 ])
@@ -441,25 +441,25 @@ def test_to_json_append_mode(mode_):
441
441
df .to_json (mode = mode_ , lines = False , orient = "records" )
442
442
443
443
444
- def test_to_json_append_output_consistent_columns ():
444
+ def test_to_json_append_output_consistent_columns (temp_file ):
445
445
# GH 35849
446
446
# Testing that resulting output reads in as expected.
447
447
# Testing same columns, new rows
448
448
df1 = DataFrame ({"col1" : [1 , 2 ], "col2" : ["a" , "b" ]})
449
449
df2 = DataFrame ({"col1" : [3 , 4 ], "col2" : ["c" , "d" ]})
450
450
451
451
expected = DataFrame ({"col1" : [1 , 2 , 3 , 4 ], "col2" : ["a" , "b" , "c" , "d" ]})
452
- with tm . ensure_clean ( "test.json" ) as path :
453
- # Save dataframes to the same file
454
- df1 .to_json (path , lines = True , orient = "records" )
455
- df2 .to_json (path , mode = "a" , lines = True , orient = "records" )
452
+ path = str ( temp_file )
453
+ # Save dataframes to the same file
454
+ df1 .to_json (path , lines = True , orient = "records" )
455
+ df2 .to_json (path , mode = "a" , lines = True , orient = "records" )
456
456
457
- # Read path file
458
- result = read_json (path , lines = True )
459
- tm .assert_frame_equal (result , expected )
457
+ # Read path file
458
+ result = read_json (path , lines = True )
459
+ tm .assert_frame_equal (result , expected )
460
460
461
461
462
- def test_to_json_append_output_inconsistent_columns ():
462
+ def test_to_json_append_output_inconsistent_columns (temp_file ):
463
463
# GH 35849
464
464
# Testing that resulting output reads in as expected.
465
465
# Testing one new column, one old column, new rows
@@ -473,17 +473,17 @@ def test_to_json_append_output_inconsistent_columns():
473
473
"col3" : [np .nan , np .nan , "!" , "#" ],
474
474
}
475
475
)
476
- with tm . ensure_clean ( "test.json" ) as path :
477
- # Save dataframes to the same file
478
- df1 .to_json (path , mode = "a" , lines = True , orient = "records" )
479
- df3 .to_json (path , mode = "a" , lines = True , orient = "records" )
476
+ path = str ( temp_file )
477
+ # Save dataframes to the same file
478
+ df1 .to_json (path , mode = "a" , lines = True , orient = "records" )
479
+ df3 .to_json (path , mode = "a" , lines = True , orient = "records" )
480
480
481
- # Read path file
482
- result = read_json (path , lines = True )
483
- tm .assert_frame_equal (result , expected )
481
+ # Read path file
482
+ result = read_json (path , lines = True )
483
+ tm .assert_frame_equal (result , expected )
484
484
485
485
486
- def test_to_json_append_output_different_columns ():
486
+ def test_to_json_append_output_different_columns (temp_file ):
487
487
# GH 35849
488
488
# Testing that resulting output reads in as expected.
489
489
# Testing same, differing and new columns
@@ -500,19 +500,19 @@ def test_to_json_append_output_different_columns():
500
500
"col4" : [None , None , None , None , None , None , True , False ],
501
501
}
502
502
).astype ({"col4" : "float" })
503
- with tm . ensure_clean ( "test.json" ) as path :
504
- # Save dataframes to the same file
505
- df1 .to_json (path , mode = "a" , lines = True , orient = "records" )
506
- df2 .to_json (path , mode = "a" , lines = True , orient = "records" )
507
- df3 .to_json (path , mode = "a" , lines = True , orient = "records" )
508
- df4 .to_json (path , mode = "a" , lines = True , orient = "records" )
509
-
510
- # Read path file
511
- result = read_json (path , lines = True )
512
- tm .assert_frame_equal (result , expected )
503
+ path = str ( temp_file )
504
+ # Save dataframes to the same file
505
+ df1 .to_json (path , mode = "a" , lines = True , orient = "records" )
506
+ df2 .to_json (path , mode = "a" , lines = True , orient = "records" )
507
+ df3 .to_json (path , mode = "a" , lines = True , orient = "records" )
508
+ df4 .to_json (path , mode = "a" , lines = True , orient = "records" )
509
+
510
+ # Read path file
511
+ result = read_json (path , lines = True )
512
+ tm .assert_frame_equal (result , expected )
513
513
514
514
515
- def test_to_json_append_output_different_columns_reordered ():
515
+ def test_to_json_append_output_different_columns_reordered (temp_file ):
516
516
# GH 35849
517
517
# Testing that resulting output reads in as expected.
518
518
# Testing specific result column order.
@@ -530,13 +530,13 @@ def test_to_json_append_output_different_columns_reordered():
530
530
"col1" : [None , None , None , None , 3 , 4 , 1 , 2 ],
531
531
}
532
532
).astype ({"col4" : "float" })
533
- with tm . ensure_clean ( "test.json" ) as path :
534
- # Save dataframes to the same file
535
- df4 .to_json (path , mode = "a" , lines = True , orient = "records" )
536
- df3 .to_json (path , mode = "a" , lines = True , orient = "records" )
537
- df2 .to_json (path , mode = "a" , lines = True , orient = "records" )
538
- df1 .to_json (path , mode = "a" , lines = True , orient = "records" )
539
-
540
- # Read path file
541
- result = read_json (path , lines = True )
542
- tm .assert_frame_equal (result , expected )
533
+ path = str ( temp_file )
534
+ # Save dataframes to the same file
535
+ df4 .to_json (path , mode = "a" , lines = True , orient = "records" )
536
+ df3 .to_json (path , mode = "a" , lines = True , orient = "records" )
537
+ df2 .to_json (path , mode = "a" , lines = True , orient = "records" )
538
+ df1 .to_json (path , mode = "a" , lines = True , orient = "records" )
539
+
540
+ # Read path file
541
+ result = read_json (path , lines = True )
542
+ tm .assert_frame_equal (result , expected )
0 commit comments