Skip to content

Commit 1c533fe

Browse files
committed
test_readlines updated
1 parent 95b1002 commit 1c533fe

File tree

1 file changed

+79
-79
lines changed

1 file changed

+79
-79
lines changed

pandas/tests/io/json/test_readlines.py

Lines changed: 79 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,7 @@ def test_readjson_each_chunk(request, lines_json_df, engine):
195195
assert chunks[1].shape == (1, 2)
196196

197197

198-
def test_readjson_chunks_from_file(request, engine):
198+
def test_readjson_chunks_from_file(request, engine, temp_file):
199199
if engine == "pyarrow":
200200
# GH 48893
201201
reason = (
@@ -204,41 +204,41 @@ def test_readjson_chunks_from_file(request, engine):
204204
)
205205
request.applymarker(pytest.mark.xfail(reason=reason, raises=ValueError))
206206

207-
with tm.ensure_clean("test.json") as path:
208-
df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
209-
df.to_json(path, lines=True, orient="records")
210-
with read_json(path, lines=True, chunksize=1, engine=engine) as reader:
211-
chunked = pd.concat(reader)
212-
unchunked = read_json(path, lines=True, engine=engine)
213-
tm.assert_frame_equal(unchunked, chunked)
207+
path = str(temp_file)
208+
df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
209+
df.to_json(path, lines=True, orient="records")
210+
with read_json(path, lines=True, chunksize=1, engine=engine) as reader:
211+
chunked = pd.concat(reader)
212+
unchunked = read_json(path, lines=True, engine=engine)
213+
tm.assert_frame_equal(unchunked, chunked)
214214

215215

216216
@pytest.mark.parametrize("chunksize", [None, 1])
217-
def test_readjson_chunks_closes(chunksize):
218-
with tm.ensure_clean("test.json") as path:
219-
df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
220-
df.to_json(path, lines=True, orient="records")
221-
reader = JsonReader(
222-
path,
223-
orient=None,
224-
typ="frame",
225-
dtype=True,
226-
convert_axes=True,
227-
convert_dates=True,
228-
keep_default_dates=True,
229-
precise_float=False,
230-
date_unit=None,
231-
encoding=None,
232-
lines=True,
233-
chunksize=chunksize,
234-
compression=None,
235-
nrows=None,
236-
)
237-
with reader:
238-
reader.read()
239-
assert reader.handles.handle.closed, (
240-
f"didn't close stream with chunksize = {chunksize}"
241-
)
217+
def test_readjson_chunks_closes(chunksize, temp_file):
218+
path = str(temp_file)
219+
df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
220+
df.to_json(path, lines=True, orient="records")
221+
reader = JsonReader(
222+
path,
223+
orient=None,
224+
typ="frame",
225+
dtype=True,
226+
convert_axes=True,
227+
convert_dates=True,
228+
keep_default_dates=True,
229+
precise_float=False,
230+
date_unit=None,
231+
encoding=None,
232+
lines=True,
233+
chunksize=chunksize,
234+
compression=None,
235+
nrows=None,
236+
)
237+
with reader:
238+
reader.read()
239+
assert reader.handles.handle.closed, (
240+
f"didn't close stream with chunksize = {chunksize}"
241+
)
242242

243243

244244
@pytest.mark.parametrize("chunksize", [0, -1, 2.2, "foo"])
@@ -278,7 +278,7 @@ def test_readjson_chunks_multiple_empty_lines(chunksize):
278278
tm.assert_frame_equal(orig, test, obj=f"chunksize: {chunksize}")
279279

280280

281-
def test_readjson_unicode(request, monkeypatch, engine):
281+
def test_readjson_unicode(request, monkeypatch, engine, temp_file):
282282
if engine == "pyarrow":
283283
# GH 48893
284284
reason = (
@@ -287,14 +287,14 @@ def test_readjson_unicode(request, monkeypatch, engine):
287287
)
288288
request.applymarker(pytest.mark.xfail(reason=reason, raises=ValueError))
289289

290-
with tm.ensure_clean("test.json") as path:
291-
monkeypatch.setattr("locale.getpreferredencoding", lambda do_setlocale: "cp949")
292-
with open(path, "w", encoding="utf-8") as f:
293-
f.write('{"£©µÀÆÖÞßéöÿ":["АБВГДабвгд가"]}')
290+
path = str(temp_file)
291+
monkeypatch.setattr("locale.getpreferredencoding", lambda do_setlocale: "cp949")
292+
with open(path, "w", encoding="utf-8") as f:
293+
f.write('{"£©µÀÆÖÞßéöÿ":["АБВГДабвгд가"]}')
294294

295-
result = read_json(path, engine=engine)
296-
expected = DataFrame({"£©µÀÆÖÞßéöÿ": ["АБВГДабвгд가"]})
297-
tm.assert_frame_equal(result, expected)
295+
result = read_json(path, engine=engine)
296+
expected = DataFrame({"£©µÀÆÖÞßéöÿ": ["АБВГДабвгд가"]})
297+
tm.assert_frame_equal(result, expected)
298298

299299

300300
@pytest.mark.parametrize("nrows", [1, 2])
@@ -441,25 +441,25 @@ def test_to_json_append_mode(mode_):
441441
df.to_json(mode=mode_, lines=False, orient="records")
442442

443443

444-
def test_to_json_append_output_consistent_columns():
444+
def test_to_json_append_output_consistent_columns(temp_file):
445445
# GH 35849
446446
# Testing that resulting output reads in as expected.
447447
# Testing same columns, new rows
448448
df1 = DataFrame({"col1": [1, 2], "col2": ["a", "b"]})
449449
df2 = DataFrame({"col1": [3, 4], "col2": ["c", "d"]})
450450

451451
expected = DataFrame({"col1": [1, 2, 3, 4], "col2": ["a", "b", "c", "d"]})
452-
with tm.ensure_clean("test.json") as path:
453-
# Save dataframes to the same file
454-
df1.to_json(path, lines=True, orient="records")
455-
df2.to_json(path, mode="a", lines=True, orient="records")
452+
path = str(temp_file)
453+
# Save dataframes to the same file
454+
df1.to_json(path, lines=True, orient="records")
455+
df2.to_json(path, mode="a", lines=True, orient="records")
456456

457-
# Read path file
458-
result = read_json(path, lines=True)
459-
tm.assert_frame_equal(result, expected)
457+
# Read path file
458+
result = read_json(path, lines=True)
459+
tm.assert_frame_equal(result, expected)
460460

461461

462-
def test_to_json_append_output_inconsistent_columns():
462+
def test_to_json_append_output_inconsistent_columns(temp_file):
463463
# GH 35849
464464
# Testing that resulting output reads in as expected.
465465
# Testing one new column, one old column, new rows
@@ -473,17 +473,17 @@ def test_to_json_append_output_inconsistent_columns():
473473
"col3": [np.nan, np.nan, "!", "#"],
474474
}
475475
)
476-
with tm.ensure_clean("test.json") as path:
477-
# Save dataframes to the same file
478-
df1.to_json(path, mode="a", lines=True, orient="records")
479-
df3.to_json(path, mode="a", lines=True, orient="records")
476+
path = str(temp_file)
477+
# Save dataframes to the same file
478+
df1.to_json(path, mode="a", lines=True, orient="records")
479+
df3.to_json(path, mode="a", lines=True, orient="records")
480480

481-
# Read path file
482-
result = read_json(path, lines=True)
483-
tm.assert_frame_equal(result, expected)
481+
# Read path file
482+
result = read_json(path, lines=True)
483+
tm.assert_frame_equal(result, expected)
484484

485485

486-
def test_to_json_append_output_different_columns():
486+
def test_to_json_append_output_different_columns(temp_file):
487487
# GH 35849
488488
# Testing that resulting output reads in as expected.
489489
# Testing same, differing and new columns
@@ -500,19 +500,19 @@ def test_to_json_append_output_different_columns():
500500
"col4": [None, None, None, None, None, None, True, False],
501501
}
502502
).astype({"col4": "float"})
503-
with tm.ensure_clean("test.json") as path:
504-
# Save dataframes to the same file
505-
df1.to_json(path, mode="a", lines=True, orient="records")
506-
df2.to_json(path, mode="a", lines=True, orient="records")
507-
df3.to_json(path, mode="a", lines=True, orient="records")
508-
df4.to_json(path, mode="a", lines=True, orient="records")
509-
510-
# Read path file
511-
result = read_json(path, lines=True)
512-
tm.assert_frame_equal(result, expected)
503+
path = str(temp_file)
504+
# Save dataframes to the same file
505+
df1.to_json(path, mode="a", lines=True, orient="records")
506+
df2.to_json(path, mode="a", lines=True, orient="records")
507+
df3.to_json(path, mode="a", lines=True, orient="records")
508+
df4.to_json(path, mode="a", lines=True, orient="records")
509+
510+
# Read path file
511+
result = read_json(path, lines=True)
512+
tm.assert_frame_equal(result, expected)
513513

514514

515-
def test_to_json_append_output_different_columns_reordered():
515+
def test_to_json_append_output_different_columns_reordered(temp_file):
516516
# GH 35849
517517
# Testing that resulting output reads in as expected.
518518
# Testing specific result column order.
@@ -530,13 +530,13 @@ def test_to_json_append_output_different_columns_reordered():
530530
"col1": [None, None, None, None, 3, 4, 1, 2],
531531
}
532532
).astype({"col4": "float"})
533-
with tm.ensure_clean("test.json") as path:
534-
# Save dataframes to the same file
535-
df4.to_json(path, mode="a", lines=True, orient="records")
536-
df3.to_json(path, mode="a", lines=True, orient="records")
537-
df2.to_json(path, mode="a", lines=True, orient="records")
538-
df1.to_json(path, mode="a", lines=True, orient="records")
539-
540-
# Read path file
541-
result = read_json(path, lines=True)
542-
tm.assert_frame_equal(result, expected)
533+
path = str(temp_file)
534+
# Save dataframes to the same file
535+
df4.to_json(path, mode="a", lines=True, orient="records")
536+
df3.to_json(path, mode="a", lines=True, orient="records")
537+
df2.to_json(path, mode="a", lines=True, orient="records")
538+
df1.to_json(path, mode="a", lines=True, orient="records")
539+
540+
# Read path file
541+
result = read_json(path, lines=True)
542+
tm.assert_frame_equal(result, expected)

0 commit comments

Comments
 (0)