Skip to content

Commit a848d2b

Browse files
committed
Use temp_file directly instead of converting to str
1 parent 17d3c68 commit a848d2b

File tree

6 files changed

+19
-29
lines changed

6 files changed

+19
-29
lines changed

pandas/tests/io/parser/common/test_chunksize.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -302,20 +302,19 @@ def test_read_csv_memory_growth_chunksize(temp_file, all_parsers):
302302
# as we iteratively process all chunks.
303303
parser = all_parsers
304304

305-
path = str(temp_file)
306-
with open(path, "w", encoding="utf-8") as f:
305+
with open(temp_file, "w", encoding="utf-8") as f:
307306
for i in range(1000):
308307
f.write(str(i) + "\n")
309308

310309
if parser.engine == "pyarrow":
311310
msg = "The 'chunksize' option is not supported with the 'pyarrow' engine"
312311
with pytest.raises(ValueError, match=msg):
313-
with parser.read_csv(path, chunksize=20) as result:
312+
with parser.read_csv(temp_file, chunksize=20) as result:
314313
for _ in result:
315314
pass
316315
return
317316

318-
with parser.read_csv(path, chunksize=20) as result:
317+
with parser.read_csv(temp_file, chunksize=20) as result:
319318
for _ in result:
320319
pass
321320

pandas/tests/io/parser/common/test_iterator.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -146,11 +146,10 @@ def test_iteration_open_handle(temp_file, all_parsers):
146146
parser = all_parsers
147147
kwargs = {"header": None}
148148

149-
path = str(temp_file)
150-
with open(path, "w", encoding="utf-8") as f:
149+
with open(temp_file, "w", encoding="utf-8") as f:
151150
f.write("AAA\nBBB\nCCC\nDDD\nEEE\nFFF\nGGG")
152151

153-
with open(path, encoding="utf-8") as f:
152+
with open(temp_file, encoding="utf-8") as f:
154153
for line in f:
155154
if "CCC" in line:
156155
break

pandas/tests/io/parser/test_index_col.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -209,9 +209,8 @@ def test_no_multi_index_level_names_empty(temp_file, all_parsers):
209209
index=midx,
210210
columns=["x", "y", "z"],
211211
)
212-
path = str(temp_file)
213-
expected.to_csv(path)
214-
result = parser.read_csv(path, index_col=[0, 1, 2])
212+
expected.to_csv(temp_file)
213+
result = parser.read_csv(temp_file, index_col=[0, 1, 2])
215214
tm.assert_frame_equal(result, expected)
216215

217216

@@ -252,11 +251,10 @@ def test_index_col_large_csv(temp_file, all_parsers, monkeypatch):
252251
}
253252
)
254253

255-
path = str(temp_file)
256-
df.to_csv(path, index=False)
254+
df.to_csv(temp_file, index=False)
257255
with monkeypatch.context() as m:
258256
m.setattr("pandas.core.algorithms._MINIMUM_COMP_ARR_LEN", ARR_LEN)
259-
result = parser.read_csv(path, index_col=[0])
257+
result = parser.read_csv(temp_file, index_col=[0])
260258

261259
tm.assert_frame_equal(result, df.set_index("a"))
262260

pandas/tests/io/parser/test_python_parser_only.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -173,11 +173,10 @@ def test_decompression_regex_sep(
173173
module = pytest.importorskip(compression)
174174
klass = getattr(module, klass)
175175

176-
path = str(temp_file)
177-
with klass(path, mode="wb") as tmp:
176+
with klass(temp_file, mode="wb") as tmp:
178177
tmp.write(data)
179178

180-
result = parser.read_csv(path, sep="::", compression=compression)
179+
result = parser.read_csv(temp_file, sep="::", compression=compression)
181180
tm.assert_frame_equal(result, expected)
182181

183182

pandas/tests/io/pytables/test_round_trip.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -28,11 +28,9 @@
2828

2929

3030
def test_conv_read_write(temp_file):
31-
path = str(temp_file)
32-
3331
def roundtrip(key, obj, **kwargs):
34-
obj.to_hdf(path, key=key, **kwargs)
35-
return read_hdf(path, key)
32+
obj.to_hdf(temp_file, key=key, **kwargs)
33+
return read_hdf(temp_file, key)
3634

3735
o = Series(
3836
np.arange(10, dtype=np.float64), index=date_range("2020-01-01", periods=10)
@@ -51,8 +49,8 @@ def roundtrip(key, obj, **kwargs):
5149

5250
# table
5351
df = DataFrame({"A": range(5), "B": range(5)})
54-
df.to_hdf(path, key="table", append=True)
55-
result = read_hdf(path, "table", where=["index>2"])
52+
df.to_hdf(temp_file, key="table", append=True)
53+
result = read_hdf(temp_file, "table", where=["index>2"])
5654
tm.assert_frame_equal(df[df.index > 2], result)
5755

5856

pandas/tests/io/test_sql.py

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -750,8 +750,7 @@ def postgresql_psycopg2_conn_types(postgresql_psycopg2_engine_types):
750750
@pytest.fixture
751751
def sqlite_str(temp_file):
752752
pytest.importorskip("sqlalchemy")
753-
name = str(temp_file)
754-
return f"sqlite:///{name}"
753+
return f"sqlite:///{temp_file}"
755754

756755

757756
@pytest.fixture
@@ -822,8 +821,7 @@ def sqlite_adbc_conn(temp_file):
822821
pytest.importorskip("adbc_driver_sqlite")
823822
from adbc_driver_sqlite import dbapi
824823

825-
name = str(temp_file)
826-
uri = f"file:{name}"
824+
uri = f"file:{temp_file}"
827825
with dbapi.connect(uri) as conn:
828826
yield conn
829827
for view in get_all_views(conn):
@@ -2585,11 +2583,10 @@ def test_sql_open_close(temp_file, test_frame3):
25852583
# Test if the IO in the database still work if the connection closed
25862584
# between the writing and reading (as in many real situations).
25872585

2588-
name = str(temp_file)
2589-
with contextlib.closing(sqlite3.connect(name)) as conn:
2586+
with contextlib.closing(sqlite3.connect(temp_file)) as conn:
25902587
assert sql.to_sql(test_frame3, "test_frame3_legacy", conn, index=False) == 4
25912588

2592-
with contextlib.closing(sqlite3.connect(name)) as conn:
2589+
with contextlib.closing(sqlite3.connect(temp_file)) as conn:
25932590
result = sql.read_sql_query("SELECT * FROM test_frame3_legacy;", conn)
25942591

25952592
tm.assert_frame_equal(test_frame3, result)

0 commit comments

Comments
 (0)