Skip to content

Commit 3bbcf7a

Browse files
committed
reformat
1 parent cfe54bd commit 3bbcf7a

File tree

9 files changed

+280
-321
lines changed

9 files changed

+280
-321
lines changed

pandas/conftest.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2116,3 +2116,9 @@ def temp_file(tmp_path):
21162116
file_path = tmp_path / str(uuid.uuid4())
21172117
file_path.touch()
21182118
return file_path
2119+
2120+
2121+
@pytest.fixture(scope="session")
2122+
def monkeysession():
2123+
with pytest.MonkeyPatch.context() as mp:
2124+
yield mp

pandas/tests/io/conftest.py

Lines changed: 46 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,7 @@
1-
import shlex
2-
import subprocess
3-
import time
41
import uuid
52

63
import pytest
74

8-
from pandas.compat import (
9-
is_ci_environment,
10-
is_platform_arm,
11-
is_platform_mac,
12-
is_platform_windows,
13-
)
145
import pandas.util._test_decorators as td
156

167
import pandas.io.common as icom
@@ -50,93 +41,62 @@ def xml_file(datapath):
5041
return datapath("io", "data", "xml", "books.xml")
5142

5243

53-
@pytest.fixture
54-
def s3_base(worker_id, monkeypatch):
55-
"""
56-
Fixture for mocking S3 interaction.
44+
@pytest.fixture(scope="session")
45+
def aws_credentials(monkeysession):
46+
"""Mocked AWS Credentials for moto."""
47+
monkeysession.setenv("AWS_ACCESS_KEY_ID", "testing")
48+
monkeysession.setenv("AWS_SECRET_ACCESS_KEY", "testing")
49+
monkeysession.setenv("AWS_SECURITY_TOKEN", "testing")
50+
monkeysession.setenv("AWS_SESSION_AWS_SESSION_TOKEN", "testing")
51+
monkeysession.setenv("AWS_DEFAULT_REGION", "us-east-1")
5752

58-
Sets up moto server in separate process locally
59-
Return url for motoserver/moto CI service
60-
"""
61-
pytest.importorskip("s3fs")
62-
pytest.importorskip("boto3")
63-
64-
# temporary workaround as moto fails for botocore >= 1.11 otherwise,
65-
# see https://github.com/spulec/moto/issues/1924 & 1952
66-
monkeypatch.setenv("AWS_ACCESS_KEY_ID", "foobar_key")
67-
monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "foobar_secret")
68-
if is_ci_environment():
69-
if is_platform_arm() or is_platform_mac() or is_platform_windows():
70-
# NOT RUN on Windows/macOS, only Ubuntu
71-
# - subprocess in CI can cause timeouts
72-
# - GitHub Actions do not support
73-
# container services for the above OSs
74-
pytest.skip(
75-
"S3 tests do not have a corresponding service on "
76-
"Windows or macOS platforms"
77-
)
78-
else:
79-
# set in .github/workflows/unit-tests.yml
80-
yield "http://localhost:5000"
81-
else:
82-
requests = pytest.importorskip("requests")
83-
pytest.importorskip("moto")
84-
pytest.importorskip("flask") # server mode needs flask too
85-
86-
# Launching moto in server mode, i.e., as a separate process
87-
# with an S3 endpoint on localhost
88-
89-
worker_id = "5" if worker_id == "master" else worker_id.lstrip("gw")
90-
endpoint_port = f"555{worker_id}"
91-
endpoint_uri = f"http://127.0.0.1:{endpoint_port}/"
92-
93-
# pipe to null to avoid logging in terminal
94-
with subprocess.Popen(
95-
shlex.split(f"moto_server s3 -p {endpoint_port}"),
96-
stdout=subprocess.DEVNULL,
97-
stderr=subprocess.DEVNULL,
98-
) as proc:
99-
timeout = 5
100-
while timeout > 0:
101-
try:
102-
# OK to go once server is accepting connections
103-
r = requests.get(endpoint_uri)
104-
if r.ok:
105-
break
106-
except Exception:
107-
pass
108-
timeout -= 0.1
109-
time.sleep(0.1)
110-
yield endpoint_uri
111-
112-
proc.terminate()
53+
54+
@pytest.fixture(scope="session")
55+
def moto_server(aws_credentials):
56+
moto_server = pytest.importorskip("moto.server")
57+
server = moto_server.ThreadedMotoServer(port=0)
58+
server.start()
59+
host, port = server.get_host_and_port()
60+
yield f"http://{host}:{port}"
61+
server.stop()
11362

11463

11564
@pytest.fixture
116-
def s3so(s3_base):
117-
return {"client_kwargs": {"endpoint_url": s3_base}}
65+
def moto_s3_resource(moto_server):
66+
boto3 = pytest.importorskip("boto3")
67+
s3 = boto3.resource("s3", endpoint_url=moto_server)
68+
return s3
11869

11970

12071
@pytest.fixture
121-
def s3_resource(s3_base):
122-
import boto3
123-
124-
s3 = boto3.resource("s3", endpoint_url=s3_base)
125-
return s3
72+
def s3_bucket_public(moto_s3_resource):
73+
"""
74+
Create a public S3 bucket using moto.
75+
"""
76+
bucket_name = f"pandas-test-{uuid.uuid4()}"
77+
bucket = moto_s3_resource.Bucket(bucket_name)
78+
bucket.create(ACL="public-read")
79+
yield bucket
80+
bucket.objects.delete()
81+
bucket.delete()
12682

12783

12884
@pytest.fixture
129-
def s3_public_bucket(s3_resource):
130-
bucket = s3_resource.Bucket(f"pandas-test-{uuid.uuid4()}")
131-
bucket.create()
85+
def s3_bucket_private(moto_s3_resource):
86+
"""
87+
Create a private S3 bucket using moto.
88+
"""
89+
bucket_name = f"cant_get_it-{uuid.uuid4()}"
90+
bucket = moto_s3_resource.Bucket(bucket_name)
91+
bucket.create(ACL="private")
13292
yield bucket
13393
bucket.objects.delete()
13494
bucket.delete()
13595

13696

13797
@pytest.fixture
138-
def s3_public_bucket_with_data(
139-
s3_public_bucket, tips_file, jsonl_file, feather_file, xml_file
98+
def s3_bucket_public_with_data(
99+
s3_bucket_public, tips_file, jsonl_file, feather_file, xml_file
140100
):
141101
"""
142102
The following datasets
@@ -158,22 +118,13 @@ def s3_public_bucket_with_data(
158118
]
159119
for s3_key, file_name in test_s3_files:
160120
with open(file_name, "rb") as f:
161-
s3_public_bucket.put_object(Key=s3_key, Body=f)
162-
return s3_public_bucket
163-
164-
165-
@pytest.fixture
166-
def s3_private_bucket(s3_resource):
167-
bucket = s3_resource.Bucket(f"cant_get_it-{uuid.uuid4()}")
168-
bucket.create(ACL="private")
169-
yield bucket
170-
bucket.objects.delete()
171-
bucket.delete()
121+
s3_bucket_public.put_object(Key=s3_key, Body=f)
122+
return s3_bucket_public
172123

173124

174125
@pytest.fixture
175-
def s3_private_bucket_with_data(
176-
s3_private_bucket, tips_file, jsonl_file, feather_file, xml_file
126+
def s3_bucket_private_with_data(
127+
s3_bucket_private, tips_file, jsonl_file, feather_file, xml_file
177128
):
178129
"""
179130
The following datasets
@@ -195,8 +146,8 @@ def s3_private_bucket_with_data(
195146
]
196147
for s3_key, file_name in test_s3_files:
197148
with open(file_name, "rb") as f:
198-
s3_private_bucket.put_object(Key=s3_key, Body=f)
199-
return s3_private_bucket
149+
s3_bucket_private.put_object(Key=s3_key, Body=f)
150+
return s3_bucket_private
200151

201152

202153
_compression_formats_params = [

pandas/tests/io/excel/test_style.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -318,16 +318,21 @@ def custom_converter(css):
318318

319319
@pytest.mark.single_cpu
320320
@td.skip_if_not_us_locale
321-
def test_styler_to_s3(s3_public_bucket, s3so):
321+
def test_styler_to_s3(s3_bucket_public, s3so):
322322
# GH#46381
323-
324-
mock_bucket_name, target_file = s3_public_bucket.name, "test.xlsx"
323+
mock_bucket_name = s3_bucket_public.name
324+
target_file = f"{uuid.uuid4()}.xlsx"
325325
df = DataFrame({"x": [1, 2, 3], "y": [2, 4, 6]})
326+
s3so = {
327+
"client_kwargs": {
328+
"endpoint_url": s3_bucket_public.meta.client.meta.endpoint_url
329+
}
330+
}
326331
styler = df.style.set_sticky(axis="index")
327332
styler.to_excel(f"s3://{mock_bucket_name}/{target_file}", storage_options=s3so)
328333
timeout = 5
329334
while True:
330-
if target_file in (obj.key for obj in s3_public_bucket.objects.all()):
335+
if target_file in (obj.key for obj in s3_bucket_public.objects.all()):
331336
break
332337
time.sleep(0.1)
333338
timeout -= 0.1

pandas/tests/io/json/test_compression.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
BytesIO,
33
StringIO,
44
)
5+
import uuid
56

67
import pytest
78

@@ -42,17 +43,23 @@ def test_read_zipped_json(datapath):
4243
@td.skip_if_not_us_locale
4344
@pytest.mark.single_cpu
4445
@pytest.mark.network
45-
def test_with_s3_url(compression, s3_public_bucket, s3so):
46+
def test_with_s3_url(compression, s3_bucket_public):
4647
# Bucket created in tests/io/conftest.py
4748
df = pd.read_json(StringIO('{"a": [1, 2, 3], "b": [4, 5, 6]}'))
4849

50+
key = f"{uuid.uuid4()}.json"
4951
with tm.ensure_clean() as path:
5052
df.to_json(path, compression=compression)
5153
with open(path, "rb") as f:
52-
s3_public_bucket.put_object(Key="test-1", Body=f)
54+
s3_bucket_public.put_object(Key=key, Body=f)
5355

56+
s3so = {
57+
"client_kwargs": {
58+
"endpoint_url": s3_bucket_public.meta.client.meta.endpoint_url
59+
}
60+
}
5461
roundtripped_df = pd.read_json(
55-
f"s3://{s3_public_bucket.name}/test-1",
62+
f"s3://{s3_bucket_public.name}/{key}",
5663
compression=compression,
5764
storage_options=s3so,
5865
)

pandas/tests/io/json/test_pandas.py

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
import os
99
import sys
1010
import time
11+
import uuid
1112

1213
import numpy as np
1314
import pytest
@@ -1411,11 +1412,17 @@ def test_read_inline_jsonl(self):
14111412
@pytest.mark.single_cpu
14121413
@pytest.mark.network
14131414
@td.skip_if_not_us_locale
1414-
def test_read_s3_jsonl(self, s3_public_bucket_with_data, s3so):
1415+
def test_read_s3_jsonl(self, s3_bucket_public_with_data):
14151416
# GH17200
14161417

1418+
s3so = {
1419+
"client_kwargs": {
1420+
"endpoint_url": s3_bucket_public_with_data.meta.client.meta.endpoint_url
1421+
}
1422+
}
1423+
14171424
result = read_json(
1418-
f"s3n://{s3_public_bucket_with_data.name}/items.jsonl",
1425+
f"s3n://{s3_bucket_public_with_data.name}/items.jsonl",
14191426
lines=True,
14201427
storage_options=s3so,
14211428
)
@@ -2011,14 +2018,20 @@ def test_json_multiindex(self):
20112018

20122019
@pytest.mark.single_cpu
20132020
@pytest.mark.network
2014-
def test_to_s3(self, s3_public_bucket, s3so):
2021+
def test_to_s3(self, s3_bucket_public):
20152022
# GH 28375
2016-
mock_bucket_name, target_file = s3_public_bucket.name, "test.json"
2023+
mock_bucket_name = s3_bucket_public.name
2024+
target_file = f"{uuid.uuid4()}.json"
20172025
df = DataFrame({"x": [1, 2, 3], "y": [2, 4, 6]})
2026+
s3so = {
2027+
"client_kwargs": {
2028+
"endpoint_url": s3_bucket_public.meta.client.meta.endpoint_url
2029+
}
2030+
}
20182031
df.to_json(f"s3://{mock_bucket_name}/{target_file}", storage_options=s3so)
20192032
timeout = 5
20202033
while True:
2021-
if target_file in (obj.key for obj in s3_public_bucket.objects.all()):
2034+
if target_file in (obj.key for obj in s3_bucket_public.objects.all()):
20222035
break
20232036
time.sleep(0.1)
20242037
timeout -= 0.1

0 commit comments

Comments
 (0)