Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion asyncprawcore/requestor.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,4 +121,4 @@ async def request(self, *args: Any, timeout: float | None = None, **kwargs: Any)
except ResponseException as exc:
raise exc
except Exception as exc: # noqa: BLE001
raise RequestException(exc, args, kwargs) from None
raise RequestException(exc, args, kwargs) from exc
55 changes: 37 additions & 18 deletions asyncprawcore/sessions.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,14 +153,13 @@ def _log_request(
log.debug("Params: %s", pformat(params))

@staticmethod
def _preprocess_dict(data: dict[str, object] | None) -> dict[str, object]:
def _preprocess_dict(data: dict[str, object]) -> dict[str, object]:
new_data = {}
if data:
for key, value in data.items():
if isinstance(value, bool):
new_data[key] = str(value).lower()
elif value is not None:
new_data[key] = str(value) if not isinstance(value, str) else value
for key, value in data.items():
if isinstance(value, bool):
new_data[key] = str(value).lower()
elif value is not None:
new_data[key] = str(value) if not isinstance(value, str) else value
return new_data

@property
Expand Down Expand Up @@ -197,7 +196,6 @@ async def _do_retry(
self,
*,
data: list[tuple[str, object]] | None,
files: dict[str, BinaryIO | TextIO] | None,
json: dict[str, object] | None,
method: str,
params: dict[str, object],
Expand All @@ -209,7 +207,6 @@ async def _do_retry(
log.warning("Retrying due to %s: %s %s", status, method, url)
return await self._request_with_retries(
data=data,
files=files,
json=json,
method=method,
params=params,
Expand All @@ -223,7 +220,6 @@ async def _do_retry(
async def _make_request(
self,
data: list[tuple[str, object]] | None,
files: dict[str, BinaryIO | TextIO] | None,
json: dict[str, object] | None,
method: str,
params: dict[str, object],
Expand All @@ -237,7 +233,6 @@ async def _make_request(
url,
allow_redirects=False,
data=data,
files=files,
json=json,
params=params,
timeout=timeout,
Expand All @@ -253,7 +248,36 @@ async def _make_request(
)
yield response

def _preprocess_params(self, params: dict[str, object] | None) -> dict[str, object]:
def _preprocess_data(
self,
data: dict[str, object],
files: dict[str, BinaryIO | TextIO] | None,
) -> dict[str, object]:
"""Preprocess data and files before request.

This is to convert requests that are formatted for the ``requests`` package to
be compatible with the ``aiohttp`` package. The motivation for this is so that
``praw`` and ``asyncpraw`` can remain as similar as possible and thus making
contributions to ``asyncpraw`` simpler.

This method does the following:

- Removes keys that have a value of ``None`` from ``data``.
- Moves ``files`` into ``data``.

:param data: Dictionary, bytes, or file-like object to send in the body of the
request.
:param files: Dictionary, mapping ``filename`` to file-like object to add to
``data``.

"""
if isinstance(data, dict):
data = self._preprocess_dict(data)
if files is not None:
data.update(files)
return data

def _preprocess_params(self, params: dict[str, object]) -> dict[str, object]:
"""Preprocess params before request.

This is to convert requests that are formatted for the ``requests`` package to
Expand All @@ -275,7 +299,6 @@ async def _request_with_retries( # noqa: PLR0912
self,
*,
data: list[tuple[str, object]] | None,
files: dict[str, BinaryIO | TextIO] | None,
json: dict[str, object] | None,
method: str,
params: dict[str, object],
Expand All @@ -292,7 +315,6 @@ async def _request_with_retries( # noqa: PLR0912
try:
async with self._make_request(
data=data,
files=files,
json=json,
method=method,
params=params,
Expand All @@ -310,7 +332,6 @@ async def _request_with_retries( # noqa: PLR0912
if retry_status is not None and retry_strategy_state.should_retry_on_failure():
return await self._do_retry(
data=data,
files=files,
json=json,
method=method,
params=params,
Expand Down Expand Up @@ -340,7 +361,6 @@ async def _request_with_retries( # noqa: PLR0912
):
return await self._do_retry(
data=data,
files=files,
json=json,
method=method,
params=params,
Expand Down Expand Up @@ -393,7 +413,7 @@ async def request(
params = self._preprocess_params(deepcopy(params) or {})
params["raw_json"] = "1"
if isinstance(data, dict):
data = self._preprocess_dict(deepcopy(data))
data = self._preprocess_data(deepcopy(data), files)
data["api_type"] = "json"
data_list = sorted(data.items())
else:
Expand All @@ -404,7 +424,6 @@ async def request(
url = urljoin(self._requestor.oauth_url, path)
return await self._request_with_retries(
data=data_list,
files=files,
json=json,
method=method,
params=params,
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ test = [
"pytest>=8.3.4",
"pytest-asyncio==1.2.*",
"pytest-vcr==1.*",
"vcrpy==7.0.0"
"vcrpy==4.3.1"
]
type = [
"aiohttp<4",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"close"
],
"User-Agent": [
"asyncprawcore:test (by u/Lil_SpazJoekp) asyncprawcore/2.3.1.dev0"
"asyncprawcore:test (by u/Lil_SpazJoekp) asyncprawcore/3.0.3.dev0"
]
},
"method": "POST",
Expand All @@ -42,7 +42,7 @@
"bytes"
],
"Cache-Control": [
"private, max-age=3600"
"private, s-maxage=0, max-age=0, must-revalidate, no-store"
],
"Connection": [
"close"
Expand All @@ -54,7 +54,10 @@
"application/json; charset=UTF-8"
],
"Date": [
"Mon, 27 Nov 2023 01:54:04 GMT"
"Sat, 04 Oct 2025 21:26:58 GMT"
],
"Expires": [
"-1"
],
"NEL": [
"{\"report_to\": \"w3-reporting-nel\", \"max_age\": 14400, \"include_subdomains\": false, \"success_fraction\": 1.0, \"failure_fraction\": 1.0}"
Expand All @@ -66,7 +69,7 @@
"snooserv"
],
"Set-Cookie": [
"edgebucket=47Z4cbkEIRoMLAEduI; Domain=reddit.com; Max-Age=63071999; Path=/; secure"
"edgebucket=LcttArzS78Dk2wvBkG; Domain=reddit.com; Max-Age=63071999; Path=/; secure"
],
"Strict-Transport-Security": [
"max-age=31536000; includeSubdomains"
Expand Down Expand Up @@ -118,10 +121,10 @@
"bearer <ACCESS_TOKEN>"
],
"Cookie": [
"edgebucket=47Z4cbkEIRoMLAEduI"
"edgebucket=LcttArzS78Dk2wvBkG"
],
"User-Agent": [
"asyncprawcore:test (by u/Lil_SpazJoekp) asyncprawcore/2.3.1.dev0"
"asyncprawcore:test (by u/Lil_SpazJoekp) asyncprawcore/3.0.3.dev0"
]
},
"method": "POST",
Expand All @@ -148,7 +151,7 @@
"application/json; charset=UTF-8"
],
"Date": [
"Mon, 27 Nov 2023 01:54:04 GMT"
"Sat, 04 Oct 2025 21:26:58 GMT"
],
"Expires": [
"-1"
Expand All @@ -163,9 +166,9 @@
"snooserv"
],
"Set-Cookie": [
"loid=00000000003ebyblla.2.1552433321352.Z0FBQUFBQmxZX2E4NVJaUjQzYm82ZUE0aEpxSnJyMlZEc3NNc21hdVl6NGN1MU9ZWnRaLW1CNnBsVmQtdHdiTFdBdnVlX3M1V3VfemlSdU9VMTlkMkdURzA4bmIzSXh4MFU1Z1pFQXk3ajhrcEU3dzEySVVGN2gwZkdGX29UbUJlNkx0bGN5bmJPMGY; Domain=reddit.com; Max-Age=63071999; Path=/; expires=Wed, 26-Nov-2025 01:54:04 GMT; secure",
"redesign_optout=true; Domain=reddit.com; Max-Age=94607999; Path=/; expires=Thu, 26-Nov-2026 01:54:04 GMT; secure",
"session_tracker=rkcihjnenmqghcdkif.0.1701050044379.Z0FBQUFBQmxZX2E4SG45MHJFN3FZenBZaWZmNV9IRVNCUHg2VWdRdGdDdzdvbUpheHdvanU5d1lFODUyUUh5R3JlTUUxd3p1MWdNcmZHU3dwNVMxVkphY2ZpZFFXLTFwSGtwQktPY1hNMERTaTJSeFVUM1lSTzY4UVhUWktPMUR5UG5mSmF5NVRydHo; Domain=reddit.com; Max-Age=7199; Path=/; expires=Mon, 27-Nov-2023 03:54:04 GMT; secure"
"loid=00000000003ebyblla.2.1552433321352.Z0FBQUFBQm80WkVpVFlkajgzWlBlb3VRQ29Zc09wUTJtSi1vQTZiWTNENFhpSVhCeW9SeXlJd2wwQmU3dW91d19qa21ReGhnLUJOaVhsNGlXb0RoWnNraFRCMVAyR3NsMktSWG4zbWJPMTlaeHhLenBHXzhLQlA3MGo5NHVYWUhkMXlmUWxKZUQtdGs; Domain=reddit.com; Max-Age=63071999; Path=/; expires=Mon, 04-Oct-2027 21:26:58 GMT; secure",
"redesign_optout=true; Domain=reddit.com; Max-Age=94607999; Path=/; expires=Tue, 03-Oct-2028 21:26:58 GMT; secure",
"session_tracker=dmmnbkoichealejbmc.0.1759613218619.Z0FBQUFBQm80WkVpTWszMTFMczlvQWpMdEd6LUxqYTNFQUZOSHBSaDVEdU9WRFhuLUJZbjNCMEpCN0xNU1NteWV6NXFVNkhXajFRNlpPenVXeXlzdF9DdjQ2Z1ZJMWdHUjZDMHdDNGdXV25ZZDFyejlkRzlOWU5Ud05iZ01FOU5YRjYwMUhtWmtLNXA; Domain=reddit.com; Max-Age=7199; Path=/; expires=Sat, 04-Oct-2025 23:26:58 GMT; secure"
],
"Strict-Transport-Security": [
"max-age=31536000; includeSubdomains"
Expand All @@ -186,13 +189,13 @@
"1; mode=block"
],
"x-ratelimit-remaining": [
"992"
"997.0"
],
"x-ratelimit-reset": [
"356"
"181"
],
"x-ratelimit-used": [
"4"
"3"
],
"x-ua-compatible": [
"IE=edge"
Expand All @@ -206,6 +209,6 @@
}
}
],
"recorded_at": "2023-11-27T01:54:04",
"recorded_at": "2025-10-04T21:26:58",
"version": 1
}
3 changes: 2 additions & 1 deletion tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,10 @@ def filter_access_token(response): # pragma: no cover
if "api/v1/access_token" not in request_uri or response["status"]["code"] != 200:
return response
body = response["body"]["string"].decode()
json_body = json.loads(body)
for token_key in ["access", "refresh"]:
try:
token = json.loads(body)[f"{token_key}_token"]
token = json_body[f"{token_key}_token"]
except (KeyError, TypeError, ValueError):
continue
response["body"]["string"] = response["body"]["string"].replace(
Expand Down
20 changes: 15 additions & 5 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.