Skip to content

Commit 6a99847

Browse files
authored
docs: Use and enforce imperative mood (#437)
- Currently, our docstrings use a mix of imperative and indicative moods basically randomly. - PEP 257 recommends using the imperative mood for the first line of docstrings, which is also enforced by the Ruff rule [D401](https://docs.astral.sh/ruff/rules/non-imperative-mood/#non-imperative-mood-d401). So I believe the imperative form is more suitable for the docs purposes. - In most cases, I have simply changed the verb form, but a few cases required more rewording. - I updated the pyproject configuration to enforce it.
1 parent 54aa579 commit 6a99847

File tree

5 files changed

+30
-15
lines changed

5 files changed

+30
-15
lines changed

pyproject.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,9 @@ ignore = [
9191
"D100", # Missing docstring in public module
9292
"D104", # Missing docstring in public package
9393
"D107", # Missing docstring in `__init__`
94+
"D203", # One blank line required before class docstring
95+
"D213", # Multi-line docstring summary should start at the second line
96+
"D413", # Missing blank line after last section
9497
"EM", # flake8-errmsg
9598
"G004", # Logging statement uses f-string
9699
"ISC001", # This rule may cause conflicts when used with the formatter
@@ -162,9 +165,6 @@ runtime-evaluated-base-classes = [
162165
[tool.ruff.lint.flake8-builtins]
163166
builtins-ignorelist = ["id"]
164167

165-
[tool.ruff.lint.pydocstyle]
166-
convention = "google"
167-
168168
[tool.ruff.lint.isort]
169169
known-local-folder = ["apify"]
170170
known-first-party = ["apify_client", "apify_shared", "crawlee"]

src/apify/_actor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1138,7 +1138,7 @@ async def create_proxy_configuration(
11381138
return proxy_configuration
11391139

11401140
def _get_default_exit_process(self) -> bool:
1141-
"""Returns False for IPython, Pytest, and Scrapy environments, True otherwise."""
1141+
"""Return False for IPython, Pytest, and Scrapy environments, True otherwise."""
11421142
if is_running_in_ipython():
11431143
self.log.debug('Running in IPython, setting default `exit_process` to False.')
11441144
return False

src/apify/_utils.py

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,16 @@ def is_running_in_ipython() -> bool:
3131

3232

3333
def docs_group(group_name: GroupName) -> Callable: # noqa: ARG001
34-
"""Decorator to mark symbols for rendering and grouping in documentation.
34+
"""Mark a symbol for rendering and grouping in documentation.
3535
36-
This decorator is used purely for documentation purposes and does not alter the behavior
36+
This decorator is used solely for documentation purposes and does not modify the behavior
3737
of the decorated callable.
38+
39+
Args:
40+
group_name: The documentation group to which the symbol belongs.
41+
42+
Returns:
43+
The original callable without modification.
3844
"""
3945

4046
def wrapper(func: Callable) -> Callable:
@@ -44,12 +50,16 @@ def wrapper(func: Callable) -> Callable:
4450

4551

4652
def docs_name(symbol_name: str) -> Callable: # noqa: ARG001
47-
"""Decorator for renaming symbols in documentation.
53+
"""Rename a symbol for documentation rendering.
4854
49-
This changes the rendered name of the symbol only in the rendered web documentation.
55+
This decorator modifies only the displayed name of the symbol in the generated documentation
56+
and does not affect its runtime behavior.
5057
51-
This decorator is used purely for documentation purposes and does not alter the behavior
52-
of the decorated callable.
58+
Args:
59+
symbol_name: The name to be used in the documentation.
60+
61+
Returns:
62+
The original callable without modification.
5363
"""
5464

5565
def wrapper(func: Callable) -> Callable:

src/apify/scrapy/_async_thread.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ async def _shutdown_tasks(self) -> None:
113113
await asyncio.gather(*tasks, return_exceptions=True)
114114

115115
def _force_exit_event_loop(self) -> None:
116-
"""Forcefully shut down the event loop and its thread."""
116+
"""Shut down the event loop and its thread forcefully."""
117117
try:
118118
logger.info('Forced shutdown of the event loop and its thread...')
119119
self._eventloop.call_soon_threadsafe(self._eventloop.stop)

src/apify/storages/_request_list.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ async def open(
5151
request_list_sources_input: list[dict[str, Any]] | None = None,
5252
http_client: HttpClient | None = None,
5353
) -> RequestList:
54-
"""Creates RequestList from Actor input requestListSources.
54+
"""Initialize a new instance from request list source input.
5555
5656
Args:
5757
name: Name of the returned RequestList.
@@ -108,9 +108,10 @@ def _create_requests_from_input(simple_url_inputs: list[_SimpleUrlInput]) -> lis
108108

109109
@staticmethod
110110
async def _fetch_requests_from_url(
111-
remote_url_requests_inputs: list[_RequestsFromUrlInput], http_client: HttpClient
111+
remote_url_requests_inputs: list[_RequestsFromUrlInput],
112+
http_client: HttpClient,
112113
) -> list[Request]:
113-
"""Crete list of requests from url.
114+
"""Create list of requests from url.
114115
115116
Send GET requests to urls defined in each requests_from_url of remote_url_requests_inputs. Run extracting
116117
callback on each response body and use URL_NO_COMMAS_REGEX regex to find all links. Create list of Requests from
@@ -119,7 +120,11 @@ async def _fetch_requests_from_url(
119120
created_requests: list[Request] = []
120121

121122
def create_requests_from_response(request_input: _RequestsFromUrlInput, task: Task) -> None:
122-
"""Callback to scrape response body with regexp and create Requests from matches."""
123+
"""Extract links from response body and use them to create `Request` objects.
124+
125+
Use the regular expression to find all matching links in the response body, then create `Request`
126+
objects from these links and the provided input attributes.
127+
"""
123128
matches = re.finditer(URL_NO_COMMAS_REGEX, task.result().read().decode('utf-8'))
124129
created_requests.extend(
125130
[

0 commit comments

Comments
 (0)