Skip to content

Commit bf416c3

Browse files
committed
Finish updating ruff rules
- update ruff config - update files for updated config - ignores doc rules in CI for now (see #2)
1 parent e55bfed commit bf416c3

File tree

14 files changed

+124
-111
lines changed

14 files changed

+124
-111
lines changed

.github/workflows/lint.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,12 @@ jobs:
1010
runs-on: ubuntu-latest
1111
steps:
1212
- uses: actions/checkout@v4
13-
- uses: chartboost/ruff-action@v1
13+
- uses: astral-sh/ruff-action@v1
1414
with:
1515
args: 'format --check'
16-
- uses: chartboost/ruff-action@v1
16+
- uses: astral-sh/ruff-action@v1
1717
with:
18-
args: 'check'
18+
args: 'check --ignore "D" "DOC"'
1919

2020
check:
2121
runs-on: ubuntu-latest

async_utils/_cpython_stuff.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,6 @@
1919

2020

2121
class _HashedSeq(list[Any]):
22-
"""This class guarantees that hash() will be called no more than once
23-
per element. This is important because the lru_cache() will hash
24-
the key multiple times on a cache miss."""
2522

2623
__slots__ = ("hashvalue",)
2724

@@ -45,16 +42,6 @@ def make_key(
4542
type: type[type] = type, # noqa: A002
4643
len: Callable[[Sized], int] = len, # noqa: A002
4744
) -> Hashable:
48-
"""Make a cache key from optionally typed positional and keyword arguments
49-
The key is constructed in a way that is flat as possible rather than
50-
as a nested structure that would take more memory.
51-
If there is only a single argument and its data type is known to cache
52-
its hash value, then that argument is returned without a wrapper. This
53-
saves space and improves lookup speed."""
54-
# All of code below relies on kwds preserving the order input by the user.
55-
# Formerly, we sorted() the kwds before looping. The new way is *much*
56-
# faster; however, it means that f(x=1, y=2) will now be treated as a
57-
# distinct call from f(y=2, x=1) which will be cached separately.
5845
key: tuple[Any, ...] = args
5946
if kwds:
6047
key += kwd_mark

async_utils/bg_loop.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -36,19 +36,18 @@ def stop(self) -> None:
3636
self._loop.call_soon_threadsafe(self._loop.stop)
3737

3838
def schedule(self, coro: _FutureLike[_T]) -> Future[_T]:
39-
"""Schedule a coroutine to run on the wrapped event loop"""
39+
"""Schedule a coroutine to run on the wrapped event loop."""
4040
return asyncio.run_coroutine_threadsafe(coro, self._loop)
4141

4242
async def run(self, coro: _FutureLike[_T]) -> _T:
43-
"""Schedule a coroutine to run on the background loop,
44-
awaiting it finishing."""
45-
43+
"""Schedule and await a coroutine to run on the background loop."""
4644
future = asyncio.run_coroutine_threadsafe(coro, self._loop)
4745
return await asyncio.wrap_future(future)
4846

4947

5048
def run_forever(
51-
loop: asyncio.AbstractEventLoop, use_eager_task_factory: bool, /
49+
loop: asyncio.AbstractEventLoop, /, *,
50+
use_eager_task_factory: bool = True,
5251
) -> None:
5352
asyncio.set_event_loop(loop)
5453
if use_eager_task_factory:
@@ -87,11 +86,14 @@ def run_forever(
8786
def threaded_loop(
8887
*, use_eager_task_factory: bool = True
8988
) -> Generator[LoopWrapper, None, None]:
90-
"""Starts an event loop on a background thread,
89+
"""Create and use a managed event loop in a backround thread.
90+
91+
Starts an event loop on a background thread,
9192
and yields an object with scheduling methods for interacting with
9293
the loop.
9394
94-
loop is scheduled for shutdown, and thread is joined at contextmanager exit"""
95+
loop is scheduled for shutdown, and thread is joined at contextmanager exit
96+
"""
9597
loop = asyncio.new_event_loop()
9698
thread = None
9799
try:

async_utils/bg_tasks.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -27,19 +27,19 @@
2727

2828

2929
class BGTasks:
30-
"""An intentionally dumber task group"""
30+
"""An intentionally dumber task group."""
3131

3232
def __init__(self, exit_timeout: float | None) -> None:
3333
self._tasks: set[asyncio.Task[Any]] = set()
34-
self._exit_timeout: float | None = exit_timeout
34+
self._etime: float | None = exit_timeout
3535

3636
def create_task(
3737
self,
3838
coro: _CoroutineLike[_T],
3939
*,
4040
name: str | None = None,
4141
context: Context | None = None,
42-
) -> Any:
42+
) -> asyncio.Task[_T]:
4343
t = asyncio.create_task(coro)
4444
self._tasks.add(t)
4545
t.add_done_callback(self._tasks.discard)
@@ -48,11 +48,9 @@ def create_task(
4848
async def __aenter__(self: Self) -> Self:
4949
return self
5050

51-
async def __aexit__(self, *_dont_care: Any):
51+
async def __aexit__(self, *_dont_care: object):
5252
while tsks := self._tasks.copy():
53-
_done, _pending = await asyncio.wait(
54-
tsks, timeout=self._exit_timeout
55-
)
53+
_done, _pending = await asyncio.wait(tsks, timeout=self._etime)
5654
for task in _pending:
5755
task.cancel()
5856
await asyncio.sleep(0)

async_utils/corofunc_cache.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -39,14 +39,16 @@ def corocache(
3939
"""Decorator to cache coroutine functions.
4040
4141
This is less powerful than the version in task_cache.py but may work better
42-
for some cases where typing of libraries this interacts with is too restrictive.
42+
for some cases where typing of libraries this interacts with is too
43+
restrictive.
4344
4445
Note: This uses the args and kwargs of the original coroutine function as a
4546
cache key. This includes instances (self) when wrapping methods.
46-
Consider not wrapping instance methods, but what those methods call when feasible
47-
in cases where this may matter.
47+
Consider not wrapping instance methods, but what those methods call when
48+
feasible in cases where this may matter.
4849
49-
The ordering of args and kwargs matters."""
50+
The ordering of args and kwargs matters.
51+
"""
5052

5153
def wrapper(coro: CoroLike[P, R]) -> CoroFunc[P, R]:
5254
internal_cache: dict[Hashable, asyncio.Future[R]] = {}
@@ -88,16 +90,18 @@ def lrucorocache(
8890
"""Decorator to cache coroutine functions.
8991
9092
This is less powerful than the version in task_cache.py but may work better
91-
for some cases where typing of libraries this interacts with is too restrictive.
93+
for some cases where typing of libraries this interacts with is too
94+
restrictive.
9295
9396
Note: This uses the args and kwargs of the original coroutine function as a
9497
cache key. This includes instances (self) when wrapping methods.
95-
Consider not wrapping instance methods, but what those methods call when feasible
96-
in cases where this may matter.
98+
Consider not wrapping instance methods, but what those methods call when
99+
feasible in cases where this may matter.
97100
98101
The ordering of args and kwargs matters.
99102
100-
cached results are evicted by LRU and ttl."""
103+
Cached results are evicted by LRU and ttl.
104+
"""
101105

102106
def wrapper(coro: CoroLike[P, R]) -> CoroFunc[P, R]:
103107
internal_cache: LRU[Hashable, asyncio.Future[R]] = LRU(maxsize)

async_utils/gen_transform.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@
2626

2727

2828
class _PeekableQueue[T](asyncio.Queue[T]):
29-
"""This is for internal use only, tested on both 3.12 and 3.13
30-
This will be tested for 3.14 prior to 3.14's release."""
29+
# This is for internal use only, tested on both 3.12 and 3.13
30+
# This will be tested for 3.14 prior to 3.14's release.
3131

3232
_get_loop: Callable[[], asyncio.AbstractEventLoop] # pyright: ignore[reportUninitializedInstanceVariable]
3333
_getters: deque[asyncio.Future[None]] # pyright: ignore[reportUninitializedInstanceVariable]
@@ -71,8 +71,7 @@ def sync_to_async_gen(
7171
*args: P.args,
7272
**kwargs: P.kwargs,
7373
) -> AsyncGenerator[YieldType]:
74-
"""Asynchronously iterate over a synchronous generator run in
75-
background thread.
74+
"""Asynchronously iterate over a synchronous generator.
7675
7776
The generator function and it's arguments must be threadsafe and will be
7877
iterated lazily. Generators which perform cpu intensive work while holding
@@ -84,7 +83,8 @@ def sync_to_async_gen(
8483
8584
If your generator is actually a synchronous coroutine, that's super cool,
8685
but rewrite is as a native coroutine or use it directly then, you don't need
87-
what this function does."""
86+
what this function does.
87+
"""
8888
# Provides backpressure, ensuring the underlying sync generator in a thread
8989
# is lazy If the user doesn't want laziness, then using this method makes
9090
# little sense, they could trivially exhaust the generator in a thread with

async_utils/lockout.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323

2424

2525
class Lockout:
26-
"""Lock out an async resource for an amount of time
26+
"""Lock out an async resource for an amount of time.
2727
2828
Resources may be locked out multiple times.
2929
@@ -79,7 +79,9 @@ async def __aexit__(self, *_dont_care: object) -> None:
7979

8080

8181
class FIFOLockout:
82-
"""A FIFO preserving version of Lockout. This has slightly more
82+
"""A FIFO preserving version of Lockout.
83+
84+
This has slightly more
8385
overhead than the base Lockout class, which is not guaranteed to
8486
preserve FIFO, though happens to in the case of not being locked.
8587

async_utils/priority_sem.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
import contextvars
2020
import heapq
2121
import threading
22-
from collections.abc import Callable
22+
from collections.abc import Callable, Generator
2323
from contextlib import contextmanager
2424
from typing import Any, NamedTuple
2525

@@ -55,7 +55,8 @@ def __lt__(self, other: Any) -> bool:
5555

5656

5757
@contextmanager
58-
def priority_context(priority: int):
58+
def priority_context(priority: int) -> Generator[None, None, None]:
59+
"""Set the priority for all PrioritySemaphore use in this context."""
5960
token = _priority.set(priority)
6061
try:
6162
yield None
@@ -67,7 +68,8 @@ def priority_context(priority: int):
6768

6869

6970
class PrioritySemaphore:
70-
"""
71+
"""A Semaphore with priority-based aquisition ordering.
72+
7173
Provides a semaphore with similar semantics as asyncio.Semaphore,
7274
but using an underlying priority. priority is shared within a context
7375
manager's logical scope, but the context can be nested safely.
@@ -76,11 +78,11 @@ class PrioritySemaphore:
7678
7779
context manager use:
7880
79-
sem = PrioritySemaphore(1)
81+
>>> sem = PrioritySemaphore(1)
82+
>>> with priority_ctx(10):
83+
async with sem:
84+
...
8085
81-
with priority_ctx(10):
82-
async with sem:
83-
...
8486
"""
8587

8688
_loop: asyncio.AbstractEventLoop | None = None
@@ -93,12 +95,14 @@ def _get_loop(self) -> asyncio.AbstractEventLoop:
9395
if self._loop is None:
9496
self._loop = loop
9597
if loop is not self._loop:
96-
raise RuntimeError(f"{self!r} is bound to a different event loop")
98+
msg = f"{self!r} is bound to a different event loop"
99+
raise RuntimeError(msg)
97100
return loop
98101

99102
def __init__(self, value: int = 1):
100103
if value < 0:
101-
raise ValueError("Semaphore initial value must be >= 0")
104+
msg = "Semaphore initial value must be >= 0"
105+
raise ValueError(msg)
102106
self._waiters: list[PriorityWaiter] | None = None
103107
self._value: int = value
104108

@@ -120,9 +124,8 @@ def locked(self) -> bool:
120124
async def __aenter__(self):
121125
prio = _priority.get()
122126
await self.acquire(prio)
123-
return
124127

125-
async def __aexit__(self, *dont_care: Any):
128+
async def __aexit__(self, *dont_care: object):
126129
self.release()
127130

128131
async def acquire(self, priority: int = _default) -> bool:
@@ -174,6 +177,6 @@ def _maybe_wake(self) -> None:
174177
heapq.heappush(self._waiters, next_waiter)
175178
break
176179

177-
def release(self):
180+
def release(self) -> None:
178181
self._value += 1
179182
self._maybe_wake()

async_utils/ratelimiter.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,12 @@
2323

2424

2525
class RateLimiter:
26-
"""This is an asyncio specific ratelimit implementation which does not
26+
"""Asyncio-specific internal application ratelimiter.
27+
28+
This is an asyncio specific ratelimit implementation which does not
2729
account for various networking effects / responses and
28-
should only be used for internal limiting."""
30+
should only be used for internal limiting.
31+
"""
2932

3033
def __init__(self, rate_limit: int, period: float, granularity: float):
3134
self.rate_limit: int = rate_limit

async_utils/scheduler.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ async def __aenter__(self):
7373

7474
return self
7575

76-
async def __aexit__(self, *_dont_care: Any):
76+
async def __aexit__(self, *_dont_care: object):
7777
self.__closed = True
7878

7979
def __aiter__(self):
@@ -108,23 +108,22 @@ async def create_task(
108108
await self.__tqueue.put(t)
109109
return t.cancel_token
110110

111-
async def cancel_task(self, cancel_token: CancelationToken, /) -> bool:
112-
"""Returns if the task with that CancelationToken. Cancelling an
113-
already cancelled task is allowed and has no additional effect."""
111+
async def cancel_task(self, cancel_token: CancelationToken, /) -> None:
112+
"""Cancel a task.
113+
114+
Canceling an already canceled task is not an error
115+
"""
114116
async with self.__l:
115117
try:
116118
task = self.__tasks[cancel_token]
117119
task.canceled = True
118120
except KeyError:
119121
pass
120-
else:
121-
return True
122-
return False
123122

124-
def close(self):
125-
"""Closes the scheduler without waiting"""
123+
def close(self) -> None:
124+
"""Closes the scheduler without waiting."""
126125
self.__closed = True
127126

128-
async def join(self):
129-
"""Waits for the scheduler's internal queue to be empty"""
127+
async def join(self) -> None:
128+
"""Waits for the scheduler's internal queue to be empty."""
130129
await self.__tqueue.join()

0 commit comments

Comments
 (0)