Skip to content

Commit 2de09d7

Browse files
committed
just rm self: Type
1 parent 2865b3e commit 2de09d7

File tree

3 files changed

+10
-10
lines changed

3 files changed

+10
-10
lines changed

src/apify/scrapy/middlewares/apify_proxy.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ class ApifyHttpProxyMiddleware:
2727
proxy_settings = {'useApifyProxy': true, 'apifyProxyGroups': []}
2828
"""
2929

30-
def __init__(self: ApifyHttpProxyMiddleware, proxy_settings: dict) -> None:
30+
def __init__(self, proxy_settings: dict) -> None:
3131
"""Create a new instance.
3232
3333
Args:
@@ -66,7 +66,7 @@ def from_crawler(cls: type[ApifyHttpProxyMiddleware], crawler: Crawler) -> Apify
6666

6767
return cls(proxy_settings)
6868

69-
async def process_request(self: ApifyHttpProxyMiddleware, request: Request, spider: Spider) -> None:
69+
async def process_request(self, request: Request, spider: Spider) -> None:
7070
"""Process a Scrapy request by assigning a new proxy.
7171
7272
Args:
@@ -89,7 +89,7 @@ async def process_request(self: ApifyHttpProxyMiddleware, request: Request, spid
8989
Actor.log.debug(f'ApifyHttpProxyMiddleware.process_request: updated request.meta={request.meta}')
9090

9191
def process_exception(
92-
self: ApifyHttpProxyMiddleware,
92+
self,
9393
request: Request,
9494
exception: Exception,
9595
spider: Spider,
@@ -116,7 +116,7 @@ def process_exception(
116116
'reason="{exception}", skipping...'
117117
)
118118

119-
async def _get_new_proxy_url(self: ApifyHttpProxyMiddleware) -> ParseResult:
119+
async def _get_new_proxy_url(self) -> ParseResult:
120120
"""Get a new proxy URL.
121121
122122
Raises:

src/apify/scrapy/pipelines/actor_dataset_push.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ class ActorDatasetPushPipeline:
1919
"""
2020

2121
async def process_item(
22-
self: ActorDatasetPushPipeline,
22+
self,
2323
item: Item,
2424
spider: Spider,
2525
) -> Item:

src/apify/scrapy/scheduler.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ class ApifyScheduler(BaseScheduler):
2929
This scheduler requires the asyncio Twisted reactor to be installed.
3030
"""
3131

32-
def __init__(self: ApifyScheduler) -> None:
32+
def __init__(self) -> None:
3333
"""Create a new instance."""
3434
if not is_asyncio_reactor_installed():
3535
raise ValueError(
@@ -40,7 +40,7 @@ def __init__(self: ApifyScheduler) -> None:
4040
self._rq: RequestQueue | None = None
4141
self.spider: Spider | None = None
4242

43-
def open(self: ApifyScheduler, spider: Spider) -> None: # this has to be named "open"
43+
def open(self, spider: Spider) -> None: # this has to be named "open"
4444
"""Open the scheduler.
4545
4646
Args:
@@ -58,7 +58,7 @@ async def open_queue() -> RequestQueue:
5858
traceback.print_exc()
5959
raise
6060

61-
def has_pending_requests(self: ApifyScheduler) -> bool:
61+
def has_pending_requests(self) -> bool:
6262
"""Check if the scheduler has any pending requests.
6363
6464
Returns:
@@ -75,7 +75,7 @@ def has_pending_requests(self: ApifyScheduler) -> bool:
7575

7676
return not is_finished
7777

78-
def enqueue_request(self: ApifyScheduler, request: Request) -> bool:
78+
def enqueue_request(self, request: Request) -> bool:
7979
"""Add a request to the scheduler.
8080
8181
This could be called from either from a spider or a downloader middleware (e.g. redirect, retry, ...).
@@ -111,7 +111,7 @@ def enqueue_request(self: ApifyScheduler, request: Request) -> bool:
111111
Actor.log.debug(f'[{call_id}]: rq.add_request.result={result}...')
112112
return bool(result.was_already_present)
113113

114-
def next_request(self: ApifyScheduler) -> Request | None:
114+
def next_request(self) -> Request | None:
115115
"""Fetch the next request from the scheduler.
116116
117117
Returns:

0 commit comments

Comments
 (0)