Skip to content

Commit 48a7a89

Browse files
authored
[spiders] Remove 'spider' argument to ExecutionEngine.crawl (#286)
1 parent 937c537 commit 48a7a89

File tree

2 files changed

+8
-4
lines changed

2 files changed

+8
-4
lines changed

src/scrapy_redis/spiders.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import json
22
from collections.abc import Iterable
3-
from scrapy import signals, FormRequest
3+
from scrapy import signals, FormRequest, version_info as scrapy_version
44
from scrapy.exceptions import DontCloseSpider
55
from scrapy.spiders import Spider, CrawlSpider
66
from scrapy_redis.utils import TextColor
@@ -190,7 +190,11 @@ def schedule_next_requests(self):
190190
"""Schedules a request if available"""
191191
# TODO: While there is capacity, schedule a batch of redis requests.
192192
for req in self.next_requests():
193-
self.crawler.engine.crawl(req, spider=self)
193+
# see https://github.com/scrapy/scrapy/issues/5994
194+
if scrapy_version >= (2, 6):
195+
self.crawler.engine.crawl(req)
196+
else:
197+
self.crawler.engine.crawl(req, spider=self)
194198

195199
def spider_idle(self):
196200
"""

tests/test_spiders.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -165,9 +165,9 @@ def test_consume_urls_from_redis(start_urls_as_zset, start_urls_as_set, spider_c
165165

166166
if start_urls_as_zset or start_urls_as_set:
167167
crawler.engine.crawl.assert_has_calls([
168-
mock.call(req, spider=spider) for req in reqs if req not in start_requests
168+
mock.call(req) for req in reqs if req not in start_requests
169169
], any_order=True)
170170
else:
171171
crawler.engine.crawl.assert_has_calls([
172-
mock.call(req, spider=spider) for req in reqs[batch_size:]
172+
mock.call(req) for req in reqs[batch_size:]
173173
])

0 commit comments

Comments
 (0)