Skip to content

Commit 6e905ac

Browse files
committed
TST run tests with Scrapy 1.0 again
1 parent cde6b3f commit 6e905ac

File tree

4 files changed

+11
-10
lines changed

4 files changed

+11
-10
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ matrix:
2020
- python: 3.6
2121
env: TOXENV=py36
2222
- python: 2.7
23-
env: TOXENV=py27-scrapy11
23+
env: TOXENV=py27-scrapy10
2424

2525
before_install:
2626
- docker pull scrapinghub/splash

tests/test_integration.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def test_basic(settings):
7878
assert len(items) == 1
7979
resp = items[0]['response']
8080
assert resp.url == url
81-
assert resp.css('body::text').get().strip() == "hello world!"
81+
assert resp.css('body::text').extract_first().strip() == "hello world!"
8282

8383

8484
@requires_splash
@@ -99,7 +99,7 @@ def parse(self, response):
9999
assert crawler.stats.get_value('dupefilter/filtered') == 1
100100
resp = items[0]['response']
101101
assert resp.url == url
102-
assert resp.css('body::text').get().strip() == "hello world!"
102+
assert resp.css('body::text').extract_first().strip() == "hello world!"
103103
assert resp.status == resp.splash_response_status == 200
104104
assert resp.headers == resp.splash_response_headers
105105
assert resp.splash_response_headers['Content-Type'] == b"text/html; charset=utf-8"
@@ -128,7 +128,7 @@ def start_requests(self):
128128
resp = items[0]['response']
129129
assert resp.url == url + "/#foo"
130130
assert resp.status == resp.splash_response_status == 200
131-
assert resp.css('body::text').get().strip() == "hello world!"
131+
assert resp.css('body::text').extract_first().strip() == "hello world!"
132132
assert resp.data['jsvalue'] == 3
133133
assert resp.headers['X-MyHeader'] == b'my value'
134134
assert resp.headers['Content-Type'] == b'text/html'

tests/utils.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,9 @@
44
from pytest_twisted import inlineCallbacks
55
from twisted.internet.defer import returnValue
66
from twisted.web.resource import Resource
7-
from scrapy.crawler import CrawlerRunner
8-
from scrapy.utils.python import to_bytes
7+
from scrapy.crawler import Crawler
8+
9+
from scrapy_splash.utils import to_bytes
910
from tests.mockserver import MockServer
1011

1112

@@ -52,7 +53,7 @@ class Spider(spider_cls):
5253
Spider.__name__ = spider_cls.__name__
5354
Spider.__module__ = spider_cls.__module__
5455
spider_cls = Spider
55-
return CrawlerRunner(settings).create_crawler(spider_cls)
56+
return Crawler(spider_cls, settings)
5657

5758

5859
class CollectorPipeline:

tox.ini

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
# and then run "tox" from this directory.
55

66
[tox]
7-
envlist = py27,py34,py35,py36,py27-scrapy11
7+
envlist = py27,py34,py35,py36,py27-scrapy10
88

99
[testenv]
1010
passenv = SPLASH_URL
@@ -30,8 +30,8 @@ deps = {[testenv:py34]deps}
3030
basepython = python3.6
3131
deps = {[testenv:py34]deps}
3232

33-
[testenv:py27-scrapy11]
33+
[testenv:py27-scrapy10]
3434
deps =
3535
-rrequirements-test.txt
36-
scrapy == 1.1.4
36+
scrapy < 1.1
3737
service_identity

0 commit comments

Comments
 (0)