Skip to content

Commit c9d34c4

Browse files
committed
Merge branch 'master' into pedro-errback
2 parents 75d2b3e + a6c7892 commit c9d34c4

File tree

9 files changed

+15
-58
lines changed

9 files changed

+15
-58
lines changed

.bumpversion.cfg

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[bumpversion]
2-
current_version = 0.10.0
2+
current_version = 0.11.0
33
commit = True
44
tag = True
55
tag_name = {new_version}

.travis.yml

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,17 @@ matrix:
66
- python: 2.7
77
env: TOXENV=py27
88
- python: 2.7
9-
env: TOXENV=py27-scrapy1.0
9+
env: TOXENV=py27-scrapy1.5
1010
- python: 2.7
11-
env: TOXENV=py27-scrapy1.1
12-
- python: 2.7
13-
env: TOXENV=py27-scrapy1.2
11+
env: TOXENV=py27-scrapy1.6
1412
- python: 3.5
1513
env: TOXENV=py35
1614
- python: 3.6
1715
env: TOXENV=py36
1816
- python: 3.6
19-
env: TOXENV=py36-scrapy1.1
17+
env: TOXENV=py36-scrapy1.5
2018
- python: 3.6
21-
env: TOXENV=py36-scrapy1.2
19+
env: TOXENV=py36-scrapy1.6
2220
script: tox
2321

2422
deploy:

requirements-dev.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
bumpversion==0.5.3
22
fabric
3-
requests==2.9.1
3+
requests==2.22.0
44
mock==1.3.0
55
pytest==2.9.1
66
pytest-cov==2.2.1

requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
Scrapy>=1.0.0
22
service-identity>=1.0.0
3-
demjson
4-
six
3+
demjson==2.2.4
4+
six==1.12.0

scrapyrt/VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
0.10.0
1+
0.11.0

scrapyrt/core.py

Lines changed: 1 addition & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,8 @@
44
import datetime
55
import os
66
import six
7-
import types
87

9-
from scrapy import signals, log as scrapy_log
8+
from scrapy import signals
109
from scrapy.crawler import CrawlerRunner, Crawler
1110
from scrapy.exceptions import DontCloseSpider
1211
from scrapy.http import Request
@@ -85,45 +84,6 @@ def cleanup_logging(result):
8584

8685
return dfd.addBoth(cleanup_logging)
8786

88-
def _setup_crawler_logging(self, crawler):
89-
log_observer = scrapy_log.start_from_crawler(crawler)
90-
if log_observer:
91-
monkey_patch_and_connect_log_observer(crawler, log_observer)
92-
if self.log_observer:
93-
monkey_patch_and_connect_log_observer(crawler, self.log_observer)
94-
95-
def _stop_logging(self):
96-
if self.log_observer:
97-
try:
98-
self.log_observer.stop()
99-
except ValueError:
100-
# exception on kill
101-
# exceptions.ValueError: list.remove(x): x not in list
102-
# looks like it's safe to ignore it
103-
pass
104-
105-
106-
def monkey_patch_and_connect_log_observer(crawler, log_observer):
107-
"""Ugly hack to close log file.
108-
109-
Monkey patch log_observer.stop method to close file each time
110-
log observer is closed.
111-
I prefer this to be fixed in Scrapy itself, but as
112-
Scrapy is going to switch to standart python logging soon
113-
https://github.com/scrapy/scrapy/pull/1060
114-
this change wouldn't be accepted in preference of merging
115-
new logging sooner.
116-
117-
"""
118-
def stop_and_close_log_file(self):
119-
self.__stop()
120-
self.write.__self__.close()
121-
122-
log_observer.__stop = log_observer.stop
123-
log_observer.stop = types.MethodType(
124-
stop_and_close_log_file, log_observer)
125-
crawler.signals.connect(log_observer.stop, signals.engine_stopped)
126-
12787

12888
class CrawlManager(object):
12989
"""

scrapyrt/resources.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ def render_object(self, obj, request):
9292
request.setHeader('Access-Control-Allow-Methods',
9393
', '.join(getattr(self, 'allowedMethods', [])))
9494
request.setHeader('Access-Control-Allow-Headers', 'X-Requested-With')
95-
request.setHeader('Content-Length', len(r))
95+
request.setHeader('Content-Length', str(len(r)))
9696
return r.encode("utf8")
9797

9898

tests/test_resource_serviceresource.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ def test_render_object(self):
172172
set_header_mock.assert_any_call('Access-Control-Allow-Origin', '*')
173173
set_header_mock.assert_any_call('Access-Control-Allow-Headers',
174174
'X-Requested-With')
175-
set_header_mock.assert_any_call('Content-Length', len(result))
175+
set_header_mock.assert_any_call('Content-Length', str(len(result)))
176176
# request.setHeader('Access-Control-Allow-Methods',
177177
# ', '.join(getattr(self, 'allowedMethods', [])))
178178
headers = dict(self.headers)

tox.ini

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,9 @@
11
[tox]
2-
envlist = py27, py27-scrapy{1.0,1.1,1.2}, py35, py36, {py35,py36}-scrapy{1.1,1.2}
2+
envlist = py27, py27-scrapy{1.5,1.6}, py35, py36, {py35,py36}-scrapy{1.5,1.6}
33

44
[testenv]
55
deps =
6-
scrapy1.0: Scrapy>=1.0,<1.1
7-
scrapy1.1: Scrapy>=1.1,<1.2
8-
scrapy1.2: Scrapy>=1.2,<1.3
6+
scrapy1.5: Scrapy>=1.5,<1.6
7+
scrapy1.6: Scrapy>=1.6,<1.7
98
-r{toxinidir}/requirements-dev.txt
109
commands = py.test {posargs}

0 commit comments

Comments
 (0)