Skip to content

Commit 3e5c76d

Browse files
authored
Merge pull request #49 from pawelmhm/python3
Python 3 support
2 parents 6441ee8 + c1056a6 commit 3e5c76d

27 files changed

+358
-105
lines changed

.travis.yml

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,13 @@
11
language: python
2-
python: 2.7
3-
env:
4-
- TOXENV=py27
52
install:
63
- pip install -U tox
7-
script: tox
4+
matrix:
5+
include:
6+
- python: 2.7
7+
env: TOXENV=py27
8+
- python: 3.5
9+
env: TOXENV=py35
10+
- python: 2.7
11+
env: TOXENV=scrapy10
12+
13+
script: tox

README.rst

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,15 @@ Scrapyrt (Scrapy realtime)
55
.. image:: https://travis-ci.org/scrapinghub/scrapyrt.svg?branch=master
66
:target: https://travis-ci.org/scrapinghub/scrapyrt
77

8+
.. image:: https://img.shields.io/pypi/pyversions/scrapyrt.svg
9+
:target: https://pypi.python.org/pypi/scrapyrt
10+
11+
.. image:: https://img.shields.io/pypi/v/scrapyrt.svg
12+
:target: https://pypi.python.org/pypi/scrapyrt
13+
14+
.. image:: https://img.shields.io/pypi/l/scrapyrt.svg
15+
:target: https://pypi.python.org/pypi/scrapyrt
16+
817
HTTP server which provides API for scheduling Scrapy spiders and
918
making requests with spiders.
1019

requirements-dev.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
-r requirements.txt
21

32
fabric
43
requests==2.9.1

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
Scrapy>=1.0.0
22
service-identity>=1.0.0
33
demjson
4+
six

scrapyrt/cmdline.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
# -*- coding: utf-8 -*-
2-
from ConfigParser import SafeConfigParser, NoOptionError, NoSectionError
2+
from six.moves.configparser import (
3+
SafeConfigParser, NoOptionError, NoSectionError
4+
)
35
import argparse
46
import os
57
import sys

scrapyrt/conf/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# -*- coding: utf-8 -*-
2+
import six
23
from copy import deepcopy
34
from importlib import import_module
45

@@ -11,7 +12,7 @@ def __init__(self):
1112
self.setmodule(default_settings)
1213

1314
def setmodule(self, module):
14-
if isinstance(module, basestring):
15+
if isinstance(module, six.string_types):
1516
module = import_module(module)
1617
for setting in dir(module):
1718
self.set(setting, getattr(module, setting))

scrapyrt/conf/spider_settings.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,10 @@ def get_scrapyrt_settings(log_file=None):
1111
"LOG_FILE": log_file,
1212
"LOG_STDOUT": False,
1313
"EXTENSIONS": {
14-
'scrapy.contrib.logstats.LogStats': None,
14+
'scrapy.extensions.logstats.LogStats': None,
1515
'scrapy.webservice.WebService': None,
16-
'scrapy.telnet.TelnetConsole': None,
17-
'scrapy.contrib.throttle.AutoThrottle': None
16+
'scrapy.extensions.telnet.TelnetConsole': None,
17+
'scrapy.extensions.throttle.AutoThrottle': None
1818
}
1919
}
2020
return spider_settings

scrapyrt/core.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def __init__(self, settings, scrapyrt_manager):
6060

6161
def crawl(self, spidercls, *args, **kwargs):
6262
if isinstance(spidercls, six.string_types):
63-
spidercls = self.spiders.load(spidercls)
63+
spidercls = self.spider_loader.load(spidercls)
6464
# creating our own crawler that will allow us to disable start requests easily
6565
crawler = ScrapyrtCrawler(
6666
spidercls, self.settings, self.scrapyrt_manager.start_requests)
@@ -159,7 +159,7 @@ def crawl(self, *args, **kwargs):
159159
dfd = self.crawler_process.crawl(self.spider_name, *args, **kwargs)
160160
except KeyError as e:
161161
# Spider not found.
162-
raise Error('404', message=e.message)
162+
raise Error('404', message=str(e))
163163
dfd.addCallback(self.return_items)
164164
return dfd
165165

@@ -245,7 +245,7 @@ def collect_dropped(self, item, response, exception, spider):
245245
if spider is self.crawler.spider:
246246
self.items_dropped.append({
247247
"item": item,
248-
"exception": exception.message,
248+
"exception": str(exception),
249249
"response": response
250250
})
251251

@@ -267,7 +267,8 @@ def create_spider_request(self, kwargs):
267267
try:
268268
req = Request(url, **kwargs)
269269
except (TypeError, ValueError) as e:
270-
message = "Error while creating Scrapy Request, {}".format(e.message)
270+
msg = "Error while creating Scrapy Request, {}"
271+
message = msg.format(str(e))
271272
raise Error('400', message=message)
272273

273274
req.dont_filter = True

scrapyrt/log.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
11
# -*- coding: utf-8 -*-
2-
from logging.config import dictConfig
32
import logging
43
import os
54
import sys
5+
from logging.config import dictConfig
66

7-
from scrapy.utils.python import unicode_to_str
7+
from scrapy.settings import Settings
8+
from scrapy.utils.log import DEFAULT_LOGGING, TopLevelFormatter
89
from twisted.python import log
910
from twisted.python.log import startLoggingWithObserver
1011
from twisted.python.logfile import DailyLogFile
11-
from scrapy.settings import Settings
12-
from scrapy.utils.log import DEFAULT_LOGGING, TopLevelFormatter
1312

1413
from .conf import settings as scrapyrt_settings
14+
from .utils import to_bytes
1515

1616
DEBUG = logging.DEBUG
1717
INFO = logging.INFO
@@ -64,7 +64,7 @@ def _unicode_to_str(self, eventDict):
6464
message = eventDict.get('message')
6565
if message:
6666
eventDict['message'] = tuple(
67-
unicode_to_str(x, self.encoding) for x in message)
67+
to_bytes(x, self.encoding) for x in message)
6868
return eventDict
6969

7070
def emit(self, eventDict):

scrapyrt/protocols.py

Lines changed: 0 additions & 14 deletions
This file was deleted.

0 commit comments

Comments
 (0)