Skip to content

Commit a868966

Browse files
authored
Merge pull request #122 from scrapinghub/remove-demjson
Remove demjson
2 parents 93f47c3 + e5d83bf commit a868966

File tree

10 files changed

+14
-29
lines changed

10 files changed

+14
-29
lines changed

requirements.txt

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1 @@
11
Scrapy>=1.0.0
2-
service-identity>=1.0.0
3-
demjson>=2.2.4
4-
six>=1.12.0
5-
jmespath==0.10.0
6-
pyasn1>=0.4.8

scrapyrt/conf/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
# -*- coding: utf-8 -*-
2-
import six
32
from copy import deepcopy
43
from importlib import import_module
54

@@ -12,7 +11,7 @@ def __init__(self):
1211
self.setmodule(default_settings)
1312

1413
def setmodule(self, module):
15-
if isinstance(module, six.string_types):
14+
if isinstance(module, str):
1615
module = import_module(module)
1716
for setting in dir(module):
1817
self.set(setting, getattr(module, setting))

scrapyrt/core.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
from copy import deepcopy
44
import datetime
55
import os
6-
import six
76

87
from scrapy import signals
98
from scrapy.crawler import CrawlerRunner, Crawler
@@ -58,7 +57,7 @@ def __init__(self, settings, scrapyrt_manager):
5857
self.scrapyrt_manager = scrapyrt_manager
5958

6059
def crawl(self, spidercls, *args, **kwargs):
61-
if isinstance(spidercls, six.string_types):
60+
if isinstance(spidercls, str):
6261
spidercls = self.spider_loader.load(spidercls)
6362

6463
for kw in kwargs:

scrapyrt/resources.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
import json
33
from urllib.parse import unquote
44

5-
import demjson
65
from scrapy.utils.misc import load_object
76
from scrapy.utils.serialize import ScrapyJSONEncoder
87
from twisted.internet.defer import Deferred
@@ -158,12 +157,11 @@ def render_POST(self, request, **kwargs):
158157
"""
159158
request_body = request.content.getvalue()
160159
try:
161-
# TODO replace demjson with json.loads
162-
api_params = demjson.decode(request_body)
163-
except demjson.JSONDecodeError as e:
160+
api_params = json.loads(request_body)
161+
except Exception as e:
164162
message = "Invalid JSON in POST body. {}"
165-
message = message.format(e.pretty_description())
166-
# TODO should be integer not string
163+
message = message.format(e)
164+
# TODO should be integer not string?
167165
raise Error('400', message=message)
168166

169167
log.msg("{}".format(api_params))

scrapyrt/utils.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,5 @@
11
import inspect
22

3-
import sys
4-
5-
import six
63
from scrapy import Request
74

85

@@ -33,7 +30,7 @@ def to_bytes(text, encoding=None, errors='strict'):
3330
is already a bytes object, return it as-is."""
3431
if isinstance(text, bytes):
3532
return text
36-
if not isinstance(text, six.string_types):
33+
if not isinstance(text, str):
3734
raise TypeError('to_bytes must receive a unicode, str or bytes '
3835
'object, got %s' % type(text).__name__)
3936
if encoding is None:

setup.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,10 +37,7 @@
3737
'License :: OSI Approved :: BSD License',
3838
],
3939
install_requires=[
40-
'Twisted>=14.0.0',
41-
'Scrapy>=1.0.0',
42-
'demjson',
43-
'six>=1.5.2'
40+
'Scrapy>=1.0.0'
4441
],
4542
package_data={
4643
'scrapyrt': [

tests/servers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@
77
import tempfile
88
import time
99
from subprocess import Popen, PIPE
10+
from urllib.parse import urljoin
1011

1112
import port_for
12-
from six.moves.urllib.parse import urljoin
1313

1414
from . import TESTS_PATH
1515
from .utils import get_testenv, generate_project

tests/test_crawl_manager.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -334,7 +334,7 @@ def setUp(self):
334334

335335
def test_return_items(self):
336336
result = self.crawl_manager.return_items(None)
337-
self.assertDictContainsSubset(self.expected_result, result)
337+
self.assertEqual(dict(result, **self.expected_result), result)
338338
self.assertEqual(list(sorted(self.stats.keys())), list(result['stats'].keys()))
339339
# debug = True by default
340340
self.assertIn('errors', result)

tests/test_log_observer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# -*- coding: utf-8 -*-
2-
from six import StringIO
2+
from io import StringIO
33

44
from mock import patch
55
from twisted.python.log import startLoggingWithObserver, removeObserver

tests/test_resource_realtimeapi.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from scrapyrt.resources import RealtimeApi, ServiceResource, CrawlResource
1010

1111

12-
class TestResource(ServiceResource):
12+
class SampleResource(ServiceResource):
1313
isLeaf = True
1414
allowedMethods = ['GET', 'POST']
1515

@@ -35,10 +35,10 @@ def test_realtimeapi_with_default_settings(self):
3535
@patch('scrapyrt.resources.settings', deepcopy(settings))
3636
def test_realtimeapi_with_custom_settings(self):
3737
from scrapyrt.resources import settings
38-
settings.RESOURCES[b'test.json'] = self._get_class_path('TestResource')
38+
settings.RESOURCES[b'test.json'] = self._get_class_path('SampleResource')
3939
expected_entities = {
4040
b'crawl.json': CrawlResource,
41-
b'test.json': TestResource
41+
b'test.json': SampleResource
4242
}
4343
service_root = RealtimeApi()
4444
self._check_entities(service_root, expected_entities)

0 commit comments

Comments
 (0)