Skip to content

Commit a273b76

Browse files
committed
Handle internal server errors and 404
1 parent 7137456 commit a273b76

File tree

3 files changed

+31
-6
lines changed

3 files changed

+31
-6
lines changed

scrapinghub/client/exceptions.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,10 @@ class DuplicateJobError(ScrapinghubAPIError):
5252
pass
5353

5454

55+
class ServerError(ScrapinghubAPIError):
56+
pass
57+
58+
5559
def wrap_http_errors(method):
5660
@wraps(method)
5761
def wrapped(*args, **kwargs):
@@ -69,6 +73,8 @@ def wrapped(*args, **kwargs):
6973
raise ValueTooLarge(http_error=exc)
7074
elif 400 <= status_code < 500:
7175
raise ScrapinghubAPIError(http_error=exc)
76+
elif 500 <= status_code < 600:
77+
raise ServerError(http_error=exc)
7278
raise
7379
except APIError as exc:
7480
msg = exc.args[0]
@@ -80,6 +86,8 @@ def wrapped(*args, **kwargs):
8086
raise BadRequest(msg)
8187
elif exc._type == APIError.ERR_AUTH_ERROR:
8288
raise Unauthorized(http_error=exc)
89+
elif exc._type == APIError.ERR_SERVER_ERROR:
90+
raise ServerError(http_error=exc)
8391
raise ScrapinghubAPIError(msg)
8492
return wrapped
8593

scrapinghub/legacy.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -138,6 +138,11 @@ def _request(self, url, data, headers, format, raw, files=None):
138138
return self._decode_response(response, format, raw)
139139

140140
def _decode_response(self, response, format, raw):
141+
if response.status_code == 404:
142+
raise APIError("Not found", _type=APIError.ERR_NOT_FOUND)
143+
elif 500 <= response.status_code < 600:
144+
raise APIError("Internal server error",
145+
_type=APIError.ERR_SERVER_ERROR)
141146
if raw:
142147
return response.raw
143148
elif format == 'json':
@@ -402,11 +407,12 @@ def _add_params(self, params):
402407

403408
class APIError(Exception):
404409

405-
ERR_DEFAULT = 0
406-
ERR_NOT_FOUND = 1
407-
ERR_VALUE_ERROR = 2
408-
ERR_BAD_REQUEST = 3
409-
ERR_AUTH_ERROR = 4
410+
ERR_DEFAULT = "err_default"
411+
ERR_NOT_FOUND = "err_not_found"
412+
ERR_VALUE_ERROR = "err_value_error"
413+
ERR_BAD_REQUEST = "err_bad_request"
414+
ERR_AUTH_ERROR = "err_auth_error"
415+
ERR_SERVER_ERROR = "err_server_error"
410416

411417
def __init__(self, message, _type=None):
412418
super(APIError, self).__init__(message)

tests/client/test_projects.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,14 @@
22
from collections import defaultdict, Iterator
33

44
import pytest
5+
import responses
56
from six.moves import range
7+
from requests.compat import urljoin
68

79
from scrapinghub import ScrapinghubClient
810
from scrapinghub.client.activity import Activity
911
from scrapinghub.client.collections import Collections
10-
from scrapinghub.client.exceptions import DuplicateJobError
12+
from scrapinghub.client.exceptions import DuplicateJobError, ServerError
1113
from scrapinghub.client.frontiers import Frontiers
1214
from scrapinghub.client.jobs import Jobs, Job
1315
from scrapinghub.client.projects import Project, Settings
@@ -45,6 +47,15 @@ def test_projects_list(client):
4547
assert int(TEST_PROJECT_ID) in projects
4648

4749

50+
@responses.activate
51+
def test_projects_list_server_error(client):
52+
url = urljoin(TEST_DASH_ENDPOINT, 'scrapyd/listprojects.json')
53+
responses.add(responses.GET, url, body='some error body', status=500)
54+
with pytest.raises(ServerError):
55+
client.projects.list()
56+
assert len(responses.calls) == 1
57+
58+
4859
def test_projects_summary(client, project):
4960
# add at least one running or pending job to ensure summary is returned
5061
project.jobs.schedule(TEST_SPIDER_NAME, meta={'state': 'running'})

0 commit comments

Comments
 (0)