Skip to content

Commit 18481b6

Browse files
committed
Fix error types, rename InvalidUsage -> BadRequest
1 parent 5b5c370 commit 18481b6

File tree

8 files changed

+17
-18
lines changed

8 files changed

+17
-18
lines changed

scrapinghub/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
__all__ = ["APIError", "Connection", "HubstorageClient",
22
"ScrapinghubClient", "ScrapinghubAPIError",
3-
"DuplicateJobError", "InvalidUsage", "NotFound",
3+
"DuplicateJobError", "BadRequest", "NotFound",
44
"Unauthorized", "ValueTooLarge"]
55

66
import pkgutil
@@ -15,7 +15,7 @@
1515
from .client.exceptions import (
1616
ScrapinghubAPIError,
1717
DuplicateJobError,
18-
InvalidUsage,
18+
BadRequest,
1919
NotFound,
2020
Unauthorized,
2121
ValueTooLarge,

scrapinghub/client/exceptions.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def __init__(self, message=None, http_error=None):
3232
super(ScrapinghubAPIError, self).__init__(message)
3333

3434

35-
class InvalidUsage(ScrapinghubAPIError):
35+
class BadRequest(ScrapinghubAPIError):
3636
pass
3737

3838

@@ -60,7 +60,7 @@ def wrapped(*args, **kwargs):
6060
except HTTPError as exc:
6161
status_code = exc.response.status_code
6262
if status_code == 400:
63-
raise InvalidUsage(http_error=exc)
63+
raise BadRequest(http_error=exc)
6464
elif status_code == 401:
6565
raise Unauthorized(http_error=exc)
6666
elif status_code == 404:
@@ -76,8 +76,8 @@ def wrapped(*args, **kwargs):
7676
raise NotFound(msg)
7777
elif exc._type == APIError.ERR_VALUE_ERROR:
7878
raise ValueError(msg)
79-
elif exc._type == APIError.ERR_INVALID_USAGE:
80-
raise InvalidUsage(msg)
79+
elif exc._type == APIError.ERR_BAD_REQUEST:
80+
raise BadRequest(msg)
8181
elif exc._type == APIError.ERR_AUTH_ERROR:
8282
raise Unauthorized(http_error=exc)
8383
raise ScrapinghubAPIError(msg)

scrapinghub/client/jobs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from .logs import Logs
1212
from .requests import Requests
1313
from .samples import Samples
14-
from .exceptions import NotFound, InvalidUsage, DuplicateJobError
14+
from .exceptions import NotFound, BadRequest, DuplicateJobError
1515
from .utils import _MappingProxy, get_tags_for_update, parse_job_key
1616

1717

@@ -149,7 +149,7 @@ def schedule(self, spidername=None, **params):
149149
try:
150150
response = self._client._connection._post(
151151
'schedule', 'json', params)
152-
except InvalidUsage as exc:
152+
except BadRequest as exc:
153153
if 'already scheduled' in str(exc):
154154
raise DuplicateJobError(exc)
155155
raise

scrapinghub/client/projects.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88

99
from .activity import Activity
1010
from .collections import Collections
11-
from .exceptions import InvalidUsage
1211
from .frontiers import _HSFrontier, Frontiers
1312
from .jobs import Jobs
1413
from .spiders import Spiders
@@ -172,5 +171,5 @@ class Settings(_MappingProxy):
172171
def set(self, key, value):
173172
# FIXME drop the method when post-by-key is implemented on server side
174173
if not isinstance(key, six.string_types):
175-
raise InvalidUsage("key should be a string")
174+
raise TypeError("key should be a string")
176175
self.update({key: value})

scrapinghub/client/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from ..hubstorage.resourcetype import ItemsResourceType
1313
from ..hubstorage.collectionsrt import Collections
1414

15-
from .exceptions import wrap_value_too_large, InvalidUsage
15+
from .exceptions import wrap_value_too_large
1616

1717

1818
class LogLevel(object):
@@ -135,7 +135,7 @@ def set(self, key, value):
135135

136136
def update(self, values):
137137
if not isinstance(values, dict):
138-
raise InvalidUsage("values should be a dict")
138+
raise TypeError("values should be a dict")
139139
data = next(self._origin.apiget())
140140
data.update(values)
141141
self._origin.apipost(jl={k: v for k, v in six.iteritems(data)

scrapinghub/legacy.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ def _decode_response(self, response, format, raw):
152152
_type=APIError.ERR_AUTH_ERROR)
153153
elif data['status'] in ('error', 'badrequest'):
154154
raise APIError(data['message'],
155-
_type=APIError.ERR_INVALID_USAGE)
155+
_type=APIError.ERR_BAD_REQUEST)
156156
else:
157157
raise APIError("Unknown response status: {0[status]}".format(data))
158158
except KeyError:
@@ -405,7 +405,7 @@ class APIError(Exception):
405405
ERR_DEFAULT = 0
406406
ERR_NOT_FOUND = 1
407407
ERR_VALUE_ERROR = 2
408-
ERR_INVALID_USAGE = 3
408+
ERR_BAD_REQUEST = 3
409409
ERR_AUTH_ERROR = 4
410410

411411
def __init__(self, message, _type=None):

tests/client/test_collections.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import pytest
44
from six.moves import range
55

6-
from scrapinghub.client.exceptions import InvalidUsage
6+
from scrapinghub.client.exceptions import BadRequest
77
from scrapinghub.client.exceptions import NotFound
88
from scrapinghub.client.exceptions import ValueTooLarge
99

@@ -115,7 +115,7 @@ def test_errors_bad_key(collection):
115115
{'_key': []},
116116
])
117117
def test_errors(collection, testarg):
118-
with pytest.raises(InvalidUsage):
118+
with pytest.raises(BadRequest):
119119
collection.set(testarg)
120120

121121

tests/client/test_spiders.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from six.moves import range
77

88
from scrapinghub.client.exceptions import DuplicateJobError
9-
from scrapinghub.client.exceptions import InvalidUsage
9+
from scrapinghub.client.exceptions import BadRequest
1010
from scrapinghub.client.exceptions import NotFound
1111
from scrapinghub.client.jobs import Jobs, Job
1212
from scrapinghub.client.spiders import Spider
@@ -42,7 +42,7 @@ def test_spider_base(project, spider):
4242

4343
def test_spider_list_update_tags(project, spider):
4444
# FIXME empty update should fail
45-
with pytest.raises(InvalidUsage):
45+
with pytest.raises(BadRequest):
4646
spider.update_tags()
4747

4848
spider.update_tags(add=['new1', 'new2'])

0 commit comments

Comments
 (0)