Skip to content

Commit dc76cb7

Browse files
chekunkovvshlapakov
authored andcommitted
Make exception wrappers private
Make wrap_http_errors private and remove wrap_value_too_large because it's not needed any more.
1 parent 54484ee commit dc76cb7

File tree

4 files changed

+13
-24
lines changed

4 files changed

+13
-24
lines changed

scrapinghub/client/__init__.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,8 @@
11
from scrapinghub import Connection as _Connection
22
from scrapinghub import HubstorageClient as _HubstorageClient
33

4+
from .exceptions import _wrap_http_errors
45
from .projects import Projects
5-
from .exceptions import wrap_http_errors
6-
76
from .utils import parse_auth
87
from .utils import parse_project_id, parse_job_key
98

@@ -13,14 +12,14 @@
1312

1413
class Connection(_Connection):
1514

16-
@wrap_http_errors
15+
@_wrap_http_errors
1716
def _request(self, *args, **kwargs):
1817
return super(Connection, self)._request(*args, **kwargs)
1918

2019

2120
class HubstorageClient(_HubstorageClient):
2221

23-
@wrap_http_errors
22+
@_wrap_http_errors
2423
def request(self, *args, **kwargs):
2524
return super(HubstorageClient, self).request(*args, **kwargs)
2625

scrapinghub/client/exceptions.py

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
from requests import HTTPError
66

77
from ..legacy import APIError
8-
from ..hubstorage import ValueTooLarge as _ValueTooLarge
98

109

1110
def _get_http_error_msg(exc):
@@ -57,7 +56,7 @@ class ServerError(ScrapinghubAPIError):
5756
"""Indicates some server error: something unexpected has happened."""
5857

5958

60-
def wrap_http_errors(method):
59+
def _wrap_http_errors(method):
6160
"""Internal helper to handle exceptions gracefully."""
6261
@wraps(method)
6362
def wrapped(*args, **kwargs):
@@ -92,14 +91,3 @@ def wrapped(*args, **kwargs):
9291
raise ServerError(http_error=exc)
9392
raise ScrapinghubAPIError(msg)
9493
return wrapped
95-
96-
97-
def wrap_value_too_large(method):
98-
"""Internal wrapper for ValueTooLarge exception."""
99-
@wraps(method)
100-
def wrapped(*args, **kwargs):
101-
try:
102-
return method(*args, **kwargs)
103-
except _ValueTooLarge as exc:
104-
raise ValueTooLarge(str(exc))
105-
return wrapped

scrapinghub/client/proxy.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,8 @@
33
import six
44
import json
55

6-
from .exceptions import wrap_value_too_large
6+
from ..hubstorage import ValueTooLarge as _ValueTooLarge
7+
from .exceptions import ValueTooLarge
78

89

910
class _Proxy(object):
@@ -53,9 +54,11 @@ class _ItemsResourceProxy(_Proxy):
5354
def get(self, _key, **params):
5455
return self._origin.get(_key, **params)
5556

56-
@wrap_value_too_large
5757
def write(self, item):
58-
return self._origin.write(item)
58+
try:
59+
return self._origin.write(item)
60+
except _ValueTooLarge as exc:
61+
raise ValueTooLarge(str(exc))
5962

6063
def iter(self, _key=None, **params):
6164
params = self._modify_iter_params(params)

scrapinghub/client/spiders.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,8 @@
22

33
from requests.compat import urljoin
44

5+
from .exceptions import NotFound, _wrap_http_errors
56
from .jobs import Jobs
6-
from .exceptions import NotFound
7-
from .exceptions import wrap_http_errors
87
from .utils import get_tags_for_update
98

109

@@ -104,7 +103,7 @@ def __init__(self, client, project_id, spider_id, spider):
104103
self.jobs = Jobs(client, project_id, self)
105104
self._client = client
106105

107-
@wrap_http_errors
106+
@_wrap_http_errors
108107
def update_tags(self, add=None, remove=None):
109108
"""Update tags for the spider.
110109
@@ -118,7 +117,7 @@ def update_tags(self, add=None, remove=None):
118117
response = self._client._connection._session.patch(url, json=params)
119118
response.raise_for_status()
120119

121-
@wrap_http_errors
120+
@_wrap_http_errors
122121
def list_tags(self):
123122
"""List spider tags.
124123

0 commit comments

Comments
 (0)