Skip to content

Commit f558af4

Browse files
committed
Add support for automated rate-limit handling
Refactor RestSession class to provide support for automatic retries on receiving rate-limit responses from Cisco Spark. Deprecate package-level `timeout` setting and recommend the use of the more descriptive `single_request_timeout`. Add test cases for new timers.
1 parent 8ff3540 commit f558af4

File tree

4 files changed

+372
-101
lines changed

4 files changed

+372
-101
lines changed

ciscosparkapi/__init__.py

Lines changed: 26 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,11 @@
1616
import os
1717

1818
from ciscosparkapi.exceptions import ciscosparkapiException, SparkApiError
19-
from ciscosparkapi.restsession import RestSession
19+
from ciscosparkapi.restsession import (
20+
DEFAULT_SINGLE_REQUEST_TIMEOUT,
21+
DEFAULT_RATE_LIMIT_TIMEOUT,
22+
RestSession,
23+
)
2024
from ciscosparkapi.api.people import Person, PeopleAPI
2125
from ciscosparkapi.api.rooms import Room, RoomsAPI
2226
from ciscosparkapi.api.memberships import Membership, MembershipsAPI
@@ -45,8 +49,8 @@
4549
del get_versions
4650

4751

52+
# Package Constants
4853
DEFAULT_BASE_URL = 'https://api.ciscospark.com/v1/'
49-
DEFAULT_TIMEOUT = 60
5054
ACCESS_TOKEN_ENVIRONMENT_VARIABLE = 'SPARK_ACCESS_TOKEN'
5155

5256

@@ -91,7 +95,9 @@ class CiscoSparkAPI(object):
9195
"""
9296

9397
def __init__(self, access_token=None, base_url=DEFAULT_BASE_URL,
94-
timeout=DEFAULT_TIMEOUT):
98+
timeout=None,
99+
single_request_timeout=DEFAULT_SINGLE_REQUEST_TIMEOUT,
100+
rate_limit_timeout=DEFAULT_RATE_LIMIT_TIMEOUT):
95101
"""Create a new CiscoSparkAPI object.
96102
97103
An access token must be used when interacting with the Cisco Spark API.
@@ -126,25 +132,27 @@ def __init__(self, access_token=None, base_url=DEFAULT_BASE_URL,
126132
variable.
127133
128134
"""
129-
# Process args
130135
assert access_token is None or isinstance(access_token, basestring)
131-
assert isinstance(base_url, basestring)
132-
assert isinstance(timeout, int)
133-
spark_access_token = os.environ.get(ACCESS_TOKEN_ENVIRONMENT_VARIABLE)
134-
access_token = access_token if access_token else spark_access_token
136+
env_access_token = os.environ.get(ACCESS_TOKEN_ENVIRONMENT_VARIABLE)
137+
access_token = access_token if access_token else env_access_token
135138
if not access_token:
136139
error_message = "You must provide an Spark access token to " \
137140
"interact with the Cisco Spark APIs, either via " \
138141
"a SPARK_ACCESS_TOKEN environment variable " \
139142
"or via the access_token argument."
140143
raise ciscosparkapiException(error_message)
141-
session_args = {u'timeout': timeout}
142144

143145
# Create the API session
144146
# All of the API calls associated with a CiscoSparkAPI object will
145147
# leverage a single RESTful 'session' connecting to the Cisco Spark
146148
# cloud.
147-
self._session = RestSession(access_token, base_url, **session_args)
149+
self._session = RestSession(
150+
access_token,
151+
base_url,
152+
timeout=timeout,
153+
single_request_timeout=single_request_timeout,
154+
rate_limit_timeout=rate_limit_timeout,
155+
)
148156

149157
# Spark API wrappers
150158
self.people = PeopleAPI(self._session)
@@ -170,3 +178,11 @@ def base_url(self):
170178
@property
171179
def timeout(self):
172180
return self._session.timeout
181+
182+
@property
183+
def single_request_timeout(self):
184+
return self._session.single_request_timeout
185+
186+
@property
187+
def rate_limit_timeout(self):
188+
return self._session.rate_limit_timeout

ciscosparkapi/exceptions.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
409: "The request could not be processed because it conflicts with some "
3434
"established rule of the system. For example, a person may not be "
3535
"added to a room more than once.",
36+
429: "Rate limit exceeded.",
3637
500: "Something went wrong on the server.",
3738
503: "Server is overloaded with requests. Try again later."
3839
}

0 commit comments

Comments
 (0)