Skip to content
This repository was archived by the owner on Apr 30, 2022. It is now read-only.

Commit bacbc58

Browse files
authored
Add test support for python 3.6. (#90)
* Add test support for python 3.6. Raise error when over 1000000 rows when fetching tables with paginate Pandas fix for newer pandas version. Bump package version 3.2.0 * The newest version of requests breaks the package. * casing * minor correction. * Don’t need this step.
1 parent 1d06fe5 commit bacbc58

File tree

12 files changed

+74
-73
lines changed

12 files changed

+74
-73
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ python:
55
- "3.3"
66
- "3.4"
77
- "3.5"
8+
- "3.6"
89
install:
910
- pip install flake8
10-
- pip install -r requirements.txt
1111
script:
1212
- flake8
1313
- python -W always setup.py -q test

quandl/get_table.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
from quandl.model.datatable import Datatable
2+
from quandl.errors.quandl_error import LimitExceededError
23
from .api_config import ApiConfig
34
from .message import Message
45
import warnings
56
import copy
6-
import os
77

88

99
def get_table(datatable_code, **options):
@@ -24,17 +24,16 @@ def get_table(datatable_code, **options):
2424
data.extend(next_data)
2525

2626
if page_count >= ApiConfig.page_limit:
27-
if os.isatty(0):
28-
warnings.warn(Message.WARN_DATA_LIMIT_EXCEEDED, UserWarning)
29-
break
27+
raise LimitExceededError(Message.WARN_DATA_LIMIT_EXCEEDED)
3028

3129
next_cursor_id = next_data.meta['next_cursor_id']
30+
3231
if next_cursor_id is None:
3332
break
3433
elif paginate is not True and next_cursor_id is not None:
35-
if os.isatty(0):
36-
warnings.warn(Message.WARN_PAGE_LIMIT_EXCEEDED, UserWarning)
34+
warnings.warn(Message.WARN_PAGE_LIMIT_EXCEEDED, UserWarning)
3735
break
36+
3837
page_count = page_count + 1
3938
options['qopts.cursor_id'] = next_cursor_id
4039
return data.to_pandas()

quandl/model/data_mixin.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ class DataMixin(object):
66
# DataFrame will respect order of input list of list
77
def to_pandas(self, keep_column_indexes=[]):
88
data = self.to_list()
9+
910
# ensure pandas gets a list of lists
1011
if data and isinstance(data, list) and not isinstance(data[0], list):
1112
data = [data]
@@ -22,14 +23,15 @@ def to_pandas(self, keep_column_indexes=[]):
2223

2324
# unfortunately to_records() cannot handle unicode in 2.7
2425
df.index.name = str(df.index.name)
26+
2527
# keep_column_indexes are 0 based, 0 is the first column
2628
if len(keep_column_indexes) > 0:
2729
self._validate_col_index(df, keep_column_indexes)
2830
# need to decrement all our indexes by 1 because
2931
# Date is considered a column by our API, but in pandas,
3032
# it is the index, so column 0 is the first column after Date index
3133
keep_column_indexes = list([x - 1 for x in keep_column_indexes])
32-
df = df[keep_column_indexes]
34+
df = df.iloc[:, keep_column_indexes]
3335
return df
3436

3537
def to_numpy(self):

quandl/model/merged_dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ def _initialize_raw_data(self):
162162
def _build_dataset_object(self, dataset_code, **options):
163163
options_copy = options.copy()
164164
# data_codes are tuples
165-
# e.g., ('GOOG/NASDAQ_AAPL', {'column_index": [1,2]})
165+
# e.g., ('WIKI/AAPL', {'column_index": [1,2]})
166166
# or strings
167167
# e.g., 'NSE/OIL'
168168
code = self._get_request_dataset_code(dataset_code)

quandl/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
VERSION = '3.1.0'
1+
VERSION = '3.2.0'

setup.cfg

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,6 @@
11
[bdist_wheel]
22
universal = 1
3+
4+
[flake8]
5+
max-line-length = 100
6+
exclude = .git,__init__.py,tmp,__pycache__,.eggs,Quandl.egg-info,build,dist,.tox

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
install_requires = [
1919
'pandas >= 0.14',
2020
'numpy >= 1.8',
21-
'requests >= 2.7.0',
21+
'requests >= 2.7.0, < 2.18', # Version 2.18 appears to break pulling data.
2222
'inflection >= 0.3.1',
2323
'python-dateutil',
2424
'six',

test/helpers/merged_datasets_helper.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -30,41 +30,41 @@ def setupDatasetsTest(unit_test, httpretty):
3030
unit_test.nse_oil = {'dataset': DatasetFactory.build(
3131
database_code='NSE', dataset_code='OIL')}
3232

33-
unit_test.goog_aapl = {'dataset': DatasetFactory.build(
34-
database_code='GOOG', dataset_code='NASDAQ_AAPL')}
33+
unit_test.wiki_aapl = {'dataset': DatasetFactory.build(
34+
database_code='WIKI', dataset_code='AAPL')}
3535

36-
unit_test.goog_msft = {'dataset': DatasetFactory.build(
37-
database_code='GOOG', dataset_code='NASDAQ_MSFT',
36+
unit_test.wiki_msft = {'dataset': DatasetFactory.build(
37+
database_code='WIKI', dataset_code='MSFT',
3838
newest_available_date='2015-07-30', oldest_available_date='2013-01-01')}
3939

4040
unit_test.single_col = {'dataset': DatasetFactory.build(
4141
database_code='SINGLE', dataset_code='COLUMN',
4242
newest_available_date='2015-07-30', oldest_available_date='2013-01-01')}
4343

4444
unit_test.oil_obj = Dataset('NSE/OIL', unit_test.nse_oil['dataset'])
45-
unit_test.aapl_obj = Dataset('GOOG/AAPL', unit_test.goog_aapl['dataset'])
46-
unit_test.goog_obj = Dataset('GOOG/MSFT', unit_test.goog_msft['dataset'])
45+
unit_test.aapl_obj = Dataset('WIKI/AAPL', unit_test.wiki_aapl['dataset'])
46+
unit_test.wiki_obj = Dataset('WIKI/MSFT', unit_test.wiki_msft['dataset'])
4747
unit_test.single_col_obj = Dataset('SINGLE/COLUMN', unit_test.single_col['dataset'])
4848

4949
httpretty.register_uri(httpretty.GET,
5050
re.compile(
5151
'https://www.quandl.com/api/v3/datasets/.*/metadata'),
5252
responses=[httpretty.Response(body=json.dumps(dataset))
5353
for dataset in
54-
[unit_test.nse_oil, unit_test.goog_aapl,
55-
unit_test.goog_msft]])
54+
[unit_test.nse_oil, unit_test.wiki_aapl,
55+
unit_test.wiki_msft]])
5656
# mock our query param column_index request
5757
httpretty.register_uri(httpretty.GET,
5858
"https://www.quandl.com/api/v3/datasets/SINGLE/COLUMN/data",
5959
body=json.dumps(unit_test.single_dataset_data))
6060
httpretty.register_uri(httpretty.GET,
61-
"https://www.quandl.com/api/v3/datasets/GOOG/NASDAQ_AAPL/data",
61+
"https://www.quandl.com/api/v3/datasets/WIKI/AAPL/data",
6262
body=json.dumps(unit_test.dataset_data))
6363
httpretty.register_uri(httpretty.GET,
6464
re.compile(
6565
'https://www.quandl.com/api/v3/datasets/NSE/OIL/data'),
6666
body=json.dumps(unit_test.dataset_data))
6767
httpretty.register_uri(httpretty.GET,
6868
re.compile(
69-
'https://www.quandl.com/api/v3/datasets/GOOG/NASDAQ_MSFT/data'),
69+
'https://www.quandl.com/api/v3/datasets/WIKI/MSFT/data'),
7070
body=json.dumps(unit_test.dataset_data))

test/test_connection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,6 @@ def test_build_request(self, mock):
6767
'accept': ('application/json, '
6868
'application/vnd.quandl+json;version=2015-04-09'),
6969
'request-source': 'python',
70-
'request-source-version': '3.1.0'},
70+
'request-source-version': '3.2.0'},
7171
params={'per_page': 10, 'page': 2})
7272
self.assertEqual(mock.call_args, expected)

test/test_get.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -121,15 +121,15 @@ def test_multiple_datasets_args_formed(self, mock):
121121
# requested_column_indexes is a dynamically added attribute
122122
self.oil_obj.requested_column_indexes = []
123123
mock.return_value = self.oil_obj
124-
get(['GOOG/NASDAQ_AAPL.1', 'GOOG/NASDAQ_MSFT.2', 'NSE/OIL'])
125-
expected = [call(('GOOG/NASDAQ_AAPL', {'column_index': [1]})),
126-
call(('GOOG/NASDAQ_MSFT', {'column_index': [2]})),
124+
get(['WIKI/AAPL.1', 'WIKI/MSFT.2', 'NSE/OIL'])
125+
expected = [call(('WIKI/AAPL', {'column_index': [1]})),
126+
call(('WIKI/MSFT', {'column_index': [2]})),
127127
call('NSE/OIL')]
128128
self.assertEqual(mock.call_args_list, expected)
129129

130130
@patch.object(MergedDataset, 'data')
131131
def test_query_params_are_formed_with_old_arg_names(self, mock_method):
132-
get(['GOOG/NASDAQ_AAPL.1', 'GOOG/NASDAQ_MSFT.2', 'NSE/OIL'],
132+
get(['WIKI/AAPL.1', 'WIKI/MSFT.2', 'NSE/OIL'],
133133
authtoken='authtoken', trim_start='2001-01-01',
134134
trim_end='2010-01-01', collapse='annual',
135135
transformation='rdiff', rows=4, sort_order='desc')
@@ -143,7 +143,7 @@ def test_query_params_are_formed_with_old_arg_names(self, mock_method):
143143

144144
@patch.object(MergedDataset, 'data')
145145
def test_query_params_are_formed_with_new_arg_names(self, mock_method):
146-
get(['GOOG/NASDAQ_AAPL.1', 'GOOG/NASDAQ_MSFT.2', 'NSE/OIL'],
146+
get(['WIKI/AAPL.1', 'WIKI/MSFT.2', 'NSE/OIL'],
147147
api_key='authtoken', start_date='2001-01-01',
148148
end_date='2010-01-01', collapse='annual',
149149
transform='rdiff', rows=4, order='desc')

0 commit comments

Comments
 (0)