Skip to content

Commit ae0578e

Browse files
authored
Merge pull request #3 from pydata/master
getting up to date
2 parents 9a760b7 + e452bf8 commit ae0578e

File tree

16 files changed

+112
-61
lines changed

16 files changed

+112
-61
lines changed

.travis.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,6 @@ env:
1010
matrix:
1111
fast_finish: true
1212
include:
13-
- python: 2.7
14-
env: PANDAS=0.24 NUMPY=1.16
1513
- python: 3.5
1614
env: PANDAS=0.21 NUMPY=1.13
1715
- python: 3.5
@@ -24,6 +22,8 @@ matrix:
2422
env: PANDAS=0.25 NUMPY=1.17
2523
- python: 3.7
2624
env: TEST_TYPE="quandl" PANDAS=0.25 NUMPY=1.17
25+
- python: 3.8
26+
env: PANDAS=0.25 NUMPY=1.18
2727
# In allow failures
2828
- python: 3.7
2929
env: TEST_TYPE="alpha_vantage" PANDAS=0.25 NUMPY=1.17

README.rst

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,8 @@ Up to date remote data access for pandas, works for multiple versions of pandas.
2424

2525
.. warning::
2626

27-
v0.8.0 is the last version which officially supports Python 2.7. Future versions of ``pandas_datareader`` will end support for Python 2.x.
27+
v0.8.0 is the last version which officially supports Python 2.7. Future versions of
28+
``pandas_datareader`` will end support for Python 2.x.
2829

2930
.. warning::
3031

docs/source/remote_data.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -621,10 +621,10 @@ You can specify dataset ID 'tran_sf_railac' to get corresponding data via ``Data
621621
622622
.. _remote_data.tsp:
623623
624-
TSP Fund Data
625-
=============
624+
Thrift Savings Plan (TSP) Fund Data
625+
===================================
626626
627-
Download mutual fund index prices for the TSP.
627+
Download mutual fund index prices for the Thrift Savings Plan (TSP).
628628
629629
.. ipython:: python
630630

docs/source/whatsnew.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ What's New
1818

1919
These are new features and improvements of note in each release.
2020

21+
.. include:: whatsnew/v0.9.0.txt
2122
.. include:: whatsnew/v0.8.0.txt
2223
.. include:: whatsnew/v0.7.0.txt
2324
.. include:: whatsnew/v0.6.0.txt

docs/source/whatsnew/v0.9.0.txt

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
.. _whatsnew_090:
2+
3+
v0.9.0 (TBD)
4+
---------------------------
5+
6+
Highlights include:
7+
8+
9+
.. contents:: What's new in v0.9.0
10+
:local:
11+
:backlinks: none
12+
13+
14+
.. _whatsnew_090.enhancements:
15+
16+
Enhancements
17+
~~~~~~~~~~~~
18+
19+
20+
.. _whatsnew_090.api_breaking:
21+
22+
Backwards incompatible API changes
23+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24+
25+
26+
.. _whatsnew_090.bug_fixes:
27+
28+
Bug Fixes
29+
~~~~~~~~~
30+
31+
- Fix Yahoo! actions bug due to change in split format. (:issue: `755`)
32+
- Fix FutureWarning from pandas import. (:issue: `762`)
33+
34+
Contributors
35+
~~~~~~~~~~~~
36+
- David Stephens

pandas_datareader/av/__init__.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,10 @@ def _read_lines(self, out):
7878
".".format(self.symbols)
7979
)
8080
else:
81-
raise RemoteDataError()
81+
raise RemoteDataError(
82+
" Their was an issue from the data vendor "
83+
"side, here is their response: {}".format(out)
84+
)
8285
df = df[sorted(df.columns)]
8386
df.columns = [id[3:] for id in df.columns]
8487
return df

pandas_datareader/compat/__init__.py

Lines changed: 11 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
from distutils.version import LooseVersion
2-
import sys
2+
from functools import reduce
3+
from io import StringIO
4+
from urllib.error import HTTPError
35

46
import pandas as pd
57
from pandas.api.types import is_list_like, is_number
68
import pandas.io.common as com
7-
from pandas.util.testing import assert_frame_equal
8-
9-
PY3 = sys.version_info >= (3, 0)
9+
from pandas.testing import assert_frame_equal
1010

1111
PANDAS_VERSION = LooseVersion(pd.__version__)
1212

@@ -30,6 +30,7 @@
3030
"lmap",
3131
"lrange",
3232
"concat",
33+
"reduce",
3334
]
3435

3536

@@ -45,34 +46,16 @@ def get_filepath_or_buffer(filepath_or_buffer, encoding=None, compression=None):
4546
)
4647

4748

48-
if PY3:
49-
from urllib.error import HTTPError
50-
from functools import reduce
51-
52-
string_types = (str,)
53-
binary_type = bytes
54-
from io import StringIO
55-
56-
def str_to_bytes(s, encoding=None):
57-
return s.encode(encoding or "ascii")
58-
59-
def bytes_to_str(b, encoding=None):
60-
return b.decode(encoding or "utf-8")
61-
49+
string_types = (str,)
50+
binary_type = bytes
6251

63-
else:
64-
from urllib2 import HTTPError
65-
from cStringIO import StringIO
6652

67-
reduce = reduce
68-
binary_type = str
69-
string_types = (basestring,) # noqa: F821
53+
def str_to_bytes(s, encoding=None):
54+
return s.encode(encoding or "ascii")
7055

71-
def bytes_to_str(b, encoding=None):
72-
return b
7356

74-
def str_to_bytes(s, encoding=None):
75-
return s
57+
def bytes_to_str(b, encoding=None):
58+
return b.decode(encoding or "utf-8")
7659

7760

7861
def lmap(*args, **kwargs):

pandas_datareader/econdb.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def url(self):
2323

2424
def read(self):
2525
""" read one data from specified URL """
26-
results = requests.get(self.url).json()["results"]
26+
results = self.session.get(self.url).json()["results"]
2727
df = pd.DataFrame({"dates": []}).set_index("dates")
2828

2929
if self._show == "labels":

pandas_datareader/moex.py

Lines changed: 33 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def __init__(self, *args, **kwargs):
4848
elif not is_list_like(self.symbols):
4949
self.symbols = [self.symbols]
5050

51-
self.__engines, self.__markets = {}, {} # dicts for engines and markets
51+
self.__markets_n_engines = {} # dicts for tuples of engines and markets
5252

5353
__url_metadata = "https://iss.moex.com/iss/securities/{symbol}.csv"
5454
__url_data = (
@@ -60,17 +60,17 @@ def __init__(self, *args, **kwargs):
6060
def url(self):
6161
"""Return a list of API URLs per symbol"""
6262

63-
if not self.__engines or not self.__markets:
63+
if not self.__markets_n_engines:
6464
raise Exception(
6565
"Accessing url property before invocation "
6666
"of read() or _get_metadata() methods"
6767
)
6868

6969
return [
70-
self.__url_data.format(
71-
engine=self.__engines[s], market=self.__markets[s], symbol=s
72-
)
70+
self.__url_data.format(engine=engine, market=market, symbol=s)
7371
for s in self.symbols
72+
if s in self.__markets_n_engines
73+
for market, engine in self.__markets_n_engines[s]
7474
]
7575

7676
def _get_params(self, start):
@@ -81,13 +81,13 @@ def _get_params(self, start):
8181
"iss.dp": "point",
8282
"iss.df": "%Y-%m-%d",
8383
"iss.tf": "%H:%M:%S",
84-
"iss.dft": "%Y-%m-%d %H:%M:%S",
84+
"iss.dtf": "%Y-%m-%d %H:%M:%S",
8585
"iss.json": "extended",
8686
"callback": "JSON_CALLBACK",
8787
"from": start,
8888
"till": self.end_dt.strftime("%Y-%m-%d"),
8989
"limit": 100,
90-
"start": 1,
90+
"start": 0,
9191
"sort_order": "TRADEDATE",
9292
"sort_order_desc": "asc",
9393
}
@@ -96,7 +96,7 @@ def _get_params(self, start):
9696
def _get_metadata(self):
9797
"""Get markets and engines for the given symbols"""
9898

99-
markets, engines = {}, {}
99+
markets_n_engines = {}
100100

101101
for symbol in self.symbols:
102102
response = self._get_response(self.__url_metadata.format(symbol=symbol))
@@ -118,9 +118,14 @@ def _get_metadata(self):
118118
continue
119119
if get_data and s != "":
120120
fields = s.split(";")
121-
markets[symbol], engines[symbol] = fields[5], fields[7]
122-
break
123-
if symbol not in markets or symbol not in engines:
121+
122+
if symbol not in markets_n_engines:
123+
markets_n_engines[symbol] = list()
124+
125+
markets_n_engines[symbol].append(
126+
(fields[5], fields[7])
127+
) # market and engine
128+
if symbol not in markets_n_engines:
124129
raise IOError(
125130
"{} request returned no metadata: {}\n"
126131
"Typo in the security symbol `{}`?".format(
@@ -129,17 +134,20 @@ def _get_metadata(self):
129134
symbol,
130135
)
131136
)
132-
return markets, engines
137+
if symbol in markets_n_engines:
138+
markets_n_engines[symbol] = list(set(markets_n_engines[symbol]))
139+
return markets_n_engines
133140

134141
def read(self):
135142
"""Read data"""
136143

137144
try:
138-
self.__markets, self.__engines = self._get_metadata()
145+
self.__markets_n_engines = self._get_metadata()
146+
139147
urls = self.url # generate urls per symbols
140148
dfs = [] # an array of pandas dataframes per symbol to concatenate
141149

142-
for i in range(len(self.symbols)):
150+
for i in range(len(urls)):
143151
out_list = []
144152
date_column = None
145153

@@ -155,7 +163,7 @@ def read(self):
155163
start_str = self.start.strftime("%Y-%m-%d")
156164
start = self.start
157165

158-
if start >= self.end or start >= dt.date.today():
166+
if start > self.end or start > dt.date.today():
159167
break
160168

161169
params = self._get_params(start_str)
@@ -172,12 +180,19 @@ def read(self):
172180
out_list += strings_out[1:] # remove a CSV head line
173181
if len(strings_out) < 100: # all data recevied - break
174182
break
175-
str_io = StringIO("\r\n".join(out_list))
176-
dfs.append(self._read_lines(str_io)) # add a new DataFrame
183+
184+
if len(out_list) > 0:
185+
str_io = StringIO("\r\n".join(out_list))
186+
dfs.append(self._read_lines(str_io)) # add a new DataFrame
177187
finally:
178188
self.close()
179189

180-
if len(dfs) > 1:
190+
if len(dfs) == 0:
191+
raise IOError(
192+
"{} returned no data; "
193+
"check URL or correct a date".format(self.__class__.__name__)
194+
)
195+
elif len(dfs) > 1:
181196
return concat(dfs, axis=0, join="outer", sort=True)
182197
else:
183198
return dfs[0]

pandas_datareader/tests/test_moex.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,12 @@ def test_moex_datareader(self):
1515
assert "SECID" in df.columns
1616
except HTTPError as e:
1717
pytest.skip(e)
18+
19+
def test_moex_stock_datareader(self):
20+
try:
21+
df = web.DataReader(
22+
["GAZP", "SIBN"], "moex", start="2019-12-26", end="2019-12-26"
23+
)
24+
assert df.size == 720
25+
except HTTPError as e:
26+
pytest.skip(e)

0 commit comments

Comments
 (0)