Skip to content

Commit 0da72f9

Browse files
committed
[*] Fixed a changed behavior of MOEX for shares data.
[+] Added a test for fixed functionality.
1 parent 63a51b4 commit 0da72f9

File tree

2 files changed

+38
-18
lines changed

2 files changed

+38
-18
lines changed

pandas_datareader/moex.py

Lines changed: 29 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def __init__(self, *args, **kwargs):
4848
elif not is_list_like(self.symbols):
4949
self.symbols = [self.symbols]
5050

51-
self.__engines, self.__markets = {}, {} # dicts for engines and markets
51+
self.__markets_n_engines = {} # dicts for tuples of engines and markets
5252

5353
__url_metadata = "https://iss.moex.com/iss/securities/{symbol}.csv"
5454
__url_data = (
@@ -60,17 +60,16 @@ def __init__(self, *args, **kwargs):
6060
def url(self):
6161
"""Return a list of API URLs per symbol"""
6262

63-
if not self.__engines or not self.__markets:
63+
if not self.__markets_n_engines:
6464
raise Exception(
6565
"Accessing url property before invocation "
6666
"of read() or _get_metadata() methods"
6767
)
6868

6969
return [
7070
self.__url_data.format(
71-
engine=self.__engines[s], market=self.__markets[s], symbol=s
72-
)
73-
for s in self.symbols
71+
engine=engine, market=market, symbol=s
72+
) for s in self.symbols if s in self.__markets_n_engines for market, engine in self.__markets_n_engines[s]
7473
]
7574

7675
def _get_params(self, start):
@@ -81,13 +80,13 @@ def _get_params(self, start):
8180
"iss.dp": "point",
8281
"iss.df": "%Y-%m-%d",
8382
"iss.tf": "%H:%M:%S",
84-
"iss.dft": "%Y-%m-%d %H:%M:%S",
83+
"iss.dtf": "%Y-%m-%d %H:%M:%S",
8584
"iss.json": "extended",
8685
"callback": "JSON_CALLBACK",
8786
"from": start,
8887
"till": self.end_dt.strftime("%Y-%m-%d"),
8988
"limit": 100,
90-
"start": 1,
89+
"start": 0,
9190
"sort_order": "TRADEDATE",
9291
"sort_order_desc": "asc",
9392
}
@@ -96,7 +95,7 @@ def _get_params(self, start):
9695
def _get_metadata(self):
9796
"""Get markets and engines for the given symbols"""
9897

99-
markets, engines = {}, {}
98+
markets_n_engines = {}
10099

101100
for symbol in self.symbols:
102101
response = self._get_response(self.__url_metadata.format(symbol=symbol))
@@ -118,9 +117,14 @@ def _get_metadata(self):
118117
continue
119118
if get_data and s != "":
120119
fields = s.split(";")
121-
markets[symbol], engines[symbol] = fields[5], fields[7]
122-
break
123-
if symbol not in markets or symbol not in engines:
120+
121+
if symbol not in markets_n_engines:
122+
markets_n_engines[symbol] = list()
123+
124+
markets_n_engines[symbol].append(
125+
(fields[5], fields[7])
126+
) # market and engine
127+
if symbol not in markets_n_engines:
124128
raise IOError(
125129
"{} request returned no metadata: {}\n"
126130
"Typo in the security symbol `{}`?".format(
@@ -129,17 +133,20 @@ def _get_metadata(self):
129133
symbol,
130134
)
131135
)
132-
return markets, engines
136+
if symbol in markets_n_engines:
137+
markets_n_engines[symbol] = list(set(markets_n_engines[symbol]))
138+
return markets_n_engines
133139

134140
def read(self):
135141
"""Read data"""
136142

137143
try:
138-
self.__markets, self.__engines = self._get_metadata()
144+
self.__markets_n_engines = self._get_metadata()
145+
139146
urls = self.url # generate urls per symbols
140147
dfs = [] # an array of pandas dataframes per symbol to concatenate
141148

142-
for i in range(len(self.symbols)):
149+
for i in range(len(urls)):
143150
out_list = []
144151
date_column = None
145152

@@ -155,7 +162,7 @@ def read(self):
155162
start_str = self.start.strftime("%Y-%m-%d")
156163
start = self.start
157164

158-
if start >= self.end or start >= dt.date.today():
165+
if start > self.end or start > dt.date.today():
159166
break
160167

161168
params = self._get_params(start_str)
@@ -172,12 +179,16 @@ def read(self):
172179
out_list += strings_out[1:] # remove a CSV head line
173180
if len(strings_out) < 100: # all data recevied - break
174181
break
175-
str_io = StringIO("\r\n".join(out_list))
176-
dfs.append(self._read_lines(str_io)) # add a new DataFrame
182+
183+
if len(out_list) > 0:
184+
str_io = StringIO("\r\n".join(out_list))
185+
dfs.append(self._read_lines(str_io)) # add a new DataFrame
177186
finally:
178187
self.close()
179188

180-
if len(dfs) > 1:
189+
if len(dfs) == 0:
190+
raise IOError("{} returned no data; check URL for invalid or correct a date interval".format(self.__class__.__name__))
191+
elif len(dfs) > 1:
181192
return concat(dfs, axis=0, join="outer", sort=True)
182193
else:
183194
return dfs[0]

pandas_datareader/tests/test_moex.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,12 @@ def test_moex_datareader(self):
1515
assert "SECID" in df.columns
1616
except HTTPError as e:
1717
pytest.skip(e)
18+
19+
def test_moex_stock_datareader(self):
20+
try:
21+
df = web.DataReader(
22+
["GAZP", "SIBN"], "moex", start="2019-12-26", end="2019-12-26"
23+
)
24+
assert df.size == 720
25+
except HTTPError as e:
26+
pytest.skip(e)

0 commit comments

Comments
 (0)