Skip to content

Commit 57766a5

Browse files
authored
Merge pull request #742 from ikonnikov/master
[BUG] Fixed a changed behavior of MOEX for shares data.
2 parents 18f3d8a + 3b71d7f commit 57766a5

File tree

2 files changed

+42
-18
lines changed

2 files changed

+42
-18
lines changed

pandas_datareader/moex.py

Lines changed: 33 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def __init__(self, *args, **kwargs):
4848
elif not is_list_like(self.symbols):
4949
self.symbols = [self.symbols]
5050

51-
self.__engines, self.__markets = {}, {} # dicts for engines and markets
51+
self.__markets_n_engines = {} # dicts for tuples of engines and markets
5252

5353
__url_metadata = "https://iss.moex.com/iss/securities/{symbol}.csv"
5454
__url_data = (
@@ -60,17 +60,17 @@ def __init__(self, *args, **kwargs):
6060
def url(self):
6161
"""Return a list of API URLs per symbol"""
6262

63-
if not self.__engines or not self.__markets:
63+
if not self.__markets_n_engines:
6464
raise Exception(
6565
"Accessing url property before invocation "
6666
"of read() or _get_metadata() methods"
6767
)
6868

6969
return [
7070
self.__url_data.format(
71-
engine=self.__engines[s], market=self.__markets[s], symbol=s
72-
)
73-
for s in self.symbols
71+
engine=engine, market=market, symbol=s
72+
) for s in self.symbols if s in self.__markets_n_engines
73+
for market, engine in self.__markets_n_engines[s]
7474
]
7575

7676
def _get_params(self, start):
@@ -81,13 +81,13 @@ def _get_params(self, start):
8181
"iss.dp": "point",
8282
"iss.df": "%Y-%m-%d",
8383
"iss.tf": "%H:%M:%S",
84-
"iss.dft": "%Y-%m-%d %H:%M:%S",
84+
"iss.dtf": "%Y-%m-%d %H:%M:%S",
8585
"iss.json": "extended",
8686
"callback": "JSON_CALLBACK",
8787
"from": start,
8888
"till": self.end_dt.strftime("%Y-%m-%d"),
8989
"limit": 100,
90-
"start": 1,
90+
"start": 0,
9191
"sort_order": "TRADEDATE",
9292
"sort_order_desc": "asc",
9393
}
@@ -96,7 +96,7 @@ def _get_params(self, start):
9696
def _get_metadata(self):
9797
"""Get markets and engines for the given symbols"""
9898

99-
markets, engines = {}, {}
99+
markets_n_engines = {}
100100

101101
for symbol in self.symbols:
102102
response = self._get_response(self.__url_metadata.format(symbol=symbol))
@@ -118,9 +118,14 @@ def _get_metadata(self):
118118
continue
119119
if get_data and s != "":
120120
fields = s.split(";")
121-
markets[symbol], engines[symbol] = fields[5], fields[7]
122-
break
123-
if symbol not in markets or symbol not in engines:
121+
122+
if symbol not in markets_n_engines:
123+
markets_n_engines[symbol] = list()
124+
125+
markets_n_engines[symbol].append(
126+
(fields[5], fields[7])
127+
) # market and engine
128+
if symbol not in markets_n_engines:
124129
raise IOError(
125130
"{} request returned no metadata: {}\n"
126131
"Typo in the security symbol `{}`?".format(
@@ -129,17 +134,20 @@ def _get_metadata(self):
129134
symbol,
130135
)
131136
)
132-
return markets, engines
137+
if symbol in markets_n_engines:
138+
markets_n_engines[symbol] = list(set(markets_n_engines[symbol]))
139+
return markets_n_engines
133140

134141
def read(self):
135142
"""Read data"""
136143

137144
try:
138-
self.__markets, self.__engines = self._get_metadata()
145+
self.__markets_n_engines = self._get_metadata()
146+
139147
urls = self.url # generate urls per symbols
140148
dfs = [] # an array of pandas dataframes per symbol to concatenate
141149

142-
for i in range(len(self.symbols)):
150+
for i in range(len(urls)):
143151
out_list = []
144152
date_column = None
145153

@@ -155,7 +163,7 @@ def read(self):
155163
start_str = self.start.strftime("%Y-%m-%d")
156164
start = self.start
157165

158-
if start >= self.end or start >= dt.date.today():
166+
if start > self.end or start > dt.date.today():
159167
break
160168

161169
params = self._get_params(start_str)
@@ -172,12 +180,19 @@ def read(self):
172180
out_list += strings_out[1:] # remove a CSV head line
173181
if len(strings_out) < 100: # all data recevied - break
174182
break
175-
str_io = StringIO("\r\n".join(out_list))
176-
dfs.append(self._read_lines(str_io)) # add a new DataFrame
183+
184+
if len(out_list) > 0:
185+
str_io = StringIO("\r\n".join(out_list))
186+
dfs.append(self._read_lines(str_io)) # add a new DataFrame
177187
finally:
178188
self.close()
179189

180-
if len(dfs) > 1:
190+
if len(dfs) == 0:
191+
raise IOError(
192+
"{} returned no data; "
193+
"check URL or correct a date".format(self.__class__.__name__)
194+
)
195+
elif len(dfs) > 1:
181196
return concat(dfs, axis=0, join="outer", sort=True)
182197
else:
183198
return dfs[0]

pandas_datareader/tests/test_moex.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,12 @@ def test_moex_datareader(self):
1515
assert "SECID" in df.columns
1616
except HTTPError as e:
1717
pytest.skip(e)
18+
19+
def test_moex_stock_datareader(self):
20+
try:
21+
df = web.DataReader(
22+
["GAZP", "SIBN"], "moex", start="2019-12-26", end="2019-12-26"
23+
)
24+
assert df.size == 720
25+
except HTTPError as e:
26+
pytest.skip(e)

0 commit comments

Comments
 (0)