Skip to content

Commit 7496711

Browse files
committed
MAINT: Switch to PyPI and bump min versions
Remove conda and use system python Bump minimum supported versions
1 parent 38d275a commit 7496711

File tree

5 files changed

+51
-66
lines changed

5 files changed

+51
-66
lines changed

.travis.yml

Lines changed: 18 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -1,94 +1,48 @@
1-
sudo: false
2-
1+
dist: bionic
32
language: python
43

54
env:
65
global:
76
# Doctr deploy key for pydata/pandas-datareader
87
- secure: "iGbOAbBSV5y0TKDh2CifRSk6OpLA9GbEEL/hscHFLSDDUCWcdfvYXda3SWJFWyoQ5QUxSigXWd+ukr4u92d7lmB7m3TWj6BAMNuRpatTgnejLNwLvNeYdvLAxPvx39Cq85frd1Rx1beBLn3h/4wm4Ah+dR5W9NH8+x3OuZMH3Eo="
98

10-
119
matrix:
1210
fast_finish: true
1311
include:
14-
- dist: bionic
15-
env:
16-
- PYTHON=2.7 PANDAS=0.19.2
17-
- dist: bionic
18-
env:
19-
- PYTHON=2.7 PANDAS=0.22
20-
- dist: bionic
21-
env:
22-
- PYTHON=3.5 PANDAS=0.19.2
23-
- dist: bionic
24-
env:
25-
- PYTHON=3.5 PANDAS=0.20.3
26-
- dist: bionic
27-
env:
28-
- PYTHON=3.6 PANDAS=0.23.0
29-
- dist: bionic
30-
env:
31-
- PYTHON=3.7 PANDAS=0.25 DOCBUILD=true
12+
- python: 2.7
13+
env: PANDAS=0.24 NUMPY=1.16
14+
- python: 3.5
15+
env: PANDAS=0.21 NUMPY=1.13
16+
- python: 3.5
17+
env: PANDAS=0.22 NUMPY=1.13
18+
- python: 3.6
19+
env: PANDAS=0.23 NUMPY=1.14
20+
- python: 3.7
21+
env: PANDAS=0.25 NUMPY=1.17 DOCBUILD=true
3222
# In allow failures
33-
- dist: bionic
34-
env:
35-
- PYTHON=3.6 PANDAS="MASTER"
23+
- env: PYTHON=3.7 PANDAS="MASTER"
3624
allow_failures:
37-
- dist: bionic
38-
env:
39-
- PYTHON=3.6 PANDAS="MASTER"
25+
- env: PYTHON=3.7 PANDAS="MASTER"
4026

4127
install:
42-
- pip install -qq flake8
43-
# You may want to periodically update this, although the conda update
44-
# conda line below will keep everything up-to-date. We do this
45-
# conditionally because it saves us some downloading if the version is
46-
# the same.
47-
- if [[ "$PYTHON" == "2.7" ]]; then
48-
wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh;
49-
else
50-
wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
51-
fi
52-
- bash miniconda.sh -b -p $HOME/miniconda
53-
- export PATH="$HOME/miniconda/bin:$PATH"
54-
- hash -r
55-
- conda config --set always_yes yes --set changeps1 no
56-
- conda config --add channels pandas
57-
- conda update -q conda
58-
# Useful for debugging any issues with conda
59-
- conda info -a
60-
- conda create -q -n test-environment python=$PYTHON coverage setuptools html5lib lxml pytest pytest-cov wrapt
61-
- source activate test-environment
62-
- if [[ "$PANDAS" == "MASTER" ]]; then
63-
conda install numpy pytz python-dateutil;
64-
PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com";
65-
pip install --pre --upgrade --timeout=60 -f $PRE_WHEELS pandas;
66-
else
67-
conda install pandas=$PANDAS;
68-
fi
69-
- if [[ $DOCBUILD ]]; then
70-
conda install sphinx ipython matplotlib;
71-
pip install sphinx_rtd_theme doctr;
72-
fi
73-
- pip install pip --upgrade
74-
- pip install codecov coveralls beautifulsoup4
75-
- conda list
28+
- source ci/pypi-install.sh;
29+
- pip install codecov coveralls beautifulsoup4 flake8
30+
- pip list
7631
- python setup.py install
7732

7833
script:
79-
- export ENIGMA_API_KEY=$ENIGMA_API_KEY
8034
- pytest -s -r xX --cov-config .coveragerc --cov=pandas_datareader --cov-report xml:/tmp/cov-datareader.xml --junitxml=/tmp/datareader.xml
8135
- flake8 --version
8236
- flake8 pandas_datareader
8337

8438
after_script:
8539
- |
86-
if [[ ${DOCBUILD} ]]; then
40+
if [[ "$DOCBUILD" ]]; then
8741
cd docs
8842
make html && make html
8943
cd ..
9044
doctr deploy devel --build-tags
91-
if [[ -z ${TRAVIS_TAG} ]]; then
45+
if [[ -z "$TRAVIS_TAG" ]]; then
9246
echo "Not a tagged build."
9347
else
9448
doctr deploy stable --build-tags

ci/pypi-install.sh

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/usr/bin/env bash
2+
3+
set +x
4+
echo "PyPI install"
5+
6+
pip install pip --upgrade
7+
pip install numpy=="$NUMPY" pytz python-dateutil coverage setuptools html5lib lxml pytest pytest-cov wrapt
8+
if [[ "$PANDAS" == "MASTER" ]]; then
9+
PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com"
10+
pip install --pre --upgrade --timeout=60 -f "$PRE_WHEELS" pandas
11+
else
12+
pip install pandas=="$PANDAS"
13+
fi
14+
if [[ "$DOCBUILD" ]]; then
15+
pip install sphinx ipython matplotlib sphinx_rtd_theme doctr
16+
fi

pandas_datareader/econdb.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
1+
from pandas_datareader.compat import string_types
2+
13
import requests
24
import pandas as pd
3-
import pandas.compat as compat
5+
46

57
from pandas_datareader.base import _BaseReader
68

@@ -15,7 +17,7 @@ class EcondbReader(_BaseReader):
1517
@property
1618
def url(self):
1719
"""API URL"""
18-
if not isinstance(self.symbols, compat.string_types):
20+
if not isinstance(self.symbols, string_types):
1921
raise ValueError('data name must be string')
2022

2123
return ('{0}?{1}&format=json&page_size=500&expand=meta'

requirements-dev.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,3 +2,5 @@ ipython
22
requests_cache
33
sphinx
44
sphinx_rtd_theme
5+
black
6+
isort

setup.cfg

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,14 @@ style = pep440
77
versionfile_source = pandas_datareader/_version.py
88
versionfile_build = pandas_datareader/_version.py
99
tag_prefix = v
10+
11+
[isort]
12+
known_compat=pandas_datareader.compat.*
13+
sections=FUTURE,COMPAT,STDLIB,THIRDPARTY,PRE_CORE,FIRSTPARTY,LOCALFOLDER
14+
known_first_party=pandas_datareader
15+
known_third_party=numpy,pandas,pytest,requests
16+
multi_line_output=0
17+
force_grid_wrap=0
18+
combine_as_imports=True
19+
force_sort_within_sections=True
20+
line_width=99

0 commit comments

Comments
 (0)