Skip to content

Commit 746269c

Browse files
committed
Fix tests and workflows
1 parent cd6e9c0 commit 746269c

File tree

9 files changed

+4888
-4650
lines changed

9 files changed

+4888
-4650
lines changed

.github/workflows/publish.yml

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
name: Build and publish python package
2+
3+
on:
4+
release:
5+
types: [ published ]
6+
7+
jobs:
8+
publish-service-client-package:
9+
runs-on: ubuntu-latest
10+
permissions:
11+
contents: write
12+
steps:
13+
- name: Publish PyPi package
14+
uses: code-specialist/pypi-poetry-publish@v1
15+
with:
16+
ACCESS_TOKEN: ${{ secrets.REPOSITORY_ACCESS_TOKEN }}
17+
PUBLISH_REGISTRY_PASSWORD: ${{ secrets.PYPI_TOKEN }}
18+
BRANCH: "main"
19+
POETRY_VERSION: "1.7.1"
20+
POETRY_CORE_VERSION: "1.8.1"

.github/workflows/test.yml

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
name: test
2+
3+
on:
4+
push:
5+
branches:
6+
- '*' # matches every branch that doesn't contain a '/'
7+
- '*/*' # matches every branch containing a single '/'
8+
- '**' # matches every branch
9+
pull_request:
10+
branches:
11+
- '*' # matches every branch that doesn't contain a '/'
12+
- '*/*' # matches every branch containing a single '/'
13+
- '**' # matches every branch
14+
15+
jobs:
16+
linting:
17+
runs-on: ubuntu-latest
18+
steps:
19+
#----------------------------------------------
20+
# check-out repo and set-up python
21+
#----------------------------------------------
22+
- uses: actions/checkout@v4
23+
- uses: actions/setup-python@v5
24+
#----------------------------------------------
25+
# load pip cache if cache exists
26+
#----------------------------------------------
27+
- uses: actions/cache@v3
28+
with:
29+
path: ~/.cache/pip
30+
key: ${{ runner.os }}-pip
31+
restore-keys: ${{ runner.os }}-pip
32+
#----------------------------------------------
33+
# install and run linters
34+
#----------------------------------------------
35+
- run: python -m pip install black flake8 isort
36+
- run: |
37+
flake8 ./investing_algorithm_framework
38+
test:
39+
needs: linting
40+
strategy:
41+
fail-fast: true
42+
matrix:
43+
os: [ "ubuntu-latest", "macos-latest", "windows-latest" ]
44+
python-version: [ "3.8", "3.9", "3.10", "3.11" ]
45+
defaults:
46+
run:
47+
shell: bash
48+
runs-on: ${{ matrix.os }}
49+
steps:
50+
#----------------------------------------------
51+
# check-out repo and set-up python
52+
#----------------------------------------------
53+
- name: Check out repository
54+
uses: actions/checkout@v4
55+
- name: Set up python ${{ matrix.python-version }}
56+
id: setup-python
57+
uses: actions/setup-python@v5
58+
with:
59+
python-version: ${{ matrix.python-version }}
60+
#----------------------------------------------
61+
# ----- install distutils if needed -----
62+
#----------------------------------------------
63+
- name: Install distutils on Ubuntu
64+
if: matrix.os == 'ubuntu-latest'
65+
run: |
66+
sudo add-apt-repository ppa:deadsnakes/ppa
67+
sudo apt-get update
68+
sudo apt install python${{ matrix.python-version }}-distutils
69+
#----------------------------------------------
70+
# ----- install & configure poetry -----
71+
#----------------------------------------------
72+
- name: Install Poetry
73+
uses: snok/install-poetry@v1
74+
with:
75+
version: 1.7.1
76+
virtualenvs-create: true
77+
virtualenvs-in-project: true
78+
#----------------------------------------------
79+
# load cached venv if cache exists
80+
#----------------------------------------------
81+
- name: Load cached venv
82+
id: cached-poetry-dependencies
83+
uses: actions/cache@v3
84+
with:
85+
path: .venv
86+
key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }}
87+
#----------------------------------------------
88+
# install dependencies if cache does not exist
89+
#----------------------------------------------
90+
- name: Install dependencies
91+
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
92+
run: |
93+
poetry install --no-interaction --no-root
94+
#----------------------------------------------
95+
# install your root project, if required
96+
#----------------------------------------------
97+
- name: Install library
98+
run: |
99+
poetry install --no-interaction
100+
#----------------------------------------------
101+
# add matrix specifics and run test suite
102+
#----------------------------------------------
103+
- name: Run tests
104+
run: |
105+
source $VENV
106+
coverage run -m unittest discover -s tests
107+
# #----------------------------------------------
108+
# # upload coverage stats
109+
# #----------------------------------------------
110+
# - name: Upload coverage
111+
# uses: codecov/codecov-action@v3
112+
# with:
113+
# file: ./coverage.xml
114+
# fail_ci_if_error: true

pyindicators/indicators/exponential_moving_average.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from typing import Union
22
from pandas import DataFrame as PdDataFrame
33
from polars import DataFrame as PlDataFrame
4+
import polars as pl
45
from pyindicators.exceptions import PyIndicatorException
56

67
def ema(

tests/indicators/test_ema.py

Lines changed: 70 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,44 +1,76 @@
1-
from datetime import timedelta
2-
from unittest import TestCase
3-
41
import pandas as pd
5-
import talib as ta
6-
import numpy as np
7-
import tulipy as ti
8-
from investing_algorithm_framework import CSVOHLCVMarketDataSource
2+
import polars as pl
3+
import pandas.testing as pdt
4+
from polars.testing import assert_frame_equal
5+
6+
from tests.resources import TestBaseline
7+
from pyindicators import ema
8+
9+
10+
class Test(TestBaseline):
11+
correct_output_csv_filename = \
12+
"EMA_200_BTC-EUR_BINANCE_15m_2023-12-01:00:00_2023-12-25:00:00.csv"
13+
14+
def generate_pandas_df(self, polars_source_df):
15+
polars_source_df = ema(
16+
data=polars_source_df,
17+
period=200,
18+
result_column="EMA_200",
19+
source_column="Close"
20+
)
21+
return polars_source_df
22+
23+
def generate_polars_df(self, pandas_source_df):
24+
pandas_source_df = ema(
25+
data=pandas_source_df,
26+
period=200,
27+
result_column="EMA_200",
28+
source_column="Close"
29+
)
30+
return pandas_source_df
31+
32+
def test_comparison_pandas(self):
933

10-
import pyindicators as pyi
34+
# Load the correct output in a pandas dataframe
35+
correct_output_pd = pd.read_csv(self.get_correct_output_csv_path())
1136

37+
# Load the source in a pandas dataframe
38+
source = pd.read_csv(self.get_source_csv_path())
1239

13-
class Test(TestCase):
40+
# Generate the pandas dataframe
41+
output = self.generate_pandas_df(source)
42+
output = output[correct_output_pd.columns]
43+
output["Datetime"] = \
44+
pd.to_datetime(output["Datetime"]).dt.tz_localize(None)
45+
correct_output_pd["Datetime"] = \
46+
pd.to_datetime(correct_output_pd["Datetime"]).dt.tz_localize(None)
1447

15-
def test(self):
16-
data_source = CSVOHLCVMarketDataSource(
17-
csv_file_path="../test_data/OHLCV_BTC-EUR_BINANCE_15m"
18-
"_2023-12-01:00:00_2023-12-25:00:00.csv",
48+
pdt.assert_frame_equal(correct_output_pd, output)
49+
50+
def test_comparison_polars(self):
51+
52+
# Load the correct output in a polars dataframe
53+
correct_output_pl = pl.read_csv(self.get_correct_output_csv_path())
54+
55+
# Load the source in a polars dataframe
56+
source = pl.read_csv(self.get_source_csv_path())
57+
58+
# Generate the polars dataframe
59+
output = self.generate_polars_df(source)
60+
61+
# Convert the datetime columns to datetime
62+
# Convert the 'Datetime' column in both DataFrames to datetime
63+
output = output.with_columns(
64+
pl.col("Datetime").str.strptime(pl.Datetime).alias("Datetime")
1965
)
20-
data_source.end_date = data_source.start_date \
21-
+ timedelta(days=4, hours=4)
22-
23-
while not data_source.empty():
24-
data = data_source.get_data(market_credential_service=None)
25-
df = pd.DataFrame(
26-
data,
27-
columns=['Date', 'Open', 'High', 'Low', 'Close', 'Volume']
28-
)
29-
pyi_ema = pyi.ema(series=df["Close"], period=200)
30-
ta_ema = ta.EMA(df["Close"], timeperiod=200).astype('float64')
31-
ti_ema = pd.Series(ti.ema(df["Close"].to_numpy(), period=200))
32-
33-
# Define a tolerance for comparison
34-
tolerance = 1e-9
35-
print(ta_ema.iloc[-1], ti_ema.iloc[-1])
36-
37-
# Compare the two Series with tolerance
38-
# nan_mask = ~np.isnan(ta_sma) & ~np.isnan(pyi_sma)
39-
# comparison_result = np.abs(
40-
# ta_sma[nan_mask] - pyi_sma[nan_mask]) <= tolerance
41-
# data_source.start_date = \
42-
# data_source.start_date + timedelta(minutes=15)
43-
# data_source.end_date = data_source.end_date + timedelta(minutes=15)
44-
# self.assertTrue(all(comparison_result))
66+
67+
correct_output_pl = correct_output_pl.with_columns(
68+
pl.col("Datetime").str.strptime(pl.Datetime).alias("Datetime")
69+
)
70+
output = output[correct_output_pl.columns]
71+
output = self.make_polars_column_datetime_naive(output, "Datetime")
72+
correct_output_pl = self.make_polars_column_datetime_naive(
73+
correct_output_pl, "Datetime"
74+
)
75+
76+
assert_frame_equal(correct_output_pl, output)

tests/resources/__init__.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
from .test_baseline import TestBaseline
2+
3+
__all__ = [
4+
"TestBaseline",
5+
]

tests/resources/test_baseline.py

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import os
2+
3+
import polars as pl
4+
from abc import abstractmethod
5+
from unittest import TestCase
6+
7+
8+
class TestBaseline(TestCase):
9+
correct_output_csv_filename = None
10+
source_csv_filename = \
11+
"OHLCV_BTC-EUR_BINANCE_15m_2023-12-01:00:00_2023-12-25:00:00.csv"
12+
result_column = None
13+
14+
@abstractmethod
15+
def generate_polars_df(self, pandas_source_df):
16+
pass
17+
18+
@abstractmethod
19+
def generate_pandas_df(self, polars_source_df):
20+
pass
21+
22+
def make_polars_column_datetime_naive(self, df: pl.DataFrame, column: str) -> pl.DataFrame:
23+
# Ensure the column is in the correct type, then convert to UTC and remove timezone
24+
return df.with_columns(
25+
pl.col(column).cast(pl.Datetime).dt.convert_time_zone("UTC").cast(pl.Datetime).alias(column)
26+
)
27+
28+
def get_correct_output_csv_path(self):
29+
return os.path.abspath(
30+
os.path.join(
31+
os.path.join(
32+
os.path.join(
33+
os.path.join(
34+
os.path.join(
35+
os.path.realpath(__file__),
36+
os.pardir
37+
),
38+
os.pardir
39+
),
40+
"test_data"
41+
),
42+
"correct_test_data"
43+
),
44+
self.correct_output_csv_filename
45+
)
46+
)
47+
48+
def get_source_csv_path(self):
49+
return os.path.abspath(
50+
os.path.join(
51+
os.path.join(
52+
os.path.join(
53+
os.path.join(
54+
os.path.join(
55+
os.path.realpath(__file__),
56+
os.pardir
57+
),
58+
os.pardir
59+
),
60+
"test_data"
61+
),
62+
"source_test_data"
63+
),
64+
self.source_csv_filename
65+
)
66+
)

0 commit comments

Comments
 (0)