Skip to content

Commit b8d9864

Browse files
deven367elephaint
andauthored
[chore]: fix requirements to build whl (#1436)
Co-authored-by: elephaint <osprangers@gmail.com>
1 parent 5f2354a commit b8d9864

File tree

7 files changed

+298
-917
lines changed

7 files changed

+298
-917
lines changed

.circleci/config.yml

Lines changed: 73 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -7,39 +7,60 @@ jobs:
77
steps:
88
- checkout
99
- run:
10-
name: Install dependencies
10+
name: Install Git
11+
command: |
12+
apt-get update && apt-get install -y git
13+
- run:
14+
name: Install uv
1115
command: |
1216
pip install uv
13-
uv venv --python 3.10
17+
- restore_cache:
18+
keys:
19+
- uv-cache-pytest-{{ checksum "uv.lock" }}
20+
- uv-cache-pytest-
1421
- run:
1522
name: Run pytest
1623
no_output_timeout: 20m
1724
command: |
18-
source .venv/bin/activate
19-
uv pip install -e '.[dev,aws,spark]'
25+
uv sync --group dev --group aws --group spark
2026
export GIT_PYTHON_REFRESH=quiet
21-
pytest -k "not test_autonlinear_longer_horizon"
27+
uv run pytest -k "not test_autonlinear_longer_horizon" --no-cov
28+
- save_cache:
29+
key: uv-cache-pytest-{{ checksum "uv.lock" }}
30+
paths:
31+
- ~/.cache/uv
32+
- .venv
2233
test-model-performance:
2334
resource_class: xlarge
2435
docker:
2536
- image: python:3.10-slim
2637
steps:
2738
- checkout
2839
- run:
29-
name: Install dependencies
40+
name: Install Git
41+
command: |
42+
apt-get update && apt-get install -y git
43+
- run:
44+
name: Install uv
3045
command: |
3146
pip install uv
32-
uv venv --python 3.10
47+
- restore_cache:
48+
keys:
49+
- uv-cache-perf-{{ checksum "uv.lock" }}
50+
- uv-cache-perf-
3351
- run:
3452
name: Run model performance tests
3553
command: |
36-
source .venv/bin/activate
37-
uv pip install -e '.[dev,aws,spark]'
38-
cd ./action_files/test_models/
39-
uv pip install -r requirements.txt
40-
python -m src.models
41-
python -m src.evaluation
42-
cd ../../
54+
uv sync --group dev --group aws --group spark
55+
uv pip install -r ./action_files/test_models/requirements.txt
56+
export PYTHONPATH="${PYTHONPATH}:./action_files/test_models"
57+
uv run python -m src.models
58+
uv run python -m src.evaluation
59+
- save_cache:
60+
key: uv-cache-perf-{{ checksum "uv.lock" }}
61+
paths:
62+
- ~/.cache/uv
63+
- .venv
4364
- store_artifacts:
4465
path: ./action_files/test_models/data/evaluation.csv
4566
destination: evaluation.csv
@@ -50,20 +71,30 @@ jobs:
5071
steps:
5172
- checkout
5273
- run:
53-
name: Install dependencies
74+
name: Install Git
75+
command: |
76+
apt-get update && apt-get install -y git
77+
- run:
78+
name: Install uv
5479
command: |
5580
pip install uv
56-
uv venv --python 3.10
81+
- restore_cache:
82+
keys:
83+
- uv-cache-perf-{{ checksum "uv.lock" }}
84+
- uv-cache-perf-
5785
- run:
5886
name: Run model performance tests
5987
command: |
60-
source .venv/bin/activate
61-
uv pip install -e '.[dev,aws,spark]'
62-
cd ./action_files/test_models/
63-
uv pip install -r requirements.txt
64-
python -m src.models2
65-
python -m src.evaluation2
66-
cd ../../
88+
uv sync --group dev --group aws --group spark
89+
uv pip install -r ./action_files/test_models/requirements.txt
90+
export PYTHONPATH="${PYTHONPATH}:./action_files/test_models"
91+
uv run python -m src.models2
92+
uv run python -m src.evaluation2
93+
- save_cache:
94+
key: uv-cache-perf-{{ checksum "uv.lock" }}
95+
paths:
96+
- ~/.cache/uv
97+
- .venv
6798
- store_artifacts:
6899
path: ./action_files/test_models/data/evaluation.csv
69100
destination: evaluation.csv
@@ -74,20 +105,30 @@ jobs:
74105
steps:
75106
- checkout
76107
- run:
77-
name: Install dependencies
108+
name: Install Git
109+
command: |
110+
apt-get update && apt-get install -y git
111+
- run:
112+
name: Install uv
78113
command: |
79114
pip install uv
80-
uv venv --python 3.10
115+
- restore_cache:
116+
keys:
117+
- uv-cache-perf-{{ checksum "uv.lock" }}
118+
- uv-cache-perf-
81119
- run:
82120
name: Run model performance tests
83121
command: |
84-
source .venv/bin/activate
85-
uv pip install -e '.[dev,aws,spark]'
86-
cd ./action_files/test_models/
87-
uv pip install -r requirements.txt
88-
python -m src.multivariate_models
89-
python -m src.multivariate_evaluation
90-
cd ../../
122+
uv sync --group dev --group aws --group spark
123+
uv pip install -r ./action_files/test_models/requirements.txt
124+
export PYTHONPATH="${PYTHONPATH}:./action_files/test_models"
125+
uv run python -m src.multivariate_models
126+
uv run python -m src.multivariate_evaluation
127+
- save_cache:
128+
key: uv-cache-perf-{{ checksum "uv.lock" }}
129+
paths:
130+
- ~/.cache/uv
131+
- .venv
91132
- store_artifacts:
92133
path: ./action_files/test_models/data/multi_evaluation.csv
93134
destination: multi_evaluation.csv

.github/workflows/build-docs.yaml

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,16 +31,22 @@ jobs:
3131
with:
3232
python-version: '3.10'
3333

34+
- name: Install uv
35+
uses: astral-sh/setup-uv@v5
36+
with:
37+
enable-cache: true
38+
cache-dependency-glob: "uv.lock"
39+
3440
- name: Install dependencies
35-
run: pip install uv pytest && uv pip install --system '.[dev,aws,spark,docs]'
41+
run: uv sync --group dev --group aws --group spark
3642

3743
# setup quarto for rendering example/tutorial nbs
3844
- uses: quarto-dev/quarto-actions/setup@v2
3945
with:
4046
version: 1.4.515
4147

4248
- name: Build Docs
43-
run: make all_docs
49+
run: uv run make all_docs
4450

4551
- name: Deploy (Push to main or Pull Request from same repo)
4652
if: (github.event_name == 'push' && github.ref == 'refs/heads/main') || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || (github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'staging')

.github/workflows/lint.yaml

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,14 @@ jobs:
1818
with:
1919
python-version: "3.10"
2020

21+
- name: Install uv
22+
uses: astral-sh/setup-uv@v5
23+
with:
24+
enable-cache: true
25+
cache-dependency-glob: "uv.lock"
26+
2127
- name: Install dependencies
22-
run: pip install black "fastcore<1.8.0" nbdev==2.3.25 pre-commit
28+
run: uv sync --group dev
2329

2430
- name: Run pre-commit
25-
run: pre-commit run --files neuralforecast/*
31+
run: uv run pre-commit run --files neuralforecast/*

.github/workflows/pytest.yml

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,14 @@ jobs:
3131
with:
3232
python-version: ${{ matrix.python-version }}
3333

34+
- name: Install uv
35+
uses: astral-sh/setup-uv@v5
36+
with:
37+
enable-cache: true
38+
cache-dependency-glob: "uv.lock"
39+
3440
- name: Install dependencies
35-
run: pip install uv pytest && uv pip install --system ".[dev]" --torch-backend cpu
41+
run: uv sync --group dev --group aws --group spark
3642

3743
- name: Override torch and numpy version on MacOS due to compatibility issues with MPS in github runners on newer MacOS versions
3844
if: matrix.os == 'macos-13' || matrix.os == 'macos-14-xlarge'
@@ -43,8 +49,8 @@ jobs:
4349
env:
4450
PYTORCH_ENABLE_MPS_FALLBACK: 1
4551
PYTORCH_MPS_HIGH_WATERMARK_RATIO: 0.0
46-
run: pytest --ignore tests/test_common --ignore tests/test_models --ignore tests/test_core.py --no-cov
52+
run: uv run pytest --ignore tests/test_common --ignore tests/test_models --ignore tests/test_core.py --no-cov
4753

4854
- name: Run pytest (ubuntu and windows)
4955
if: (matrix.os == 'ubuntu-latest' || matrix.os == 'windows-latest')
50-
run: pytest
56+
run: uv run pytest

CONTRIBUTING.md

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -105,18 +105,18 @@ source .venv/bin/activate
105105
.\.venv\Scripts\activate
106106
```
107107

108-
Now, install the library. Make sure to specify the desired [PyTorch backend](https://docs.astral.sh/uv/reference/cli/#uv-pip-install--torch-backend):
108+
Now, install the library. Make sure to specify the desired [PyTorch backend](https://docs.astral.sh/uv/reference/cli/#uv-sync--torch-backend):
109109

110110
```bash
111-
uv pip install -e ".[dev]" --torch-backend auto # uv will decide the optimal backend automatically
112-
uv pip install -e ".[dev]" --torch-backend cpu # for cpu backend
113-
uv pip install -e ".[dev]" --torch-backend cu118 # for CUDA 11.8 PyTorch backend
111+
uv sync --group dev --torch-backend auto # uv will decide the optimal backend automatically
112+
uv sync --group dev --torch-backend cpu # for cpu backend
113+
uv sync --group dev --torch-backend cu118 # for CUDA 11.8 PyTorch backend
114114
```
115115

116116
You can install other optional dependencies:
117117

118118
```sh
119-
uv pip install -e ".[dev,aws,spark]"
119+
uv sync --group dev --group aws --group spark
120120
```
121121

122122
#### Install pre-commit hooks
@@ -185,7 +185,7 @@ For additional instructions, see the [Mintlify installation guide](https://mintl
185185
**Install documentation dependencies:**
186186

187187
```sh
188-
uv pip install -e ".[dev,aws,spark,docs]"
188+
uv sync --group dev --group aws --group spark
189189
```
190190

191191
**Generate documentation:**

pyproject.toml

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ dependencies = [
3535
"utilsforecast>=0.2.3",
3636
]
3737

38-
[project.optional-dependencies]
38+
[dependency-groups]
3939
spark = ["fugue", "pyspark>=3.5"]
4040
aws = ["fsspec[s3]"]
4141
dev = [
@@ -56,10 +56,8 @@ dev = [
5656
"xlstm",
5757
"pytest",
5858
"pytest-cov",
59-
"pip-licenses"
60-
]
61-
docs = [
62-
"mkdocstrings-parser@git+https://github.com/Nixtla/mkdocstrings-parser.git"
59+
"pip-licenses",
60+
"mkdocstrings-parser @ git+https://github.com/Nixtla/mkdocstrings-parser"
6361
]
6462

6563
[project.urls]
@@ -92,3 +90,18 @@ addopts = [
9290
"--cov-report=html",
9391
"--cov-fail-under=80",
9492
]
93+
94+
[tool.coverage.run]
95+
source = ["neuralforecast"]
96+
omit = [
97+
"*/tmp/ray/*",
98+
"*/_ray_pkg_*/*",
99+
"*/runtime_resources/*",
100+
]
101+
102+
[tool.coverage.report]
103+
omit = [
104+
"*/tmp/ray/*",
105+
"*/_ray_pkg_*/*",
106+
"*/runtime_resources/*",
107+
]

0 commit comments

Comments
 (0)