@@ -35,7 +35,7 @@ clean-test: ## remove test and coverage artifacts
3535test : # # run tests (and coverage if configured in setup.cfg) with the default Python
3636 @echo -----------------------------------------------------------------
3737 @echo RUNNING TESTS...
38- poetry run pytest -v --cov=sparkdantic
38+ uv run --group test --extra pyspark pytest -v --cov=sparkdantic
3939 @echo ✅ Tests have passed! Nice work!
4040 @echo -----------------------------------------------------------------
4141
@@ -45,50 +45,47 @@ coverage: ## check code coverage quickly with the default Python
4545 coverage report > COVERAGE.txt
4646
4747test-ci :
48- poetry run pytest --cov=sparkdantic --cov-report=json
48+ uv run --group test --extra pyspark pytest --cov=sparkdantic --cov-report=json
4949
5050
5151dist : clean # # builds source and wheel package
52- poetry build
52+ uv build
5353 ls -l dist
5454
5555
5656install : clean # # install the package to the active Python's site-packages via pip
57- poetry install
57+ uv sync
5858
5959
6060install-e : clean # # install via pip in editable mode this see https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
61- pip install -e .
61+ uv pip install -e .
6262
6363test-cov : test # # run tests locally and output coverage file
64- poetry run coverage report > COVERAGE.txt
64+ uv run --group test --extra pyspark coverage report > COVERAGE.txt
6565
6666commit-cov :
6767 git add COVERAGE.txt --force
6868
6969install-docs :
70- poetry install --only docs
70+ uv sync --group docs
7171
7272install-tests :
73- poetry install --only test
73+ uv sync --group test --extra pyspark
7474
7575install-all-pyspark3 :
76- poetry install --with dev, test, docs
77- poetry run pip install pyspark==3.5.5
76+ uv sync --group dev --group test --group docs --extra pyspark
77+ uv run --group dev --group test --group docs --extra pyspark pip install pyspark==3.5.5
7878
7979install-all-pyspark4 :
80- poetry install --with dev, test, docs
81- poetry run pip install pyspark==4.1.1
80+ uv sync --group dev --group test --group docs --extra pyspark
81+ uv run --group dev --group test --group docs --extra pyspark pip install pyspark==4.1.1
8282
8383install-dev-local : # # install all the stuff you need to develop locally
84- pip install --upgrade pip
85- pip install wheel
86- pip install -e .
87- poetry install --with dev,test,docs -E pyspark
84+ uv sync --group dev --group test --group docs --extra pyspark
8885 pre-commit install
8986
9087publish : dist # # publish the package to PyPI
91- poetry publish
88+ uv publish
9289
9390run-infra :
9491 docker-compose -f docker/dev/docker-compose.yaml up --remove-orphans -d
@@ -97,4 +94,4 @@ stop-infra:
9794 docker-compose -f docker/dev/docker-compose.yaml down
9895
9996docs : # # generate Sphinx HTML documentation, including API docs
100- poetry run mkdocs build
97+ uv run --group docs mkdocs build
0 commit comments