Skip to content

Commit 9095018

Browse files
committed
Updated dependencies, makefile, and developer docs
1 parent 438edca commit 9095018

File tree

3 files changed

+5
-14
lines changed

3 files changed

+5
-14
lines changed

CONTRIBUTING.md

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ make lint # Check code quality
2929

3030
# Run tests
3131
make test # Run tests
32-
make coverage # Run tests with coverage report
3332

3433
# Build package
3534
make build # Build with modern build system
@@ -89,10 +88,6 @@ The name used to flag the spark instance should be the test module or test class
8988
# Run all tests
9089
make test
9190

92-
# Run tests with coverage report (generates htmlcov/index.html)
93-
make coverage
94-
```
95-
9691
If using an environment with multiple Python versions, make sure to use virtual env or similar to pick up correct python versions.
9792

9893
If necessary, set `PYSPARK_PYTHON` and `PYSPARK_DRIVER_PYTHON` to point to correct versions of Python.

makefile

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
.PHONY: dev test coverage lint fmt clean build docs
1+
.PHONY: dev test lint fmt clean build docs
22

3-
all: clean dev lint fmt test coverage
3+
all: clean dev lint fmt test
44

55
clean:
66
rm -fr .venv clean htmlcov .mypy_cache .pytest_cache .ruff_cache .coverage coverage.xml
@@ -22,9 +22,6 @@ fmt:
2222
test:
2323
hatch run test
2424

25-
coverage:
26-
hatch run coverage
27-
2825
build:
2926
hatch build
3027

pyproject.toml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ dependencies = [
3232
"numpy>=1.22.0",
3333
"pandas>=1.3.4",
3434
"pyarrow>=7.0.0",
35-
"pyspark>=3.3.0",
35+
"pyspark[sql]>=3.3.0",
3636
"python-dateutil>=2.8.2",
3737
"six>=1.16.0",
3838
"pyparsing>=3.0.4",
@@ -110,7 +110,7 @@ dependencies = [
110110
"ruff~=0.3.4",
111111
"types-PyYAML~=6.0.12",
112112
"types-requests~=2.31.0",
113-
"pyspark~=3.5.0"
113+
"pyspark[sql]~=3.5.0"
114114
]
115115

116116
python="3.10"
@@ -119,8 +119,7 @@ python="3.10"
119119
path = ".venv"
120120

121121
[tool.hatch.envs.default.scripts]
122-
test = "pytest tests/ -n 10 --cov --cov-report=xml:coverage-unit.xml --timeout 600 --durations 20"
123-
coverage = "pytest tests/ -n 10 --cov --cov-report=html --timeout 600 --durations 20"
122+
test = "pytest tests/ -n 10 --cov --cov-report=html --timeout 600 --durations 20"
124123
fmt = ["ruff check . --fix",
125124
"mypy .",
126125
"pylint --output-format=colorized -j 0 dbldatagen tests"]

0 commit comments

Comments
 (0)