Skip to content

Commit 0d06679

Browse files
Merge pull request #862 from shanehead/feature/pyspark4.1
Support PySpark 4.1.x
2 parents d6bd703 + 6af2dcd commit 0d06679

File tree

7 files changed

+1405
-651
lines changed

7 files changed

+1405
-651
lines changed

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ install-all-pyspark3:
7878

7979
install-all-pyspark4:
8080
poetry install --with dev,test,docs
81-
poetry run pip install pyspark==4.0.0
81+
poetry run pip install pyspark==4.1.1
8282

8383
install-dev-local: ## install all the stuff you need to develop locally
8484
pip install --upgrade pip

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ pip install "sparkdantic[pyspark]"
3636

3737
### Supported PySpark versions
3838

39-
PySpark version `3.3.0` or higher, up to but not including `4.1.0`
39+
PySpark version `3.3.0` or higher, up to but not including `4.2.0`
4040

4141
## Usage
4242

poetry.lock

Lines changed: 1394 additions & 641 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,12 @@ version = "2.7.1"
44
description = "A pydantic -> spark schema library"
55
authors = ["Mitchell Lisle <m.lisle90@gmail.com>"]
66
readme = "README.md"
7-
packages = [{include = "sparkdantic", from = "src"}]
7+
packages = [{ include = "sparkdantic", from = "src" }]
88

99
[tool.poetry.dependencies]
1010
python = ">3.9.1,<4.0.0"
1111
pydantic = "^2.1.1"
12-
pyspark = { version = ">=3.3.0,<=4.0.2", optional = true }
12+
pyspark = { version = ">=3.3.0,<=4.2.0", optional = true }
1313
jmespath = "^1.0.1"
1414
pyparsing = "^3.1.1"
1515
packaging = ">=24.2,<27.0"
@@ -35,7 +35,7 @@ mkdocs = "^1.4.2"
3535
mkdocs-material = "^9.1.4"
3636
mkdocs-gen-files = ">=0.4,<0.7"
3737
mkdocs-literate-nav = "^0.6.0"
38-
mkdocstrings = {extras = ["python"], version = ">=0.20,<0.31"}
38+
mkdocstrings = { extras = ["python"], version = ">=0.20,<0.31" }
3939
mkautodoc = "^0.2.0"
4040
pymdown-extensions = "^10.0.1"
4141

src/sparkdantic/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
MIN_PYSPARK_VERSION = '3.3.0'
66
"""Inclusive minimum version of PySpark required"""
7-
MAX_PYSPARK_VERSION = '4.1.0'
7+
MAX_PYSPARK_VERSION = '4.2.0'
88
"""Exclusive maximum version of PySpark supported"""
99

1010
try:

tests/test_computed_field.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from pydantic import BaseModel, computed_field
44
from pyspark.sql.types import IntegerType, LongType, StringType, StructField, StructType
55

6-
from sparkdantic import create_spark_schema, SparkField
6+
from sparkdantic import SparkField, create_spark_schema
77

88

99
class ComputedOnlyModel(BaseModel):
@@ -97,7 +97,7 @@ def d(self) -> int:
9797

9898
def test_computed_field_with_spark_type():
9999
class ComputedWithSparkType(BaseModel):
100-
@computed_field(json_schema_extra={"spark_type": LongType})
100+
@computed_field(json_schema_extra={'spark_type': LongType})
101101
@property
102102
def d(self) -> int:
103103
return 4
@@ -129,7 +129,7 @@ def d(self) -> Annotated[int, SparkField(spark_type=LongType)]:
129129

130130
def test_computed_field_with_spark_type_over_annotated_return():
131131
class ComputedWithSparkType(BaseModel):
132-
@computed_field(json_schema_extra={"spark_type": LongType})
132+
@computed_field(json_schema_extra={'spark_type': LongType})
133133
@property
134134
def d(self) -> Annotated[int, SparkField(spark_type=StringType)]:
135135
return 4

tests/test_optional_pyspark_dependency.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,8 @@ def test_no_pyspark_raises_import_error(no_pyspark):
5050
('3.3.0', False),
5151
('3.5.0', False),
5252
('4.0.0', False),
53-
('4.1.0', True),
53+
('4.1.0', False),
54+
('4.2.0', True),
5455
],
5556
)
5657
def test_require_pyspark_version_in_range(

0 commit comments

Comments
 (0)