Skip to content

Commit a3b5844

Browse files
authored
Unpins Dask (#207)
1 parent c431559 commit a3b5844

File tree

3 files changed

+20
-25
lines changed

3 files changed

+20
-25
lines changed

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,8 @@ dev = [
5454
"polars[numpy]",
5555
"pytest",
5656
"pytest-cov",
57-
"fugue[dask,spark]>=0.8.1",
58-
"dask<=2024.12.1",
57+
"fugue[dask,spark]>=0.9.4",
58+
"dask",
5959
"pip-licenses",
6060
"mkdocstrings-parser@git+https://github.com/Nixtla/mkdocstrings-parser.git",
6161
]

tests/test_evaluation.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@
1313
import pytest
1414
from datasetsforecast.evaluation import accuracy as ds_evaluate
1515
from pyspark.sql import SparkSession
16+
from dask.distributed import Client
17+
from fugue_dask import DaskExecutionEngine
18+
import dask
1619

1720
import utilsforecast.processing as ufp
1821
from utilsforecast.data import generate_series
@@ -280,6 +283,12 @@ def test_distributed_evaluate(setup_series):
280283
level = [80, 95]
281284
spark = SparkSession.builder.getOrCreate()
282285
spark.sparkContext.setLogLevel("FATAL")
286+
287+
# Use processes=False to avoid deadlocks in CI
288+
client = Client(processes=False)
289+
engine = DaskExecutionEngine(client)
290+
291+
dask.config.set({"dataframe.shuffle.method": "tasks", "scheduler": "synchronous"})
283292
dask_df = dd.from_pandas(setup_series, npartitions=2)
284293
spark_df = spark.createDataFrame(setup_series).repartition(2)
285294
for distributed_df, use_train in product([dask_df, spark_df], [True, False]):

uv.lock

Lines changed: 9 additions & 23 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)