File tree Expand file tree Collapse file tree 2 files changed +6
-10
lines changed Expand file tree Collapse file tree 2 files changed +6
-10
lines changed Original file line number Diff line number Diff line change @@ -536,19 +536,16 @@ async def map_worker_partitions(
536
536
else :
537
537
args .append (ref )
538
538
539
- @wraps (func )
540
- def fn (
541
- * args : _P .args , address : str = addr , ** kwargs : _P .kwargs
542
- ) -> List [_MapRetT ]:
543
- # Turn result into a list for bag construction
539
+ def fn (_address : str , * args : _P .args , ** kwargs : _P .kwargs ) -> List [_MapRetT ]:
544
540
worker = distributed .get_worker ()
545
541
546
- if worker .address != address :
542
+ if worker .address != _address :
547
543
raise ValueError (
548
- f"Invalid worker address: { worker .address } , expecting { address } . "
544
+ f"Invalid worker address: { worker .address } , expecting { _address } . "
549
545
"This is likely caused by one of the workers died and Dask "
550
546
"re-scheduled a different one. Resilience is not yet supported."
551
547
)
548
+ # Turn result into a list for bag construction
552
549
return [func (* args , ** kwargs )]
553
550
554
551
# XGBoost requires all workers running training tasks to be unique. Meaning, we
@@ -568,9 +565,8 @@ def fn(
568
565
# relax the constraint and prevent Dask from choosing an invalid worker, the
569
566
# task will simply hangs. We prefer a quick error here.
570
567
#
571
-
572
568
fut = client .submit (
573
- fn ,
569
+ update_wrapper ( partial ( fn , addr ), fn ) ,
574
570
* args ,
575
571
pure = False ,
576
572
workers = [addr ],
Original file line number Diff line number Diff line change @@ -16,7 +16,7 @@ class XGBoostTrainingSummary:
16
16
17
17
@staticmethod
18
18
def from_metrics (
19
- metrics : Dict [str , Dict [str , List [float ]]]
19
+ metrics : Dict [str , Dict [str , List [float ]]],
20
20
) -> "XGBoostTrainingSummary" :
21
21
"""
22
22
Create an XGBoostTrainingSummary instance from a nested dictionary of metrics.
You can’t perform that action at this time.
0 commit comments