Skip to content

Commit 13d1c06

Browse files
authored
Merge pull request #1002 from automl/development
Release 0.11.1
2 parents 7efc5e2 + 8e7b34b commit 13d1c06

File tree

5 files changed

+48
-19
lines changed

5 files changed

+48
-19
lines changed

autosklearn/__version__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
"""Version information."""
22

33
# The following line *must* be the last in the module, exactly as formatted:
4-
__version__ = "0.11.0"
4+
__version__ = "0.11.1"

autosklearn/evaluation/__init__.py

Lines changed: 37 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
# -*- encoding: utf-8 -*-
22
import functools
3+
import json
34
import math
45
import multiprocessing
56
from queue import Empty
67
import time
78
import traceback
8-
from typing import Tuple
9+
from typing import Dict, List, Optional, Tuple, Union
910

11+
from ConfigSpace import Configuration
1012
import numpy as np
1113
import pynisher
1214
from smac.runhistory.runhistory import RunInfo, RunValue
@@ -81,6 +83,14 @@ def get_cost_of_crash(metric):
8183
return worst_possible_result
8284

8385

86+
def _encode_exit_status(exit_status):
87+
try:
88+
json.dumps(exit_status)
89+
return exit_status
90+
except (TypeError, OverflowError):
91+
return str(exit_status)
92+
93+
8494
# TODO potentially log all inputs to this class to pickle them in order to do
8595
# easier debugging of potential crashes
8696
class ExecuteTaFuncWithQueue(AbstractTAFunc):
@@ -224,11 +234,15 @@ def run_wrapper(
224234

225235
return super().run_wrapper(run_info=run_info)
226236

227-
def run(self, config, instance=None,
228-
cutoff=None,
229-
seed=12345,
230-
budget=0.0,
231-
instance_specific=None):
237+
def run(
238+
self,
239+
config: Configuration,
240+
instance: Optional[str] = None,
241+
cutoff: Optional[float] = None,
242+
seed: int = 12345,
243+
budget: float = 0.0,
244+
instance_specific: Optional[str] = None,
245+
) -> Tuple[StatusType, float, float, Dict[str, Union[int, float, str, Dict, List, Tuple]]]:
232246

233247
queue = multiprocessing.Queue()
234248

@@ -272,11 +286,19 @@ def run(self, config, instance=None,
272286
obj_kwargs['resampling_strategy'] = self.resampling_strategy
273287
obj_kwargs['resampling_strategy_args'] = self.resampling_strategy_args
274288

275-
obj = pynisher.enforce_limits(**arguments)(self.ta)
276-
obj(**obj_kwargs)
277-
278-
if obj.exit_status in (pynisher.TimeoutException,
279-
pynisher.MemorylimitException):
289+
try:
290+
obj = pynisher.enforce_limits(**arguments)(self.ta)
291+
obj(**obj_kwargs)
292+
except Exception as e:
293+
exception_traceback = traceback.format_exc()
294+
error_message = repr(e)
295+
additional_info = {
296+
'traceback': exception_traceback,
297+
'error': error_message
298+
}
299+
return StatusType.CRASHED, self.cost_for_crash, 0.0, additional_info
300+
301+
if obj.exit_status in (pynisher.TimeoutException, pynisher.MemorylimitException):
280302
# Even if the pynisher thinks that a timeout or memout occured,
281303
# it can be that the target algorithm wrote something into the queue
282304
# - then we treat it as a succesful run
@@ -309,8 +331,7 @@ def run(self, config, instance=None,
309331
elif obj.exit_status is pynisher.MemorylimitException:
310332
status = StatusType.MEMOUT
311333
additional_run_info = {
312-
'error': 'Memout (used more than %d MB).' %
313-
self.memory_limit
334+
'error': 'Memout (used more than %d MB).' % self.memory_limit
314335
}
315336
else:
316337
raise ValueError(obj.exit_status)
@@ -322,7 +343,7 @@ def run(self, config, instance=None,
322343
cost = self.worst_possible_result
323344
additional_run_info = {'error': 'Your configuration of '
324345
'auto-sklearn does not work!',
325-
'exit_status': obj.exit_status,
346+
'exit_status': _encode_exit_status(obj.exit_status),
326347
'subprocess_stdout': obj.stdout,
327348
'subprocess_stderr': obj.stderr,
328349
}
@@ -343,14 +364,14 @@ def run(self, config, instance=None,
343364
'because the pynisher exit ' \
344365
'status %s is unknown.' % \
345366
str(obj.exit_status)
346-
additional_run_info['exit_status'] = obj.exit_status
367+
additional_run_info['exit_status'] = _encode_exit_status(obj.exit_status)
347368
additional_run_info['subprocess_stdout'] = obj.stdout
348369
additional_run_info['subprocess_stderr'] = obj.stderr
349370
except Empty:
350371
info = None
351372
additional_run_info = {
352373
'error': 'Result queue is empty',
353-
'exit_status': obj.exit_status,
374+
'exit_status': _encode_exit_status(obj.exit_status),
354375
'subprocess_stdout': obj.stdout,
355376
'subprocess_stderr': obj.stderr,
356377
'exitcode': obj.exitcode

autosklearn/pipeline/components/data_preprocessing/data_preprocessing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ def fit(self, X, y=None):
102102
transformers=sklearn_transf_spec,
103103
sparse_threshold=float(self.sparse_),
104104
)
105-
self.column_transformer.fit(X)
105+
self.column_transformer.fit(X, y)
106106
return self
107107

108108
def transform(self, X):

doc/releases.rst

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,14 @@
1111
Releases
1212
========
1313

14+
Version 0.11.1
15+
==============
16+
17+
* FIX #989: Fixes a bug where `y` was not passed to all data preprocessors which made 3rd party
18+
category encoders fail.
19+
* FIX #1001: Fixes a bug which could make Auto-sklearn fail at random.
20+
* MAINT #1000: Introduce a minimal version for ``dask.distributed``.
21+
1422
Version 0.11.0
1523
==============
1624

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ joblib
77
scikit-learn>=0.22.0,<0.23
88

99
dask
10-
distributed
10+
distributed>=2.2.0
1111
lockfile
1212
pyyaml
1313
pandas>=1.0

0 commit comments

Comments
 (0)