Skip to content

Commit be16ffe

Browse files
committed
fix: move logging setup to hyperopter
restores hyperopt logging functionality
1 parent a0c4b52 commit be16ffe

File tree

2 files changed

+25
-25
lines changed

2 files changed

+25
-25
lines changed

freqtrade/optimize/hyperopt/hyperopt.py

Lines changed: 2 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
import random
1010
from datetime import datetime
1111
from math import ceil
12-
from multiprocessing import Manager
1312
from pathlib import Path
1413
from typing import Any
1514

@@ -21,7 +20,6 @@
2120
from freqtrade.enums import HyperoptState
2221
from freqtrade.exceptions import OperationalException
2322
from freqtrade.misc import file_dump_json, plural
24-
from freqtrade.optimize.hyperopt.hyperopt_logger import logging_mp_handle, logging_mp_setup
2523
from freqtrade.optimize.hyperopt.hyperopt_optimizer import INITIAL_POINTS, HyperOptimizer
2624
from freqtrade.optimize.hyperopt.hyperopt_output import HyperoptOutput
2725
from freqtrade.optimize.hyperopt_tools import (
@@ -35,9 +33,6 @@
3533
logger = logging.getLogger(__name__)
3634

3735

38-
log_queue: Any
39-
40-
4136
class Hyperopt:
4237
"""
4338
Hyperopt class, this class contains all the logic to run a hyperopt simulation
@@ -149,15 +144,7 @@ def print_results(self, results: dict[str, Any]) -> None:
149144
def run_optimizer_parallel(self, parallel: Parallel, asked: list[list]) -> list[dict[str, Any]]:
150145
"""Start optimizer in a parallel way"""
151146

152-
def optimizer_wrapper(*args, **kwargs):
153-
# global log queue. This must happen in the file that initializes Parallel
154-
logging_mp_setup(
155-
log_queue, logging.INFO if self.config["verbosity"] < 1 else logging.DEBUG
156-
)
157-
158-
return self.hyperopter.generate_optimizer_wrapped(*args, **kwargs)
159-
160-
return parallel(optimizer_wrapper(v) for v in asked)
147+
return parallel(self.hyperopter.generate_optimizer_wrapped(v) for v in asked)
161148

162149
def _set_random_state(self, random_state: int | None) -> int:
163150
return random_state or random.randint(1, 2**16 - 1) # noqa: S311
@@ -236,15 +223,6 @@ def evaluate_result(self, val: dict[str, Any], current: int, is_random: bool):
236223

237224
self._save_result(val)
238225

239-
def _setup_logging_mp_workaround(self) -> None:
240-
"""
241-
Workaround for logging in child processes.
242-
local_queue must be a global in the file that initializes Parallel.
243-
"""
244-
global log_queue
245-
m = Manager()
246-
log_queue = m.Queue()
247-
248226
def start(self) -> None:
249227
self.random_state = self._set_random_state(self.config.get("hyperopt_random_state"))
250228
logger.info(f"Using optimizer random state: {self.random_state}")
@@ -257,7 +235,6 @@ def start(self) -> None:
257235
logger.info(f"Number of parallel jobs set as: {config_jobs}")
258236

259237
self.opt = self.hyperopter.get_optimizer(self.random_state)
260-
self._setup_logging_mp_workaround()
261238
try:
262239
with Parallel(n_jobs=config_jobs) as parallel:
263240
jobs = parallel._effective_n_jobs()
@@ -307,7 +284,7 @@ def start(self) -> None:
307284

308285
self.evaluate_result(val, current, is_random[j])
309286
pbar.update(task, advance=1)
310-
logging_mp_handle(log_queue)
287+
self.hyperopter.handle_mp_logging()
311288
gc.collect()
312289

313290
if (

freqtrade/optimize/hyperopt/hyperopt_optimizer.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
import sys
88
import warnings
99
from datetime import UTC, datetime
10+
from multiprocessing import Manager
1011
from pathlib import Path
1112
from typing import Any
1213

@@ -29,6 +30,7 @@
2930

3031
# Import IHyperOptLoss to allow unpickling classes from these modules
3132
from freqtrade.optimize.hyperopt.hyperopt_auto import HyperOptAuto
33+
from freqtrade.optimize.hyperopt.hyperopt_logger import logging_mp_handle, logging_mp_setup
3234
from freqtrade.optimize.hyperopt_loss.hyperopt_loss_interface import IHyperOptLoss
3335
from freqtrade.optimize.hyperopt_tools import HyperoptStateContainer, HyperoptTools
3436
from freqtrade.optimize.optimize_reports import generate_strategy_stats
@@ -58,6 +60,8 @@
5860
"QMCSampler": optuna.samplers.QMCSampler,
5961
}
6062

63+
log_queue: Any
64+
6165

6266
class HyperOptimizer:
6367
"""
@@ -113,6 +117,24 @@ def __init__(self, config: Config, data_pickle_file: Path) -> None:
113117
if HyperoptTools.has_space(self.config, "sell"):
114118
# Make sure use_exit_signal is enabled
115119
self.config["use_exit_signal"] = True
120+
self._setup_logging_mp_workaround()
121+
122+
def _setup_logging_mp_workaround(self) -> None:
123+
"""
124+
Workaround for logging in child processes.
125+
local_queue must be a global and passed to the child process via inheritance.
126+
"""
127+
global log_queue
128+
m = Manager()
129+
log_queue = m.Queue()
130+
logger.info(f"manager queue {type(log_queue)}")
131+
132+
def handle_mp_logging(self) -> None:
133+
"""
134+
Handle logging from child processes.
135+
Must be called in the parent process to handle log messages from the child process.
136+
"""
137+
logging_mp_handle(log_queue)
116138

117139
def prepare_hyperopt(self) -> None:
118140
# Initialize spaces ...
@@ -264,6 +286,7 @@ def assign_params(self, params_dict: dict[str, Any], category: str) -> None:
264286
@delayed
265287
@wrap_non_picklable_objects
266288
def generate_optimizer_wrapped(self, params_dict: dict[str, Any]) -> dict[str, Any]:
289+
logging_mp_setup(log_queue, logging.INFO if self.config["verbosity"] < 1 else logging.DEBUG)
267290
return self.generate_optimizer(params_dict)
268291

269292
def generate_optimizer(self, params_dict: dict[str, Any]) -> dict[str, Any]:

0 commit comments

Comments
 (0)