99import random
1010from datetime import datetime
1111from math import ceil
12- from multiprocessing import Manager
1312from pathlib import Path
1413from typing import Any
1514
2120from freqtrade .enums import HyperoptState
2221from freqtrade .exceptions import OperationalException
2322from freqtrade .misc import file_dump_json , plural
24- from freqtrade .optimize .hyperopt .hyperopt_logger import logging_mp_handle , logging_mp_setup
2523from freqtrade .optimize .hyperopt .hyperopt_optimizer import INITIAL_POINTS , HyperOptimizer
2624from freqtrade .optimize .hyperopt .hyperopt_output import HyperoptOutput
2725from freqtrade .optimize .hyperopt_tools import (
3533logger = logging .getLogger (__name__ )
3634
3735
38- log_queue : Any
39-
40-
4136class Hyperopt :
4237 """
4338 Hyperopt class, this class contains all the logic to run a hyperopt simulation
@@ -149,15 +144,7 @@ def print_results(self, results: dict[str, Any]) -> None:
149144 def run_optimizer_parallel (self , parallel : Parallel , asked : list [list ]) -> list [dict [str , Any ]]:
150145 """Start optimizer in a parallel way"""
151146
152- def optimizer_wrapper (* args , ** kwargs ):
153- # global log queue. This must happen in the file that initializes Parallel
154- logging_mp_setup (
155- log_queue , logging .INFO if self .config ["verbosity" ] < 1 else logging .DEBUG
156- )
157-
158- return self .hyperopter .generate_optimizer_wrapped (* args , ** kwargs )
159-
160- return parallel (optimizer_wrapper (v ) for v in asked )
147+ return parallel (self .hyperopter .generate_optimizer_wrapped (v ) for v in asked )
161148
162149 def _set_random_state (self , random_state : int | None ) -> int :
163150 return random_state or random .randint (1 , 2 ** 16 - 1 ) # noqa: S311
@@ -236,15 +223,6 @@ def evaluate_result(self, val: dict[str, Any], current: int, is_random: bool):
236223
237224 self ._save_result (val )
238225
239- def _setup_logging_mp_workaround (self ) -> None :
240- """
241- Workaround for logging in child processes.
242- local_queue must be a global in the file that initializes Parallel.
243- """
244- global log_queue
245- m = Manager ()
246- log_queue = m .Queue ()
247-
248226 def start (self ) -> None :
249227 self .random_state = self ._set_random_state (self .config .get ("hyperopt_random_state" ))
250228 logger .info (f"Using optimizer random state: { self .random_state } " )
@@ -257,7 +235,6 @@ def start(self) -> None:
257235 logger .info (f"Number of parallel jobs set as: { config_jobs } " )
258236
259237 self .opt = self .hyperopter .get_optimizer (self .random_state )
260- self ._setup_logging_mp_workaround ()
261238 try :
262239 with Parallel (n_jobs = config_jobs ) as parallel :
263240 jobs = parallel ._effective_n_jobs ()
@@ -307,7 +284,7 @@ def start(self) -> None:
307284
308285 self .evaluate_result (val , current , is_random [j ])
309286 pbar .update (task , advance = 1 )
310- logging_mp_handle ( log_queue )
287+ self . hyperopter . handle_mp_logging ( )
311288 gc .collect ()
312289
313290 if (
0 commit comments