Skip to content

Commit

Permalink
SacessOptimizer: Use cloudpickle for passing data to workers
Browse files Browse the repository at this point in the history
Cloudpickle is able to handle more complex objects than pickle.

See ICB-DCM#1465
Closes ICB-DCM#1465

If the new process are forked, we could skip the pickling, but at this point, I don't think that's necessary.
  • Loading branch information
dweindl committed Sep 16, 2024
1 parent b88356f commit a63f8ce
Showing 1 changed file with 14 additions and 9 deletions.
23 changes: 14 additions & 9 deletions pypesto/optimize/ess/sacess.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from uuid import uuid1
from warnings import warn

import cloudpickle
import numpy as np

import pypesto
Expand Down Expand Up @@ -242,9 +243,13 @@ def minimize(
name=f"{self.__class__.__name__}-worker-{i:02d}",
target=_run_worker,
args=(
worker,
problem,
startpoint_method,
cloudpickle.dumps(
(
worker,
problem,
startpoint_method,
),
),
logging_thread.queue,
),
)
Expand Down Expand Up @@ -782,16 +787,16 @@ def get_temp_result_filename(worker_idx: int, tmpdir: str | Path) -> str:
return str(Path(tmpdir, f"sacess-{worker_idx:02d}_tmp.h5").absolute())


def _run_worker(
worker: SacessWorker,
problem: Problem,
startpoint_method: StartpointMethod,
log_process_queue: multiprocessing.Queue,
):
def _run_worker(pickled_args: bytes, log_process_queue: multiprocessing.Queue):
"""Run the given SACESS worker.
Helper function as entrypoint for sacess worker processes.
"""
unpickled_args: tuple[
SacessWorker, Problem, StartpointMethod
] = cloudpickle.loads(pickled_args)
(worker, problem, startpoint_method) = unpickled_args

# different random seeds per process
np.random.seed((os.getpid() * int(time.time() * 1000)) % 2**32)

Expand Down

0 comments on commit a63f8ce

Please sign in to comment.