Skip to content

Commit

Permalink
Revert "Before the first training step, the model has no optimizer: f…
Browse files Browse the repository at this point in the history
…ix ds3"

This reverts commit bf6e7ed.
  • Loading branch information
qgallouedec committed Feb 6, 2025
1 parent bf6e7ed commit 7134a1e
Showing 1 changed file with 0 additions and 4 deletions.
4 changes: 0 additions & 4 deletions trl/models/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,6 @@ def setup_chat_format(

def remove_hooks(model: "DeepSpeedEngine") -> None:
"""Removes the optimizer hooks from a DeepSpeed ZeRO-3 model."""
if not hasattr(model, "optimizer"): # before the first training step, the model has no optimizer
return
if model.optimizer is not None and hasattr(model.optimizer, "parameter_offload"):
optimizer_offload = model.optimizer.parameter_offload
elif model.optimizer is not None:
Expand Down Expand Up @@ -166,8 +164,6 @@ def iter_params(module, recurse=False):

def add_hooks(model: "DeepSpeedEngine") -> None:
"""Adds the optimizer hooks from a DeepSpeed ZeRO-3 model."""
if not hasattr(model, "optimizer"): # before the first training step, the model has no optimizer
return
if model.optimizer is not None and hasattr(model.optimizer, "parameter_offload"):
optimizer_offload = model.optimizer.parameter_offload
elif model.optimizer is not None:
Expand Down

0 comments on commit 7134a1e

Please sign in to comment.