Skip to content

Commit

Permalink
Merge branch 'main' into export-D53202231
Browse files Browse the repository at this point in the history
  • Loading branch information
HuanyuZhang authored Dec 19, 2024
2 parents 777d889 + 32c75c2 commit 222c732
Show file tree
Hide file tree
Showing 6 changed files with 3,670 additions and 815 deletions.
3 changes: 2 additions & 1 deletion opacus/layers/param_rename.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ def _register_renamed_parameters(self):
self.parameters() proceeds recursively from the top, going into submodules after processing
items at the current level, and will not return duplicates.
"""
for old_name, param in super().named_parameters():

for old_name, param in list(super().named_parameters()):
if old_name in self.old_to_new:
new_name = self.old_to_new[old_name]
self.register_parameter(new_name, param)
Expand Down
2 changes: 1 addition & 1 deletion opacus/tests/accountants_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def test_get_noise_multiplier_prv_steps(self) -> None:
),
delta=st.sampled_from([1e-4, 1e-5, 1e-6]),
)
@settings(deadline=10000)
@settings(deadline=40000)
def test_get_noise_multiplier_overshoot(self, epsilon, epochs, sample_rate, delta):
noise_multiplier = get_noise_multiplier(
target_epsilon=epsilon,
Expand Down
4 changes: 2 additions & 2 deletions opacus/tests/batch_memory_manager_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def _init_training(self, batch_size=10, **data_loader_kwargs):
batch_size=st.sampled_from([8, 16, 64]),
max_physical_batch_size=st.sampled_from([4, 8]),
)
@settings(suppress_health_check=list(HealthCheck), deadline=10000)
@settings(suppress_health_check=list(HealthCheck), deadline=40000)
def test_basic(
self,
num_workers: int,
Expand Down Expand Up @@ -119,7 +119,7 @@ def test_basic(
num_workers=st.integers(0, 4),
pin_memory=st.booleans(),
)
@settings(suppress_health_check=list(HealthCheck), deadline=10000)
@settings(suppress_health_check=list(HealthCheck), deadline=40000)
def test_empty_batch(
self,
num_workers: int,
Expand Down
4 changes: 2 additions & 2 deletions opacus/tests/per_sample_gradients_utils_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def per_sample_grads_utils_test(
groups=st.integers(1, 12),
grad_sample_mode=st.sampled_from(get_grad_sample_modes(use_ew=True)),
)
@settings(deadline=10000)
@settings(deadline=40000)
def test_conv1d(
self,
N: int,
Expand Down Expand Up @@ -120,7 +120,7 @@ def test_conv1d(
batch_first=st.booleans(),
grad_sample_mode=st.sampled_from(get_grad_sample_modes(use_ew=True)),
)
@settings(deadline=10000)
@settings(deadline=40000)
def test_linear(
self,
N: int,
Expand Down
4 changes: 3 additions & 1 deletion opacus/utils/module_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,8 @@ def clone_module(module: nn.Module) -> nn.Module:
"""
Handy utility to clone an nn.Module. PyTorch doesn't always support copy.deepcopy(), so it is
just easier to serialize the model to a BytesIO and read it from there.
When ``weights_only=False``, ``torch.load()`` uses "pickle" module implicity, which is known to be insecure.
Only load the model you trust.
Args:
module: The module to clone
Expand All @@ -99,7 +101,7 @@ def clone_module(module: nn.Module) -> nn.Module:
with io.BytesIO() as bytesio:
torch.save(module, bytesio)
bytesio.seek(0)
module_copy = torch.load(bytesio)
module_copy = torch.load(bytesio, weights_only=False)
next_param = next(
module.parameters(), None
) # Eg, InstanceNorm with affine=False has no params
Expand Down
Loading

0 comments on commit 222c732

Please sign in to comment.