Skip to content

Commit

Permalink
changing torch.GradScaler to torch.amp.GradScaler
Browse files Browse the repository at this point in the history
  • Loading branch information
0ssamaak0 authored Jan 26, 2025
1 parent 85c31ba commit 0c71332
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions recipes_source/recipes/amp_recipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def make_model(in_size, out_size, num_layers):
# The same ``GradScaler`` instance should be used for the entire convergence run.
# If you perform multiple convergence runs in the same script, each run should use
# a dedicated fresh ``GradScaler`` instance. ``GradScaler`` instances are lightweight.
scaler = torch.GradScaler("cuda")
scaler = torch.amp.GradScaler("cuda")

for epoch in range(0): # 0 epochs, this section is for illustration only
for input, target in zip(data, targets):
Expand Down Expand Up @@ -182,7 +182,7 @@ def make_model(in_size, out_size, num_layers):

net = make_model(in_size, out_size, num_layers)
opt = torch.optim.SGD(net.parameters(), lr=0.001)
scaler = torch.GradScaler("cuda" ,enabled=use_amp)
scaler = torch.amp.GradScaler("cuda" ,enabled=use_amp)

start_timer()
for epoch in range(epochs):
Expand Down

0 comments on commit 0c71332

Please sign in to comment.