Skip to content

Commit

Permalink
Merge branch 'master' of github.com:nimarb/pytorch_influence_functions
Browse files Browse the repository at this point in the history
  • Loading branch information
nimarb committed Jul 17, 2020
2 parents 4d8547e + fc88319 commit 66c9a9e
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 3 deletions.
2 changes: 1 addition & 1 deletion pytorch_influence_functions/calc_influence_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,7 @@ def calc_img_wise(config, model, train_loader, test_loader):

start_time = time.time()
influence, harmful, helpful, _ = calc_influence_single(
model, train_loader, test_loader, test_id_num=i, gpu=0,
model, train_loader, test_loader, test_id_num=i, gpu=config['gpu'],
recursion_depth=config['recursion_depth'], r=config['r_averaging'])
end_time = time.time()

Expand Down
6 changes: 4 additions & 2 deletions pytorch_influence_functions/influence_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ def s_test(z_test, t_test, model, z_loader, gpu=-1, damp=0.01, scale=25.0,
x, t = x.cuda(), t.cuda()
y = model(x)
loss = calc_loss(y, t)
hv = hvp(loss, list(model.parameters()), h_estimate)
params = [ p for p in model.parameters() if p.requires_grad ]
hv = hvp(loss, params, h_estimate)
# Recursively caclulate h_estimate
h_estimate = [
_v + (1 - damp) * _h_e - _hv / scale
Expand Down Expand Up @@ -93,7 +94,8 @@ def grad_z(z, t, model, gpu=-1):
y = model(z)
loss = calc_loss(y, t)
# Compute sum of gradients from model parameters to loss
return list(grad(loss, list(model.parameters()), create_graph=True))
params = [ p for p in model.parameters() if p.requires_grad ]
return list(grad(loss, params, create_graph=True))


def hvp(y, w, v):
Expand Down

0 comments on commit 66c9a9e

Please sign in to comment.