Skip to content

Commit

Permalink
labels can be directly passed in, if training encoder
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Apr 25, 2023
1 parent ee6155e commit 72ee8da
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -344,13 +344,14 @@ def forward(
*,
mask = None,
return_loss = False,
labels = None,
memory_replay_backprop = False, # whether to have the class do the backwards pass memory efficiently
mrbp_loss_weight = 1. # if using memory replay backprop with gradient accumulation, scale loss by this factor ex. (1. / <num grad accum steps>)
):
seq_len = self.seq_len

labels = None
if return_loss or memory_replay_backprop:
if (return_loss or memory_replay_backprop) and not exists(labels):
x, labels = x[:, :-1], x[:, 1:]

# segment input
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'recurrent-memory-transformer-pytorch',
packages = find_packages(exclude=[]),
version = '0.1.0',
version = '0.1.1',
license='MIT',
description = 'Recurrent Memory Transformer - Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit 72ee8da

Please sign in to comment.