Skip to content

Commit

Permalink
cleanup sequential sequence module
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Nov 4, 2020
1 parent 681aacb commit 8e470aa
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 7 deletions.
7 changes: 1 addition & 6 deletions performer_pytorch/reversible.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,22 +123,17 @@ def backward(ctx, dy):
y, dy = block.backward_pass(y, dy, **kwargs)
return dy, None, None


class SequentialSequence(nn.Module):
def __init__(self, layers, args_route = {}, layer_dropout = 0.):
def __init__(self, layers, args_route = {}):
super().__init__()
assert all(len(route) == len(layers) for route in args_route.values()), 'each argument route map must have the same depth as the number of sequential layers'
self.layers = layers
self.args_route = args_route
self.layer_dropout = layer_dropout

def forward(self, x, **kwargs):
args = route_args(self.args_route, kwargs, len(self.layers))
layers_and_args = list(zip(self.layers, args))

if self.training and self.layer_dropout > 0:
layers_and_args = layer_drop(layers_and_args, self.layer_dropout)

for (f, g), (f_args, g_args) in layers_and_args:
x = x + f(x, **f_args)
x = x + g(x, **g_args)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'performer-pytorch',
packages = find_packages(exclude=['examples']),
version = '0.7.0',
version = '0.7.1',
license='MIT',
description = 'Performer - Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit 8e470aa

Please sign in to comment.