Skip to content
This repository has been archived by the owner on Nov 3, 2022. It is now read-only.

Add Parametric Soft Exponential Unit (PSEU) activation layer #451

Closed
wants to merge 61 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
61 commits
Select commit Hold shift + click to select a range
6ba333a
Create pseu.py
SriRangaTarun Feb 10, 2019
82ae700
Add PSEU activation layer
SriRangaTarun Feb 10, 2019
3012cde
Create test_pseu.py
SriRangaTarun Feb 10, 2019
d6cf865
Add PSEU test
SriRangaTarun Feb 10, 2019
c9ac0e4
Update PSEU layer
SriRangaTarun Feb 10, 2019
abd58ae
Update test
SriRangaTarun Feb 10, 2019
1891e17
Add import
SriRangaTarun Feb 10, 2019
8022191
Merge pull request #1 from keras-team/master
SriRangaTarun Feb 10, 2019
542db44
Fix silly errors in test
SriRangaTarun Feb 10, 2019
6888fd5
Fix pep8 violation
SriRangaTarun Feb 10, 2019
8497f72
Fix import error
SriRangaTarun Feb 10, 2019
caa51f9
Merge pull request #2 from keras-team/master
SriRangaTarun Feb 10, 2019
94c5ac1
Fix pep8
SriRangaTarun Feb 10, 2019
1015e10
import numpy
SriRangaTarun Feb 10, 2019
dc3588a
utf-8
SriRangaTarun Feb 10, 2019
f214d53
Fix test
SriRangaTarun Feb 10, 2019
60d1d1e
Update test_pseu.py
SriRangaTarun Feb 10, 2019
e1cefb0
Update test_pseu.py
SriRangaTarun Feb 10, 2019
326bfa3
Fix silly error
SriRangaTarun Feb 10, 2019
d28a544
Update test_pseu.py
SriRangaTarun Feb 10, 2019
df9e964
Update pseu.py
SriRangaTarun Feb 10, 2019
51ef0c8
Update pseu.py
SriRangaTarun Feb 10, 2019
a3bccfb
Remove unnecessary initializer
SriRangaTarun Feb 11, 2019
a880bd0
Add missing import
SriRangaTarun Feb 11, 2019
3fd819e
Fix import order
SriRangaTarun Feb 11, 2019
55676cd
Fix import order
SriRangaTarun Feb 11, 2019
be42270
Fix small problems
SriRangaTarun Feb 11, 2019
df24e8f
Sort imports in alphabetical order
SriRangaTarun Feb 11, 2019
d57d963
Remove unnecessary condition
SriRangaTarun Feb 12, 2019
e6456e1
Remove unnecessary comment in docstring
SriRangaTarun Feb 12, 2019
aa03980
Add option to specify initializer for alpha weights
SriRangaTarun Feb 12, 2019
839be17
Add initializer parameter to test
SriRangaTarun Feb 12, 2019
9813264
Remove unnecessary line
SriRangaTarun Feb 12, 2019
8a34e8f
Remove useless spaces
SriRangaTarun Feb 12, 2019
dd84064
Remove unnecessary initializer parameter from test
SriRangaTarun Feb 12, 2019
f2c5d1a
Remove unnecessary initializer parameter from test
SriRangaTarun Feb 12, 2019
f2b0f97
Fix silly mistake
SriRangaTarun Feb 12, 2019
6bdf843
Fix initializer parameter in test
SriRangaTarun Feb 12, 2019
6d7b8aa
Add necessary sign parameter and add overrides
SriRangaTarun Feb 12, 2019
0aa0b40
Fix test
SriRangaTarun Feb 12, 2019
7d52579
Fix pep8 problems
SriRangaTarun Feb 12, 2019
df63af4
Add default initializer
SriRangaTarun Feb 12, 2019
3736b42
Fix silly errors
SriRangaTarun Feb 12, 2019
f7758b9
REVERT TO NO-INITIALIZER VERSION
SriRangaTarun Feb 12, 2019
ed77ff7
Remove initializer param from test
SriRangaTarun Feb 12, 2019
7749c67
Add custom initializer to work correctly
SriRangaTarun Feb 12, 2019
19d978b
Fix pep8 problems
SriRangaTarun Feb 12, 2019
75c83d2
Remove unnecessary lines and trainability
SriRangaTarun Feb 12, 2019
8791343
Remove trainable param from test
SriRangaTarun Feb 12, 2019
605ddc1
Add PSEU to CODEOWNERS
SriRangaTarun Feb 12, 2019
e722372
Avoid calling alpha_init inside build function
SriRangaTarun Feb 12, 2019
351ef64
Clean up code and remove unnecessary lines
SriRangaTarun Feb 12, 2019
69feddb
Fix silly mistake
SriRangaTarun Feb 12, 2019
ef4a20f
Simplify code
SriRangaTarun Feb 12, 2019
4d8f0fb
Simplify code
SriRangaTarun Feb 12, 2019
dd22dd8
Add important note in docsting
SriRangaTarun Feb 13, 2019
49ec6c8
Set trainable=False
SriRangaTarun Feb 13, 2019
3e53036
Adjust alpha docs
SriRangaTarun Feb 13, 2019
5ce52c3
Update alpha docstring
SriRangaTarun Feb 13, 2019
b919451
Improve docs
SriRangaTarun Feb 13, 2019
f4e1c65
Fix small typo in docs
SriRangaTarun Feb 13, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ keras_contrib/callbacks/snapshot.py @titu1994


# layers
keras_contrib/layers/advanced_activations/pseu.py @SriRangaTarun
keras_contrib/layers/advanced_activations/sinerelu.py @wilderrodrigues
keras_contrib/layers/advanced_activations/swish.py @gabrieldemarmiesse
keras_contrib/layers/convolutional/subpixelupscaling.py @titu1994
Expand Down
1 change: 1 addition & 0 deletions keras_contrib/layers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import absolute_import

from .advanced_activations.pelu import PELU
from .advanced_activations.pseu import PSEU
from .advanced_activations.srelu import SReLU
from .advanced_activations.swish import Swish
from .advanced_activations.sinerelu import SineReLU
Expand Down
75 changes: 75 additions & 0 deletions keras_contrib/layers/advanced_activations/pseu.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
from keras import backend as K
from keras.layers import Layer


class PSEU(Layer):
"""Parametric Soft Exponential Unit
See: https://arxiv.org/pdf/1602.01321.pdf by Godfrey and Gashler
Reference: https://github.com/keras-team/keras/issues/3842 (@hobson)

Soft Exponential f(α, x):
α == 0: x
α > 0: (exp(αx)-1) / α + α
α < 0: -ln(1-α(x + α)) / α

# Input shape
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.

# Output shape
Same shape as the input.

# Arguments
alpha: Value of the alpha weights (float)

NOTE : This function can become unstable for
negative values of α. In particular, the
function returns NaNs when α < 0 and x <= 1/α
(where x is the input).
If the function starts returning NaNs for α < 0,
try decreasing the magnitude of α.
Alternatively, you can normalize the data into fixed
ranges before passing it to PSEU.
Adjust α based on your specific dataset
and use-case.

# Example
model = Sequential()
model.add(Dense(10, input_shape=(5,))
model.add(PSEU(alpha=0.2))
"""
def __init__(self,
alpha=0.1,
**kwargs):

super(PSEU, self).__init__(**kwargs)
self.alpha = alpha

def alpha_initializer(self, input_shape):
return self.alpha * K.ones(input_shape)

def build(self, input_shape):
new_input_shape = input_shape[1:]
self.alphas = self.add_weight(shape=new_input_shape,
name='{}_alphas'.format(self.name),
initializer=self.alpha_initializer,
trainable=False)
self.build = True

def call(self, x):
if self.alpha < 0:
return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas
elif self.alpha > 0:
return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas
else:
return x

def compute_output_shape(self, input_shape):
return input_shape

def get_config(self):
config = {'alpha': self.alpha}
base_config = super(PSEU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
15 changes: 15 additions & 0 deletions tests/keras_contrib/layers/advanced_activations/test_pseu.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# -*- coding: utf-8 -*-
import pytest
from keras_contrib.utils.test_utils import layer_test
from keras_contrib.layers import PSEU


@pytest.mark.parametrize('alpha', [-0.1, 0., 0.1])
def test_pseu(alpha):
layer_test(PSEU,
kwargs={'alpha': alpha},
input_shape=(2, 3, 4))


if __name__ == '__main__':
pytest.main([__file__])