Skip to content
This repository has been archived by the owner on Nov 3, 2022. It is now read-only.

Add Parametric Soft Exponential Unit (PSEU) activation layer #459

Open
wants to merge 25 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 22 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ keras_contrib/callbacks/snapshot.py @titu1994


# layers
keras_contrib/layers/advanced_activations/pseu.py @SriRangaTarun
keras_contrib/layers/advanced_activations/sinerelu.py @wilderrodrigues
keras_contrib/layers/advanced_activations/swish.py @gabrieldemarmiesse
keras_contrib/layers/convolutional/subpixelupscaling.py @titu1994
Expand Down
1 change: 1 addition & 0 deletions keras_contrib/layers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import absolute_import

from .advanced_activations.pelu import PELU
from .advanced_activations.pseu import PSEU
from .advanced_activations.srelu import SReLU
from .advanced_activations.swish import Swish
from .advanced_activations.sinerelu import SineReLU
Expand Down
81 changes: 81 additions & 0 deletions keras_contrib/layers/advanced_activations/pseu.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# -*- coding: utf-8 -*-
from keras import backend as K
from keras.layers import Layer
from keras_contrib.utils.test_utils import to_tuple
from keras_contrib.utils.test_utils import is_tf_keras


class PSEU(Layer):
"""Parametric Soft Exponential Unit
See: https://arxiv.org/pdf/1602.01321.pdf by Godfrey and Gashler
Reference: https://github.com/keras-team/keras/issues/3842 (@hobson)
Soft Exponential f(α, x):
α == 0: x
α > 0: (exp(αx)-1) / α + α
α < 0: -ln(1-α(x + α)) / α
# Input shape
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
# Output shape
Same shape as the input.
# Arguments
alpha: Value of the alpha weights (float)
NOTE : This function can become unstable for
negative values of α. In particular, the
function returns NaNs when α < 0 and x <= 1/α
(where x is the input).
If the function starts returning NaNs for α < 0,
try decreasing the magnitude of α.
Alternatively, you can normalize the data into fixed
ranges before passing it to PSEU.
Adjust α based on your specific dataset
and use-case.
# Example
model = Sequential()
model.add(Dense(10, input_shape=(5,))
model.add(PSEU(alpha=0.2))
"""
def __init__(self,
alpha=0.1,
**kwargs):

super(PSEU, self).__init__(**kwargs)
self.alpha = alpha
self.trainable = False

if is_tf_keras:
def alpha_initializer(self, input_shape, dtype='float32', partition_info=None):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

partition_info is a useless arg

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No need to separate is_tf_keras case

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@RaphaelMeudec The initializer does not work in tf.keras without the partition_info argument.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe you can use **kwargs to have only one declaration.

Copy link
Contributor Author

@SriRangaTarun SriRangaTarun Mar 4, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@gabrieldemarmiesse Added **kwargs

return self.alpha * K.ones(input_shape,
dtype=dtype)

else:
def alpha_initializer(self, input_shape, dtype='float32'):
return self.alpha * K.ones(input_shape,
dtype=dtype)

def build(self, input_shape):
input_shape = to_tuple(input_shape)
new_input_shape = input_shape[1:]
self.alphas = self.add_weight(shape=new_input_shape,
name='{}_alphas'.format(self.name),
initializer=self.alpha_initializer,
trainable=self.trainable)
self.build = True

def call(self, x):
if self.alpha < 0:
return - K.log(1 - self.alphas * (x + self.alphas)) / self.alphas
elif self.alpha > 0:
return self.alphas + (K.exp(self.alphas * x) - 1.) / self.alphas
else:
return x

def compute_output_shape(self, input_shape):
return input_shape

def get_config(self):
config = {'alpha': self.alpha,
'trainable': self.trainable}
base_config = super(PSEU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
15 changes: 15 additions & 0 deletions tests/keras_contrib/layers/advanced_activations/test_pseu.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# -*- coding: utf-8 -*-
import pytest
from keras_contrib.utils.test_utils import layer_test
from keras_contrib.layers import PSEU


@pytest.mark.parametrize('alpha', [-0.1, 0., 0.1])
def test_pseu(alpha):
layer_test(PSEU,
kwargs={'alpha': alpha},
input_shape=(2, 3, 4))


if __name__ == '__main__':
pytest.main([__file__])