Skip to content

Commit

Permalink
Merge pull request #843 from partobs-mdp/master
Browse files Browse the repository at this point in the history
Added SELU nonlinearity
  • Loading branch information
f0k committed Jul 12, 2017
2 parents 34af5fb + 1aa7756 commit 6327a74
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 1 deletion.
6 changes: 6 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,12 @@ def setup(app):
theano.sandbox.cuda.cuda_enabled = True
theano.sandbox.cuda.dnn.dnn_available = lambda: True

theano.gpuarray = Mock()
sys.modules['theano.gpuarray'] = theano.gpuarray
sys.modules['theano.gpuarray.dnn'] = theano.gpuarray.dnn
theano.gpuarray.pygpu_activated = True
theano.gpuarray.dnn.dnn_present = lambda: True

sys.modules['pylearn2'] = Mock()
sys.modules['pylearn2.sandbox'] = Mock()
sys.modules['pylearn2.sandbox.cuda_convnet'] = Mock()
Expand Down
5 changes: 5 additions & 0 deletions docs/modules/nonlinearities.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
leaky_rectify
very_leaky_rectify
elu
SELU
selu
softplus
linear
identity
Expand All @@ -33,6 +35,9 @@ Detailed description
.. autofunction:: leaky_rectify
.. autofunction:: very_leaky_rectify
.. autofunction:: elu
.. autoclass:: SELU
:members:
.. autofunction:: selu
.. autofunction:: softplus
.. autofunction:: linear
.. autofunction:: identity
70 changes: 70 additions & 0 deletions lasagne/nonlinearities.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,76 @@ def elu(x):
return theano.tensor.switch(x > 0, x, theano.tensor.expm1(x))


# selu
class SELU(object):
"""
Scaled Exponential Linear Unit
:math:`\\varphi(x)=\\lambda \\left[(x>0) ? x : \\alpha(e^x-1)\\right]`
The Scaled Exponential Linear Unit (SELU) was introduced in [1]_
as an activation function that allows the construction of
self-normalizing neural networks.
Parameters
----------
scale : float32
The scale parameter :math:`\\lambda` for scaling all output.
scale_neg : float32
The scale parameter :math:`\\alpha`
for scaling output for nonpositive argument values.
Methods
-------
__call__(x)
Apply the SELU function to the activation `x`.
Examples
--------
In contrast to other activation functions in this module, this is
a class that needs to be instantiated to obtain a callable:
>>> from lasagne.layers import InputLayer, DenseLayer
>>> l_in = InputLayer((None, 100))
>>> from lasagne.nonlinearities import SELU
>>> selu = SELU(2, 3)
>>> l1 = DenseLayer(l_in, num_units=200, nonlinearity=selu)
See Also
--------
selu: Instance with :math:`\\alpha\\approx1.6733,\\lambda\\approx1.0507`
as used in [1]_.
References
----------
.. [1] Günter Klambauer et al. (2017):
Self-Normalizing Neural Networks,
https://arxiv.org/abs/1706.02515
"""
def __init__(self, scale=1, scale_neg=1):
self.scale = scale
self.scale_neg = scale_neg

def __call__(self, x):
return self.scale * theano.tensor.switch(
x > 0.0,
x,
self.scale_neg * (theano.tensor.expm1(x)))


selu = SELU(scale=1.0507009873554804934193349852946,
scale_neg=1.6732632423543772848170429916717)
selu.__doc__ = """selu(x)
Instance of :class:`SELU` with :math:`\\alpha\\approx 1.6733,
\\lambda\\approx 1.0507`
This has a stable and attracting fixed point of :math:`\\mu=0`,
:math:`\\sigma=1` under the assumptions of the
original paper on self-normalizing neural networks.
"""


# softplus
def softplus(x):
"""Softplus activation function :math:`\\varphi(x) = \\log(1 + e^x)`
Expand Down
29 changes: 28 additions & 1 deletion lasagne/tests/test_nonlinearities.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,20 @@ def leaky_rectify_0(self, x):
def elu(self, x, alpha=1):
return np.where(x > 0, x, alpha * (np.expm1(x)))

def selu(self, x, alpha=1, lmbda=1):
return lmbda * np.where(x > 0, x, alpha * np.expm1(x))

def selu_paper(self, x):
return self.selu(x,
alpha=1.6732632423543772848170429916717,
lmbda=1.0507009873554804934193349852946)

def selu_rect(self, x):
return self.selu(x, alpha=0, lmbda=1)

def selu_custom(self, x):
return self.selu(x, alpha=0.12, lmbda=1.21)

def softplus(self, x):
return np.log1p(np.exp(x))

Expand All @@ -39,7 +53,10 @@ def softmax(self, x):

@pytest.mark.parametrize('nonlinearity',
['linear', 'rectify',
'leaky_rectify', 'elu', 'sigmoid',
'leaky_rectify', 'elu',
'selu', 'selu_paper',
'selu_rect', 'selu_custom',
'sigmoid',
'tanh', 'scaled_tanh',
'softmax', 'leaky_rectify_0',
'scaled_tanh_p', 'softplus'])
Expand All @@ -55,6 +72,16 @@ def test_nonlinearity(self, nonlinearity):
elif nonlinearity == 'scaled_tanh_p':
from lasagne.nonlinearities import ScaledTanH
theano_nonlinearity = ScaledTanH(scale_in=0.5, scale_out=2.27)
elif nonlinearity.startswith('selu'):
from lasagne.nonlinearities import SELU, selu
if nonlinearity == 'selu':
theano_nonlinearity = SELU()
elif nonlinearity == 'selu_paper':
theano_nonlinearity = selu
elif nonlinearity == 'selu_rect':
theano_nonlinearity = SELU(scale=1, scale_neg=0)
elif nonlinearity == 'selu_custom':
theano_nonlinearity = SELU(scale=1.21, scale_neg=0.12)
else:
theano_nonlinearity = getattr(lasagne.nonlinearities,
nonlinearity)
Expand Down

0 comments on commit 6327a74

Please sign in to comment.