提交 b2176482 编写于 作者: T Tommi Koivisto 提交者: François Chollet

Add an advanced activation layer for ReLU (#10322)

The max_value argument can not be used in a layer, except
custom layer or Lambda. Hence, similarly to LeakyReLU or
for example Softmax, this PR adds a layer for ReLU,
enabling also a capped ReLU to be used.
上级 1365ed5d
......@@ -257,3 +257,35 @@ class Softmax(Layer):
def compute_output_shape(self, input_shape):
return input_shape
class ReLU(Layer):
"""Rectified Linear Unit activation function.
# Input shape
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
# Output shape
Same shape as the input.
# Arguments
max_value: Float, the maximum output value.
"""
def __init__(self, max_value=None, **kwargs):
super(ReLU, self).__init__(**kwargs)
self.supports_masking = True
self.max_value = max_value
def call(self, inputs):
return activations.relu(inputs, max_value=self.max_value)
def get_config(self):
config = {'max_value': self.max_value}
base_config = super(ReLU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def compute_output_shape(self, input_shape):
return input_shape
......@@ -43,5 +43,12 @@ def test_softmax():
input_shape=(2, 3, 4))
@keras_test
def test_relu():
for max_value in [None, 1., 6.]:
layer_test(layers.ReLU, kwargs={'max_value': max_value},
input_shape=(2, 3, 4))
if __name__ == '__main__':
pytest.main([__file__])
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册