提交 f8763efb 编写于 作者: F Francois Chollet

Use tf.nn.relu6 when appropriate in K.relu.

上级 52e33501
......@@ -3161,6 +3161,9 @@ def relu(x, alpha=0., max_value=None):
# Returns
A tensor.
"""
if alpha == 0 and max_value == 6:
return tf.nn.relu6(x)
if alpha != 0.:
x = tf.nn.leaky_relu(x, alpha)
else:
......
......@@ -163,6 +163,19 @@ def test_relu():
result = f([test_values])[0]
assert_allclose(result, test_values, rtol=1e-05)
# Test max_value
test_values = [0.5, 1.5]
f = K.function([x], [activations.relu(x, max_value=1.)])
result = f([test_values])[0]
assert np.max(result) <= 1.
# Test max_value == 6.
test_values = [0.5, 6.]
f = K.function([x], [activations.relu(x, max_value=1.)])
result = f([test_values])[0]
assert np.max(result) <= 6.
def test_elu():
x = K.placeholder(ndim=2)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册