From f8763efb371f355e65473e7c59a8715cb06bdd0c Mon Sep 17 00:00:00 2001 From: Francois Chollet Date: Tue, 25 Sep 2018 17:40:54 -0700 Subject: [PATCH] Use tf.nn.relu6 when appropriate in K.relu. --- keras/backend/tensorflow_backend.py | 3 +++ tests/keras/activations_test.py | 13 +++++++++++++ 2 files changed, 16 insertions(+) diff --git a/keras/backend/tensorflow_backend.py b/keras/backend/tensorflow_backend.py index bfd083436..a1b2a11cc 100644 --- a/keras/backend/tensorflow_backend.py +++ b/keras/backend/tensorflow_backend.py @@ -3161,6 +3161,9 @@ def relu(x, alpha=0., max_value=None): # Returns A tensor. """ + if alpha == 0 and max_value == 6: + return tf.nn.relu6(x) + if alpha != 0.: x = tf.nn.leaky_relu(x, alpha) else: diff --git a/tests/keras/activations_test.py b/tests/keras/activations_test.py index 5be59def8..dce88a33d 100644 --- a/tests/keras/activations_test.py +++ b/tests/keras/activations_test.py @@ -163,6 +163,19 @@ def test_relu(): result = f([test_values])[0] assert_allclose(result, test_values, rtol=1e-05) + # Test max_value + test_values = [0.5, 1.5] + f = K.function([x], [activations.relu(x, max_value=1.)]) + result = f([test_values])[0] + assert np.max(result) <= 1. + + # Test max_value == 6. + test_values = [0.5, 6.] + f = K.function([x], [activations.relu(x, max_value=1.)]) + result = f([test_values])[0] + assert np.max(result) <= 6. + + def test_elu(): x = K.placeholder(ndim=2) -- GitLab