提交 00151d81 编写于 作者: K Katherine Wu 提交者: TensorFlower Gardener

Run all tests in both graph and eager mode.

PiperOrigin-RevId: 225245412
上级 06f9c2e7
......@@ -31,6 +31,7 @@ def _ref_softmax(values):
return e / np.sum(e)
@test_util.run_all_in_graph_and_eager_modes
class KerasActivationsTest(test.TestCase):
def test_serialization(self):
......@@ -46,12 +47,11 @@ class KerasActivationsTest(test.TestCase):
assert fn == ref_fn
def test_softmax(self):
with self.cached_session():
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.softmax(x)])
test_values = np.random.random((2, 5))
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.softmax(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
result = f([test_values])[0]
expected = _ref_softmax(test_values[0])
self.assertAllClose(result[0], expected, rtol=1e-05)
......@@ -60,40 +60,36 @@ class KerasActivationsTest(test.TestCase):
keras.activations.softmax(x)
def test_temporal_softmax(self):
with self.cached_session():
x = keras.backend.placeholder(shape=(2, 2, 3))
f = keras.backend.function([x], [keras.activations.softmax(x)])
test_values = np.random.random((2, 2, 3)) * 10
result = f([test_values])[0]
x = keras.backend.placeholder(shape=(2, 2, 3))
f = keras.backend.function([x], [keras.activations.softmax(x)])
test_values = np.random.random((2, 2, 3)) * 10
result = f([test_values])[0]
expected = _ref_softmax(test_values[0, 0])
self.assertAllClose(result[0, 0], expected, rtol=1e-05)
@test_util.run_deprecated_v1
def test_selu(self):
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.selu(x)])
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
with self.cached_session():
positive_values = np.array([[1, 2]], dtype=keras.backend.floatx())
result = f([positive_values])[0]
self.assertAllClose(result, positive_values * scale, rtol=1e-05)
positive_values = np.array([[1, 2]], dtype=keras.backend.floatx())
result = f([positive_values])[0]
self.assertAllClose(result, positive_values * scale, rtol=1e-05)
negative_values = np.array([[-1, -2]], dtype=keras.backend.floatx())
result = f([negative_values])[0]
true_result = (np.exp(negative_values) - 1) * scale * alpha
self.assertAllClose(result, true_result)
negative_values = np.array([[-1, -2]], dtype=keras.backend.floatx())
result = f([negative_values])[0]
true_result = (np.exp(negative_values) - 1) * scale * alpha
self.assertAllClose(result, true_result)
def test_softplus(self):
def softplus(x):
return np.log(np.ones_like(x) + np.exp(x))
with self.cached_session():
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.softplus(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.softplus(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = softplus(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
......@@ -101,11 +97,10 @@ class KerasActivationsTest(test.TestCase):
def softsign(x):
return np.divide(x, np.ones_like(x) + np.absolute(x))
with self.cached_session():
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.softsign(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.softsign(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = softsign(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
......@@ -118,68 +113,60 @@ class KerasActivationsTest(test.TestCase):
return z / (1 + z)
sigmoid = np.vectorize(ref_sigmoid)
with self.cached_session():
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.sigmoid(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.sigmoid(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = sigmoid(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
@test_util.run_deprecated_v1
def test_hard_sigmoid(self):
def ref_hard_sigmoid(x):
x = (x * 0.2) + 0.5
z = 0.0 if x <= 0 else (1.0 if x >= 1 else x)
return z
hard_sigmoid = np.vectorize(ref_hard_sigmoid)
with self.cached_session():
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.hard_sigmoid(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.hard_sigmoid(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = hard_sigmoid(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
def test_relu(self):
with self.cached_session():
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.relu(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.relu(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
# No negative values in test values...
self.assertAllClose(result, test_values, rtol=1e-05)
@test_util.run_deprecated_v1
def test_elu(self):
with self.cached_session():
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.elu(x, 0.5)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
self.assertAllClose(result, test_values, rtol=1e-05)
negative_values = np.array([[-1, -2]], dtype=keras.backend.floatx())
result = f([negative_values])[0]
true_result = (np.exp(negative_values) - 1) / 2
x = keras.backend.placeholder(ndim=2)
f = keras.backend.function([x], [keras.activations.elu(x, 0.5)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
self.assertAllClose(result, test_values, rtol=1e-05)
negative_values = np.array([[-1, -2]], dtype=keras.backend.floatx())
result = f([negative_values])[0]
true_result = (np.exp(negative_values) - 1) / 2
self.assertAllClose(result, true_result)
def test_tanh(self):
with self.cached_session():
test_values = np.random.random((2, 5))
x = keras.backend.placeholder(ndim=2)
exp = keras.activations.tanh(x)
f = keras.backend.function([x], [exp])
result = f([test_values])[0]
test_values = np.random.random((2, 5))
x = keras.backend.placeholder(ndim=2)
exp = keras.activations.tanh(x)
f = keras.backend.function([x], [exp])
result = f([test_values])[0]
expected = np.tanh(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
def test_exponential(self):
with self.cached_session():
test_values = np.random.random((2, 5))
x = keras.backend.placeholder(ndim=2)
exp = keras.activations.exponential(x)
f = keras.backend.function([x], [exp])
result = f([test_values])[0]
test_values = np.random.random((2, 5))
x = keras.backend.placeholder(ndim=2)
exp = keras.activations.exponential(x)
f = keras.backend.function([x], [exp])
result = f([test_values])[0]
expected = np.exp(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
......
......@@ -21,6 +21,7 @@ from __future__ import print_function
import numpy as np
from tensorflow.python import keras
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
......@@ -35,6 +36,7 @@ def get_example_array():
return example_array
@test_util.run_all_in_graph_and_eager_modes
class KerasConstraintsTest(test.TestCase):
def test_serialization(self):
......@@ -49,54 +51,47 @@ class KerasConstraintsTest(test.TestCase):
assert fn.__class__ == ref_fn.__class__
def test_max_norm(self):
with self.cached_session():
array = get_example_array()
for m in get_test_values():
norm_instance = keras.constraints.max_norm(m)
normed = norm_instance(keras.backend.variable(array))
assert np.all(keras.backend.eval(normed) < m)
# a more explicit example
norm_instance = keras.constraints.max_norm(2.0)
x = np.array([[0, 0, 0], [1.0, 0, 0], [3, 0, 0], [3, 3, 3]]).T
x_normed_target = np.array([[0, 0, 0], [1.0, 0, 0],
[2.0, 0, 0],
[2. / np.sqrt(3),
2. / np.sqrt(3),
2. / np.sqrt(3)]]).T
x_normed_actual = keras.backend.eval(
norm_instance(keras.backend.variable(x)))
self.assertAllClose(x_normed_actual, x_normed_target, rtol=1e-05)
array = get_example_array()
for m in get_test_values():
norm_instance = keras.constraints.max_norm(m)
normed = norm_instance(keras.backend.variable(array))
assert np.all(keras.backend.eval(normed) < m)
# a more explicit example
norm_instance = keras.constraints.max_norm(2.0)
x = np.array([[0, 0, 0], [1.0, 0, 0], [3, 0, 0], [3, 3, 3]]).T
x_normed_target = np.array(
[[0, 0, 0], [1.0, 0, 0], [2.0, 0, 0],
[2. / np.sqrt(3), 2. / np.sqrt(3), 2. / np.sqrt(3)]]).T
x_normed_actual = keras.backend.eval(
norm_instance(keras.backend.variable(x)))
self.assertAllClose(x_normed_actual, x_normed_target, rtol=1e-05)
def test_non_neg(self):
with self.cached_session():
non_neg_instance = keras.constraints.non_neg()
normed = non_neg_instance(keras.backend.variable(get_example_array()))
assert np.all(np.min(keras.backend.eval(normed), axis=1) == 0.)
non_neg_instance = keras.constraints.non_neg()
normed = non_neg_instance(keras.backend.variable(get_example_array()))
assert np.all(np.min(keras.backend.eval(normed), axis=1) == 0.)
def test_unit_norm(self):
with self.cached_session():
unit_norm_instance = keras.constraints.unit_norm()
normalized = unit_norm_instance(
keras.backend.variable(get_example_array()))
norm_of_normalized = np.sqrt(
np.sum(keras.backend.eval(normalized) ** 2, axis=0))
# In the unit norm constraint, it should be equal to 1.
difference = norm_of_normalized - 1.
largest_difference = np.max(np.abs(difference))
assert np.abs(largest_difference) < 10e-5
unit_norm_instance = keras.constraints.unit_norm()
normalized = unit_norm_instance(keras.backend.variable(get_example_array()))
norm_of_normalized = np.sqrt(
np.sum(keras.backend.eval(normalized)**2, axis=0))
# In the unit norm constraint, it should be equal to 1.
difference = norm_of_normalized - 1.
largest_difference = np.max(np.abs(difference))
assert np.abs(largest_difference) < 10e-5
def test_min_max_norm(self):
with self.cached_session():
array = get_example_array()
for m in get_test_values():
norm_instance = keras.constraints.min_max_norm(min_value=m,
max_value=m * 2)
normed = norm_instance(keras.backend.variable(array))
value = keras.backend.eval(normed)
l2 = np.sqrt(np.sum(np.square(value), axis=0))
assert not l2[l2 < m]
assert not l2[l2 > m * 2 + 1e-5]
array = get_example_array()
for m in get_test_values():
norm_instance = keras.constraints.min_max_norm(
min_value=m, max_value=m * 2)
normed = norm_instance(keras.backend.variable(array))
value = keras.backend.eval(normed)
l2 = np.sqrt(np.sum(np.square(value), axis=0))
assert not l2[l2 < m]
assert not l2[l2 > m * 2 + 1e-5]
if __name__ == '__main__':
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册