未验证 提交 4c563e0b 编写于 作者: K kangguangli 提交者: GitHub

replace cross_entropy except in python/paddle/fluid/tests/unittests/*.py &&...

replace cross_entropy except in python/paddle/fluid/tests/unittests/*.py && unittests/*/*.py (#48922)
上级 fcdf633f
...@@ -22,7 +22,6 @@ import paddle.static as static ...@@ -22,7 +22,6 @@ import paddle.static as static
import paddle.utils as utils import paddle.utils as utils
from paddle.distributed import fleet from paddle.distributed import fleet
from paddle.distributed.fleet import auto from paddle.distributed.fleet import auto
from paddle.fluid import layers
paddle.enable_static() paddle.enable_static()
_global_parallel_strategy = None _global_parallel_strategy = None
...@@ -93,7 +92,9 @@ def mlp_pretrain_forward(train_program, start_program): ...@@ -93,7 +92,9 @@ def mlp_pretrain_forward(train_program, start_program):
predict = mlp(input) predict = mlp(input)
cost = layers.cross_entropy(input=predict, label=label) cost = paddle.nn.functional.cross_entropy(
input=predict, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost) avg_cost = paddle.mean(x=cost)
return avg_cost, train_program, start_program return avg_cost, train_program, start_program
......
...@@ -34,7 +34,9 @@ class TestDistMnist2x2(TestDistRunnerBase): ...@@ -34,7 +34,9 @@ class TestDistMnist2x2(TestDistRunnerBase):
# Train program # Train program
predict = cnn_model(images) predict = cnn_model(images)
cost = fluid.layers.cross_entropy(input=predict, label=label) cost = paddle.nn.functional.cross_entropy(
input=predict, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost) avg_cost = paddle.mean(x=cost)
# Evaluator # Evaluator
......
...@@ -350,7 +350,9 @@ class TestSeResNeXt(TestParallelDyGraphRunnerBase): ...@@ -350,7 +350,9 @@ class TestSeResNeXt(TestParallelDyGraphRunnerBase):
out = model(img) out = model(img)
softmax_out = paddle.nn.functional.softmax(out, use_cudnn=False) softmax_out = paddle.nn.functional.softmax(out, use_cudnn=False)
loss = fluid.layers.cross_entropy(input=softmax_out, label=label) loss = paddle.nn.functional.cross_entropy(
input=softmax_out, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(x=loss) avg_loss = paddle.mean(x=loss)
return avg_loss return avg_loss
......
...@@ -100,7 +100,9 @@ class TestDistMnist2x2(TestDistRunnerBase): ...@@ -100,7 +100,9 @@ class TestDistMnist2x2(TestDistRunnerBase):
# Train program # Train program
predict = cnn_model(images) predict = cnn_model(images)
with fluid.device_guard("gpu:1"): with fluid.device_guard("gpu:1"):
cost = fluid.layers.cross_entropy(input=predict, label=label) cost = paddle.nn.functional.cross_entropy(
input=predict, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost) avg_cost = paddle.mean(x=cost)
# Evaluator # Evaluator
......
...@@ -100,7 +100,9 @@ class TestDistMnist2x2(TestDistRunnerBase): ...@@ -100,7 +100,9 @@ class TestDistMnist2x2(TestDistRunnerBase):
# Train program # Train program
predict = cnn_model(images) predict = cnn_model(images)
with fluid.device_guard("gpu:1"): with fluid.device_guard("gpu:1"):
cost = fluid.layers.cross_entropy(input=predict, label=label) cost = paddle.nn.functional.cross_entropy(
input=predict, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost) avg_cost = paddle.mean(x=cost)
# Evaluator # Evaluator
......
...@@ -92,7 +92,9 @@ class TestDistMnist2x2(TestDistRunnerBase): ...@@ -92,7 +92,9 @@ class TestDistMnist2x2(TestDistRunnerBase):
# Train program # Train program
predict = cnn_model(images) predict = cnn_model(images)
with fluid.device_guard("gpu:0"): with fluid.device_guard("gpu:0"):
cost = fluid.layers.cross_entropy(input=predict, label=label) cost = paddle.nn.functional.cross_entropy(
input=predict, label=label, reduction='none', use_softmax=False
)
avg_cost = paddle.mean(x=cost) avg_cost = paddle.mean(x=cost)
# Evaluator # Evaluator
......
...@@ -41,7 +41,9 @@ class FleetTest(unittest.TestCase): ...@@ -41,7 +41,9 @@ class FleetTest(unittest.TestCase):
feed_list=[image, label], place=fluid.CPUPlace() feed_list=[image, label], place=fluid.CPUPlace()
) )
predict = fluid.layers.fc(input=image, size=10, act='softmax') predict = fluid.layers.fc(input=image, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=predict, label=label) loss = paddle.nn.functional.cross_entropy(
input=predict, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
optimizer = fluid.optimizer.AdamOptimizer(learning_rate=0.001) optimizer = fluid.optimizer.AdamOptimizer(learning_rate=0.001)
......
...@@ -1308,7 +1308,9 @@ class TestResnet(unittest.TestCase): ...@@ -1308,7 +1308,9 @@ class TestResnet(unittest.TestCase):
): ):
out = resnet(img) out = resnet(img)
loss = fluid.layers.cross_entropy(input=out, label=label) loss = paddle.nn.functional.cross_entropy(
input=out, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(x=loss) avg_loss = paddle.mean(x=loss)
dy_out = avg_loss.numpy() dy_out = avg_loss.numpy()
......
...@@ -1298,7 +1298,9 @@ class TestResnet(unittest.TestCase): ...@@ -1298,7 +1298,9 @@ class TestResnet(unittest.TestCase):
): ):
out = resnet(img) out = resnet(img)
loss = fluid.layers.cross_entropy(input=out, label=label) loss = paddle.nn.functional.cross_entropy(
input=out, label=label, reduction='none', use_softmax=False
)
avg_loss = paddle.mean(x=loss) avg_loss = paddle.mean(x=loss)
dy_out = avg_loss.numpy() dy_out = avg_loss.numpy()
......
...@@ -53,7 +53,12 @@ class QuantDequantTensorRTSubgraphPassConvTest(QuantDequantTest): ...@@ -53,7 +53,12 @@ class QuantDequantTensorRTSubgraphPassConvTest(QuantDequantTest):
elif self.conv_groups == 4: elif self.conv_groups == 4:
cout = paddle.reshape(conv_out, shape=[1, 1, 10816]) cout = paddle.reshape(conv_out, shape=[1, 1, 10816])
result = fluid.layers.relu(cout) result = fluid.layers.relu(cout)
loss = fluid.layers.cross_entropy(input=result, label=label_shape) loss = paddle.nn.functional.cross_entropy(
input=result,
label=label_shape,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss, result return avg_loss, result
...@@ -156,7 +161,12 @@ class DynamicShapeQuantDequantTensorRTSubgraphPassConvTest(QuantDequantTest): ...@@ -156,7 +161,12 @@ class DynamicShapeQuantDequantTensorRTSubgraphPassConvTest(QuantDequantTest):
) )
cout = paddle.reshape(conv_out, shape=[1, 1, 10816]) cout = paddle.reshape(conv_out, shape=[1, 1, 10816])
result = fluid.layers.relu(cout) result = fluid.layers.relu(cout)
loss = fluid.layers.cross_entropy(input=result, label=label_shape) loss = paddle.nn.functional.cross_entropy(
input=result,
label=label_shape,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss, result return avg_loss, result
...@@ -257,7 +267,12 @@ class QuantDequantTensorRTSubgraphPassConvTransposeTest(QuantDequantTest): ...@@ -257,7 +267,12 @@ class QuantDequantTensorRTSubgraphPassConvTransposeTest(QuantDequantTest):
elif self.conv_groups == 4: elif self.conv_groups == 4:
cout = paddle.reshape(conv_out, shape=[1, 1, 10816]) cout = paddle.reshape(conv_out, shape=[1, 1, 10816])
result = fluid.layers.relu(cout) result = fluid.layers.relu(cout)
loss = fluid.layers.cross_entropy(input=result, label=label_shape) loss = paddle.nn.functional.cross_entropy(
input=result,
label=label_shape,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss, result return avg_loss, result
......
...@@ -38,7 +38,12 @@ class FCQuantDequantFusePassTRTDims3Cols1Test(QuantDequantTest): ...@@ -38,7 +38,12 @@ class FCQuantDequantFusePassTRTDims3Cols1Test(QuantDequantTest):
act="relu", act="relu",
) )
result = fluid.layers.relu(fc_out) result = fluid.layers.relu(fc_out)
loss = fluid.layers.cross_entropy(input=result, label=self.label) loss = paddle.nn.functional.cross_entropy(
input=result,
label=self.label,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss, result return avg_loss, result
...@@ -105,7 +110,12 @@ class FCQuantDequantFusePassTRTDims3Cols2Test(QuantDequantTest): ...@@ -105,7 +110,12 @@ class FCQuantDequantFusePassTRTDims3Cols2Test(QuantDequantTest):
) )
c_out = paddle.reshape(fc_out, shape=[0, 784]) c_out = paddle.reshape(fc_out, shape=[0, 784])
result = fluid.layers.relu(c_out) result = fluid.layers.relu(c_out)
loss = fluid.layers.cross_entropy(input=result, label=self.label) loss = paddle.nn.functional.cross_entropy(
input=result,
label=self.label,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss, result return avg_loss, result
...@@ -174,7 +184,12 @@ class FCQuantDequantFusePassTRTDims3Cols3Test(QuantDequantTest): ...@@ -174,7 +184,12 @@ class FCQuantDequantFusePassTRTDims3Cols3Test(QuantDequantTest):
) )
c_out = paddle.reshape(fc_out, shape=[1, 1, 2744]) c_out = paddle.reshape(fc_out, shape=[1, 1, 2744])
result = fluid.layers.relu(c_out) result = fluid.layers.relu(c_out)
loss = fluid.layers.cross_entropy(input=result, label=label_shape) loss = paddle.nn.functional.cross_entropy(
input=result,
label=label_shape,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss, result return avg_loss, result
......
...@@ -47,7 +47,12 @@ class TensorRTMatMulQuantDequantDims3Test(QuantDequantTest): ...@@ -47,7 +47,12 @@ class TensorRTMatMulQuantDequantDims3Test(QuantDequantTest):
act=None, act=None,
) )
result = fluid.layers.relu(fc_out) result = fluid.layers.relu(fc_out)
loss = fluid.layers.cross_entropy(input=result, label=self.label) loss = paddle.nn.functional.cross_entropy(
input=result,
label=self.label,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss, result return avg_loss, result
...@@ -144,7 +149,12 @@ class TensorRTMatMulQuantDequantDims4Test(QuantDequantTest): ...@@ -144,7 +149,12 @@ class TensorRTMatMulQuantDequantDims4Test(QuantDequantTest):
act=None, act=None,
) )
result = fluid.layers.relu(fc_out) result = fluid.layers.relu(fc_out)
loss = fluid.layers.cross_entropy(input=result, label=self.label) loss = paddle.nn.functional.cross_entropy(
input=result,
label=self.label,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss, result return avg_loss, result
...@@ -240,7 +250,12 @@ class TensorRTMatMulQuantDequantDims3DynamicTest(QuantDequantTest): ...@@ -240,7 +250,12 @@ class TensorRTMatMulQuantDequantDims3DynamicTest(QuantDequantTest):
act=None, act=None,
) )
result = fluid.layers.relu(fc_out) result = fluid.layers.relu(fc_out)
loss = fluid.layers.cross_entropy(input=result, label=self.label) loss = paddle.nn.functional.cross_entropy(
input=result,
label=self.label,
reduction='none',
use_softmax=False,
)
avg_loss = paddle.mean(loss) avg_loss = paddle.mean(loss)
return avg_loss, result return avg_loss, result
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册