From 61fe139f343e4a4920376a853be8511667417aad Mon Sep 17 00:00:00 2001 From: minqiyang Date: Wed, 3 Apr 2019 16:19:13 +0800 Subject: [PATCH] Polish code --- .../fluid/operators/softmax_with_cross_entropy_op.cu | 2 +- python/paddle/fluid/dygraph/nn.py | 12 ++++++------ python/paddle/fluid/tests/unittests/test_layers.py | 5 ++--- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/paddle/fluid/operators/softmax_with_cross_entropy_op.cu b/paddle/fluid/operators/softmax_with_cross_entropy_op.cu index d00349e94..89aaac4cb 100644 --- a/paddle/fluid/operators/softmax_with_cross_entropy_op.cu +++ b/paddle/fluid/operators/softmax_with_cross_entropy_op.cu @@ -404,7 +404,7 @@ class SoftmaxWithCrossEntropyCUDAKernel : public framework::OpKernel { int batch_size = logits->dims()[0]; int feature_size = logits->dims()[1]; auto* logits_data = logits->data(); - auto* labels_data = labels->data(); + auto* labels_data = labels->data(); SoftmaxWithCrossEntropyFusedKernel( logits_data, labels_data, softmax_data, loss_data, batch_size, feature_size, context.cuda_device_context().stream()); diff --git a/python/paddle/fluid/dygraph/nn.py b/python/paddle/fluid/dygraph/nn.py index e1996e4fc..178e6cd48 100644 --- a/python/paddle/fluid/dygraph/nn.py +++ b/python/paddle/fluid/dygraph/nn.py @@ -563,7 +563,7 @@ class LayerNorm(layers.Layer): >>> x = fluid.layers.layer_norm(input=data, begin_norm_axis=1) """ - super(LayerNorm, self).__init__(name_scope, dtype) + super(LayerNorm, self).__init__(name_scope) self._scale = scale self._shift = shift self._begin_norm_axis = begin_norm_axis @@ -840,7 +840,7 @@ class NCE(layers.Layer): custom_dist=None, seed=0, is_sparse=False): - super(NCE, self).__init__(name_scope, dtype) + super(NCE, self).__init__(name_scope) self._param_attr = param_attr self._bias_attr = bias_attr self._num_total_classes = num_total_classes @@ -1013,7 +1013,7 @@ class PRelu(layers.Layer): def __init__(self, name_scope, mode, param_attr=None): - super(PRelu, self).__init__(name_scope, dtype) + super(PRelu, self).__init__(name_scope) self._mode = mode self._param_attr = param_attr if self._mode not in ['all', 'channel', 'element']: @@ -1090,7 +1090,7 @@ class BilinearTensorProduct(layers.Layer): act=None, param_attr=None, bias_attr=None): - super(BilinearTensorProduct, self).__init__(name_scope, dtype) + super(BilinearTensorProduct, self).__init__(name_scope) self._param_attr = param_attr self._bias_attr = bias_attr self._act = act @@ -1260,7 +1260,7 @@ class Conv2DTranspose(layers.Layer): bias_attr=None, use_cudnn=True, act=None): - super(Conv2DTranspose, self).__init__(name_scope, dtype) + super(Conv2DTranspose, self).__init__(name_scope) assert param_attr is not False, "param_attr should not be False in conv2d_transpose." self._param_attr = param_attr self._bias_attr = bias_attr @@ -1388,7 +1388,7 @@ class SequenceConv(layers.Layer): bias_attr=None, param_attr=None, act=None): - super(SequenceConv, self).__init__(name_scope, dtype) + super(SequenceConv, self).__init__(name_scope) self._num_filters = num_filters self._filter_size = filter_size self._filter_stride = filter_stride diff --git a/python/paddle/fluid/tests/unittests/test_layers.py b/python/paddle/fluid/tests/unittests/test_layers.py index 954e822e6..25fe2171a 100644 --- a/python/paddle/fluid/tests/unittests/test_layers.py +++ b/python/paddle/fluid/tests/unittests/test_layers.py @@ -672,9 +672,8 @@ class TestBook(LayerTest): def make_sampled_softmax_with_cross_entropy(self): with program_guard(fluid.default_main_program(), fluid.default_startup_program()): - logits = self._get_data(name='Logits', shape=[256], dtype='float64') - print(logits.dtype) - label = self._get_data(name='Label', shape=[1], dtype='int64') + logits = self._get_data(name='Logits', shape=[256], dtype='float32') + label = self._get_data(name='Label', shape=[1], dtype='int32') num_samples = 25 output = layers.sampled_softmax_with_cross_entropy(logits, label, num_samples) -- GitLab