From aa560dbb97d110086b9bdbd361fed8db95d93a89 Mon Sep 17 00:00:00 2001 From: Luo Tao Date: Fri, 11 Nov 2016 12:10:10 +0800 Subject: [PATCH] fix bug in sum_cost --- python/paddle/trainer_config_helpers/layers.py | 18 ++++++++++++------ .../tests/configs/test_cost_layers.py | 2 +- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index 10737c90c..92e09b51e 100644 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -4048,7 +4048,8 @@ def cross_entropy(input, label, name=None, coeff=1.0, layer_attr=None): .. code-block:: python - cost = cross_entropy(input, label) + cost = cross_entropy(input=input_layer, + label=label_layer) :param input: The first input layer. :type input: LayerOutput. @@ -4084,7 +4085,8 @@ def cross_entropy_with_selfnorm(input, label, name=None, coeff=1.0, .. code-block:: python - cost = cross_entropy_with_selfnorm(input, label) + cost = cross_entropy_with_selfnorm(input=input_layer, + label=label_layer) :param input: The first input layer. :type input: LayerOutput. @@ -4122,7 +4124,7 @@ def sum_cost(input, name=None, layer_attr=None): .. code-block:: python - cost = sum_cost(input) + cost = sum_cost(input=input_layer) :param input: The first input layer. :type input: LayerOutput. @@ -4133,6 +4135,7 @@ def sum_cost(input, name=None, layer_attr=None): :return: LayerOutput object. :rtype: LayerOutput. """ + assert isinstance(input, LayerOutput) Layer(name=name, type=LayerType.SUM_COST, inputs=[input.name], @@ -4141,7 +4144,8 @@ def sum_cost(input, name=None, layer_attr=None): return LayerOutput(name, LayerType.SUM_COST, - parents=[input]) + parents=[input], + size=1) @wrap_name_default() @@ -4152,7 +4156,8 @@ def huber_cost(input, label, name=None, coeff=1.0, layer_attr=None): .. code-block:: python - cost = huber_cost(input, label) + cost = huber_cost(input=input_layer, + label=label_layer) :param input: The first input layer. :type input: LayerOutput. @@ -4188,7 +4193,8 @@ def multi_binary_label_cross_entropy(input, label, name=None, coeff=1.0, .. code-block:: python - cost = multi_binary_label_cross_entropy(input, label) + cost = multi_binary_label_cross_entropy(input=input_layer, + label=label_layer) :param input: The first input layer. :type input: LayerOutput diff --git a/python/paddle/trainer_config_helpers/tests/configs/test_cost_layers.py b/python/paddle/trainer_config_helpers/tests/configs/test_cost_layers.py index f1b3365f8..cfaf2da00 100644 --- a/python/paddle/trainer_config_helpers/tests/configs/test_cost_layers.py +++ b/python/paddle/trainer_config_helpers/tests/configs/test_cost_layers.py @@ -25,4 +25,4 @@ outputs(ctc_layer(input=seq_in, label=labels), huber_cost(input=data_layer(name='huber_probs', size=1), label=data_layer(name='huber_label', size=1)), multi_binary_label_cross_entropy(input=probs, label=xe_label), - sum_cost(hidden)) + sum_cost(input=hidden)) -- GitLab