From 2838491235c1eb85262f41ae45f2dee5c5fe2d72 Mon Sep 17 00:00:00 2001 From: dangqingqing Date: Tue, 25 Apr 2017 11:04:32 +0800 Subject: [PATCH] add smooth_l1 interface to v2 doc. --- doc/api/v2/config/layer.rst | 5 +++++ python/paddle/trainer_config_helpers/layers.py | 8 ++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/doc/api/v2/config/layer.rst b/doc/api/v2/config/layer.rst index 2a02baf17b..4e3589ebc4 100644 --- a/doc/api/v2/config/layer.rst +++ b/doc/api/v2/config/layer.rst @@ -419,6 +419,11 @@ hsigmoid .. autoclass:: paddle.v2.layer.hsigmoid :noindex: +smooth_l1 +--------- +.. automodule:: paddle.v2.layer.smooth_l1 + :noindex: + Check Layer ============ diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index 56fca13d37..1796e48f09 100755 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -116,7 +116,7 @@ __all__ = [ 'spp_layer', 'pad_layer', 'eos_layer', - 'smooth_l1_cost', + 'smooth_l1', 'layer_support', ] @@ -5283,7 +5283,7 @@ def multi_binary_label_cross_entropy(input, @wrap_name_default() @layer_support() -def smooth_l1_cost(input, label, name=None, layer_attr=None): +def smooth_l1(input, label, name=None, layer_attr=None): """ This is a L1 loss but more smooth. It requires that the size of input and label are equal. The formula is as follows, @@ -5307,8 +5307,8 @@ def smooth_l1_cost(input, label, name=None, layer_attr=None): .. code-block:: python - cost = smooth_l1_cost(input=input_layer, - label=label_layer) + cost = smooth_l1(input=input_layer, + label=label_layer) :param input: The input layer. :type input: LayerOutput -- GitLab