diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index a0432b3966b31da14232996d9c8c7857bddb11d9..56fca13d37242a091f09b35ca2e139071c644cad 100755 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -5286,10 +5286,7 @@ def multi_binary_label_cross_entropy(input, def smooth_l1_cost(input, label, name=None, layer_attr=None): """ This is a L1 loss but more smooth. It requires that the - size of input and label are equal. - - More details can be found by referring to `Fast R-CNN - `_ + size of input and label are equal. The formula is as follows, .. math:: @@ -5305,6 +5302,9 @@ def smooth_l1_cost(input, label, name=None, layer_attr=None): |x|-0.5& \text{otherwise} \end{cases} + More details can be found by referring to `Fast R-CNN + `_ + .. code-block:: python cost = smooth_l1_cost(input=input_layer,