diff --git a/python/paddle/fluid/layers/control_flow.py b/python/paddle/fluid/layers/control_flow.py index db5b07558a11e5da2a349db25459bbbe58cc08d3..5394ac3278f8087de18de6c89da1fedb020f4851 100644 --- a/python/paddle/fluid/layers/control_flow.py +++ b/python/paddle/fluid/layers/control_flow.py @@ -55,8 +55,6 @@ __all__ = [ def split_lod_tensor(input, mask, level=0): """ - **split_lod_tensor** - This function takes in an input that contains the complete lod information, and takes in a mask which is used to mask certain parts of the input. The output is the true branch and the false branch with the mask applied to diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 627718f87e7df786f4ffd45430a84acbfbfd33a7..d3899cd442ff35e589a3d86ac5a5d92838014df2 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -1638,23 +1638,23 @@ def batch_norm(input, Args: input(variable): The input variable which is a LoDTensor. - act(string, default None): Activation type, linear|relu|prelu|... - is_test(bool, default False): Used for training or training. - momentum(float, default 0.9): - epsilon(float, default 1e-05): + act(string, Default None): Activation type, linear|relu|prelu|... + is_test(bool, Default False): Used for training or training. + momentum(float, Default 0.9): + epsilon(float, Default 1e-05): param_attr(ParamAttr): The parameter attribute for Parameter `scale`. bias_attr(ParamAttr): The parameter attribute for Parameter `bias`. data_layout(string, default NCHW): NCHW|NHWC - in_place(bool, default False): Make the input and output of batch norm reuse memory. + in_place(bool, Default False): Make the input and output of batch norm reuse memory. use_mkldnn(bool, Default false): ${use_mkldnn_comment} name(string, Default None): A name for this layer(optional). If set None, the layer will be named automatically. moving_mean_name(string, Default None): The name of moving_mean which store the global Mean. moving_variance_name(string, Default None): The name of the moving_variance which store the global Variance. - do_model_average_for_mean_and_var(bool, Default False): + do_model_average_for_mean_and_var(bool, Default False): Do model average for mean and variance or not. Returns: - The sequence's last step variable which is a Tensor. + Variable: A tensor variable which is the result after applying batch normalization on the input. Examples: