diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index 9dbfb93217262b0599fd4af75449838041da7a23..ac7ca5822de83082e612be95f5de07fc0c49491e 100644 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -789,10 +789,9 @@ class MixedLayerType(LayerOutput): :type size: int :param act: Activation type. :type act: BaseActivation - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param layer_attr: Extra Layer Attribute. :type layer_attr: ExtraLayerAttribute or None @@ -889,10 +888,9 @@ def mixed_layer(size=0, then this function will just return layer's name. :param act: Activation Type. LinearActivation is the default. :type act: BaseActivation - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param layer_attr: The extra layer config. Default is None. :type layer_attr: ExtraLayerAttribute @@ -1034,10 +1032,9 @@ def fc_layer(input, :type act: BaseActivation :param param_attr: The Parameter Attribute|list. :type param_attr: ParameterAttribute - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param layer_attr: Extra Layer config. :type layer_attr: ExtraLayerAttribute | None @@ -1390,10 +1387,9 @@ def pooling_layer(input, :type pooling_type: BasePoolingType | None :param stride: The step size between successive pooling regions. :type stride: Int - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param layer_attr: The Extra Attributes for layer, such as dropout. :type layer_attr: ExtraLayerAttribute | None @@ -1491,10 +1487,9 @@ def lstmemory(input, :type gate_act: BaseActivation :param state_act: state activation type, TanhActivation by default. :type state_act: BaseActivation - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param param_attr: Parameter Attribute. :type param_attr: ParameterAttribute | None | False @@ -1617,10 +1612,9 @@ def grumemory(input, This activation affects the :math:`z_t` and :math:`r_t`. It is the :math:`\\sigma` in the above formula. :type gate_act: BaseActivation - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param param_attr: Parameter Attribute. :type param_attr: ParameterAttribute | None | False @@ -1817,10 +1811,9 @@ def expand_layer(input, :type expand_as: LayerOutput :param name: The name of this layer. It is optional. :type name: basestring - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param expand_level: whether input layer is timestep(default) or sequence. :type expand_level: ExpandLevel @@ -1939,10 +1932,9 @@ def seq_reshape_layer(input, :type act: BaseActivation :param layer_attr: extra layer attributes. :type layer_attr: ExtraLayerAttribute. - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :return: LayerOutput object. :rtype: LayerOutput @@ -2326,10 +2318,9 @@ def hsigmoid(input, :type num_classes: int | None :param name: The name of this layer. It is optional. :type name: basestring - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param param_attr: Parameter Attribute. None means default parameter. :type param_attr: ParameterAttribute | None @@ -2469,10 +2460,9 @@ def img_conv_layer(input, :type dilation: int | tuple | list :param dilation_y: The y dimension of the dilation. :type dilation_y: int - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param num_channels: number of input channels. If None will be set automatically from previous output. @@ -3219,10 +3209,9 @@ def addto_layer(input, act=None, name=None, bias_attr=None, layer_attr=None): :type input: LayerOutput | list | tuple :param act: Activation Type. LinearActivation is the default. :type act: BaseActivation - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param layer_attr: Extra Layer attribute. :type layer_attr: ExtraLayerAttribute @@ -3375,10 +3364,9 @@ def seq_concat_layer(a, b, act=None, name=None, layer_attr=None, :type act: BaseActivation :param layer_attr: Extra Layer Attribute. :type layer_attr: ExtraLayerAttribute - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :return: LayerOutput object. :rtype: LayerOutput @@ -3558,10 +3546,9 @@ def lstm_step_layer(input, :type gate_act: BaseActivation :param state_act: State Activation Type. TanhActivation is the default. :type state_act: BaseActivation - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param layer_attr: layer's extra attribute. :type layer_attr: ExtraLayerAttribute @@ -3617,10 +3604,9 @@ def gru_step_layer(input, :param name: The name of this layer. It is optional. :param gate_act: Activation type of this layer's two gates. Default is Sigmoid. :type gate_act: BaseActivation - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param param_attr: the parameter_attribute for transforming the output_mem from previous step. @@ -3680,10 +3666,9 @@ def gru_step_naive_layer(input, :type act: BaseActivation :param gate_act: Activation type of this layer's two gates. Default is Sigmoid. :type gate_act: BaseActivation - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param param_attr: :param layer_attr: @@ -3813,10 +3798,9 @@ def recurrent_layer(input, :type input: LayerOutput :param act: Activation type. TanhActivation is the default. :type act: BaseActivation - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param param_attr: parameter attribute. :type param_attr: ParameterAttribute @@ -4806,10 +4790,9 @@ def tensor_layer(a, :type act: BaseActivation :param param_attr: The Parameter Attribute. :type param_attr: ParameterAttribute - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param layer_attr: Extra Layer config. :type layer_attr: ExtraLayerAttribute | None @@ -4871,10 +4854,9 @@ def selective_fc_layer(input, :type act: BaseActivation :param param_attr: The Parameter Attribute. :type param_attr: ParameterAttribute - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param layer_attr: Extra Layer config. :type layer_attr: ExtraLayerAttribute | None @@ -5546,10 +5528,9 @@ def nce_layer(input, A uniform distribution will be used if not provided. If not None, its length must be equal to num_classes. :type neg_distribution: list | tuple | collections.Sequence | None - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param layer_attr: Extra Layer Attribute. :type layer_attr: ExtraLayerAttribute @@ -5773,20 +5754,21 @@ def cross_entropy(input, :param input: The first input layer. :type input: LayerOutput. :param label: The input label. - :type input: LayerOutput. + :type input: LayerOutput :param name: The name of this layer. It is optional. - :type name: None | basestring. - :param coeff: The cost is multiplied with coeff. - The coefficient affects the gradient in the backward. - :type coeff: float. + :type name: basestring + :param coeff: The weight of the gradient in the back propagation. + 1.0 is the default. + :type coeff: float :param weight: The cost of each sample is multiplied with each weight. The weight should be a layer with size=1. Note that gradient will not be calculated for weight. :type weight: LayerOutout - :param layer_attr: Extra Layer Attribute. + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute :return: LayerOutput object. - :rtype: LayerOutput. + :rtype: LayerOutput """ ipts, parents = __cost_input__(input, label, weight) @@ -5819,19 +5801,21 @@ def cross_entropy_with_selfnorm(input, label=label_layer) :param input: The first input layer. - :type input: LayerOutput. + :type input: LayerOutput :param label: The input label. - :type input: LayerOutput. + :type input: LayerOutput :param name: The name of this layer. It is optional. - :type name: None | basestring. - :param coeff: The coefficient affects the gradient in the backward. - :type coeff: float. + :type name: basestring + :param coeff: The weight of the gradient in the back propagation. + 1.0 is the default. + :type coeff: float :param softmax_selfnorm_alpha: The scale factor affects the cost. - :type softmax_selfnorm_alpha: float. - :param layer_attr: Extra Layer Attribute. + :type softmax_selfnorm_alpha: float + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute :return: LayerOutput object. - :rtype: LayerOutput. + :rtype: LayerOutput """ Layer( name=name, @@ -5852,7 +5836,7 @@ def cross_entropy_with_selfnorm(input, @layer_support() def sum_cost(input, name=None, layer_attr=None): """ - A loss layer which calculate the sum of the input as loss + A loss layer which calculates the sum of the input as loss. The example usage is: @@ -5861,10 +5845,11 @@ def sum_cost(input, name=None, layer_attr=None): cost = sum_cost(input=input_layer) :param input: The input of this layer. - :type input: LayerOutput. + :type input: LayerOutput :param name: The name of this layer. It is optional. - :type name: None | basestring. - :param layer_attr: Extra Layer Attribute. + :type name: basestring + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute :return: LayerOutput object. :rtype: LayerOutput. @@ -5904,16 +5889,18 @@ def huber_regression_cost(input, cost = huber_regression_cost(input=input_layer, label=label_layer) :param input: The first input layer. - :type input: LayerOutput. + :type input: LayerOutput :param label: The input label. - :type input: LayerOutput. + :type input: LayerOutput :param name: The name of this layer. It is optional. - :type name: None | basestring. + :type name: basestring :param delta: The difference between the observed and predicted values. - :type delta: float. - :param coeff: The coefficient affects the gradient in the backward. - :type coeff: float. - :param layer_attr: Extra Layer Attribute. + :type delta: float + :param coeff: The weight of the gradient in the back propagation. + 1.0 is the default. + :type coeff: float + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute :return: LayerOutput object. :rtype: LayerOutput. @@ -5954,17 +5941,19 @@ def huber_classification_cost(input, cost = huber_classification_cost(input=input_layer, label=label_layer) :param input: The first input layer. - :type input: LayerOutput. + :type input: LayerOutput :param label: The input label. - :type input: LayerOutput. + :type input: LayerOutput :param name: The name of this layer. It is optional. - :type name: None | basestring. - :param coeff: The coefficient affects the gradient in the backward. - :type coeff: float. - :param layer_attr: Extra Layer Attribute. + :type name: basestring + :param coeff: The weight of the gradient in the back propagation. + 1.0 is the default. + :type coeff: float + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute :return: LayerOutput object. - :rtype: LayerOutput. + :rtype: LayerOutput """ assert isinstance(input, LayerOutput) if input.size is not None: @@ -6001,10 +5990,12 @@ def multi_binary_label_cross_entropy(input, :param label: The input label. :type input: LayerOutput :param name: The name of this layer. It is optional. - :type name: None | basestring - :param coeff: The coefficient affects the gradient in the backward. + :type name: basestring + :param coeff: The weight of the gradient in the back propagation. + 1.0 is the default. :type coeff: float - :param layer_attr: Extra Layer Attribute. + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute :return: LayerOutput object. :rtype: LayerOutput @@ -6107,7 +6098,7 @@ def cross_entropy_over_beam(input, name=None): :param input: Input beams for this layer. :type input: BeamInput - :param name: The name of this layer. + :param name: The name of this layer. It is optional. :type name: basestring :return: LayerOutput object. :rtype: LayerOutput @@ -6142,7 +6133,7 @@ def cross_entropy_over_beam(input, name=None): def smooth_l1_cost(input, label, name=None, coeff=1.0, layer_attr=None): """ This is a L1 loss but more smooth. It requires that the - size of input and label are equal. The formula is as follows, + sizes of input and label are equal. The formula is as follows, .. math:: @@ -6154,8 +6145,9 @@ def smooth_l1_cost(input, label, name=None, coeff=1.0, layer_attr=None): smooth_{L1}(x) = \\begin{cases} 0.5x^2& \\text{if} \\ |x| < 1 \\\\ |x|-0.5& \\text{otherwise} \end{cases} - More details can be found by referring to `Fast R-CNN - `_ + Reference: + Fast R-CNN + https://arxiv.org/pdf/1504.08083v2.pdf The example usage is: @@ -6169,10 +6161,12 @@ def smooth_l1_cost(input, label, name=None, coeff=1.0, layer_attr=None): :param label: The input label. :type input: LayerOutput :param name: The name of this layer. It is optional. - :type name: None | basestring - :param coeff: The coefficient affects the gradient in the backward. + :type name: basestring + :param coeff: The weight of the gradient in the back propagation. + 1.0 is the default. :type coeff: float - :param layer_attr: Extra Layer Attribute. + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute :return: LayerOutput object. :rtype: LayerOutput @@ -6194,12 +6188,12 @@ def smooth_l1_cost(input, label, name=None, coeff=1.0, layer_attr=None): @wrap_name_default() def multiplex_layer(input, name=None, layer_attr=None): """ - This layer multiplex multiple layers according to the index, - which is provided by the first input layer. - inputs[0]: the index of the layer to output of size batchSize. + This layer multiplex multiple layers according to the indexes, + which are provided by the first input layer. + inputs[0]: the indexes of the layers to form the output of size batchSize. inputs[1:N]; the candidate output data. - For each index i from 0 to batchSize -1, the output is the i-th row of the - (index[i] + 1)-th layer. + For each index i from 0 to batchSize - 1, the i-th row of the output is the + the same to the i-th row of the (index[i] + 1)-th layer. For each i-th row of output: .. math:: @@ -6218,7 +6212,8 @@ def multiplex_layer(input, name=None, layer_attr=None): :type input: list of LayerOutput :param name: The name of this layer. It is optional. :type name: basestring - :param layer_attr: extra layer attributes. + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute. :return: LayerOutput object. :rtype: LayerOutput @@ -6322,14 +6317,14 @@ def row_conv_layer(input, :type context_len: int :param act: Activation Type. LinearActivation is the default. :type act: BaseActivation - :param param_attr: The Parameter Attribute. If None, the parameter will be - initialized smartly. It's better to set it by yourself. + :param param_attr: The parameter attribute. See ParameterAttribute for + details. :type param_attr: ParameterAttribute - :param layer_attr: Extra Layer config. + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute | None :return: LayerOutput object. :rtype: LayerOutput - """ assert isinstance(input, LayerOutput) assert context_len > 0, "the context_len must be greatet than 0." @@ -6354,7 +6349,7 @@ def prelu_layer(input, param_attr=None, layer_attr=None): """ - The Parameter Relu activation that actives outputs with a learnable weight. + The Parametric Relu activation that actives outputs with a learnable weight. Reference: Delving Deep into Rectifiers: Surpassing Human-Level Performance on @@ -6374,16 +6369,17 @@ def prelu_layer(input, :type name: basestring :param input: The input of this layer. :type input: LayerOutput - :param partial_sum: this parameter makes a group of inputs share a same weight. + :param partial_sum: this parameter makes a group of inputs share the same weight. - partial_sum = 1, indicates the element-wise activation: each element has a weight. - - partial_sum = number of elements in one channel, indicates the channel-wise activation, elements in a channel share a same weight. - - partial_sum = number of outputs, indicates all elements share a same weight. + - partial_sum = number of elements in one channel, indicates the channel-wise activation, elements in a channel share the same weight. + - partial_sum = number of outputs, indicates all elements share the same weight. :type partial_sum: int :param param_attr: The parameter attribute. See ParameterAttribute for details. - :type param_attr: ParameterAttribute | None - :param layer_attr: Extra layer configurations. Default is None. + :type param_attr: ParameterAttribute + :param layer_attr: The extra layer attribute. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute | None :return: LayerOutput object. :rtype: LayerOutput @@ -6439,34 +6435,34 @@ def gated_unit_layer(input, :param input: The input of this layer. :type input: LayerOutput - :param size: output size of the gated unit. + :param size: The dimension of this layer's output. :type size: int - :param act: Activation type of the projected input. LinearActivation is the default. + :param act: Activation type of the projection. LinearActivation is the default. :type act: BaseActivation :param name: The name of this layer. It is optional. :type name: basestring - :param gate_attr: Attributes to tune the gate output, for example, error - clipping threshold, dropout and so on. See ExtraLayerAttribute for - more details. + :param gate_attr: The extra layer attribute of the gate. See ExtraLayerAttribute for + details. :type gate_attr: ExtraLayerAttribute | None - :param gate_param_attr: Attributes to tune the learnable projected matrix - parameter of the gate. - :type gate_param_attr: ParameterAttribute | None - :param gate_bias_attr: Attributes to tune the learnable bias of the gate. - :type gate_bias_attr: ParameterAttribute | None - :param inproj_attr: Attributes to the tune the projected input, for - example, error clipping threshold, dropout and so on. See - ExtraLayerAttribute for more details. + :param gate_param_attr: The parameter attribute of the gate. See ParameterAttribute + for details. + :type gate_param_attr: ParameterAttribute + :param gate_bias_attr: The bias attribute of the gate. If the parameter is set to False or + an object whose type is not ParameterAttribute, no bias is defined. + If the parameter is set to True, the bias is initialized to zero. + :type gate_bias_attr: ParameterAttribute | bool | None | Any + :param inproj_attr: Extra layer attributes of the projection. See ExtraLayerAttribute for + details. :type inproj_attr: ExtraLayerAttribute | None - :param inproj_param_attr: Attributes to tune the learnable parameter of - the projection of input. - :type inproj_param_attr: ParameterAttribute | None - :param inproj_bias_attr: Attributes to tune the learnable bias of - projection of the input. - :type inproj_bias_attr: ParameterAttribute | None - :param layer_attr: Attributes to tune the final output of the gated unit, - for example, error clipping threshold, dropout and so on. See - ExtraLayerAttribute for more details. + :param inproj_param_attr: The parameter attribute of the projection. See ParameterAttribute + for details. + :type inproj_param_attr: ParameterAttribute + :param inproj_bias_attr: The bias attribute of the projection. If the parameter is set to False + or an object whose type is not ParameterAttribute, no bias is defined. + If the parameter is set to True, the bias is initialized to zero. + :type inproj_bias_attr: ParameterAttribute | bool | None | Any + :param layer_attr: Extra layer attribute of the product. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute | None :return: LayerOutput object. :rtype: LayerOutput @@ -6662,9 +6658,9 @@ def clip_layer(input, min, max, name=None): :param input: The input of this layer. :type input: LayerOutput. :param min: The lower threshold for clipping. - :type min: double + :type min: float :param max: The upper threshold for clipping. - :type max: double + :type max: float :return: LayerOutput object. :rtype: LayerOutput """ @@ -6712,7 +6708,6 @@ def seq_slice_layer(input, starts, ends, name=None): :type ends: LayerOutput | None :return: LayerOutput object. :rtype: LayerOutput - """ assert isinstance(input, LayerOutput), ( @@ -6833,20 +6828,21 @@ def img_conv3d_layer(input, :param padding: The numbers of padding along three axises. If the parameter is set to one integer, they will be same. :type padding: int | tuple | list - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :param num_channels: The number of input channels. If the parameter is not set or set to None, its actual value will be automatically set to the channels number of the input . :type num_channels: int - :param param_attr: The parameter attribute of the convolution. + :param param_attr: The parameter attribute of the convolution. See ParameterAttribute for + details. :type param_attr: ParameterAttribute :param shared_biases: Whether biases will be shared between filters or not. :type shared_biases: bool - :param layer_attr: Extra layer attributes. + :param layer_attr: The extra layer attributes. See ExtraLayerAttribute for + details. :type layer_attr: ExtraLayerAttribute :param trans: True if it is a convTransLayer, False if it is a convLayer :type trans: bool @@ -6953,12 +6949,12 @@ def scale_shift_layer(input, name=None, param_attr=None, bias_attr=None): :type name: basestring :param input: The input of this layer. :type input: LayerOutput - :param param_attr: The parameter attribute of scaling. + :param param_attr: The parameter attribute of scaling. See ParameterAttribute for + details. :type param_attr: ParameterAttribute - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :return: LayerOutput object. :rtype: LayerOutput @@ -7016,10 +7012,9 @@ def sub_seq_layer(input, offsets, sizes, act=None, bias_attr=None, name=None): :type sizes: LayerOutput :param act: Activation type, LinearActivation is the default. :type act: BaseActivation. - :param bias_attr: The Bias Attribute. If the parameter is set to - False or something not type of ParameterAttribute, - no bias is defined. If the parameter is set to - True, the bias is initialized to zero. + :param bias_attr: The bias attribute. If the parameter is set to False or an object + whose type is not ParameterAttribute, no bias is defined. If the + parameter is set to True, the bias is initialized to zero. :type bias_attr: ParameterAttribute | None | bool | Any :return: LayerOutput object. :rtype: LayerOutput