提交 3e177b9d 编写于 作者: W walloollaw 提交者: qingqing01

ISSUE-10996: support name setting for layers topk/softmax/concat/dropout (#10999)

上级 60783a75
...@@ -855,7 +855,7 @@ def cos_sim(X, Y): ...@@ -855,7 +855,7 @@ def cos_sim(X, Y):
return out return out
def dropout(x, dropout_prob, is_test=False, seed=None): def dropout(x, dropout_prob, is_test=False, seed=None, name=None):
""" """
Computes dropout. Computes dropout.
...@@ -873,6 +873,8 @@ def dropout(x, dropout_prob, is_test=False, seed=None): ...@@ -873,6 +873,8 @@ def dropout(x, dropout_prob, is_test=False, seed=None):
parameter is set to None, a random seed is used. parameter is set to None, a random seed is used.
NOTE: If an integer seed is given, always the same output NOTE: If an integer seed is given, always the same output
units will be dropped. DO NOT use a fixed seed in training. units will be dropped. DO NOT use a fixed seed in training.
name(str|None): A name for this layer(optional). If set None, the layer
will be named automatically.
Returns: Returns:
Variable: A tensor variable. Variable: A tensor variable.
...@@ -1117,7 +1119,7 @@ def sequence_softmax(input, param_attr=None, bias_attr=None, use_cudnn=True): ...@@ -1117,7 +1119,7 @@ def sequence_softmax(input, param_attr=None, bias_attr=None, use_cudnn=True):
return softmax_out return softmax_out
def softmax(input, param_attr=None, bias_attr=None, use_cudnn=True): def softmax(input, param_attr=None, bias_attr=None, use_cudnn=True, name=None):
helper = LayerHelper('softmax', **locals()) helper = LayerHelper('softmax', **locals())
dtype = helper.input_dtype() dtype = helper.input_dtype()
softmax_out = helper.create_tmp_variable(dtype) softmax_out = helper.create_tmp_variable(dtype)
...@@ -2610,7 +2612,7 @@ def matmul(x, y, transpose_x=False, transpose_y=False, name=None): ...@@ -2610,7 +2612,7 @@ def matmul(x, y, transpose_x=False, transpose_y=False, name=None):
return out return out
def topk(input, k): def topk(input, k, name=None):
""" """
This operator is used to find values and indices of the k largest entries This operator is used to find values and indices of the k largest entries
for the last dimension. for the last dimension.
...@@ -2626,6 +2628,8 @@ def topk(input, k): ...@@ -2626,6 +2628,8 @@ def topk(input, k):
input(Variable): The input variable which can be a vector or Tensor with input(Variable): The input variable which can be a vector or Tensor with
higher rank. higher rank.
k(int): An integer value to specify the top k largest elements. k(int): An integer value to specify the top k largest elements.
name(str|None): A name for this layer(optional). If set None, the layer
will be named automatically.
Returns: Returns:
values(Variable): The k largest elements along each last dimensional values(Variable): The k largest elements along each last dimensional
......
...@@ -112,7 +112,7 @@ def cast(x, dtype): ...@@ -112,7 +112,7 @@ def cast(x, dtype):
return out return out
def concat(input, axis=0): def concat(input, axis=0, name=None):
""" """
**Concat** **Concat**
...@@ -122,6 +122,8 @@ def concat(input, axis=0): ...@@ -122,6 +122,8 @@ def concat(input, axis=0):
Args: Args:
input(list): List of tensors to be concatenated input(list): List of tensors to be concatenated
axis(int): Integer axis along which the tensors will be concatenated axis(int): Integer axis along which the tensors will be concatenated
name(str|None): A name for this layer(optional). If set None, the layer
will be named automatically.
Returns: Returns:
Variable: Output variable of the concatenation Variable: Output variable of the concatenation
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册