diff --git a/mindspore/ops/operations/array_ops.py b/mindspore/ops/operations/array_ops.py index 94e64c23d04f7835ba21d6301e0888556c9456b3..694c4f1d74b011a7e4df5b7cea8d45871d024104 100644 --- a/mindspore/ops/operations/array_ops.py +++ b/mindspore/ops/operations/array_ops.py @@ -2241,7 +2241,8 @@ class ScatterMax(PrimitiveWithInfer): """ Update the value of the input tensor through the max operation. - Using given values to update tensor value through the max operation, along with the input indices,. + Using given values to update tensor value through the max operation, along with the input indices. + This operation outputs the `input_x` after the update is done, which makes it convenient to use the updated value. Args: use_locking (bool): Whether protect the assignment by a lock. Default: True. @@ -2253,7 +2254,7 @@ class ScatterMax(PrimitiveWithInfer): the data type is same as `input_x`, the shape is `indices_shape + x_shape[1:]`. Outputs: - Tensor, has the same shape and data type as `input_x`. + Parameter, the updated `input_x`. Examples: >>> input_x = Parameter(Tensor(np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]), mindspore.float32), name="input_x") @@ -2286,6 +2287,7 @@ class ScatterAdd(PrimitiveWithInfer): Update the value of the input tensor through the add operation. Using given values to update tensor value through the add operation, along with the input indices. + This operation outputs the `input_x` after the update is done, which makes it convenient to use the updated value. Args: use_locking (bool): Whether protect the assignment by a lock. Default: False. @@ -2297,7 +2299,7 @@ class ScatterAdd(PrimitiveWithInfer): the data type is same as `input_x`, the shape is `indices_shape + x_shape[1:]`. Outputs: - Tensor, has the same shape and data type as `input_x`. + Parameter, the updated `input_x`. Examples: >>> input_x = Parameter(Tensor(np.array([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]), mindspore.float32), name="x") diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index 0701db8c3bef4a84bf4adf4c62bfb096b54e11f2..dd3339fe8a74d4416b4a202e40b045d136545170 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -3407,12 +3407,12 @@ class BasicLSTMCell(PrimitiveWithInfer): Outputs: - **ct** (Tensor) - Forward :math:`c_t` cache at moment `t`. Tensor of shape (`batch_size`, `hidden_size`). - **ht** (Tensor) - Cell output. Tensor of shape (`batch_size`, `hidden_size`). - - **it** (Tensor) - Forward :math:`i_t` cache at moment `t`. Tensor of shape (`batch_size`, `4 x hidden_size`). - - **jt** (Tensor) - Forward :math:`j_t` cache at moment `t`. Tensor of shape (`batch_size`, `4 x hidden_size`). - - **ft** (Tensor) - Forward :math:`f_t` cache at moment `t`. Tensor of shape (`batch_size`, `4 x hidden_size`). - - **ot** (Tensor) - Forward :math:`o_t` cache at moment `t`. Tensor of shape (`batch_size`, `4 x hidden_size`). + - **it** (Tensor) - Forward :math:`i_t` cache at moment `t`. Tensor of shape (`batch_size`, `hidden_size`). + - **jt** (Tensor) - Forward :math:`j_t` cache at moment `t`. Tensor of shape (`batch_size`, `hidden_size`). + - **ft** (Tensor) - Forward :math:`f_t` cache at moment `t`. Tensor of shape (`batch_size`, `hidden_size`). + - **ot** (Tensor) - Forward :math:`o_t` cache at moment `t`. Tensor of shape (`batch_size`, `hidden_size`). - **tanhct** (Tensor) - Forward :math:`tanh c_t` cache at moment `t`. - Tensor of shape (`batch_size`, `4 x hidden_size`). + Tensor of shape (`batch_size`, `hidden_size`). Examples: 'block': P.BasicLSTMCell(keep_prob=1.0, forget_bias=1.0, state_is_tuple=True, activation='tanh'),