未验证 提交 86ca31ab 编写于 作者: C Cindy Cai 提交者: GitHub

English API Docs Optimization Part 1 (#24536)

* test=develop, test=document_fix

* test=develop, test=document_fix
Co-authored-by: Nswtkiwi <1208425345@qq.com>
上级 2d0f849e
......@@ -1196,6 +1196,8 @@ def append_backward(loss,
callbacks=None,
checkpoints=None):
"""
:api_attr: Static Graph
This function appends backward part to main_program.
A complete neural network training is made up of forward and backward
......@@ -1724,6 +1726,8 @@ def calc_gradient(targets, inputs, target_gradients=None, no_grad_set=None):
def gradients(targets, inputs, target_gradients=None, no_grad_set=None):
"""
:api_attr: Static Graph
Backpropagate the gradients of targets to inputs.
Args:
......
......@@ -158,6 +158,10 @@ class GradientClipBase(object):
class GradientClipByValue(GradientClipBase):
"""
:alias_main: paddle.nn.GradientClipByValue
:alias: paddle.nn.GradientClipByValue,paddle.nn.clip.GradientClipByValue
:old_api: paddle.fluid.clip.GradientClipByValue
Limit the value of multi-dimensional Tensor :math:`X` to the range [min, max].
- Any values less than min are set to ``min``.
......@@ -296,6 +300,10 @@ class GradientClipByValue(GradientClipBase):
class GradientClipByNorm(GradientClipBase):
"""
:alias_main: paddle.nn.GradientClipByNorm
:alias: paddle.nn.GradientClipByNorm,paddle.nn.clip.GradientClipByNorm
:old_api: paddle.fluid.clip.GradientClipByNorm
Limit the l2 norm of multi-dimensional Tensor :math:`X` to ``clip_norm`` .
- If the l2 norm of :math:`X` is greater than ``clip_norm`` , :math:`X` will be compressed by a ratio.
......@@ -447,6 +455,10 @@ class GradientClipByNorm(GradientClipBase):
class GradientClipByGlobalNorm(GradientClipBase):
"""
:alias_main: paddle.nn.GradientClipByGlobalNorm
:alias: paddle.nn.GradientClipByGlobalNorm,paddle.nn.clip.GradientClipByGlobalNorm
:old_api: paddle.fluid.clip.GradientClipByGlobalNorm
Given a list of Tensor :math:`t\_list` , calculate the global norm for the elements of all tensors in
:math:`t\_list` , and limit it to ``clip_norm`` .
......@@ -691,6 +703,8 @@ class GradientClipByGlobalNorm(GradientClipBase):
@framework.dygraph_not_support
def set_gradient_clip(clip, param_list=None, program=None):
"""
:api_attr: Static Graph
Warning:
This API must be used after building network, and before ``minimize`` ,
......
......@@ -86,6 +86,8 @@ def _has_optimizer_in_control_flow(program):
class CompiledProgram(object):
"""
:api_attr: Static Graph
The CompiledProgram is used to transform a program or graph for
various optimizations according to the configuration of build_strategy,
for example, the operators' fusion in the computation graph, memory
......
......@@ -24,6 +24,11 @@ __all__ = ['data']
def data(name, shape, dtype='float32', lod_level=0):
"""
:api_attr: Static Graph
:alias_main: paddle.nn.data
:alias: paddle.nn.data,paddle.nn.input.data
:old_api: paddle.fluid.data
**Data Layer**
This function creates a variable on the global block. The global variable
......
......@@ -20,6 +20,8 @@ __all__ = ['DataFeedDesc']
class DataFeedDesc(object):
"""
:api_attr: Static Graph
Datafeed descriptor, describing input training data format. This class is
currently only used for AsyncExecutor (See comments for class AsyncExecutor
for a brief introduction)
......
......@@ -211,6 +211,8 @@ class BatchedTensorProvider(object):
class DataFeeder(object):
"""
:api_attr: Static Graph
DataFeeder converts the data that returned by a reader into a data
structure that can feed into Executor. The reader is usually a
python generator that returns a list of mini-batch data entries.
......
......@@ -728,6 +728,8 @@ class InMemoryDataset(DatasetBase):
def release_memory(self):
"""
:api_attr: Static Graph
Release InMemoryDataset memory data, when data will not be used again.
Examples:
......
......@@ -111,6 +111,10 @@ def enabled():
def enable_dygraph(place=None):
"""
:alias_main: paddle.enable_dygraph
:alias: paddle.enable_dygraph,paddle.enable_imperative.enable_dygraph
:old_api: paddle.fluid.dygraph.base.enable_dygraph
This function enables dynamic graph mode.
Parameters:
......@@ -141,6 +145,10 @@ def enable_dygraph(place=None):
def disable_dygraph():
"""
:alias_main: paddle.disable_dygraph
:alias: paddle.disable_dygraph,paddle.disable_imperative.disable_dygraph
:old_api: paddle.fluid.dygraph.base.disable_dygraph
This function disables dynamic graph mode.
return:
......@@ -178,6 +186,8 @@ def _switch_tracer_mode_guard_(is_train=True):
def no_grad(func=None):
"""
:api_attr: imperative
Create a context which disables dygraph gradient calculation.
In this mode, the result of every computation will have `stop_gradient=True`.
......@@ -236,6 +246,8 @@ def no_grad(func=None):
@signature_safe_contextmanager
def guard(place=None):
"""
:api_attr: imperative
This context will create a dygraph context for dygraph to run, using python ``with`` statement.
Parameters:
......@@ -520,6 +532,8 @@ def grad(outputs,
@framework.dygraph_only
def to_variable(value, name=None, zero_copy=None):
"""
:api_attr: imperative
The API will create a ``Variable`` or ``ComplexVariable`` object from
numpy\.ndarray, Variable or ComplexVariable object.
......
......@@ -32,6 +32,8 @@ __all__ = [
@dygraph_only
def save_dygraph(state_dict, model_path):
'''
:api_attr: imperative
Save Layer's state_dict to disk. This will generate a file with suffix ".pdparams"
The state_dict is get from Layers.state_dict function
......@@ -95,6 +97,8 @@ def save_dygraph(state_dict, model_path):
@dygraph_only
def load_dygraph(model_path, keep_name_table=False):
'''
:api_attr: imperative
Load parameter state_dict from disk.
Args:
......
......@@ -203,6 +203,8 @@ def _trace(layer,
class TracedLayer(object):
"""
:api_attr: imperative
TracedLayer is used to convert a forward dygraph model to a static
graph model. This is mainly used to save the dygraph model for online
inference using C++. Besides, users can also do inference in Python
......
......@@ -58,7 +58,12 @@ class HookRemoveHelper(object):
class Layer(core.Layer):
"""Dynamic graph Layer based on OOD, includes the parameters of the layer, the structure of the forward graph and so on.
"""
:alias_main: paddle.nn.Layer
:alias: paddle.nn.Layer
:old_api: paddle.fluid.dygraph.layers.Layer
Dynamic graph Layer based on OOD, includes the parameters of the layer, the structure of the forward graph and so on.
Parameters:
name_scope (str, optional): prefix name used by the layer to name parameters.
......
......@@ -69,6 +69,8 @@ class LearningRateDecay(object):
class PiecewiseDecay(LearningRateDecay):
"""
:api_attr: imperative
Piecewise decay scheduler.
The algorithm can be described as the code below.
......@@ -128,6 +130,8 @@ class PiecewiseDecay(LearningRateDecay):
class NaturalExpDecay(LearningRateDecay):
"""
:api_attr: imperative
Applies natural exponential decay to the initial learning rate.
The algorithm can be described as following.
......@@ -207,6 +211,8 @@ class NaturalExpDecay(LearningRateDecay):
class ExponentialDecay(LearningRateDecay):
"""
:api_attr: imperative
Applies exponential decay to the learning rate.
The algorithm can be described as following.
......@@ -287,6 +293,8 @@ class ExponentialDecay(LearningRateDecay):
class InverseTimeDecay(LearningRateDecay):
"""
:api_attr: imperative
Applies inverse time decay to the initial learning rate.
The algorithm can be described as following.
......@@ -363,6 +371,8 @@ class InverseTimeDecay(LearningRateDecay):
class PolynomialDecay(LearningRateDecay):
"""
:api_attr: imperative
Applies polynomial decay to the initial learning rate.
The algorithm can be described as following.
......@@ -455,6 +465,8 @@ class PolynomialDecay(LearningRateDecay):
class CosineDecay(LearningRateDecay):
"""
:api_attr: imperative
Applies cosine decay to the learning rate.
The algorithm can be described as following.
......@@ -511,6 +523,8 @@ class CosineDecay(LearningRateDecay):
class NoamDecay(LearningRateDecay):
"""
:api_attr: imperative
Applies Noam decay to the initial learning rate.
The algorithm can be described as following.
......
......@@ -696,6 +696,10 @@ class Conv3DTranspose(layers.Layer):
class Pool2D(layers.Layer):
"""
:alias_main: paddle.nn.Pool2D
:alias: paddle.nn.Pool2D,paddle.nn.layer.Pool2D,paddle.nn.layer.common.Pool2D
:old_api: paddle.fluid.dygraph.Pool2D
This interface is used to construct a callable object of the ``Pool2D`` class.
For more details, refer to code examples.
The pooling2d operation calculates the output based on the input, pool_type and pool_size, pool_stride,
......@@ -867,6 +871,10 @@ class Pool2D(layers.Layer):
class Linear(layers.Layer):
"""
:alias_main: paddle.nn.Linear
:alias: paddle.nn.Linear,paddle.nn.layer.Linear,paddle.nn.layer.common.Linear
:old_api: paddle.fluid.dygraph.Linear
Fully-connected linear transformation layer:
.. math::
......@@ -1100,6 +1108,10 @@ class InstanceNorm(layers.Layer):
class BatchNorm(layers.Layer):
"""
:alias_main: paddle.nn.BatchNorm
:alias: paddle.nn.BatchNorm,paddle.nn.layer.BatchNorm,paddle.nn.layer.norm.BatchNorm
:old_api: paddle.fluid.dygraph.BatchNorm
This interface is used to construct a callable object of the ``BatchNorm`` class.
For more details, refer to code examples.
It implements the function of the Batch Normalization Layer and can be used
......@@ -1443,6 +1455,10 @@ class Dropout(layers.Layer):
class Embedding(layers.Layer):
"""
:alias_main: paddle.nn.Embedding
:alias: paddle.nn.Embedding,paddle.nn.layer.Embedding,paddle.nn.layer.common.Embedding
:old_api: paddle.fluid.dygraph.Embedding
**Embedding Layer**
This interface is used to construct a callable object of the ``Embedding`` class.
......@@ -1599,6 +1615,10 @@ class Embedding(layers.Layer):
class LayerNorm(layers.Layer):
"""
:alias_main: paddle.nn.LayerNorm
:alias: paddle.nn.LayerNorm,paddle.nn.layer.LayerNorm,paddle.nn.layer.norm.LayerNorm
:old_api: paddle.fluid.dygraph.LayerNorm
This interface is used to construct a callable object of the ``LayerNorm`` class.
For more details, refer to code examples.
It implements the function of the Layer Normalization Layer and can be applied to mini-batch input data.
......@@ -2289,6 +2309,10 @@ class PRelu(layers.Layer):
class BilinearTensorProduct(layers.Layer):
"""
:alias_main: paddle.nn.BilinearTensorProduct
:alias: paddle.nn.BilinearTensorProduct,paddle.nn.layer.BilinearTensorProduct,paddle.nn.layer.common.BilinearTensorProduct
:old_api: paddle.fluid.dygraph.BilinearTensorProduct
**Add Bilinear Tensor Product Layer**
This layer performs bilinear tensor product on two inputs.
......@@ -2809,6 +2833,10 @@ class RowConv(layers.Layer):
class GroupNorm(layers.Layer):
"""
:alias_main: paddle.nn.GroupNorm
:alias: paddle.nn.GroupNorm,paddle.nn.layer.GroupNorm,paddle.nn.layer.norm.GroupNorm
:old_api: paddle.fluid.dygraph.GroupNorm
This interface is used to construct a callable object of the ``GroupNorm`` class.
For more details, refer to code examples.
It implements the function of the Group Normalization Layer.
......@@ -2909,6 +2937,10 @@ class GroupNorm(layers.Layer):
class SpectralNorm(layers.Layer):
"""
:alias_main: paddle.nn.SpectralNorm
:alias: paddle.nn.SpectralNorm,paddle.nn.layer.SpectralNorm,paddle.nn.layer.norm.SpectralNorm
:old_api: paddle.fluid.dygraph.SpectralNorm
This interface is used to construct a callable object of the ``SpectralNorm`` class.
For more details, refer to code examples. It implements the function of the Spectral Normalization Layer.
This layer calculates the spectral normalization value of weight parameters of
......
......@@ -28,6 +28,9 @@ ParallelStrategy = core.ParallelStrategy
def prepare_context(strategy=None):
'''
:api_attr: imperative
'''
if strategy is None:
strategy = ParallelStrategy()
strategy.nranks = Env().nranks
......
......@@ -23,6 +23,8 @@ from paddle.fluid import framework
class Tracer(core.Tracer):
"""
:api_attr: imperative
Tracer is used to execute and record the operators executed, to construct the
computation graph in dygraph model. Tracer has two mode, :code:`train_mode`
and :code:`eval_mode`. In :code:`train_mode`, Tracer would add backward network
......
......@@ -40,6 +40,8 @@ InferAnalysisConfig = core.AnalysisConfig
def global_scope():
"""
:api_attr: Static Graph
Get the global/default scope instance. There are a lot of APIs use
:code:`global_scope` as its default value, e.g., :code:`Executor.run`
......@@ -68,6 +70,8 @@ def _switch_scope(scope):
@signature_safe_contextmanager
def scope_guard(scope):
"""
:api_attr: Static Graph
This function switches scope through python `with` statement.
Scope records the mapping between variable names and variables ( :ref:`api_guide_Variable` ),
similar to brackets in programming languages.
......@@ -456,6 +460,8 @@ handler = FetchHandlerExample(var_dict=var_dict)
class Executor(object):
"""
:api_attr: Static Graph
An Executor in Python, supports single/multiple-GPU running,
and single/multiple-CPU running.
......
......@@ -179,6 +179,10 @@ def require_version(min_version, max_version=None):
def in_dygraph_mode():
"""
:alias_main: paddle.in_dygraph_mode
:alias: paddle.in_dygraph_mode
:old_api: paddle.fluid.framework.in_dygraph_mode
This function checks whether the program runs in dynamic graph mode or not.
You can enter dynamic graph mode with :ref:`api_fluid_dygraph_guard` api,
or enable and disable dynamic graph mode with :ref:`api_fluid_dygraph_enable`
......@@ -436,6 +440,8 @@ _name_scope = NameScope()
@signature_safe_contextmanager
def name_scope(prefix=None):
"""
:api_attr: Static Graph
Generate hierarchical name prefix for the operators.
Note:
......@@ -5277,6 +5283,8 @@ def switch_startup_program(program):
@signature_safe_contextmanager
def program_guard(main_program, startup_program=None):
"""
:api_attr: Static Graph
Change the global main program and startup program with `"with"` statement.
Layer functions in the Python `"with"` block will append operators and
variables to the new main programs.
......@@ -5376,6 +5384,8 @@ def _dygraph_place_guard(place):
def load_op_library(lib_filename):
"""
:api_attr: Static Graph
Load a dynamic library, including custom operators and kernels.
When library is loaded, ops and kernels registered in the library
will be available in PaddlePaddle main process.
......
......@@ -23,6 +23,9 @@ __all__ = ['one_hot', 'embedding']
def one_hot(input, depth, allow_out_of_range=False):
"""
:alias_main: paddle.nn.functional.one_hot
:alias: paddle.nn.functional.one_hot,paddle.nn.functional.common.one_hot
:old_api: paddle.fluid.one_hot
The operator converts each id in the input to an one-hot vector with a
depth length. The value in the vector dimension corresponding to the id
......@@ -132,6 +135,7 @@ def embedding(input,
param_attr=None,
dtype='float32'):
"""
:api_attr: Static Graph
The operator is used to lookup embeddings vector of ids provided by :attr:`input` .
It automatically constructs a 2D embedding matrix based on the
......
......@@ -124,6 +124,8 @@ def is_belong_to_optimizer(var):
@dygraph_not_support
def get_program_parameter(program):
"""
:api_attr: Static Graph
Get all the parameters from Program.
Args:
......@@ -147,6 +149,8 @@ def get_program_parameter(program):
@dygraph_not_support
def get_program_persistable_vars(program):
"""
:api_attr: Static Graph
Get all the persistable vars from Program.
Args:
......@@ -223,6 +227,8 @@ def save_vars(executor,
predicate=None,
filename=None):
"""
:api_attr: Static Graph
This API saves specific variables in the `Program` to files.
There are two ways to specify the variables to be saved: set variables in
......@@ -365,6 +371,8 @@ def save_vars(executor,
@dygraph_not_support
def save_params(executor, dirname, main_program=None, filename=None):
"""
:api_attr: Static Graph
This operator saves all parameters from the :code:`main_program` to
the folder :code:`dirname` or file :code:`filename`. You can refer to
:ref:`api_guide_model_save_reader_en` for more details.
......@@ -588,6 +596,8 @@ def _save_distributed_persistables(executor, dirname, main_program):
@dygraph_not_support
def save_persistables(executor, dirname, main_program=None, filename=None):
"""
:api_attr: Static Graph
This operator saves all persistable variables from :code:`main_program` to
the folder :code:`dirname` or file :code:`filename`. You can refer to
:ref:`api_guide_model_save_reader_en` for more details. And then
......@@ -661,6 +671,8 @@ def load_vars(executor,
predicate=None,
filename=None):
"""
:api_attr: Static Graph
This API loads variables from files by executor.
There are two ways to specify the variables to be loaded: the first way, set
......@@ -829,6 +841,8 @@ def load_vars(executor,
@dygraph_not_support
def load_params(executor, dirname, main_program=None, filename=None):
"""
:api_attr: Static Graph
This API filters out all parameters from the give ``main_program``
and then tries to load these parameters from the directory ``dirname`` or
the file ``filename``.
......@@ -887,6 +901,8 @@ def load_params(executor, dirname, main_program=None, filename=None):
@dygraph_not_support
def load_persistables(executor, dirname, main_program=None, filename=None):
"""
:api_attr: Static Graph
This API filters out all variables with ``persistable==True`` from the
given ``main_program`` and then tries to load these variables from the
directory ``dirname`` or the file ``filename``.
......@@ -1084,6 +1100,8 @@ def save_inference_model(dirname,
export_for_deployment=True,
program_only=False):
"""
:api_attr: Static Graph
Prune the given `main_program` to build a new program especially for inference,
and then save it and all related parameters to given `dirname` .
If you just want to save parameters of your trained model, please use the
......@@ -1288,6 +1306,8 @@ def load_inference_model(dirname,
params_filename=None,
pserver_endpoints=None):
"""
:api_attr: Static Graph
Load the inference model from a given directory. By this API, you can get the model
structure(Inference Program) and model parameters. If you just want to load
parameters of the pre-trained model, please use the :ref:`api_fluid_io_load_params` API.
......@@ -1577,6 +1597,11 @@ def _load_persistable_nodes(executor, dirname, graph):
@dygraph_not_support
def save(program, model_path):
"""
:api_attr: Static Graph
:alias_main: paddle.save
:alias: paddle.save,paddle.tensor.save,paddle.tensor.io.save
:old_api: paddle.fluid.save
This function save parameters, optimizer information and network description to model_path.
The parameters contains all the trainable Variable, will save to a file with suffix ".pdparams".
......@@ -1636,6 +1661,11 @@ def save(program, model_path):
@dygraph_not_support
def load(program, model_path, executor=None, var_list=None):
"""
:api_attr: Static Graph
:alias_main: paddle.load
:alias: paddle.load,paddle.tensor.load,paddle.tensor.io.load
:old_api: paddle.fluid.io.load
This function get parameters and optimizer information from program, and then get corresponding value from file.
An exception will throw if shape or dtype of the parameters is not match.
......@@ -1803,6 +1833,8 @@ def load(program, model_path, executor=None, var_list=None):
@dygraph_not_support
def load_program_state(model_path, var_list=None):
"""
:api_attr: Static Graph
Load program state from local file
Args:
......@@ -1934,6 +1966,8 @@ def load_program_state(model_path, var_list=None):
@dygraph_not_support
def set_program_state(program, state_dict):
"""
:api_attr: Static Graph
Set program parameter from state_dict
An exception will throw if shape or dtype of the parameters is not match.
......
......@@ -222,6 +222,8 @@ def Print(input,
print_tensor_lod=True,
print_phase='both'):
'''
:api_attr: Static Graph
**Print operator**
This creates a print op that will print when a tensor is accessed.
......@@ -446,6 +448,8 @@ class StaticRNNMemoryLink(object):
class StaticRNN(object):
"""
:api_attr: Static Graph
StaticRNN class.
The StaticRNN can process a batch of sequence data. The first dimension of inputs
......@@ -923,6 +927,8 @@ class WhileGuard(BlockGuard):
class While(object):
"""
:api_attr: Static Graph
while loop control flow. Repeat while body until cond is False.
Note:
......@@ -1061,6 +1067,11 @@ def assign_skip_lod_tensor_array(inputs, outputs):
def while_loop(cond, body, loop_vars, is_test=False, name=None):
"""
:api_attr: Static Graph
:alias_main: paddle.nn.while_loop
:alias: paddle.nn.while_loop,paddle.nn.control_flow.while_loop
:old_api: paddle.fluid.layers.while_loop
while_loop is one of the control flows. Repeats while_loop `body` until `cond` returns False.
Notice:
......@@ -1529,6 +1540,10 @@ def create_array(dtype):
@templatedoc()
def less_than(x, y, force_cpu=None, cond=None):
"""
:alias_main: paddle.less_than
:alias: paddle.less_than,paddle.tensor.less_than,paddle.tensor.logic.less_than
:old_api: paddle.fluid.layers.less_than
${comment}
Args:
......@@ -1594,6 +1609,10 @@ def less_than(x, y, force_cpu=None, cond=None):
@templatedoc()
def less_equal(x, y, cond=None):
"""
:alias_main: paddle.less_equal
:alias: paddle.less_equal,paddle.tensor.less_equal,paddle.tensor.logic.less_equal
:old_api: paddle.fluid.layers.less_equal
This OP returns the truth value of :math:`x <= y` elementwise, which is equivalent function to the overloaded operator `<=`.
Args:
......@@ -1642,6 +1661,10 @@ def less_equal(x, y, cond=None):
@templatedoc()
def greater_than(x, y, cond=None):
"""
:alias_main: paddle.greater_than
:alias: paddle.greater_than,paddle.tensor.greater_than,paddle.tensor.logic.greater_than
:old_api: paddle.fluid.layers.greater_than
This OP returns the truth value of :math:`x > y` elementwise, which is equivalent function to the overloaded operator `>`.
Args:
......@@ -1689,6 +1712,10 @@ def greater_than(x, y, cond=None):
@templatedoc()
def greater_equal(x, y, cond=None):
"""
:alias_main: paddle.greater_equal
:alias: paddle.greater_equal,paddle.tensor.greater_equal,paddle.tensor.logic.greater_equal
:old_api: paddle.fluid.layers.greater_equal
This OP returns the truth value of :math:`x >= y` elementwise, which is equivalent function to the overloaded operator `>=`.
Args:
......@@ -1782,6 +1809,10 @@ def equal(x, y, cond=None):
def not_equal(x, y, cond=None):
"""
:alias_main: paddle.not_equal
:alias: paddle.not_equal,paddle.tensor.not_equal,paddle.tensor.logic.not_equal
:old_api: paddle.fluid.layers.not_equal
This OP returns the truth value of :math:`x != y` elementwise, which is equivalent function to the overloaded operator `!=`.
Args:
......@@ -2225,6 +2256,11 @@ def copy_var_to_parent_block(var, layer_helper):
def cond(pred, true_fn=None, false_fn=None, name=None):
"""
:api_attr: Static Graph
:alias_main: paddle.nn.cond
:alias: paddle.nn.cond,paddle.nn.control_flow.cond
:old_api: paddle.fluid.layers.cond
This API returns ``true_fn()`` if the predicate ``pred`` is true else
``false_fn()`` . Users could also set ``true_fn`` or ``false_fn`` to
``None`` if do nothing and this API will treat the callable simply returns
......@@ -2410,6 +2446,11 @@ def _error_message(what, arg_name, op_name, right_value, error_value):
def case(pred_fn_pairs, default=None, name=None):
'''
:api_attr: Static Graph
:alias_main: paddle.nn.case
:alias: paddle.nn.case,paddle.nn.control_flow.case
:old_api: paddle.fluid.layers.case
This operator works like an if-elif-elif-else chain.
Args:
......@@ -2520,6 +2561,7 @@ def case(pred_fn_pairs, default=None, name=None):
class Switch(object):
"""
:api_attr: Static Graph
This class is used to implement Switch branch control function.
Switch branch contains several case branches and one default branch.
......@@ -2677,6 +2719,8 @@ class IfElseBlockGuard(object):
class IfElse(object):
"""
:api_attr: Static Graph
This class is used to implement IfElse branch control function. IfElse contains two blocks, true_block and false_block. IfElse will put data satisfying True or False conditions into different blocks to run.
Cond is a 2-D Tensor with shape [N, 1] and data type bool, representing the execution conditions of the corresponding part of the input data.
......@@ -2853,6 +2897,8 @@ class IfElse(object):
class DynamicRNN(object):
"""
:api_attr: Static Graph
**Note: the input of this class should be LoDTensor which holds the
information of variable-length sequences. If the input is fixed-length Tensor,
please use StaticRNN (fluid.layers.** :ref:`api_fluid_layers_StaticRNN` **) for
......@@ -3518,6 +3564,8 @@ class DynamicRNN(object):
def switch_case(branch_index, branch_fns, default=None, name=None):
'''
:api_attr: Static Graph
This operator is like a C++ switch/case statement.
Args:
......@@ -3701,6 +3749,10 @@ def reorder_lod_tensor_by_rank(x, rank_table):
def is_empty(x, cond=None):
"""
:alias_main: paddle.is_empty
:alias: paddle.is_empty,paddle.tensor.is_empty,paddle.tensor.logic.is_empty
:old_api: paddle.fluid.layers.is_empty
Test whether a Variable is empty.
Args:
......
......@@ -57,6 +57,11 @@ def center_loss(input,
param_attr,
update_center=True):
"""
:api_attr: Static Graph
:alias_main: paddle.nn.functional.center_loss
:alias: paddle.nn.functional.center_loss,paddle.nn.functional.loss.center_loss
:old_api: paddle.fluid.layers.center_loss
**Center loss Cost layer**
This OP accepts input (deep features,the output of the last hidden layer)
......@@ -147,6 +152,10 @@ def center_loss(input,
def bpr_loss(input, label, name=None):
"""
:alias_main: paddle.nn.functional.bpr_loss
:alias: paddle.nn.functional.bpr_loss,paddle.nn.functional.loss.bpr_loss
:old_api: paddle.fluid.layers.bpr_loss
**Bayesian Personalized Ranking Loss Operator**
This operator belongs to pairwise ranking loss. Label is the desired item.
......@@ -195,6 +204,10 @@ def bpr_loss(input, label, name=None):
def cross_entropy(input, label, soft_label=False, ignore_index=kIgnoreIndex):
"""
:alias_main: paddle.nn.functional.cross_entropy
:alias: paddle.nn.functional.cross_entropy,paddle.nn.functional.loss.cross_entropy
:old_api: paddle.fluid.layers.cross_entropy
This operator computes the cross entropy between input and label. It
supports both hard-label and and soft-label cross entropy computation.
......@@ -288,6 +301,10 @@ def cross_entropy2(input, label, ignore_index=kIgnoreIndex):
def square_error_cost(input, label):
"""
:alias_main: paddle.nn.functional.square_error_cost
:alias: paddle.nn.functional.square_error_cost,paddle.nn.functional.loss.square_error_cost
:old_api: paddle.fluid.layers.square_error_cost
This op accepts input predictions and target label and returns the
squared error cost.
......@@ -663,6 +680,8 @@ def nce(input,
seed=0,
is_sparse=False):
"""
:api_attr: Static Graph
${comment}
Args:
......@@ -874,6 +893,8 @@ def hsigmoid(input,
is_custom=False,
is_sparse=False):
"""
:api_attr: Static Graph
The hierarchical sigmoid organizes the classes into a complete binary tree to reduce the computational complexity
and speed up the model training, especially the training of language model.
Each leaf node of the complete binary tree represents a class(word) and each non-leaf node acts as a binary classifier.
......@@ -1167,6 +1188,10 @@ def softmax_with_cross_entropy(logits,
return_softmax=False,
axis=-1):
"""
:alias_main: paddle.nn.functional.softmax_with_cross_entropy
:alias: paddle.nn.functional.softmax_with_cross_entropy,paddle.nn.functional.loss.softmax_with_cross_entropy
:old_api: paddle.fluid.layers.softmax_with_cross_entropy
This operator implements the cross entropy loss function with softmax. This function
combines the calculation of the softmax operation and the cross entropy loss function
to provide a more numerically stable gradient.
......@@ -1290,6 +1315,10 @@ def softmax_with_cross_entropy(logits,
def rank_loss(label, left, right, name=None):
"""
:alias_main: paddle.nn.functional.rank_loss
:alias: paddle.nn.functional.rank_loss,paddle.nn.functional.loss.rank_loss
:old_api: paddle.fluid.layers.rank_loss
This operator implements the sort loss layer in the RankNet model. RankNet is a pairwise ranking model
with a training sample consisting of a pair of documents (A and B), The label (P)
indicates whether A is ranked higher than B or not. Please refer to more details:
......@@ -1407,6 +1436,10 @@ def sigmoid_cross_entropy_with_logits(x,
name=None,
normalize=False):
"""
:alias_main: paddle.nn.functional.sigmoid_cross_entropy_with_logits
:alias: paddle.nn.functional.sigmoid_cross_entropy_with_logits,paddle.nn.functional.loss.sigmoid_cross_entropy_with_logits
:old_api: paddle.fluid.layers.sigmoid_cross_entropy_with_logits
${comment}
Args:
......@@ -1464,6 +1497,10 @@ def teacher_student_sigmoid_loss(input,
soft_max_up_bound=15.0,
soft_max_lower_bound=-15.0):
"""
:alias_main: paddle.nn.functional.teacher_student_sigmoid_loss
:alias: paddle.nn.functional.teacher_student_sigmoid_loss,paddle.nn.functional.loss.teacher_student_sigmoid_loss
:old_api: paddle.fluid.layers.teacher_student_sigmoid_loss
**Teacher Student Log Loss Layer**
This layer accepts input predictions and target label and returns the
......@@ -1583,6 +1620,10 @@ def huber_loss(input, label, delta):
@templatedoc()
def kldiv_loss(x, target, reduction='mean', name=None):
"""
:alias_main: paddle.nn.functional.kldiv_loss
:alias: paddle.nn.functional.kldiv_loss,paddle.nn.functional.loss.kldiv_loss
:old_api: paddle.fluid.layers.kldiv_loss
${comment}
Args:
......@@ -1643,6 +1684,10 @@ from .control_flow import equal
def npair_loss(anchor, positive, labels, l2_reg=0.002):
'''
:alias_main: paddle.nn.functional.npair_loss
:alias: paddle.nn.functional.npair_loss,paddle.nn.functional.loss.npair_loss
:old_api: paddle.fluid.layers.npair_loss
**Npair Loss Layer**
Read `Improved Deep Metric Learning with Multi class N pair Loss Objective\
......@@ -1709,6 +1754,10 @@ def npair_loss(anchor, positive, labels, l2_reg=0.002):
def mse_loss(input, label):
"""
:alias_main: paddle.nn.functional.mse_loss
:alias: paddle.nn.functional.mse_loss,paddle.nn.functional.loss.mse_loss
:old_api: paddle.fluid.layers.mse_loss
This op accepts input predications and target label and returns the mean square error.
The loss can be described as:
......
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册