未验证 提交 2d3c29ab 编写于 作者: J Jiabin Yang 提交者: GitHub

test=document_fix, test=release/1.6, refine en doc (#20317) (#20404)

* test=develop, fix docker with paddle nccl problem

* test=develop, refine en_doc for Variable and Program

* test=document_fix, fix English doc for Variable and Program

* test=document_fix, refine astype code block style

* test=document_fix, add example code for Variable properties
上级 37791fbf
paddle.fluid.Program ('paddle.fluid.framework.Program', ('document', '4f9e1829c89e0711355820e935d2b447')) paddle.fluid.Program ('paddle.fluid.framework.Program', ('document', '9d8de4f0398ff2b01d4b572ee412b6aa'))
paddle.fluid.Program.__init__ (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) paddle.fluid.Program.__init__ (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
paddle.fluid.Program.block (ArgSpec(args=['self', 'index'], varargs=None, keywords=None, defaults=None), ('document', '28d066e432ceda86810b1e7deb8a4afa')) paddle.fluid.Program.block (ArgSpec(args=['self', 'index'], varargs=None, keywords=None, defaults=None), ('document', 'f2b2f468fef697ede6ea5bf7a9ba489c'))
paddle.fluid.Program.clone (ArgSpec(args=['self', 'for_test'], varargs=None, keywords=None, defaults=(False,)), ('document', '1e910e8c4186e8ff1afb62602f369033')) paddle.fluid.Program.clone (ArgSpec(args=['self', 'for_test'], varargs=None, keywords=None, defaults=(False,)), ('document', 'c60aaaa8e2ca192dadffbb6ef0de8f60'))
paddle.fluid.Program.current_block (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '365e49ce9f346ac6d54265e29db447b5')) paddle.fluid.Program.current_block (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '0efa58429d84407256665cc1ad66a240'))
paddle.fluid.Program.global_block (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', 'dd3f2b49147861d6ae48989a77482f05')) paddle.fluid.Program.global_block (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '0a25153e749268585354f8ae7eb2713b'))
paddle.fluid.Program.list_vars (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '757cf8d083dff9507676b17376ac5af1')) paddle.fluid.Program.list_vars (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '81440aaee8e4cb6c3488e8f9bbf1143e'))
paddle.fluid.Program.parse_from_string (ArgSpec(args=['binary_str'], varargs=None, keywords=None, defaults=None), ('document', '70e063a0a09d5a8ed322db0d5de9edb4')) paddle.fluid.Program.parse_from_string (ArgSpec(args=['binary_str'], varargs=None, keywords=None, defaults=None), ('document', 'fc4a5660ff4280278402688f0014ce7f'))
paddle.fluid.Program.to_string (ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,)), ('document', '6dfb00cd50eb515dcf2548a68ea94bfb')) paddle.fluid.Program.to_string (ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,)), ('document', '7dde33f16b63aa50d474870a9cebb539'))
paddle.fluid.default_startup_program (ArgSpec(args=[], varargs=None, keywords=None, defaults=None), ('document', 'accb52b28228f8e93a26fabdc960f56c')) paddle.fluid.default_startup_program (ArgSpec(args=[], varargs=None, keywords=None, defaults=None), ('document', 'f53890b2fb8c0642b6047e4fee2d6d58'))
paddle.fluid.default_main_program (ArgSpec(args=[], varargs=None, keywords=None, defaults=None), ('document', '853718df675e59aea7104f3d61bbf11d')) paddle.fluid.default_main_program (ArgSpec(args=[], varargs=None, keywords=None, defaults=None), ('document', '853718df675e59aea7104f3d61bbf11d'))
paddle.fluid.program_guard (ArgSpec(args=['main_program', 'startup_program'], varargs=None, keywords=None, defaults=(None,)), ('document', '78fb5c7f70ef76bcf4a1862c3f6b8191')) paddle.fluid.program_guard (ArgSpec(args=['main_program', 'startup_program'], varargs=None, keywords=None, defaults=(None,)), ('document', '78fb5c7f70ef76bcf4a1862c3f6b8191'))
paddle.fluid.name_scope (ArgSpec(args=['prefix'], varargs=None, keywords=None, defaults=(None,)), ('document', '917d313881ff990de5fb18d98a9c7b42')) paddle.fluid.name_scope (ArgSpec(args=['prefix'], varargs=None, keywords=None, defaults=(None,)), ('document', '917d313881ff990de5fb18d98a9c7b42'))
...@@ -16,16 +16,16 @@ paddle.fluid.cpu_places (ArgSpec(args=['device_count'], varargs=None, keywords=N ...@@ -16,16 +16,16 @@ paddle.fluid.cpu_places (ArgSpec(args=['device_count'], varargs=None, keywords=N
paddle.fluid.cuda_pinned_places (ArgSpec(args=['device_count'], varargs=None, keywords=None, defaults=(None,)), ('document', '567ac29567716fd8e7432b533337d529')) paddle.fluid.cuda_pinned_places (ArgSpec(args=['device_count'], varargs=None, keywords=None, defaults=(None,)), ('document', '567ac29567716fd8e7432b533337d529'))
paddle.fluid.in_dygraph_mode (ArgSpec(args=[], varargs=None, keywords=None, defaults=None), ('document', 'df1f4d1ed7e1eefe04f6361efda6b75a')) paddle.fluid.in_dygraph_mode (ArgSpec(args=[], varargs=None, keywords=None, defaults=None), ('document', 'df1f4d1ed7e1eefe04f6361efda6b75a'))
paddle.fluid.is_compiled_with_cuda (ArgSpec(args=[], varargs=None, keywords=None, defaults=None), ('document', '60c7f107a5050aeb58bb74eb175672b5')) paddle.fluid.is_compiled_with_cuda (ArgSpec(args=[], varargs=None, keywords=None, defaults=None), ('document', '60c7f107a5050aeb58bb74eb175672b5'))
paddle.fluid.Variable ('paddle.fluid.framework.Variable', ('document', '65ff735c2b96673d7131f5ff6b0db40c')) paddle.fluid.Variable ('paddle.fluid.framework.Variable', ('document', '8e51f2adcad7ae33aa35a178cceebdf6'))
paddle.fluid.Variable.__init__ (ArgSpec(args=['self', 'block', 'type', 'name', 'shape', 'dtype', 'lod_level', 'capacity', 'persistable', 'error_clip', 'stop_gradient', 'is_data', 'need_check_feed'], varargs=None, keywords='kwargs', defaults=(VarType.LOD_TENSOR, None, None, None, None, None, None, None, False, False, False)), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) paddle.fluid.Variable.__init__ (ArgSpec(args=['self', 'block', 'type', 'name', 'shape', 'dtype', 'lod_level', 'capacity', 'persistable', 'error_clip', 'stop_gradient', 'is_data', 'need_check_feed'], varargs=None, keywords='kwargs', defaults=(VarType.LOD_TENSOR, None, None, None, None, None, None, None, False, False, False)), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
paddle.fluid.Variable.astype (ArgSpec(args=['self', 'dtype'], varargs=None, keywords=None, defaults=None), ('document', '78541af4039262ed7ce3c447f8cc9cc1')) paddle.fluid.Variable.astype (ArgSpec(args=['self', 'dtype'], varargs=None, keywords=None, defaults=None), ('document', 'ffc7049afe250829b56986c40cf3cca3'))
paddle.fluid.Variable.backward (ArgSpec(args=['self', 'backward_strategy'], varargs=None, keywords=None, defaults=(None,)), ('document', 'cb928fa194da09694f4267f0a25268f1')) paddle.fluid.Variable.backward (ArgSpec(args=['self', 'backward_strategy'], varargs=None, keywords=None, defaults=(None,)), ('document', 'a2fabca0102442aae4d6537028574d07'))
paddle.fluid.Variable.clear_gradient (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '509a96d23c876fc5bfb10e1147e21d5f')) paddle.fluid.Variable.clear_gradient (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', 'c6e511dcccf51a95eeb83447322b4345'))
paddle.fluid.Variable.detach (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '0730b2d310b014d9b0a903b2034757d7')) paddle.fluid.Variable.detach (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '833afb90a8d9353610e3d671b2de9f4f'))
paddle.fluid.Variable.gradient (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '86b246bfaf20f3058e91927abbcf9fb9')) paddle.fluid.Variable.gradient (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', 'b8fe1354051f3cc953c280c8cabdd3fc'))
paddle.fluid.Variable.numpy (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '7536e8feb56d827875943e7f01d406fc')) paddle.fluid.Variable.numpy (ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None), ('document', '760ab11e4019258b3004fe9002819170'))
paddle.fluid.Variable.set_value (ArgSpec(args=['self', 'value'], varargs=None, keywords=None, defaults=None), ('document', 'c424b9e763ff51c38a6917f98026fe7d')) paddle.fluid.Variable.set_value (ArgSpec(args=['self', 'value'], varargs=None, keywords=None, defaults=None), ('document', '69deb77a9dedc61f7b731a6a7709fa5b'))
paddle.fluid.Variable.to_string (ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,)), ('document', '31f359a2c074f26dc0ffff296fc3983f')) paddle.fluid.Variable.to_string (ArgSpec(args=['self', 'throw_on_error', 'with_details'], varargs=None, keywords=None, defaults=(False,)), ('document', '65cd237e2d30c12e412c9cafbbd00791'))
paddle.fluid.load_op_library (ArgSpec(args=['lib_filename'], varargs=None, keywords=None, defaults=None), ('document', 'c009b2ea5fb6520f2d2f53aafec788e0')) paddle.fluid.load_op_library (ArgSpec(args=['lib_filename'], varargs=None, keywords=None, defaults=None), ('document', 'c009b2ea5fb6520f2d2f53aafec788e0'))
paddle.fluid.Executor ('paddle.fluid.executor.Executor', ('document', '34e8c1769313fbeff7817212dda6259e')) paddle.fluid.Executor ('paddle.fluid.executor.Executor', ('document', '34e8c1769313fbeff7817212dda6259e'))
paddle.fluid.Executor.__init__ (ArgSpec(args=['self', 'place'], varargs=None, keywords=None, defaults=None), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) paddle.fluid.Executor.__init__ (ArgSpec(args=['self', 'place'], varargs=None, keywords=None, defaults=None), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
......
...@@ -418,18 +418,20 @@ def _debug_string_(proto, throw_on_error=True): ...@@ -418,18 +418,20 @@ def _debug_string_(proto, throw_on_error=True):
class Variable(object): class Variable(object):
""" """
**Notes:** **Notes**:
**The constructor of Variable should not be invoked directly.** **The constructor of Variable should not be invoked directly.**
**In Static Graph Mode: Please use** `Block.create_var` **to create a Static variable which has no data until being feed.** **In Static Graph Mode: Please use** `Block.create_var` **to create a Static variable which has no data until being feed.**
**In Dygraph Mode: Please use** `fluid.dygraph.to_variable()` **to create a dygraph variable with real data**
In Fluid, every input and output of an operator is a variable. In most **In Dygraph Mode: Please use** :ref:`api_fluid_dygraph_to_variable` **to create a dygraph variable with real data**
In Fluid, every input and output of an OP is a variable. In most
cases, variables are used for holding different kinds of data or training cases, variables are used for holding different kinds of data or training
labels. A variable belongs to a block. All variable has its own name and labels. A variable belongs to a :ref:`api_guide_Block_en` . All variable has its own name and
two variables in different blocks could have the same name. two variables in different :ref:`api_guide_Block_en` could have the same name.
There are many kinds of variables. Each kind of them has its own attributes There are many kinds of variables. Each kind of them has its own attributes
and usages. Please refer to the framework.proto for details. and usages. Please refer to the `framework.proto <https://github.com/PaddlePaddle/Paddle/blob/develop/paddle/fluid/framework/framework.proto>`_ for details.
Most of a Variable's member variables can be setted to be None. It mean Most of a Variable's member variables can be setted to be None. It mean
it is not available or will be specified later. it is not available or will be specified later.
...@@ -445,7 +447,7 @@ class Variable(object): ...@@ -445,7 +447,7 @@ class Variable(object):
new_variable = cur_block.create_var(name="X", new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48], shape=[-1, 23, 48],
dtype='float32') dtype='float32')
In Dygraph Mode: In `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ Mode:
.. code-block:: python .. code-block:: python
...@@ -577,15 +579,14 @@ class Variable(object): ...@@ -577,15 +579,14 @@ class Variable(object):
@dygraph_only @dygraph_only
def detach(self): def detach(self):
""" """
**Notes: This API is ONLY avaliable in Dygraph mode** **Notes**:
**This API is ONLY avaliable in Dygraph mode**
Returns a new Variable, detached from the current graph. Returns a new Variable, detached from the current graph.
Returns: Returns:
Variable: The detached Variable. ( :ref:`api_guide_Variable_en` | dtype is same as current Variable): The detached Variable.
Returns type:
Variable(Tensor|LoDTensor) dtype is same as current Variable
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -617,15 +618,16 @@ class Variable(object): ...@@ -617,15 +618,16 @@ class Variable(object):
@dygraph_only @dygraph_only
def numpy(self): def numpy(self):
""" """
**Notes: This API is ONLY avaliable in Dygraph mode** **Notes**:
**This API is ONLY avaliable in Dygraph mode**
Returns a numpy array shows the value of current :ref:`api_guide_Variable` Returns a numpy array shows the value of current :ref:`api_guide_Variable_en`
Returns: Returns:
ndarray: The numpy value of current Variable. ndarray: The numpy value of current Variable.
Returns type: Returns type:
ndarray dtype is same as current Variable ndarray: dtype is same as current Variable
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -653,14 +655,14 @@ class Variable(object): ...@@ -653,14 +655,14 @@ class Variable(object):
@dygraph_only @dygraph_only
def set_value(self, value): def set_value(self, value):
""" """
**Notes**:
**This API is ONLY avaliable in Dygraph mode**
Set a new value for this Variable. Set a new value for this Variable.
Args: Args:
value (Variable|np.ndarray): the new value. value (Variable|np.ndarray): the new value.
Returns:
None.
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -692,14 +694,16 @@ class Variable(object): ...@@ -692,14 +694,16 @@ class Variable(object):
@dygraph_only @dygraph_only
def backward(self, backward_strategy=None): def backward(self, backward_strategy=None):
""" """
**Notes: This API is ONLY avaliable in Dygraph mode** **Notes**:
**This API is ONLY avaliable in Dygraph mode**
Run backward of current Graph which starts from current Variable Run backward of current Graph which starts from current Variable
Parameter: Args:
- **backward_strategy** : ( :ref:`api_fluid_dygraph_BackwardStrategy` ) - The Backward Strategy to run backward backward_strategy( :ref:`api_fluid_dygraph_BackwardStrategy` ): The Backward Strategy to run backward
Returns: None Returns:
NoneType: None
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -712,6 +716,8 @@ class Variable(object): ...@@ -712,6 +716,8 @@ class Variable(object):
inputs2 = [] inputs2 = []
for _ in range(10): for _ in range(10):
tmp = fluid.dygraph.base.to_variable(x) tmp = fluid.dygraph.base.to_variable(x)
# if we don't set tmp's stop_gradient as False then, all path to loss will has no gradient since
# there is no one need gradient on it.
tmp.stop_gradient=False tmp.stop_gradient=False
inputs2.append(tmp) inputs2.append(tmp)
ret2 = fluid.layers.sums(inputs2) ret2 = fluid.layers.sums(inputs2)
...@@ -735,13 +741,13 @@ class Variable(object): ...@@ -735,13 +741,13 @@ class Variable(object):
@dygraph_only @dygraph_only
def gradient(self): def gradient(self):
""" """
**Notes: This API is ONLY avaliable in Dygraph mode** **Notes**:
**This API is ONLY avaliable in Dygraph mode**
Get the Gradient of Current Variable Get the Gradient of Current Variable
Returns: Numpy value of the gradient of current Variable Returns:
ndarray: Numpy value of the gradient of current Variable
Returns type: ndarray
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -778,9 +784,12 @@ class Variable(object): ...@@ -778,9 +784,12 @@ class Variable(object):
@dygraph_only @dygraph_only
def clear_gradient(self): def clear_gradient(self):
""" """
**Notes: This API is ONLY avaliable in Dygraph mode** **Notes**:
**1. This API is ONLY avaliable in Dygraph mode**
**2. Use it only Variable has gradient, normally we use this for Parameters since other temporal Variable will be deleted by Python's GC**
Clear (set to zero) the Gradient of Current Variable Clear (set to ``0`` ) the Gradient of Current Variable
Returns: None Returns: None
...@@ -816,19 +825,15 @@ class Variable(object): ...@@ -816,19 +825,15 @@ class Variable(object):
""" """
Get debug string. Get debug string.
Parameters: Args:
- **throw_on_error** (bool): True if raise an exception when self is
not initialized. throw_on_error (bool): True if raise an exception when self is not initialized.
- **with_details** (bool): more details about variables and parameters
(e.g. trainable, optimize_attr, ...) will be printed when with_details (bool): more details about variables and parameters (e.g. trainable, optimize_attr, ...) will be printed when with_details is True. Default value is False;
with_details is True. Default False;
Returns: Returns:
str: The debug string. str: The debug string.
Returns Type:
str
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -840,7 +845,7 @@ class Variable(object): ...@@ -840,7 +845,7 @@ class Variable(object):
shape=[-1, 23, 48], shape=[-1, 23, 48],
dtype='float32') dtype='float32')
print(new_variable.to_string(True)) print(new_variable.to_string(True))
print("\n=============with detail===============\n") print("=============with detail===============")
print(new_variable.to_string(True, True)) print(new_variable.to_string(True, True))
""" """
if in_dygraph_mode(): if in_dygraph_mode():
...@@ -870,6 +875,35 @@ class Variable(object): ...@@ -870,6 +875,35 @@ class Variable(object):
@property @property
def stop_gradient(self): def stop_gradient(self):
"""
Indicating if we stop gradient from current Variable
**Notes: This Property has default value as** ``True`` **in** `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ **mode, while Parameter's default value is False. However, in Static Graph Mode all Variable's default stop_gradient value is** ``False``
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
with fluid.dygraph.guard():
value0 = np.arange(26).reshape(2, 13).astype("float32")
value1 = np.arange(6).reshape(2, 3).astype("float32")
value2 = np.arange(10).reshape(2, 5).astype("float32")
fc = fluid.FC("fc1", size=5, dtype="float32")
fc2 = fluid.FC("fc2", size=3, dtype="float32")
a = fluid.dygraph.to_variable(value0)
b = fluid.dygraph.to_variable(value1)
c = fluid.dygraph.to_variable(value2)
out1 = fc(a)
out2 = fc2(b)
out1.stop_gradient = True
out = fluid.layers.concat(input=[out1, out2, c], axis=1)
out.backward()
assert (fc._w.gradient() == 0).all()
assert (out1.gradient() == 0).all()
"""
if in_dygraph_mode(): if in_dygraph_mode():
return self._ivar.stop_gradient return self._ivar.stop_gradient
else: else:
...@@ -884,6 +918,27 @@ class Variable(object): ...@@ -884,6 +918,27 @@ class Variable(object):
@property @property
def persistable(self): def persistable(self):
"""
Indicating if we current Variable should be long-term alive
**Notes: This Property will be deprecated and this API is just to help user understand concept**
**1. All Variable's persistable is** ``False`` **except Parameters.**
**2. In** `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ **mode, this property should not be changed**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("persistable of current Var is: {}".format(new_variable.persistable))
"""
if in_dygraph_mode(): if in_dygraph_mode():
return self._ivar.persistable return self._ivar.persistable
else: else:
...@@ -900,6 +955,22 @@ class Variable(object): ...@@ -900,6 +955,22 @@ class Variable(object):
@property @property
def name(self): def name(self):
"""
Indicating name of current Variable
**Notes: If it has two or more Varaible share the same name in the same** :ref:`api_guide_Block_en` **, it means these Variable will share content in no-** `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ **mode. This is how we achieve Parameter sharing**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("name of current Var is: {}".format(new_variable.name))
"""
if in_dygraph_mode(): if in_dygraph_mode():
return self._ivar.name return self._ivar.name
else: else:
...@@ -914,6 +985,23 @@ class Variable(object): ...@@ -914,6 +985,23 @@ class Variable(object):
@property @property
def shape(self): def shape(self):
"""
Indicating shape of current Variable
**Notes: This is a read-only property**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("shape of current Var is: {}".format(new_variable.shape))
"""
# convert to tuple, make it as same as numpy API. # convert to tuple, make it as same as numpy API.
if in_dygraph_mode(): if in_dygraph_mode():
return self._ivar.shape return self._ivar.shape
...@@ -922,6 +1010,22 @@ class Variable(object): ...@@ -922,6 +1010,22 @@ class Variable(object):
@property @property
def dtype(self): def dtype(self):
"""
Indicating data type of current Variable
**Notes: This is a read-only property**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("Dtype of current Var is: {}".format(new_variable.dtype))
"""
if in_dygraph_mode(): if in_dygraph_mode():
return self._ivar.dtype return self._ivar.dtype
else: else:
...@@ -930,6 +1034,27 @@ class Variable(object): ...@@ -930,6 +1034,27 @@ class Variable(object):
@property @property
@dygraph_not_support @dygraph_not_support
def lod_level(self): def lod_level(self):
"""
Indicating ``LoD`` info of current Variable, please refer to :ref:`api_fluid_LoDTensor_en` to check the meaning
of ``LoD``
**Notes**:
**1. This is a read-only property**
**2. Don't support this property in** `Dygraph <../../user_guides/howto/dygraph/DyGraph.html>`_ **mode, it's value should be** ``0(int)``
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("LoD Level of current Var is: {}".format(new_variable.lod_level))
"""
# TODO(minqiyang): Support lod_level in dygraph mode # TODO(minqiyang): Support lod_level in dygraph mode
if in_dygraph_mode(): if in_dygraph_mode():
raise Exception("Dygraph model DO NOT supprt lod") raise Exception("Dygraph model DO NOT supprt lod")
...@@ -937,6 +1062,22 @@ class Variable(object): ...@@ -937,6 +1062,22 @@ class Variable(object):
@property @property
def type(self): def type(self):
"""
Indicating Type of current Variable
**Notes: This is a read-only property**
Examples:
.. code-block:: python
import paddle.fluid as fluid
cur_program = fluid.Program()
cur_block = cur_program.current_block()
new_variable = cur_block.create_var(name="X",
shape=[-1, 23, 48],
dtype='float32')
print("Type of current Var is: {}".format(new_variable.type))
"""
if in_dygraph_mode(): if in_dygraph_mode():
return self._ivar.type return self._ivar.type
else: else:
...@@ -3166,10 +3307,12 @@ class Program(object): ...@@ -3166,10 +3307,12 @@ class Program(object):
control flow op like conditional_block, while :ref:`api_fluid_layers_While` is included, control flow op like conditional_block, while :ref:`api_fluid_layers_While` is included,
it will contain nested block. it will contain nested block.
Please reference the framework.proto for details. Please reference the
`framework.proto <https://github.com/PaddlePaddle/Paddle/blob/develop/paddle/fluid/framework/framework.proto>`_
for details.
A set of Program usually contains startup program and main program. A set of Program usually contains startup program and main program.
A startup program is set to contain some initial work , and the main A startup program is set to contain some initial work, eg. initialize the ``Parameter``, and the main
program will contain the network structure and vars for train. program will contain the network structure and vars for train.
A set of Program can be used for test or train, in train program , A set of Program can be used for test or train, in train program ,
...@@ -3177,15 +3320,13 @@ class Program(object): ...@@ -3177,15 +3320,13 @@ class Program(object):
program Paddle will prune some content which is irrelevant to test, eg. program Paddle will prune some content which is irrelevant to test, eg.
backward ops and vars. backward ops and vars.
Notes: we have default_startup_program and default_main_program **Notes**:
by default, a pair of them will shared the parameters. **we have** :ref:`api_fluid_default_startup_program` **and** :ref:`api_fluid_default_main_program`
The default_startup_program only run once to initialize parameters, **by default, a pair of them will shared the parameters. The** :ref:`api_fluid_default_startup_program` **only run once to initialize parameters,**
default_main_program run in every mini batch and adjust the weights. :ref:`api_fluid_default_main_program` **run in every mini batch and adjust the weights.**
Returns: Returns:
An empty Program. Program: An empty Program.
Return type: Program
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -3370,20 +3511,17 @@ class Program(object): ...@@ -3370,20 +3511,17 @@ class Program(object):
""" """
To debug string. To debug string.
Parameters: Args:
- **throw_on_error** (bool): raise Value error when any of required fields
is not set. throw_on_error (bool): raise Value error when any of required fields is not set.
- **with_details** (bool): True if more details about variables and with_details (bool): True if more details about variables and parameters, e.g., :code:`trainable`, :code:`optimize_attr`, need to print.
parameters, e.g., :code:`trainable`, :code:`optimize_attr`, need
to print.
Returns: Returns:
The debug string describe current Program. str: The debug string describe current Program.
Raises: Raises:
ValueError: If any of required fields is not set and throw_on_error is ValueError: If any of required fields is not set and throw_on_error is True.
True.
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -3392,8 +3530,9 @@ class Program(object): ...@@ -3392,8 +3530,9 @@ class Program(object):
prog = fluid.default_main_program() prog = fluid.default_main_program()
prog_string = prog.to_string(throw_on_error=True, with_details=False) prog_string = prog.to_string(throw_on_error=True, with_details=False)
print(prog_string) print("program string without detial: {}".format(prog_string))
prog_string_with_detail = prog.to_string(throw_on_error=True, with_details=True)
print("program string with detial: {}".format(prog_string_with_detail))
""" """
assert isinstance(throw_on_error, bool) and isinstance(with_details, assert isinstance(throw_on_error, bool) and isinstance(with_details,
bool) bool)
...@@ -3425,15 +3564,17 @@ class Program(object): ...@@ -3425,15 +3564,17 @@ class Program(object):
def clone(self, for_test=False): def clone(self, for_test=False):
""" """
**Notes**: **Notes**:
**1.** :code:`Program.clone()` **method DOES NOT clone** :code:`py_reader`. **1.** :code:`Program.clone()` **method DOES NOT clone** :ref:`api_fluid_io_DataLoader` .
**2. Recommend you to use** :code:`clone` **before using** :code:`Opimizer.minimize`.**
**2. Recommend you to use** :code:`clone` **before using** :code:`Opimizer.minimize`.
**3. This API has no effect in Dygraph Mode** **3. This API has no effect in Dygraph Mode**
Create a new Program with forward content of original one when ``for_test=True``. Create a new Program with forward content of original one when ``for_test=True``.
Create a new Program as the same as original one when ``for_test=False`` Create a new Program as the same as original one when ``for_test=False``
Some operators, e.g., :ref:`cn_api_fluid_layers_batch_norm` , behave differently between Some operators, e.g., :ref:`api_fluid_layers_batch_norm` , behave differently between
training and testing. They have an attribute, :code:`is_test`, to training and testing. They have an attribute, :code:`is_test`, to
control this behaviour. This method will change the :code:`is_test` control this behaviour. This method will change the :code:`is_test`
attribute of them to :code:`True` when :code:`for_test=True`. attribute of them to :code:`True` when :code:`for_test=True`.
...@@ -3442,9 +3583,9 @@ class Program(object): ...@@ -3442,9 +3583,9 @@ class Program(object):
* Set for_test to True when we want to clone the program for testing. * Set for_test to True when we want to clone the program for testing.
We will prune the backward and optimize part of the program when you We will prune the backward and optimize part of the program when you
use :code:`clone` after :code:`Opimizer.minimize`, but we still use :code:`clone` after :code:`Opimizer.minimize`, but we still
recommend you to use :code:`clone` before using :code:`Opimizer.minimize`. For example: recommend you to use :code:`clone` before using :code:`Opimizer.minimize`.
For Example:
.. code-block:: python .. code-block:: python
test_program = fluid.default_main_program().clone(for_test=True) test_program = fluid.default_main_program().clone(for_test=True)
...@@ -3452,22 +3593,21 @@ class Program(object): ...@@ -3452,22 +3593,21 @@ class Program(object):
optimizer = fluid.optimizer.Momentum(learning_rate=0.01, momentum=0.9) optimizer = fluid.optimizer.Momentum(learning_rate=0.01, momentum=0.9)
optimizer.minimize() optimizer.minimize()
Parameters: Args:
- **for_test** (bool) - True if change the :code:`is_test` attribute of
operators to :code:`True`.
Returns: A new Program with forward content of original one when ``for_test=True``. A new Program as the same as original one when ``for_test=False`` for_test (bool): True if change the :code:`is_test` attribute of operators to :code:`True`.
Returns:
Program: A new Program with forward content of original one when ``for_test=True``. A new Program as the same as original one when ``for_test=False``
Return type: Program
Examples: Examples:
Notes: The Program's order maybe different after :code:`clone` and **Notes: The Program's order maybe different after** :code:`clone` **and
this will not affect your training or testing progress. In the following this will not affect your training or testing progress. In the following
example we give you an simple method :code:`print_prog(program)` to example we give you an simple method** :code:`print_prog(program)` **to
print Program Descs inorder to make sure you have same print result print Program Descs inorder to make sure you have same print result
after :code:`clone`: after** :code:`clone`:
.. code-block:: python .. code-block:: python
import paddle.fluid as fluid import paddle.fluid as fluid
...@@ -3777,19 +3917,20 @@ class Program(object): ...@@ -3777,19 +3917,20 @@ class Program(object):
@staticmethod @staticmethod
def parse_from_string(binary_str): def parse_from_string(binary_str):
""" """
**Notes:** **Notes**:
**- All information about parameters will be lost after serialization** **1. All information about parameters will be lost after serialization**
**- This API has no effect in Dygraph mode**
**2. This API has no effect in Dygraph mode**
Deserialize a Program from `protobuf <https://en.wikipedia.org/wiki/Protocol_Buffers>`_ binary string. Deserialize a Program from `protobuf <https://en.wikipedia.org/wiki/Protocol_Buffers>`_ binary string.
This method always use to save and load model This method always use to save and load model
Parameters: Args:
- **binary_str_type** (str) - the binary prootbuf string.
Returns: Program: A deserialized Program. binary_str_type (str): the binary prootbuf string.
Return type: Program Returns:
Program: A deserialized Program.
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -3839,14 +3980,14 @@ class Program(object): ...@@ -3839,14 +3980,14 @@ class Program(object):
@property @property
def random_seed(self): def random_seed(self):
""" """
**Notes: It must be set before the operators have been added.** The default random seed for random operators in Program. ``0`` means get
The default random seed for random operators in Program. Zero means get
the random seed from random device. the random seed from random device.
Returns: random seed in current Program **Notes: It must be set before the operators have been added.**
Returns:
int64: Random seed in current Program
Return type: int64
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -3869,13 +4010,13 @@ class Program(object): ...@@ -3869,13 +4010,13 @@ class Program(object):
@property @property
def num_blocks(self): def num_blocks(self):
""" """
**Notes: This API has no effect in Dygraph mode**
The number of :ref:`api_guide_Block_en` in this Program. The number of :ref:`api_guide_Block_en` in this Program.
Returns: num of :ref:`api_guide_Block_en` in current Program **Notes: This API has no effect in Dygraph mode**
Returns:
int(Platform-dependent size): num of :ref:`api_guide_Block_en` in current Program
Return type: int(Platform-dependent size)
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -3901,13 +4042,14 @@ class Program(object): ...@@ -3901,13 +4042,14 @@ class Program(object):
def global_block(self): def global_block(self):
""" """
**Notes: This API has no effect in Dygraph mode** **Notes**:
**This API has no effect in Dygraph mode**
Get the first :ref:`api_guide_Block_en` of this Program. Get the first :ref:`api_guide_Block_en` of this Program.
Returns: The first :ref:`api_guide_Block_en` of this Program. Returns:
:ref:`api_guide_Block_en`: The first :ref:`api_guide_Block_en` of this Program.
Return type: :ref:`api_guide_Block_en`
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -3923,16 +4065,16 @@ class Program(object): ...@@ -3923,16 +4065,16 @@ class Program(object):
def block(self, index): def block(self, index):
""" """
**Notes: This API has no effect in Dygraph mode** **Notes**:
**This API has no effect in Dygraph mode**
Get the :code:`index` :ref:`api_guide_Block_en` of this Program Get the :code:`index` :ref:`api_guide_Block_en` of this Program
Parameter: Args:
- **index** (int) - The index of :ref:`api_guide_Block_en` to get index (int) - The index of :ref:`api_guide_Block_en` to get
Returns: The :code:`index` block
Return type: :ref:`api_guide_Block_en` Returns:
:ref:`api_guide_Block_en`: The :code:`index` block
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -3947,14 +4089,14 @@ class Program(object): ...@@ -3947,14 +4089,14 @@ class Program(object):
def current_block(self): def current_block(self):
""" """
**Notes: This API has no effect in Dygraph mode** **Notes**:
**This API has no effect in Dygraph mode**
Get the current block. The :code:`current` block is the block to append
operators.
Returns: The :code:`index` block Get the current :ref:`api_guide_Block_en` . The :code:`current` :ref:`api_guide_Block_en`
is the :ref:`api_guide_Block_en` to append operators.
Return type: Block Returns:
:ref:`api_guide_Block_en`: The :code:`index` :ref:`api_guide_Block_en`
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -3973,6 +4115,7 @@ class Program(object): ...@@ -3973,6 +4115,7 @@ class Program(object):
to new block. to new block.
Args: Args:
parent_idx(int): The parent block index. parent_idx(int): The parent block index.
Returns: Returns:
...@@ -4081,11 +4224,10 @@ class Program(object): ...@@ -4081,11 +4224,10 @@ class Program(object):
@dygraph_not_support @dygraph_not_support
def list_vars(self): def list_vars(self):
""" """
Get all :ref:`api_guide_Variable` from this Program. A iterable object is returned. Get all :ref:`api_guide_Variable_en` from this Program. A iterable object is returned.
Returns: The Generator will yield every variable in this program. Returns:
iterable :ref:`api_guide_Variable_en`: The Generator will yield every variable in this program.
Return type: iterable :ref:`api_guide_Variable_en`
Examples: Examples:
.. code-block:: python .. code-block:: python
...@@ -4202,17 +4344,17 @@ def default_startup_program(): ...@@ -4202,17 +4344,17 @@ def default_startup_program():
""" """
Get default/global startup program. Get default/global startup program.
The layer function in :code:`fluid.layers` will create parameters, readers, The layer function in :ref:`api_fluid_layers` will create parameters, :ref:`api_paddle_data_reader_reader` ,
NCCL handles as global variables. The :code:`startup_program` will `NCCL <https://developer.nvidia.com/nccl>`_ handles as global variables. The :code:`startup_program` will
initialize them by the operators in startup program. The layer function will initialize them by the OPs in startup :ref:`api_fluid_Program` . The :ref:`api_fluid_layers` function will
append these initialization operators into startup program. append these initialization operators into startup program.
This method will return the :code:`default` or the :code:`current` startup This method will return the :code:`default` or the :code:`current` startup
program. Users can use :code:`fluid.program_guard` to switch program. program. Users can use :ref:`api_fluid_program_guard` to switch :ref:`api_fluid_Program` .
Returns: current default startup program Returns: current default startup :ref:`api_fluid_Program`
Returns type: Program Returns type: :ref:`api_fluid_Program`
Examples: Examples:
.. code-block:: python .. code-block:: python
......
...@@ -93,14 +93,48 @@ def monkey_patch_variable(): ...@@ -93,14 +93,48 @@ def monkey_patch_variable():
def astype(self, dtype): def astype(self, dtype):
""" """
**Notes**:
**The variable must be a** :ref:`api_fluid_Tensor`
Cast a variable to a specified data type. Cast a variable to a specified data type.
NOTE: The variable must be a Tensor
Args: Args:
self(Variable): The source variable self(Variable): The source variable
dtype: The target dtype
dtype: The target data type
Returns: Returns:
Variable with new dtype Variable: Variable with new dtype
Examples:
In Static Graph Mode:
.. code-block:: python
import paddle.fluid as fluid
startup_prog = fluid.Program()
main_prog = fluid.Program()
with fluid.program_guard(startup_prog, main_prog):
original_variable = fluid.data(name = "new_variable", shape=[2,2], dtype='float32')
new_variable = original_variable.astype('int64')
print("new var's dtype is: {}".format(new_variable.dtype))
In Dygraph Mode:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
x = np.ones([2, 2], np.float32)
with fluid.dygraph.guard():
original_variable = fluid.dygraph.to_variable(x)
print("original var's dtype is: {}, numpy dtype is {}".format(original_variable.dtype, original_variable.numpy().dtype))
new_variable = original_variable.astype('int64')
print("new var's dtype is: {}, numpy dtype is {}".format(new_variable.dtype, new_variable.numpy().dtype))
""" """
block = current_block(self) block = current_block(self)
out = create_new_tmp_var(block, dtype) out = create_new_tmp_var(block, dtype)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册