未验证 提交 584c9cfc 编写于 作者: Y Yu Yang 提交者: GitHub

Add comments of unique_name, Variable, Operator (#6342)

上级 b0f83eda
...@@ -3,6 +3,7 @@ import collections ...@@ -3,6 +3,7 @@ import collections
import numpy as np import numpy as np
from . import core from . import core
import proto.framework_pb2 as framework_pb2 import proto.framework_pb2 as framework_pb2
import google.protobuf.message
import contextlib import contextlib
__all__ = [ __all__ = [
...@@ -13,11 +14,28 @@ __all__ = [ ...@@ -13,11 +14,28 @@ __all__ = [
def unique_name(prefix): def unique_name(prefix):
"""
Generate unique names with prefix
Args:
prefix(str): The prefix of return string
Returns(str): A unique string with the prefix
"""
uid = core.unique_integer(prefix) # unique during whole process. uid = core.unique_integer(prefix) # unique during whole process.
return "_".join([prefix, str(uid)]) return "_".join([prefix, str(uid)])
def convert_np_dtype_to_dtype_(np_dtype): def convert_np_dtype_to_dtype_(np_dtype):
"""
Convert the data type in numpy to the data type in Paddle
Args:
np_dtype(np.dtype): the data type in numpy
Returns(core.DataType): the data type in Paddle
"""
dtype = np.dtype(np_dtype) dtype = np.dtype(np_dtype)
if dtype == np.float32: if dtype == np.float32:
return core.DataType.FP32 return core.DataType.FP32
...@@ -38,17 +56,33 @@ def convert_np_dtype_to_dtype_(np_dtype): ...@@ -38,17 +56,33 @@ def convert_np_dtype_to_dtype_(np_dtype):
def dtype_is_floating(dtype): def dtype_is_floating(dtype):
"""
Check the data type is floating or not.
Args:
dtype(np.dtype|core.DataType): data type.
Could be numpy format or Paddle format
Returns(bool): True if data type is a float value
"""
if not isinstance(dtype, core.DataType): if not isinstance(dtype, core.DataType):
dtype = convert_np_dtype_to_dtype_(dtype) dtype = convert_np_dtype_to_dtype_(dtype)
if (dtype == core.DataType.FP16 or dtype == core.DataType.FP32 or return dtype in [core.DataType.FP16, core.DataType.FP32, core.DataType.FP64]
dtype == core.DataType.FP64):
return True
else:
return False
def _debug_string_(proto, throw_on_error=True): def _debug_string_(proto, throw_on_error=True):
"""
Get the debug string of a protobuf message. The message could be not
initialized.
Args:
proto(google.protobuf.message.Message): The protobuf message
throw_on_error(bool): True if raise an error when the protobuf message
is not initialized.
Returns(str): The debug string of the protobuf message
"""
error_fields = list() error_fields = list()
if not proto.IsInitialized(error_fields) and throw_on_error: if not proto.IsInitialized(error_fields) and throw_on_error:
raise ValueError("{0} are not initialized\nThe message is {1}".format( raise ValueError("{0} are not initialized\nThe message is {1}".format(
...@@ -57,6 +91,38 @@ def _debug_string_(proto, throw_on_error=True): ...@@ -57,6 +91,38 @@ def _debug_string_(proto, throw_on_error=True):
class Variable(object): class Variable(object):
"""
Python variable. Every input and output of an operator is a variable. Every
variable belongs to a block. The variable has a name and two variables in
different blocks could have the same name.
There are many kinds of variables. Please reference the framework.proto for
details.
Notes: The constructor of Variable should not be invoked directly. Please
use `Block.create_var` to create a variable.
>>> cur_program = Program()
>>> cur_block = cur_program.current_block()
>>> new_variable = cur_block.create_var(
>>> name="X", shape=[-1, 23, 48], dtype='float32')
Args:
block(Block): The associated block. It will be passed by
`Block.create_var` automatically.
type(core.VarDesc.VarType): Variable type. Please reference the
framework.proto for details.
shape(tuple|list|None): The shape of variable. -1 means the batch size.
Some kinds of variable do not contain shape, just set it to None.
dtype(np.dtype|core.DataType|str): The data type of variable.
lod_level(int): The level of lod tensor. 0 means there is not a time
series data.
persistable(bool): True if the variable should be saved as check point.
Defaults to False.
stop_gradient(bool): True if the variable will stop to calculate
gradients when backward. Defaults to False.
"""
def __init__(self, def __init__(self,
block, block,
type=core.VarDesc.VarType.LOD_TENSOR, type=core.VarDesc.VarType.LOD_TENSOR,
...@@ -140,6 +206,16 @@ class Variable(object): ...@@ -140,6 +206,16 @@ class Variable(object):
return self.to_string(True) return self.to_string(True)
def to_string(self, throw_on_error): def to_string(self, throw_on_error):
"""
Get debug string.
Args:
throw_on_error(bool): True if raise an exception when self is not
intialized.
Returns(str): The debug string.
"""
protostr = self.desc.serialize_to_string() protostr = self.desc.serialize_to_string()
proto = framework_pb2.VarDesc.FromString(str(protostr)) proto = framework_pb2.VarDesc.FromString(str(protostr))
return _debug_string_(proto, throw_on_error) return _debug_string_(proto, throw_on_error)
...@@ -185,7 +261,9 @@ class Variable(object): ...@@ -185,7 +261,9 @@ class Variable(object):
def get_all_op_protos(): def get_all_op_protos():
""" """
Get all registered op proto from PaddlePaddle C++ end. Get all registered op proto from PaddlePaddle C++ end.
:return: A list of registered OpProto.
Returns(list): list of OpProto
""" """
protostrs = core.get_all_op_protos() protostrs = core.get_all_op_protos()
ret_values = [] ret_values = []
...@@ -196,6 +274,10 @@ def get_all_op_protos(): ...@@ -196,6 +274,10 @@ def get_all_op_protos():
class OpProtoHolder(object): class OpProtoHolder(object):
"""
A global variable to hold all OpProtos from C++ as a map
"""
@classmethod @classmethod
def instance(cls): def instance(cls):
if not hasattr(cls, '_instance'): if not hasattr(cls, '_instance'):
...@@ -212,12 +294,26 @@ class OpProtoHolder(object): ...@@ -212,12 +294,26 @@ class OpProtoHolder(object):
self.op_proto_map[proto.type] = proto self.op_proto_map[proto.type] = proto
def get_op_proto(self, type): def get_op_proto(self, type):
"""
Get OpProto by a type string.
Args:
type(str): The type that operator registered in C++ side.
Returns(framework_pb2.OpProto): The OpProto
"""
if type not in self.op_proto_map: if type not in self.op_proto_map:
raise ValueError("Operator \"%s\" has not been registered." % type) raise ValueError("Operator \"%s\" has not been registered." % type)
return self.op_proto_map[type] return self.op_proto_map[type]
class Operator(object): class Operator(object):
"""
Python Operator class. The operator represents the build in instructs in a
Block. Users can use the build in instructs to describe their neural
network.
"""
def __init__(self, def __init__(self,
block, block,
desc, desc,
...@@ -225,6 +321,30 @@ class Operator(object): ...@@ -225,6 +321,30 @@ class Operator(object):
inputs=None, inputs=None,
outputs=None, outputs=None,
attrs=None): attrs=None):
"""
Constructor.
Notes: The constructor of operator should not be invoked directly. Use
Block.append_op or Block.prepend_op instead.
>>> cur_program = Program()
>>> cur_block = cur_program.current_block()
>>> # var1 += var2 + var3
>>> cur_block.append_op(type="sum",
>>> inputs={"X": [var1, var2, var3]},
>>> outputs={"Out": [var1]})
Args:
block(Block): The block has the current operator
desc(core.OpDesc): The protobuf description
type(str): The type of operator.
inputs(dict): The input dictionary. Key is the input parameter name.
Value is a list of variables.
outputs(dict): The output dictionary. Has same format with inputs
attrs(dict): The attributes dictionary. Key is attribute name. Value
is the attribute value. The attribute type should be as same as
the type registered in C++
"""
self.block = block self.block = block
self.desc = desc self.desc = desc
if len(self.desc.type()) != 0: if len(self.desc.type()) != 0:
...@@ -311,6 +431,15 @@ class Operator(object): ...@@ -311,6 +431,15 @@ class Operator(object):
self.desc.infer_shape(self.block.desc) self.desc.infer_shape(self.block.desc)
def to_string(self, throw_on_error): def to_string(self, throw_on_error):
"""
To debug string.
Args:
throw_on_error(bool): raise exception when self is not initialized
when throw_on_error is True
Returns(str): The debug string.
"""
protostr = self.desc.serialize_to_string() protostr = self.desc.serialize_to_string()
proto = framework_pb2.OpDesc.FromString(str(protostr)) proto = framework_pb2.OpDesc.FromString(str(protostr))
return _debug_string_(proto, throw_on_error) return _debug_string_(proto, throw_on_error)
...@@ -325,21 +454,55 @@ class Operator(object): ...@@ -325,21 +454,55 @@ class Operator(object):
return self.desc.type() return self.desc.type()
def input(self, name): def input(self, name):
"""
Get input arguments by the input parameter name
Args:
name(str): The input parameter name
Returns(list): return the list of argument names associated with the
specific parameter name.
"""
return self.desc.input(name) return self.desc.input(name)
@property @property
def input_names(self): def input_names(self):
"""
Get all input parameter names
Returns(list): return a list of input parameter names
"""
return self.desc.input_names() return self.desc.input_names()
def output(self, name): def output(self, name):
"""
Get output arguments by the output parameter name
Args:
name(str): The output parameter name
Returns(list): return the list of argument names associated with the
specific parameter name.
"""
return self.desc.output(name) return self.desc.output(name)
@property @property
def output_names(self): def output_names(self):
"""
Get all output parameter names
Returns(list): return a list of output parameter names
"""
return self.desc.output_names() return self.desc.output_names()
@property @property
def idx(self): def idx(self):
"""
Return the array index of current operator.
Returns(int): The array index in block.ops array
Raises:
ValueError: when the operator is not found.
"""
for i, op in enumerate(self.block.ops): for i, op in enumerate(self.block.ops):
if op == self: if op == self:
return i return i
...@@ -347,19 +510,57 @@ class Operator(object): ...@@ -347,19 +510,57 @@ class Operator(object):
"Can't find op itself in it's block. It could be a bug of Paddle.") "Can't find op itself in it's block. It could be a bug of Paddle.")
def has_attr(self, name): def has_attr(self, name):
"""
operator has the attribute with name or not.
Args:
name(str): the attribute name
Returns(bool): True if has this attribute.
"""
return self.desc.has_attr(name) return self.desc.has_attr(name)
def attr_type(self, name): def attr_type(self, name):
"""
Get the type of attribute by attribute name
Args:
name(str): the attribute name
Returns(core.AttrType): the attribute type
"""
return self.desc.attr_type(name) return self.desc.attr_type(name)
@property @property
def attr_names(self): def attr_names(self):
"""
Get all attribute names
Returns(list): The list of attribute name
"""
return self.desc.attr_names() return self.desc.attr_names()
def attr(self, name): def attr(self, name):
"""
Get attribute by name
Args:
name(str): the attribute name
Returns(bool|int|str|float|list): The attribute value. The return value
can be any valid attribute type.
"""
return self.desc.attr(name) return self.desc.attr(name)
def block_attr(self, name): def block_attr(self, name):
"""
Get the block attribute by name
Args:
name(str): the attribute name
Returns(int): the block index
"""
return self.desc.block_attr(name) return self.desc.block_attr(name)
...@@ -479,7 +680,7 @@ class Block(object): ...@@ -479,7 +680,7 @@ class Block(object):
""" """
Copy the information of parameters from other block Copy the information of parameters from other block
Args: Args:
other(Block): other block other(Block): other block
Returns: Returns:
None None
...@@ -623,7 +824,7 @@ class Program(object): ...@@ -623,7 +824,7 @@ class Program(object):
def copy_param_info_from(self, other): def copy_param_info_from(self, other):
""" """
Copy the information of parameters from other program. Copy the information of parameters from other program.
Args: Args:
other(Program): Other program other(Program): Other program
...@@ -675,7 +876,7 @@ def default_startup_program(): ...@@ -675,7 +876,7 @@ def default_startup_program():
""" """
Get default startup program. In startup program, Paddle will initialize Get default startup program. In startup program, Paddle will initialize
parameters, initialize nccl handle, etc. parameters, initialize nccl handle, etc.
Returns: Returns:
Program: startup program Program: startup program
""" """
...@@ -685,7 +886,7 @@ def default_startup_program(): ...@@ -685,7 +886,7 @@ def default_startup_program():
def default_main_program(): def default_main_program():
""" """
Get default main program. The main program is used for training or testing. Get default main program. The main program is used for training or testing.
Returns: Returns:
Program: main program Program: main program
""" """
...@@ -695,7 +896,7 @@ def default_main_program(): ...@@ -695,7 +896,7 @@ def default_main_program():
def switch_main_program(program): def switch_main_program(program):
""" """
Switch the main program to a new program. Switch the main program to a new program.
Args: Args:
program(Program): The new main program program(Program): The new main program
...@@ -710,7 +911,7 @@ def switch_main_program(program): ...@@ -710,7 +911,7 @@ def switch_main_program(program):
def switch_startup_program(program): def switch_startup_program(program):
""" """
Switch the startup program to a new program Switch the startup program to a new program
Args: Args:
program(Program): The new startup program program(Program): The new startup program
...@@ -727,15 +928,15 @@ def switch_startup_program(program): ...@@ -727,15 +928,15 @@ def switch_startup_program(program):
def program_guard(main_program, startup_program=None): def program_guard(main_program, startup_program=None):
""" """
Switch program with `with` statement Switch program with `with` statement
Examples: Examples:
>>> with program_guard(Program()): >>> with program_guard(Program()):
>>> data = fluid.layers.data(...) >>> data = fluid.layers.data(...)
>>> hidden = fluid.layers.fc(...) >>> hidden = fluid.layers.fc(...)
Args: Args:
main_program(Program): New main program inside `with` statement main_program(Program): New main program inside `with` statement
startup_program(Program): New startup program inside `with` statement. startup_program(Program): New startup program inside `with` statement.
None means do not change startup program. None means do not change startup program.
Returns: Returns:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册