From 584c9cfc82bedf7fd1e293b9e295d2fd5a196409 Mon Sep 17 00:00:00 2001 From: Yu Yang Date: Thu, 7 Dec 2017 13:54:11 +0800 Subject: [PATCH] Add comments of unique_name, Variable, Operator (#6342) --- python/paddle/v2/fluid/framework.py | 231 ++++++++++++++++++++++++++-- 1 file changed, 216 insertions(+), 15 deletions(-) diff --git a/python/paddle/v2/fluid/framework.py b/python/paddle/v2/fluid/framework.py index e6e3190b991..bf0cd275b62 100644 --- a/python/paddle/v2/fluid/framework.py +++ b/python/paddle/v2/fluid/framework.py @@ -3,6 +3,7 @@ import collections import numpy as np from . import core import proto.framework_pb2 as framework_pb2 +import google.protobuf.message import contextlib __all__ = [ @@ -13,11 +14,28 @@ __all__ = [ def unique_name(prefix): + """ + Generate unique names with prefix + + Args: + prefix(str): The prefix of return string + + Returns(str): A unique string with the prefix + + """ uid = core.unique_integer(prefix) # unique during whole process. return "_".join([prefix, str(uid)]) def convert_np_dtype_to_dtype_(np_dtype): + """ + Convert the data type in numpy to the data type in Paddle + Args: + np_dtype(np.dtype): the data type in numpy + + Returns(core.DataType): the data type in Paddle + + """ dtype = np.dtype(np_dtype) if dtype == np.float32: return core.DataType.FP32 @@ -38,17 +56,33 @@ def convert_np_dtype_to_dtype_(np_dtype): def dtype_is_floating(dtype): + """ + Check the data type is floating or not. + Args: + dtype(np.dtype|core.DataType): data type. + Could be numpy format or Paddle format + + Returns(bool): True if data type is a float value + + """ if not isinstance(dtype, core.DataType): dtype = convert_np_dtype_to_dtype_(dtype) - if (dtype == core.DataType.FP16 or dtype == core.DataType.FP32 or - dtype == core.DataType.FP64): - return True - else: - return False + return dtype in [core.DataType.FP16, core.DataType.FP32, core.DataType.FP64] def _debug_string_(proto, throw_on_error=True): + """ + Get the debug string of a protobuf message. The message could be not + initialized. + Args: + proto(google.protobuf.message.Message): The protobuf message + throw_on_error(bool): True if raise an error when the protobuf message + is not initialized. + + Returns(str): The debug string of the protobuf message + + """ error_fields = list() if not proto.IsInitialized(error_fields) and throw_on_error: raise ValueError("{0} are not initialized\nThe message is {1}".format( @@ -57,6 +91,38 @@ def _debug_string_(proto, throw_on_error=True): class Variable(object): + """ + Python variable. Every input and output of an operator is a variable. Every + variable belongs to a block. The variable has a name and two variables in + different blocks could have the same name. + + There are many kinds of variables. Please reference the framework.proto for + details. + + Notes: The constructor of Variable should not be invoked directly. Please + use `Block.create_var` to create a variable. + + >>> cur_program = Program() + >>> cur_block = cur_program.current_block() + >>> new_variable = cur_block.create_var( + >>> name="X", shape=[-1, 23, 48], dtype='float32') + + Args: + block(Block): The associated block. It will be passed by + `Block.create_var` automatically. + type(core.VarDesc.VarType): Variable type. Please reference the + framework.proto for details. + shape(tuple|list|None): The shape of variable. -1 means the batch size. + Some kinds of variable do not contain shape, just set it to None. + dtype(np.dtype|core.DataType|str): The data type of variable. + lod_level(int): The level of lod tensor. 0 means there is not a time + series data. + persistable(bool): True if the variable should be saved as check point. + Defaults to False. + stop_gradient(bool): True if the variable will stop to calculate + gradients when backward. Defaults to False. + """ + def __init__(self, block, type=core.VarDesc.VarType.LOD_TENSOR, @@ -140,6 +206,16 @@ class Variable(object): return self.to_string(True) def to_string(self, throw_on_error): + """ + Get debug string. + + Args: + throw_on_error(bool): True if raise an exception when self is not + intialized. + + Returns(str): The debug string. + + """ protostr = self.desc.serialize_to_string() proto = framework_pb2.VarDesc.FromString(str(protostr)) return _debug_string_(proto, throw_on_error) @@ -185,7 +261,9 @@ class Variable(object): def get_all_op_protos(): """ Get all registered op proto from PaddlePaddle C++ end. - :return: A list of registered OpProto. + + Returns(list): list of OpProto + """ protostrs = core.get_all_op_protos() ret_values = [] @@ -196,6 +274,10 @@ def get_all_op_protos(): class OpProtoHolder(object): + """ + A global variable to hold all OpProtos from C++ as a map + """ + @classmethod def instance(cls): if not hasattr(cls, '_instance'): @@ -212,12 +294,26 @@ class OpProtoHolder(object): self.op_proto_map[proto.type] = proto def get_op_proto(self, type): + """ + Get OpProto by a type string. + Args: + type(str): The type that operator registered in C++ side. + + Returns(framework_pb2.OpProto): The OpProto + + """ if type not in self.op_proto_map: raise ValueError("Operator \"%s\" has not been registered." % type) return self.op_proto_map[type] class Operator(object): + """ + Python Operator class. The operator represents the build in instructs in a + Block. Users can use the build in instructs to describe their neural + network. + """ + def __init__(self, block, desc, @@ -225,6 +321,30 @@ class Operator(object): inputs=None, outputs=None, attrs=None): + """ + Constructor. + + Notes: The constructor of operator should not be invoked directly. Use + Block.append_op or Block.prepend_op instead. + + >>> cur_program = Program() + >>> cur_block = cur_program.current_block() + >>> # var1 += var2 + var3 + >>> cur_block.append_op(type="sum", + >>> inputs={"X": [var1, var2, var3]}, + >>> outputs={"Out": [var1]}) + + Args: + block(Block): The block has the current operator + desc(core.OpDesc): The protobuf description + type(str): The type of operator. + inputs(dict): The input dictionary. Key is the input parameter name. + Value is a list of variables. + outputs(dict): The output dictionary. Has same format with inputs + attrs(dict): The attributes dictionary. Key is attribute name. Value + is the attribute value. The attribute type should be as same as + the type registered in C++ + """ self.block = block self.desc = desc if len(self.desc.type()) != 0: @@ -311,6 +431,15 @@ class Operator(object): self.desc.infer_shape(self.block.desc) def to_string(self, throw_on_error): + """ + To debug string. + Args: + throw_on_error(bool): raise exception when self is not initialized + when throw_on_error is True + + Returns(str): The debug string. + + """ protostr = self.desc.serialize_to_string() proto = framework_pb2.OpDesc.FromString(str(protostr)) return _debug_string_(proto, throw_on_error) @@ -325,21 +454,55 @@ class Operator(object): return self.desc.type() def input(self, name): + """ + Get input arguments by the input parameter name + Args: + name(str): The input parameter name + + Returns(list): return the list of argument names associated with the + specific parameter name. + + """ return self.desc.input(name) @property def input_names(self): + """ + Get all input parameter names + Returns(list): return a list of input parameter names + + """ return self.desc.input_names() def output(self, name): + """ + Get output arguments by the output parameter name + Args: + name(str): The output parameter name + + Returns(list): return the list of argument names associated with the + specific parameter name. + + """ return self.desc.output(name) @property def output_names(self): + """ + Get all output parameter names + Returns(list): return a list of output parameter names + + """ return self.desc.output_names() @property def idx(self): + """ + Return the array index of current operator. + Returns(int): The array index in block.ops array + Raises: + ValueError: when the operator is not found. + """ for i, op in enumerate(self.block.ops): if op == self: return i @@ -347,19 +510,57 @@ class Operator(object): "Can't find op itself in it's block. It could be a bug of Paddle.") def has_attr(self, name): + """ + operator has the attribute with name or not. + Args: + name(str): the attribute name + + Returns(bool): True if has this attribute. + + """ return self.desc.has_attr(name) def attr_type(self, name): + """ + Get the type of attribute by attribute name + Args: + name(str): the attribute name + + Returns(core.AttrType): the attribute type + + """ return self.desc.attr_type(name) @property def attr_names(self): + """ + Get all attribute names + Returns(list): The list of attribute name + + """ return self.desc.attr_names() def attr(self, name): + """ + Get attribute by name + Args: + name(str): the attribute name + + Returns(bool|int|str|float|list): The attribute value. The return value + can be any valid attribute type. + + """ return self.desc.attr(name) def block_attr(self, name): + """ + Get the block attribute by name + Args: + name(str): the attribute name + + Returns(int): the block index + + """ return self.desc.block_attr(name) @@ -479,7 +680,7 @@ class Block(object): """ Copy the information of parameters from other block Args: - other(Block): other block + other(Block): other block Returns: None @@ -623,7 +824,7 @@ class Program(object): def copy_param_info_from(self, other): """ - Copy the information of parameters from other program. + Copy the information of parameters from other program. Args: other(Program): Other program @@ -675,7 +876,7 @@ def default_startup_program(): """ Get default startup program. In startup program, Paddle will initialize parameters, initialize nccl handle, etc. - + Returns: Program: startup program """ @@ -685,7 +886,7 @@ def default_startup_program(): def default_main_program(): """ Get default main program. The main program is used for training or testing. - + Returns: Program: main program """ @@ -695,7 +896,7 @@ def default_main_program(): def switch_main_program(program): """ Switch the main program to a new program. - + Args: program(Program): The new main program @@ -710,7 +911,7 @@ def switch_main_program(program): def switch_startup_program(program): """ - Switch the startup program to a new program + Switch the startup program to a new program Args: program(Program): The new startup program @@ -727,15 +928,15 @@ def switch_startup_program(program): def program_guard(main_program, startup_program=None): """ Switch program with `with` statement - + Examples: >>> with program_guard(Program()): >>> data = fluid.layers.data(...) >>> hidden = fluid.layers.fc(...) - + Args: main_program(Program): New main program inside `with` statement - startup_program(Program): New startup program inside `with` statement. + startup_program(Program): New startup program inside `with` statement. None means do not change startup program. Returns: -- GitLab