From 2ea6f478419566e14f0b0dea6b881fc8536884b0 Mon Sep 17 00:00:00 2001 From: qiaolongfei Date: Mon, 4 Sep 2017 11:18:59 -0700 Subject: [PATCH] update the design of variable --- doc/design/var_desc.md | 28 ++++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/doc/design/var_desc.md b/doc/design/var_desc.md index 3dc25c23c03..db6ea2b2e48 100644 --- a/doc/design/var_desc.md +++ b/doc/design/var_desc.md @@ -47,7 +47,7 @@ message LoDTensorDesc { ## Definition of Variable in Python -In Python API, layer will take Variable as Input, and return Variable as Output. +In Python API, layer will take Variable as Input, and return Variable as Output. There should be a class `Variable` in python to help create and manage Variable. ```python image = Variable(dims=[-1, 640, 480]) @@ -55,27 +55,43 @@ image = Variable(dims=[-1, 640, 480]) fc1 = layer.fc(input=image, output_size=10) fc2 = layer.fc(input=fc1, output_size=20) ``` - -There should be a class `Variable` in python to help create and manage Variable. +### what should class `Variable` Have +1. `name`.a name of string type is used to mark the value of the Variable. +1. `initializer`. Since our Tensor does not have value. we will always use some Operator to fullfill it when run. So we should have a inialize method to help add the init operator. +1. `operator`. Variable should record which operator produce itself. The reaon is: + - we use pd.eval(targets=[var1, var2]) to run the related ops to get the value of var1 and var2. var.op is used to trace the dependency of the current variable. ```python import VarDesc import LoDTensorDesc import framework +def AddInitialOperator(variable, initializer): + # add an initialize Operator to graph to init this Variable + class Variable(object): - def __init__(self, name, dims, type): + def __init__(self, name, dims, type, initializer): + self._graph = get_default_graph() self._name = name self.op = None + tensor_desc = LoDTensorDesc(data_type=type, dims=dims) _var_desc = VarDesc(name=name, lod_tensor=tensor_desc) self._var = framework.CreateVar(_var_desc) + self._graph.add_var(self) + + # add initial op according to initializer + if initializer is not None: + AddInitialOperator(self, initializer) def dims(self): return self._var.dims() def data_type(self): return self._var.data_type() + + def to_proto(self): + pass ``` Then we can use this Variable to create a fc layer in Python. @@ -90,8 +106,8 @@ def flatten_size(X, num_flatten_dims): return prod def layer.fc(X, output_size, num_flatten_dims): - W = Variable(type=FP32, dims=[flatten_size(X, num_flatten_dims), output_size]) - b = Variable(type=FP32, dims=[output_size]) + W = Variable(pd.random_uniform(), type=FP32, dims=[flatten_size(X, num_flatten_dims), output_size]) + b = Variable(pd.random_uniform(), type=FP32, dims=[output_size]) out = Variable(type=FP32) y = operator.fc(X, W, b, output=out) # fc will put fc op input into out pd.InferShape(y) -- GitLab