From 7c7cdf082f7dba6ef16c074e1fb1218489fb7823 Mon Sep 17 00:00:00 2001 From: Chen Weihang Date: Mon, 23 Nov 2020 19:47:39 +0800 Subject: [PATCH] remove fluid & variable in program (#28966) --- python/paddle/fluid/framework.py | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index a0d03111aae..49c5f9f5b8e 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -1297,9 +1297,12 @@ class Variable(object): Examples: .. code-block:: python - import paddle.fluid as fluid + import paddle + import paddle.static as static - cur_program = fluid.Program() + paddle.enable_static() + + cur_program = static.Program() cur_block = cur_program.current_block() new_variable = cur_block.create_var(name="X", shape=[-1, 23, 48], @@ -1307,10 +1310,10 @@ class Variable(object): print(new_variable._to_readable_code()) """ if self.type == core.VarDesc.VarType.SELECTED_ROWS or self.type == core.VarDesc.VarType.LOD_TENSOR: - var_str = "{name} : fluid.{type}.shape{shape}.astype({dtype})".\ + var_str = "{name} : paddle.{type}.shape{shape}.astype({dtype})".\ format(i="{", e="}", name=self.name, type=self.type, shape=self.shape, dtype=self.dtype) else: - var_str = "{name} : fluid.{type})".\ + var_str = "{name} : paddle.{type})".\ format(i="{", e="}", name=self.name, type=self.type) if type(self) == Parameter: @@ -4270,9 +4273,12 @@ class Program(object): Examples: .. code-block:: python - import paddle.fluid as fluid + import paddle + import paddle.static as static - cur_program = fluid.Program() + paddle.enable_static() + + cur_program = static.Program() cur_block = cur_program.current_block() new_var = cur_block.create_var(name="X", shape=[-1, 23, 48], @@ -4470,7 +4476,7 @@ class Program(object): # Due to parameter sharing usage for train and test, so we need to use startup program of train # instead of using test startup program, while nothing is in test's startup program - # In Paddle Fluid we will share weights by using the same Variable name. In train and test program + # In Paddle we will share weights by using the same Tensor name. In train and test program # all parameters will have the same name and this can make train and test program sharing parameters, # that's why we need to use startup program of train. And for startup program of test, it has nothing, # since it is a new program. @@ -4823,7 +4829,7 @@ class Program(object): ## 0 ## the default random seed is 0 - # Here we need to set random seed before we use fluid.layers.dropout + # Here we need to set random seed before we use paddle.nn.functional.dropout prog.random_seed = 1 z_var = F.dropout(x_var, 0.7) @@ -5098,8 +5104,8 @@ class Program(object): for var in prog.list_vars(): print(var) - # var img : fluid.VarType.LOD_TENSOR.shape(-1, 1, 28, 28).astype(VarType.FP32) - # var label : fluid.VarType.LOD_TENSOR.shape(-1, 1).astype(VarType.INT64) + # var img : paddle.VarType.LOD_TENSOR.shape(-1, 1, 28, 28).astype(VarType.FP32) + # var label : paddle.VarType.LOD_TENSOR.shape(-1, 1).astype(VarType.INT64) """ for each_block in self.blocks: for each_var in list(each_block.vars.values()): @@ -5132,8 +5138,8 @@ class Program(object): # Here will print all parameters in current program, in this example, # the result is like: # - # persist trainable param fc_0.w_0 : fluid.VarType.LOD_TENSOR.shape(13, 10).astype(VarType.FP32) - # persist trainable param fc_0.b_0 : fluid.VarType.LOD_TENSOR.shape(10,).astype(VarType.FP32) + # persist trainable param fc_0.w_0 : paddle.VarType.LOD_TENSOR.shape(13, 10).astype(VarType.FP32) + # persist trainable param fc_0.b_0 : paddle.VarType.LOD_TENSOR.shape(10,).astype(VarType.FP32) # # Here print(param) will print out all the properties of a parameter, # including name, type and persistable, you can access to specific -- GitLab