未验证 提交 e9b18c74 编写于 作者: H Huihuang Zheng 提交者: GitHub

Rename Decorator "dygraph_to_static_graph" to "dygraph_to_static_func" (#23150)

This PR does exact the thing as the title. The reason is that we plan to develop 4 decorators

"dygraph_to_static_code"
"dygraph_to_static_program"
"dygraph_to_static_func"
"dygraph_to_static_output"

 The 4 decorators will emphasize different part when translating dygraph to static graph. Decorator name "dygraph_to_static_graph" is too big for the function it implements.
上级 53e6f8e1
......@@ -42,7 +42,7 @@ from paddle.fluid.dygraph.dygraph_to_static.utils import dygraph_class_to_static
__all__ = ['DygraphToStaticAst', 'convert_to_static']
DECORATOR_NAMES = ['dygraph_to_static_output', 'dygraph_to_static_graph']
DECORATOR_NAMES = ['dygraph_to_static_output', 'dygraph_to_static_func']
class DygraphToStaticAst(gast.NodeTransformer):
......
......@@ -15,7 +15,7 @@
from __future__ import print_function
__all__ = [
'TracedLayer', 'dygraph_to_static_code', 'dygraph_to_static_graph',
'TracedLayer', 'dygraph_to_static_code', 'dygraph_to_static_func',
'dygraph_to_static_output'
]
......@@ -70,11 +70,11 @@ def _dygraph_to_static_code_(dygraph_func):
dygraph_to_static_code = wrap_decorator(_dygraph_to_static_code_)
def _dygraph_to_static_graph_(dygraph_func):
def _dygraph_to_static_func_(dygraph_func):
def __impl__(*args, **kwargs):
if in_dygraph_mode():
warnings.warn(
"The decorator 'dygraph_to_static_graph' doesn't work in dygraph mode."
"The decorator 'dygraph_to_static_func' doesn't work in dygraph mode."
" Please use it in static mode.")
return dygraph_func(*args, **kwargs)
program_translator = ProgramTranslator()
......@@ -84,7 +84,7 @@ def _dygraph_to_static_graph_(dygraph_func):
return __impl__
dygraph_to_static_graph = wrap_decorator(_dygraph_to_static_graph_)
dygraph_to_static_func = wrap_decorator(_dygraph_to_static_func_)
def _dygraph_to_static_output_(dygraph_func):
......
......@@ -15,7 +15,7 @@
from __future__ import print_function
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
def add_fn(x):
......@@ -141,7 +141,7 @@ class NetWithControlFlowIf(fluid.dygraph.Layer):
self.alpha = 10.
self.constant_vars = {}
@dygraph_to_static_graph
@dygraph_to_static_func
def forward(self, input):
hidden_dim = input.shape[-1]
if hidden_dim != self.hidden_dim:
......
......@@ -21,8 +21,9 @@ import gast
import paddle.fluid as fluid
import paddle.fluid.dygraph as dygraph
from paddle.fluid.dygraph import to_variable
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
from paddle.fluid.dygraph.dygraph_to_static.utils import is_dygraph_api
SEED = 2020
......@@ -62,7 +63,7 @@ class TestDygraphBasicApi_ToVariable(unittest.TestCase):
main_program = fluid.Program()
main_program.random_seed = SEED
with fluid.program_guard(main_program):
static_out = dygraph_to_static_graph(self.dygraph_func)(self.input)
static_out = dygraph_to_static_func(self.dygraph_func)(self.input)
exe = fluid.Executor(self.place)
static_res = exe.run(main_program, fetch_list=static_out)
......@@ -205,7 +206,7 @@ class TestDygraphBasicApi(unittest.TestCase):
main_program.random_seed = SEED
with fluid.program_guard(main_program, startup_program):
data = fluid.layers.assign(self.input)
static_out = dygraph_to_static_graph(self.dygraph_func)(data)
static_out = dygraph_to_static_func(self.dygraph_func)(data)
exe = fluid.Executor(fluid.CPUPlace())
exe.run(startup_program)
......@@ -240,8 +241,8 @@ class TestDygraphBasicApi_BilinearTensorProduct(TestDygraphBasicApi):
main_program = fluid.Program()
main_program.random_seed = SEED
with fluid.program_guard(main_program, startup_program):
static_out = dygraph_to_static_graph(self.dygraph_func)(self.input1,
self.input2)
static_out = dygraph_to_static_func(self.dygraph_func)(self.input1,
self.input2)
exe = fluid.Executor(fluid.CPUPlace())
exe.run(startup_program)
......@@ -367,7 +368,7 @@ class TestDygraphBasicApi_CosineDecay(unittest.TestCase):
main_program = fluid.Program()
main_program.random_seed = SEED
with fluid.program_guard(main_program, startup_program):
static_out = dygraph_to_static_graph(self.dygraph_func)()
static_out = dygraph_to_static_func(self.dygraph_func)()
exe = fluid.Executor(fluid.CPUPlace())
exe.run(startup_program)
......
......@@ -17,7 +17,7 @@ from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
SEED = 2020
np.random.seed(SEED)
......@@ -142,7 +142,7 @@ class TestContinueInFor(unittest.TestCase):
def run_static_mode(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
res = dygraph_to_static_graph(self.dygraph_func)(self.input)
res = dygraph_to_static_func(self.dygraph_func)(self.input)
exe = fluid.Executor(self.place)
static_res = exe.run(main_program, fetch_list=[res])
......
......@@ -19,7 +19,7 @@ import numpy as np
import unittest
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
PLACE = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace(
)
......@@ -47,7 +47,7 @@ class SubNetWithDict(fluid.dygraph.Layer):
bias_attr=False,
param_attr=init_weight(0.2))
@dygraph_to_static_graph
@dygraph_to_static_func
def forward(self, input, cache=None):
input = fluid.dygraph.to_variable(input)
......@@ -76,7 +76,7 @@ class MainNetWithDict(fluid.dygraph.Layer):
self.output_size = output_size
self.sub_net = SubNetWithDict(hidden_size, output_size)
@dygraph_to_static_graph
@dygraph_to_static_func
def forward(self, input, max_len=4):
input = fluid.dygraph.to_variable(input)
cache = {
......
......@@ -18,7 +18,7 @@ import numpy as np
import paddle.fluid as fluid
import unittest
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
from ifelse_simple_func import *
......@@ -45,7 +45,7 @@ class TestDygraphIfElse(unittest.TestCase):
with fluid.program_guard(main_program):
x_v = fluid.layers.assign(self.x)
# Transform into static graph
out = dygraph_to_static_graph(self.dyfunc)(x_v)
out = dygraph_to_static_func(self.dyfunc)(x_v)
exe = fluid.Executor(place)
ret = exe.run(main_program, fetch_list=out)
return ret
......@@ -166,7 +166,7 @@ class TestAst2FuncWithExternalFunc(TestDygraphIfElse):
class NetWithExternalFunc(fluid.dygraph.Layer):
@dygraph_to_static_graph
@dygraph_to_static_func
def forward(self, x, label=None):
if fluid.layers.mean(x).numpy()[0] > 5:
x_v = x - 1
......
......@@ -17,7 +17,7 @@ from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
SEED = 2020
np.random.seed(SEED)
......@@ -110,7 +110,7 @@ class TestListWithoutControlFlow(unittest.TestCase):
def run_static_mode(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
tensor_list = dygraph_to_static_graph(self.dygraph_func)(self.input)
tensor_list = dygraph_to_static_func(self.dygraph_func)(self.input)
exe = fluid.Executor(self.place)
static_res = exe.run(main_program, fetch_list=tensor_list[0])
......@@ -132,8 +132,7 @@ class TestListInIf(TestListWithoutControlFlow):
def run_static_mode(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
tensor_array = dygraph_to_static_graph(self.dygraph_func)(
self.input)
tensor_array = dygraph_to_static_func(self.dygraph_func)(self.input)
static_out = fluid.layers.array_read(
tensor_array,
i=fluid.layers.fill_constant(
......@@ -163,7 +162,7 @@ class TestListInWhileLoop(TestListWithoutControlFlow):
def run_static_mode(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
tensor_array = dygraph_to_static_graph(self.dygraph_func)(
tensor_array = dygraph_to_static_func(self.dygraph_func)(
self.input, self.iter_num)
static_outs = []
for i in range(self.iter_num):
......@@ -191,8 +190,8 @@ class TestListInWhileLoopWithStack(TestListInWhileLoop):
def run_static_mode(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
out_var = dygraph_to_static_graph(self.dygraph_func)(self.input,
self.iter_num)
out_var = dygraph_to_static_func(self.dygraph_func)(self.input,
self.iter_num)
exe = fluid.Executor(self.place)
numpy_res = exe.run(main_program, fetch_list=out_var)
return numpy_res[0]
......
......@@ -20,7 +20,7 @@ import numpy as np
import paddle.fluid as fluid
import unittest
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
from paddle.fluid.dygraph.dygraph_to_static.loop_transformer import NameVisitor
SEED = 2020
......@@ -90,7 +90,7 @@ class TestTransformWhileLoop(unittest.TestCase):
main_program = fluid.Program()
with fluid.program_guard(main_program):
x_var = fluid.layers.assign(self.x)
static_func = dygraph_to_static_graph(self.dyfunc)
static_func = dygraph_to_static_func(self.dyfunc)
out = static_func(x_var)
exe = fluid.Executor(self.place)
......@@ -133,7 +133,7 @@ class TestTransformForLoop(unittest.TestCase):
def _run_static(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
static_func = dygraph_to_static_graph(self.dyfunc)
static_func = dygraph_to_static_func(self.dyfunc)
out = static_func(self.len)
exe = fluid.Executor(self.place)
ret = exe.run(main_program, fetch_list=out)
......
......@@ -20,7 +20,7 @@ import paddle.fluid as fluid
from paddle.fluid.optimizer import AdamOptimizer
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
import unittest
......@@ -66,7 +66,7 @@ class SimpleImgConvPool(fluid.dygraph.Layer):
global_pooling=global_pooling,
use_cudnn=use_cudnn)
@dygraph_to_static_graph
@dygraph_to_static_func
def forward(self, inputs):
x = self._conv2d(inputs)
x = self._pool2d(x)
......@@ -94,7 +94,7 @@ class MNIST(fluid.dygraph.Layer):
loc=0.0, scale=scale)),
act="softmax")
@dygraph_to_static_graph
@dygraph_to_static_func
def forward(self, inputs, label=None):
x = self.inference(inputs)
if label is not None:
......@@ -105,7 +105,7 @@ class MNIST(fluid.dygraph.Layer):
else:
return x
@dygraph_to_static_graph
@dygraph_to_static_func
def inference(self, inputs):
x = self._simple_img_conv_pool_1(inputs)
x = self._simple_img_conv_pool_2(x)
......@@ -128,7 +128,7 @@ class TestMNIST(unittest.TestCase):
class TestMNISTWithStaticMode(TestMNIST):
"""
Tests model when using `dygraph_to_static_graph` to convert dygraph into static
Tests model when using `dygraph_to_static_func` to convert dygraph into static
model. It allows user to add customized code to train static model, such as `with`
and `Executor` statement.
"""
......
......@@ -30,7 +30,7 @@ from __future__ import print_function
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, BatchNorm, Linear
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
import unittest
import time
......@@ -93,7 +93,7 @@ class ConvBNLayer(fluid.dygraph.Layer):
self._batch_norm = BatchNorm(num_filters, act=act)
@dygraph_to_static_graph
@dygraph_to_static_func
def forward(self, inputs):
y = self._conv(inputs)
y = self._batch_norm(y)
......@@ -133,7 +133,7 @@ class BottleneckBlock(fluid.dygraph.Layer):
self._num_channels_out = num_filters * 4
@dygraph_to_static_graph
@dygraph_to_static_func
def forward(self, inputs):
y = self.conv0(inputs)
conv1 = self.conv1(y)
......@@ -203,7 +203,7 @@ class ResNet(fluid.dygraph.Layer):
param_attr=fluid.param_attr.ParamAttr(
initializer=fluid.initializer.Uniform(-stdv, stdv)))
@dygraph_to_static_graph
@dygraph_to_static_func
def forward(self, inputs, label):
y = self.conv(inputs)
y = self.pool2d_max(y)
......
......@@ -17,7 +17,7 @@ from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
SEED = 2020
np.random.seed(SEED)
......@@ -97,7 +97,7 @@ class TestSliceWithoutControlFlow(unittest.TestCase):
def run_static_mode(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
tensor_list = dygraph_to_static_graph(self.dygraph_func)(self.input)
tensor_list = dygraph_to_static_func(self.dygraph_func)(self.input)
exe = fluid.Executor(self.place)
static_res = exe.run(main_program, fetch_list=tensor_list[0])
......@@ -119,8 +119,7 @@ class TestSliceInIf(TestSliceWithoutControlFlow):
def run_static_mode(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
tensor_array = dygraph_to_static_graph(self.dygraph_func)(
self.input)
tensor_array = dygraph_to_static_func(self.dygraph_func)(self.input)
static_out = fluid.layers.array_read(
tensor_array,
i=fluid.layers.fill_constant(
......@@ -150,7 +149,7 @@ class TestSliceInWhileLoop(TestSliceWithoutControlFlow):
def run_static_mode(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
tensor_array = dygraph_to_static_graph(self.dygraph_func)(
tensor_array = dygraph_to_static_func(self.dygraph_func)(
self.input, self.iter_num)
static_outs = []
for i in range(self.iter_num):
......
......@@ -18,7 +18,7 @@ import numpy
import unittest
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import dygraph_to_static_graph
from paddle.fluid.dygraph.jit import dygraph_to_static_func
def dyfunc_tensor_shape_1(x):
......@@ -179,7 +179,7 @@ class TestTensorShapeBasic(unittest.TestCase):
def get_static_output(self):
main_program = fluid.Program()
with fluid.program_guard(main_program):
static_out = dygraph_to_static_graph(self.dygraph_func)(self.input)
static_out = dygraph_to_static_func(self.dygraph_func)(self.input)
exe = fluid.Executor(self.place)
static_res = exe.run(main_program, fetch_list=static_out)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册