未验证 提交 fa0f6938 编写于 作者: H hjyp 提交者: GitHub

[Dy2St] 移除 declarative 接口 (#49145)

* 移除 declarative 接口

* 修改 CI报错,清理注释
上级 bfd0faef
......@@ -116,7 +116,7 @@ def check_type(input, input_name, expected_type, op_name, extra_message=''):
return
# NOTE: `in_declarative_mode` is used to determined whether this op is called under
# @declarative in transformation from dygrah to static layer. We add VarBase in
# @to_static in transformation from dygrah to static layer. We add VarBase in
# expected_type to skip checking because varBase may be created and used in unusual way.
from .dygraph.base import in_declarative_mode
......
......@@ -44,13 +44,13 @@ __all__ = [
'to_variable',
]
# Flag that indicates whether running code under `@declarative`
# Flag that indicates whether running code under `@to_static`
_in_declarative_mode_ = False
def in_declarative_mode():
"""
Return a bool value that indicates whether running code under `@declarative`
Return a bool value that indicates whether running code under `@to_static`
"""
return _in_declarative_mode_
......
......@@ -17,7 +17,7 @@ from transformer_dygraph_model import MultiHeadAttention, PrePostProcessLayer
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import Layer
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.nn import Linear
......@@ -373,7 +373,7 @@ class PretrainModelLayer(Layer):
bias_attr="next_sent_fc.b_0",
)
@declarative
@to_static
def forward(
self,
src_ids,
......
......@@ -21,7 +21,7 @@ import paddle.fluid as fluid
from paddle.fluid import ParamAttr, layers
from paddle.fluid.dygraph import Layer
from paddle.fluid.dygraph.base import to_variable
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.nn import Embedding
INF = 1.0 * 1e5
......@@ -207,7 +207,7 @@ class BaseModel(fluid.dygraph.Layer):
topk_coordinates = paddle.stack([batch_pos, indices], axis=2)
return paddle.gather_nd(x, topk_coordinates)
@declarative
@to_static
def forward(self, inputs):
src, tar, label, src_sequence_length, tar_sequence_length = inputs
if src.shape[0] < self.batch_size:
......@@ -312,7 +312,7 @@ class BaseModel(fluid.dygraph.Layer):
return loss
@declarative
@to_static
def beam_search(self, inputs):
src, tar, label, src_sequence_length, tar_sequence_length = inputs
if src.shape[0] < self.batch_size:
......@@ -724,7 +724,7 @@ class AttentionModel(fluid.dygraph.Layer):
return array
@declarative
@to_static
def forward(self, inputs):
src, tar, label, src_sequence_length, tar_sequence_length = inputs
if src.shape[0] < self.batch_size:
......
......@@ -18,7 +18,7 @@ import paddle
import paddle.fluid as fluid
import paddle.fluid.param_attr as attr
from paddle.fluid.dygraph import Layer
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.static import Variable
......@@ -495,7 +495,7 @@ class BOW(Layer):
self.bow_layer_po = FCLayer(self.bow_dim, None, "fc").ops()
self.softmax_layer = FCLayer(2, "softmax", "cos_sim").ops()
@declarative
@to_static
def forward(self, left, right):
"""
Forward network
......
......@@ -19,7 +19,7 @@ import numpy
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
@paddle.jit.to_static
......@@ -28,7 +28,7 @@ def dyfunc_assert_variable(x):
assert x_v
@declarative
@to_static
def dyfunc_assert_non_variable(x=True):
assert x
......
......@@ -18,7 +18,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.jit.dy2static.utils import Dygraph2StaticException
......@@ -36,7 +36,7 @@ class TestDy2staticException(unittest.TestCase):
if self.dyfunc:
with self.assertRaisesRegex(Dygraph2StaticException, self.error):
ProgramTranslator().enable(True)
self.assertTrue(declarative(self.dyfunc)(self.x))
self.assertTrue(to_static(self.dyfunc)(self.x))
paddle.fluid.dygraph.base._in_declarative_mode_ = False
ProgramTranslator().enable(False)
......@@ -223,7 +223,7 @@ class TestContinueInFor(unittest.TestCase):
def run_static_mode(self):
with fluid.dygraph.guard():
res = declarative(self.dygraph_func)(self.input)
res = to_static(self.dygraph_func)(self.input)
return res.numpy()
def test_transformed_static_result(self):
......
......@@ -21,7 +21,7 @@ from test_fetch_feed import Linear, Pool2D
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.dy2static import convert_to_static
......@@ -138,7 +138,7 @@ class TestConvertWithCache(unittest.TestCase):
self.assertTrue(id(static_func), id(cached_func))
@declarative
@to_static
def sum_even_until_limit(max_len, limit):
ret_sum = fluid.dygraph.to_variable(np.zeros((1)).astype('int32'))
for i in range(max_len):
......@@ -166,7 +166,7 @@ class TestToOutputWithCache(unittest.TestCase):
ret = sum_even_until_limit(80, 10)
self.assertEqual(ret.numpy(), 30)
ret = declarative(sum_under_while)(100)
ret = to_static(sum_under_while)(100)
self.assertEqual(ret.numpy(), 5050)
......
......@@ -17,40 +17,40 @@ import unittest
import numpy as np
import paddle.fluid as fluid
from paddle.jit.api import declarative
from paddle.jit.api import to_static
SEED = 2020
np.random.seed(SEED)
@declarative
@to_static
def test_bool_cast(x):
x = fluid.dygraph.to_variable(x)
x = bool(x)
return x
@declarative
@to_static
def test_int_cast(x):
x = fluid.dygraph.to_variable(x)
x = int(x)
return x
@declarative
@to_static
def test_float_cast(x):
x = fluid.dygraph.to_variable(x)
x = float(x)
return x
@declarative
@to_static
def test_not_var_cast(x):
x = int(x)
return x
@declarative
@to_static
def test_mix_cast(x):
x = fluid.dygraph.to_variable(x)
x = int(x)
......
......@@ -39,7 +39,7 @@ os.environ["CUDA_VISIBLE_DEVICES"] = "1"
import paddle
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.nn import BatchNorm
# Note: Set True to eliminate randomness.
......@@ -82,7 +82,7 @@ class Cycle_Gan(fluid.dygraph.Layer):
input_channel
)
@declarative
@to_static
def forward(self, input_A, input_B):
"""
Generator of GAN model.
......@@ -133,7 +133,7 @@ class Cycle_Gan(fluid.dygraph.Layer):
g_loss,
)
@declarative
@to_static
def discriminatorA(self, input_A, input_B):
"""
Discriminator A of GAN model.
......@@ -143,7 +143,7 @@ class Cycle_Gan(fluid.dygraph.Layer):
return rec_B, fake_pool_rec_B
@declarative
@to_static
def discriminatorB(self, input_A, input_B):
"""
Discriminator B of GAN model.
......
......@@ -23,7 +23,7 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import Layer, to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.dy2static.program_translator import (
ConcreteProgram,
StaticFunction,
......@@ -38,13 +38,12 @@ class SimpleNet(Layer):
super().__init__()
self.linear = paddle.nn.Linear(10, 3)
@declarative(input_spec=[InputSpec(shape=[None, 10], dtype='float32')])
@to_static(input_spec=[InputSpec(shape=[None, 10], dtype='float32')])
def forward(self, x, a=1, b=2):
y = self.inner_function(x)
return y
# `declarative` is not essential, add it to test for robustness.
@declarative
@to_static
def inner_function(self, x):
y = self.linear(x)
return y
......@@ -53,14 +52,14 @@ class SimpleNet(Layer):
z = x + y
return z
@declarative(input_spec=[[InputSpec([None, 10]), InputSpec([None, 10])]])
@to_static(input_spec=[[InputSpec([None, 10]), InputSpec([None, 10])]])
def func_with_list(self, l, int_val=1):
x, y = l
z = x + y
z = z + int_val
return z
@declarative(
@to_static(
input_spec=[{'x': InputSpec([None, 10]), 'y': InputSpec([None, 10])}]
)
def func_with_dict(self, d):
......@@ -70,7 +69,7 @@ class SimpleNet(Layer):
return z
@declarative(
@to_static(
input_spec=[
[
InputSpec([None]),
......@@ -135,8 +134,8 @@ class TestInputSpec(unittest.TestCase):
# 3. we can decorate any method
x_2 = to_variable(np.ones([4, 20]).astype('float32'))
# uses `declarative(func)` instead of `@declarative`
net.add_func = declarative(net.add_func)
# uses `to_static(func)` instead of `@to_static`
net.add_func = to_static(net.add_func)
out = net.add_func(x_2, np.ones([20]).astype('float32'))
self.assertTrue(len(net.add_func.program_cache) == 1)
......@@ -164,7 +163,7 @@ class TestInputSpec(unittest.TestCase):
# 2. requires len(input_spec) <= len(args)
with self.assertRaises(ValueError):
net.add_func = declarative(
net.add_func = to_static(
net.add_func,
input_spec=[
InputSpec([-1, 10]),
......@@ -182,7 +181,7 @@ class TestInputSpec(unittest.TestCase):
net = SimpleNet()
# We can get concrete_program by specificing InputSpec information. Faking input is no need.
net.add_func = declarative(
net.add_func = to_static(
net.add_func,
input_spec=[InputSpec([-1, 10]), InputSpec([-1, 10], name='y')],
)
......@@ -191,14 +190,14 @@ class TestInputSpec(unittest.TestCase):
self.assertTrue(cp1.inputs[-1].name == 'y')
# generate another program
net.add_func = declarative(
net.add_func = to_static(
net.add_func,
input_spec=[InputSpec([10]), InputSpec([10], name='label')],
)
cp2 = net.add_func.concrete_program
self.assertTrue(cp2.inputs[-1].shape == (10,))
self.assertTrue(cp2.inputs[-1].name == 'label')
# Note(Aurelius84): New instance will be returned if we use `declarative(foo)` every time.
# Note(Aurelius84): New instance will be returned if we use `to_static(foo)` every time.
# So number of cache program is 1.
self.assertTrue(len(net.add_func.program_cache) == 1)
self.assertTrue(cp1 != cp2)
......@@ -219,7 +218,7 @@ class TestDifferentInputSpecCacheProgram(unittest.TestCase):
y_data = np.ones([10]).astype('float32') * 2
z_data = np.ones([10]).astype('float32') * 2.2
foo = declarative(foo_func)
foo = to_static(foo_func)
# [16, 10] + [10] (varbase)
out_1 = foo(to_variable(x_data), to_variable(y_data))
......@@ -260,7 +259,7 @@ class TestDifferentInputSpecCacheProgram(unittest.TestCase):
def test_get_concrete_program(self):
foo = declarative(foo_func)
foo = to_static(foo_func)
# 1. specific InputSpec for `x`/`y`
concrete_program_1 = foo.get_concrete_program(
......@@ -349,7 +348,7 @@ class TestInputDefaultName(unittest.TestCase):
class TestDeclarativeAPI(unittest.TestCase):
def test_error(self):
func = declarative(dyfunc_to_variable)
func = to_static(dyfunc_to_variable)
paddle.enable_static()
......@@ -373,20 +372,20 @@ class TestDecorateModelDirectly(unittest.TestCase):
def test_fake_input(self):
net = SimpleNet()
net = declarative(net)
net = to_static(net)
y = net(self.x)
self.assertTrue(len(net.forward.program_cache) == 1)
def test_input_spec(self):
net = SimpleNet()
net = declarative(net, input_spec=[InputSpec([None, 8, 10])])
net = to_static(net, input_spec=[InputSpec([None, 8, 10])])
self.assertTrue(len(net.forward.inputs) == 1)
self.assertTrue(len(net.forward.program_cache) == 1)
input_shape = net.forward.inputs[0].shape
self.assertListEqual(list(input_shape), [-1, 8, 10])
# redecorate
net = declarative(net, input_spec=[InputSpec([None, 16, 10])])
net = to_static(net, input_spec=[InputSpec([None, 16, 10])])
input_shape = net.forward.inputs[0].shape
self.assertListEqual(list(input_shape), [-1, 16, 10])
......
......@@ -19,7 +19,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
SEED = 2020
......@@ -29,7 +29,7 @@ class Pool2D(fluid.dygraph.Layer):
super().__init__()
self.pool2d = paddle.nn.AvgPool2D(kernel_size=2, stride=1)
@declarative
@to_static
def forward(self, x):
# Add func `get_result` for testing arg_name_to_idx in ast transformation.
def get_result(x):
......@@ -54,7 +54,7 @@ class Linear(fluid.dygraph.Layer):
)
self.act = paddle.nn.ReLU()
@declarative
@to_static
def forward(self, x):
pre = self.fc(x)
pre = self.act(pre)
......
......@@ -44,7 +44,6 @@ from ifelse_simple_func import (
import paddle
import paddle.fluid.core as core
import paddle.nn.functional as F
from paddle.jit.api import declarative
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.jit.dy2static.utils import Dygraph2StaticException
......@@ -66,7 +65,7 @@ class TestDy2staticException(unittest.TestCase):
if self.dyfunc:
with self.assertRaisesRegex(Dygraph2StaticException, self.error):
ProgramTranslator().enable(True)
self.assertTrue(declarative(self.dyfunc)(self.x))
self.assertTrue(paddle.jit.to_static(self.dyfunc)(self.x))
paddle.fluid.dygraph.base._in_declarative_mode_ = False
ProgramTranslator().enable(False)
......@@ -89,7 +88,7 @@ class TestDygraphIfElse(unittest.TestCase):
with fluid.dygraph.guard(place):
x_v = fluid.dygraph.to_variable(self.x)
if to_static:
ret = declarative(self.dyfunc)(x_v)
ret = paddle.jit.to_static(self.dyfunc)(x_v)
else:
ret = self.dyfunc(x_v)
return ret.numpy()
......@@ -293,7 +292,7 @@ class TestAst2FuncWithExternalFunc(TestDygraphIfElse):
class NetWithExternalFunc(fluid.dygraph.Layer):
@declarative
@paddle.jit.to_static
def forward(self, x, label=None):
if paddle.mean(x) < 0:
x_v = x - 1
......
......@@ -28,7 +28,7 @@ from paddle import _legacy_C_ops
from paddle.fluid.dygraph import to_variable
from paddle.fluid.framework import _non_static_mode
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
SEED = 2020
......@@ -440,7 +440,7 @@ class LexNet(fluid.dygraph.Layer):
# share weight
self.crf_decoding.weight = self.linear_chain_crf.weight
@declarative(input_spec=input_specs)
@to_static(input_spec=input_specs)
def forward(self, word, target, length=None):
"""
Configure the network
......
......@@ -19,7 +19,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.nn.functional as F
from paddle.jit.api import declarative
def call_lambda_as_func(x):
......@@ -108,7 +107,7 @@ class TestLambda(unittest.TestCase):
with fluid.dygraph.guard(self.place):
x_v = fluid.dygraph.to_variable(self.x)
if to_static:
ret = declarative(func)(x_v)
ret = paddle.jit.to_static(func)(x_v)
else:
ret = func(x_v)
return ret.numpy()
......
......@@ -18,7 +18,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit.api import declarative
from paddle.jit.dy2static import Call
SEED = 2020
......@@ -58,7 +57,7 @@ class TestLen(unittest.TestCase):
def _run(self, to_static):
with fluid.dygraph.guard(self.place):
if to_static:
out = declarative(self.func)(self.x_data)
out = paddle.jit.to_static(self.func)(self.x_data)
else:
out = self.func(self.x_data)
......
......@@ -20,7 +20,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.layers.utils import map_structure
from paddle.jit.api import declarative
SEED = 2020
np.random.seed(SEED)
......@@ -247,7 +246,7 @@ class TestListWithoutControlFlow(unittest.TestCase):
with fluid.dygraph.guard():
if to_static:
res = declarative(self.dygraph_func)(self.input)
res = paddle.jit.to_static(self.dygraph_func)(self.input)
else:
res = self.dygraph_func(self.input)
return self.varbase_to_numpy(res)
......@@ -290,8 +289,10 @@ class TestListInWhileLoop(TestListWithoutControlFlow):
with fluid.dygraph.guard():
if to_static:
print(declarative(self.dygraph_func).code)
res = declarative(self.dygraph_func)(self.input, self.iter_num)
print(paddle.jit.to_static(self.dygraph_func).code)
res = paddle.jit.to_static(self.dygraph_func)(
self.input, self.iter_num
)
else:
res = self.dygraph_func(self.input, self.iter_num)
return self.varbase_to_numpy(res)
......
......@@ -20,7 +20,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.nn.functional as F
from paddle.jit.api import declarative
from paddle.jit.dy2static.loop_transformer import NameVisitor
from paddle.utils import gast
......@@ -324,7 +323,7 @@ class TestTransformWhileLoop(unittest.TestCase):
# Set the input of dyfunc to VarBase
tensor_x = fluid.dygraph.to_variable(self.x, zero_copy=False)
if to_static:
ret = declarative(self.dyfunc)(tensor_x)
ret = paddle.jit.to_static(self.dyfunc)(tensor_x)
else:
ret = self.dyfunc(tensor_x)
if hasattr(ret, "numpy"):
......@@ -401,7 +400,7 @@ class TestTransformForLoop(unittest.TestCase):
def _run(self, to_static):
with fluid.dygraph.guard(self.place):
if to_static:
ret = declarative(self.dyfunc)(self.len)
ret = paddle.jit.to_static(self.dyfunc)(self.len)
else:
ret = self.dyfunc(self.len)
return ret.numpy()
......
......@@ -25,7 +25,7 @@ import paddle.fluid as fluid
from paddle.fluid.initializer import MSRA
from paddle.fluid.param_attr import ParamAttr
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.nn import BatchNorm, Linear
......@@ -266,7 +266,7 @@ class MobileNetV1(fluid.dygraph.Layer):
bias_attr=ParamAttr(name="fc7_offset"),
)
@declarative
@to_static
def forward(self, inputs):
y = self.conv1(inputs)
for dws in self.dwsl:
......@@ -432,7 +432,7 @@ class MobileNetV2(fluid.dygraph.Layer):
bias_attr=ParamAttr(name="fc10_offset"),
)
@declarative
@to_static
def forward(self, inputs):
y = self._conv1(inputs, if_act=True)
for inv in self._invl:
......
......@@ -15,7 +15,7 @@
import sys
import unittest
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.dy2static import DygraphToStaticAst
from paddle.jit.dy2static.origin_info import (
ORIGI_INFO,
......@@ -43,13 +43,13 @@ def nested_func(x):
return result
@declarative
@to_static
def decorated_func(x):
return x
@declarative
@declarative
@to_static
@to_static
def decorated_func2(x):
return x
......
......@@ -21,7 +21,7 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.layers.utils import flatten
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
SEED = 2020
......@@ -78,7 +78,7 @@ class TestWithNestedInput(unittest.TestCase):
self.fake_input()
if to_static:
out = declarative(nested_input)(self.x, self.y)
out = paddle.jit.to_static(nested_input)(self.x, self.y)
else:
out = nested_input(self.x, self.y)
......@@ -102,7 +102,7 @@ class TestWithNestedOutput(unittest.TestCase):
self.y = fake_data([10, 16])
if to_static:
out = declarative(nested_output)(self.x, self.y)
out = paddle.jit.to_static(nested_output)(self.x, self.y)
else:
out = nested_output(self.x, self.y)
......@@ -185,7 +185,7 @@ class GPT2LMHeadModel(fluid.dygraph.Layer):
np.random.rand(2, 3).astype('float32')
)
@declarative
@to_static
def forward(self, x):
x = paddle.reshape(x, shape=[-1, 6])
x1, x2, x3 = paddle.split(x=x, axis=1, num_or_sections=3)
......
......@@ -28,7 +28,7 @@ import paddle
import paddle.fluid as fluid
import paddle.jit.dy2static as _jst
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.dy2static.utils import func_to_source_code
from paddle.utils import gast
......@@ -47,7 +47,7 @@ def simple_func(x, weight_numpy):
return z
@declarative
@to_static
def decorated_simple_func(x, weight_numpy):
x = fluid.dygraph.to_variable(x)
w = fluid.dygraph.to_variable(weight_numpy)
......@@ -206,7 +206,7 @@ class StaticCode2:
class NetWithError(fluid.dygraph.layers.Layer):
@declarative
@to_static
def forward(self, x):
linear = paddle.nn.Linear(32, 64)
y = linear(x)
......
......@@ -23,7 +23,7 @@ import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.optimizer import SGDOptimizer
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
PRINT_STEP = 20
SEED = 2020
......@@ -186,7 +186,7 @@ class PtbModel(fluid.Layer):
def build_once(self, input, label, init_hidden, init_cell):
pass
@declarative
@to_static
def forward(self, input, label, init_hidden, init_cell):
init_h = paddle.reshape(
......
......@@ -24,7 +24,7 @@ import paddle.fluid as fluid
import paddle.nn.functional as F
from paddle.fluid.dygraph import Layer, to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
SEED = 2020
program_translator = ProgramTranslator()
......@@ -41,7 +41,7 @@ class Policy(Layer):
self.saved_log_probs = []
self.rewards = []
@declarative
@to_static
def forward(self, x):
x = paddle.reshape(x, shape=[1, 4])
x = self.affine1(x)
......
......@@ -21,7 +21,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.dy2static.partial_program import partial_program_from
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
......@@ -40,7 +40,7 @@ class SimpleFcLayer(fluid.dygraph.Layer):
super().__init__()
self._linear = paddle.nn.Linear(fc_size, fc_size)
@declarative
@to_static
def forward(self, x):
y = self._linear(x)
z = self._linear(y)
......
......@@ -26,7 +26,7 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.nn import BatchNorm, Linear
......@@ -321,7 +321,7 @@ class SeResNeXt(fluid.dygraph.Layer):
),
)
@declarative
@to_static
def forward(self, inputs, label):
if self.layers == 50 or self.layers == 101:
y = self.conv0(inputs)
......
......@@ -21,7 +21,7 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.nn import Embedding, Linear
SEED = 2020
......@@ -89,7 +89,7 @@ class CNN(fluid.dygraph.Layer):
self._fc1_act = paddle.nn.Softmax()
self._fc_prediction = Linear(self.fc_hid_dim, self.class_dim)
@declarative
@to_static
def forward(self, inputs, label=None):
emb = self.embedding(inputs)
o_np_mask = (paddle.reshape(inputs, [-1, 1]) != self.dict_dim).astype(
......@@ -133,7 +133,7 @@ class BOW(fluid.dygraph.Layer):
self._fc2 = Linear(self.hid_dim, self.fc_hid_dim)
self._fc_prediction = Linear(self.fc_hid_dim, self.class_dim)
@declarative
@to_static
def forward(self, inputs, label=None):
emb = self.embedding(inputs)
o_np_mask = (paddle.reshape(inputs, [-1, 1]) != self.dict_dim).astype(
......@@ -182,7 +182,7 @@ class GRU(fluid.dygraph.Layer):
self._fc_prediction = Linear(self.fc_hid_dim, self.class_dim)
self._gru = DynamicGRU(size=self.hid_dim, h_0=h_0)
@declarative
@to_static
def forward(self, inputs, label=None):
emb = self.embedding(inputs)
o_np_mask = (paddle.reshape(inputs, [-1, 1]) != self.dict_dim).astype(
......@@ -235,7 +235,7 @@ class BiGRU(fluid.dygraph.Layer):
size=self.hid_dim, h_0=h_0, is_reverse=True
)
@declarative
@to_static
def forward(self, inputs, label=None):
emb = self.embedding(inputs)
o_np_mask = (paddle.reshape(inputs, [-1, 1]) != self.dict_dim).astype(
......
......@@ -18,7 +18,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit.api import declarative
def dyfunc_tensor_shape_1(x):
......@@ -252,7 +251,9 @@ class TestTensorShapeBasic(unittest.TestCase):
def _run(self, to_static):
with fluid.dygraph.guard():
if to_static:
res = declarative(self.dygraph_func)(self.input).numpy()
res = paddle.jit.to_static(self.dygraph_func)(
self.input
).numpy()
else:
res = self.dygraph_func(self.input).numpy()
return res
......
......@@ -25,7 +25,7 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.nn import BatchNorm, Linear
random.seed(0)
......@@ -200,7 +200,7 @@ class TSM_ResNet(fluid.dygraph.Layer):
),
)
@declarative
@to_static
def forward(self, inputs):
y = paddle.reshape(inputs, [-1] + self.reshape_list)
y = self.conv(y)
......
......@@ -16,8 +16,8 @@ import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit.api import declarative
SEED = 2020
np.random.seed(SEED)
......@@ -56,7 +56,7 @@ class TestTransformWhileLoop(unittest.TestCase):
# Set the input of dyfunc to VarBase
tensor_x = fluid.dygraph.to_variable(self.x, zero_copy=False)
if to_static:
ret = declarative(self.dyfunc)(tensor_x)
ret = paddle.jit.to_static(self.dyfunc)(tensor_x)
else:
ret = self.dyfunc(tensor_x)
if hasattr(ret, "numpy"):
......
......@@ -21,7 +21,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.nn import Embedding
......@@ -250,7 +250,7 @@ class SkipGram(fluid.dygraph.Layer):
),
)
@declarative
@to_static
def forward(self, center_words, target_words, label):
center_words_emb = self.embedding(center_words)
target_words_emb = self.embedding_out(target_words)
......
......@@ -22,7 +22,7 @@ import paddle.fluid as fluid
from paddle import _legacy_C_ops
from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.regularizer import L2Decay
from paddle.jit.api import declarative
from paddle.jit.api import to_static
class AttrDict(dict):
......@@ -277,7 +277,7 @@ class YOLOv3(fluid.dygraph.Layer):
self.route_blocks_2.append(route)
self.upsample = Upsample()
@declarative
@to_static
def forward(
self,
inputs,
......
......@@ -25,7 +25,7 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid import unique_name
from paddle.fluid.layers.utils import flatten
from paddle.jit.api import declarative
from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_PARAMS_INFO_SUFFIX
from paddle.nn import Linear
from paddle.static import InputSpec
......@@ -57,7 +57,7 @@ class LinearNet(fluid.dygraph.Layer):
super().__init__()
self._linear = Linear(in_size, out_size)
@declarative
@to_static
def forward(self, x):
return self._linear(x)
......@@ -67,7 +67,7 @@ class LinearNetWithInputSpec(fluid.dygraph.Layer):
super().__init__()
self._linear = Linear(in_size, out_size)
@declarative(input_spec=[InputSpec(shape=[None, 784], dtype='float32')])
@to_static(input_spec=[InputSpec(shape=[None, 784], dtype='float32')])
def forward(self, x):
return self._linear(x)
......@@ -86,7 +86,7 @@ class LinerNetWithLabel(paddle.nn.Layer):
super().__init__()
self._linear = Linear(in_size, out_size)
@declarative(
@to_static(
input_spec=[
InputSpec(shape=[None, 784], dtype='float32', name="image"),
InputSpec(shape=[None, 1], dtype='int64', name="label"),
......@@ -106,7 +106,7 @@ class LinerNetWithPruneInput(paddle.nn.Layer):
super().__init__()
self._linear = Linear(in_size, out_size)
@declarative(
@to_static(
input_spec=[
InputSpec(shape=[None, 784], dtype='float32', name="image"),
InputSpec(shape=[None, 1], dtype='int64', name="label"),
......@@ -126,7 +126,7 @@ class LinerNetWithUselessInput(paddle.nn.Layer):
super().__init__()
self._linear = Linear(in_size, out_size)
@declarative(
@to_static(
input_spec=[
InputSpec(shape=[None, 784], dtype='float32', name="image"),
InputSpec(shape=[None, 1], dtype='int64', name="label"),
......@@ -142,7 +142,7 @@ class LinearNetReturnLoss(fluid.dygraph.Layer):
super().__init__()
self._linear = Linear(in_size, out_size)
@declarative
@to_static
def forward(self, x):
y = self._linear(x)
z = self._linear(y)
......@@ -156,7 +156,7 @@ class LinearNetMultiInput(fluid.dygraph.Layer):
self._linear1 = Linear(in_size, out_size)
self._linear2 = Linear(in_size, out_size)
@declarative(
@to_static(
input_spec=[
InputSpec([None, 8], dtype='float32'),
InputSpec([None, 8], dtype='float32'),
......@@ -175,7 +175,7 @@ class LinearNetMultiInput1(fluid.dygraph.Layer):
self._linear1 = Linear(in_size, out_size)
self._linear2 = Linear(in_size, out_size)
@declarative(
@to_static(
input_spec=(
InputSpec([None, 8], dtype='float32'),
InputSpec([None, 8], dtype='float32'),
......@@ -195,7 +195,7 @@ class MultiLoadingLinearNet(fluid.dygraph.Layer):
self._load_linear1 = paddle.jit.load(model_path)
self._load_linear2 = paddle.jit.load(model_path)
@declarative
@to_static
def forward(self, x):
tmp1 = self._linear(x)
tmp2 = self._load_linear1(tmp1)
......@@ -210,7 +210,7 @@ class LinearNetReturnHidden(fluid.dygraph.Layer):
self._linear_1 = Linear(in_size, out_size)
self._linear_2 = Linear(in_size, out_size)
@declarative
@to_static
def forward(self, x):
y = self._linear_1(x)
z = self._linear_2(y)
......@@ -224,7 +224,7 @@ class LinearNetWithNestOut(fluid.dygraph.Layer):
self._linear_1 = Linear(in_size, out_size)
self._linear_2 = Linear(in_size, out_size)
@declarative
@to_static
def forward(self, x):
y = self._linear_1(x)
z = self._linear_2(y)
......@@ -456,7 +456,7 @@ class TestSaveLoadWithNestOut(unittest.TestCase):
net = LinearNetWithNestOut(8, 8)
dy_outs = flatten(net(x))
net = declarative(net, input_spec=[InputSpec([None, 8], name='x')])
net = to_static(net, input_spec=[InputSpec([None, 8], name='x')])
model_path = os.path.join(self.temp_dir.name, "net_with_nest_out/model")
paddle.jit.save(net, model_path)
......@@ -549,7 +549,7 @@ class TestSaveLoadWithInputSpec(unittest.TestCase):
def test_with_input_spec(self):
net = LinearNetReturnLoss(8, 8)
# set x.shape = [None, 8]
net.forward = declarative(
net.forward = to_static(
net.forward, input_spec=[InputSpec([None, 8], name='x')]
)
......
......@@ -15,7 +15,7 @@
from .api import save
from .api import load
from .api import declarative as to_static
from .api import to_static
from .api import not_to_static
from .dy2static.logging_utils import set_code_level, set_verbosity
......
......@@ -178,7 +178,7 @@ def copy_decorator_attrs(original_func, decorated_obj):
original_func(callable): the original decorated function.
decorated_obj(StaticFunction): the target decorated StaticFunction object.
"""
decorator_name = "declarative"
decorator_name = "to_static"
decorated_obj.__name__ = original_func.__name__
decorated_obj._decorator_name = decorator_name
......@@ -190,12 +190,12 @@ def copy_decorator_attrs(original_func, decorated_obj):
return decorated_obj
def declarative(
def to_static(
function=None, input_spec=None, build_strategy=None, property=False
):
"""
Converts imperative dygraph APIs into declarative function APIs. Decorator
@declarative handles the Program and Executor of static mode and returns
@to_static handles the Program and Executor of static mode and returns
the result as dygraph Tensor(s). Users could use the returned dygraph
Tensor(s) to do imperative training, inference, or other operations. If the
decorated function calls other imperative function, the called one will be
......@@ -264,7 +264,7 @@ def declarative(
)
)
# for usage: `declarative(foo, ...)`
# for usage: `to_static(foo, ...)`
if function is not None:
if isinstance(function, Layer):
if isinstance(function.forward, StaticFunction):
......@@ -279,7 +279,7 @@ def declarative(
else:
return decorated(function)
# for usage: `@declarative`
# for usage: `@to_static`
return decorated
......@@ -1008,7 +1008,7 @@ def save(layer, path, input_spec=None, **configs):
inner_input_spec = pack_sequence_as(
input_spec, inner_input_spec
)
static_forward = declarative(
static_forward = to_static(
inner_layer.forward, input_spec=inner_input_spec
)
concrete_program = (
......@@ -1017,7 +1017,7 @@ def save(layer, path, input_spec=None, **configs):
)
)
# the input_spec has been used in declarative, which is equal to
# @declarative with input_spec and jit.save without input_spec,
# @to_static with input_spec and jit.save without input_spec,
# avoid needless warning
inner_input_spec = None
else:
......@@ -1041,7 +1041,7 @@ def save(layer, path, input_spec=None, **configs):
inner_input_spec = pack_sequence_as(
input_spec, inner_input_spec
)
static_function = declarative(
static_function = to_static(
attr_func, input_spec=inner_input_spec
)
concrete_program = static_function.concrete_program
......
......@@ -241,12 +241,12 @@ def convert_call(func):
if func.__name__ == '<lambda>':
return func
try:
# Note(Aurelius84): Because `@declarative` returns a class instance instead of
# Note(Aurelius84): Because `@to_static` returns a class instance instead of
# a function. This will modify the value referring to itself in `__globals__`.
# For example:
#
# @declarative
# @to_static
# def foo(x):
# return x
#
......
......@@ -132,7 +132,7 @@ def _change_is_test_status(program, is_test):
class PartialProgramLayer:
"""
PartialProgramLayer wraps all the ops from layers decorated by `@declarative`
PartialProgramLayer wraps all the ops from layers decorated by `@to_static`
and execute them as a static subgraph.
.. note::
......@@ -143,8 +143,8 @@ class PartialProgramLayer:
Args:
main_program(Program): The main program that contains ops need to be executed.
inputs(list[Variable]): The input list of the decorated function by `@declarative`.
outputs(list[Variable]): The output list of the decorated function by `@declarative`.
inputs(list[Variable]): The input list of the decorated function by `@to_static`.
outputs(list[Variable]): The output list of the decorated function by `@to_static`.
parameters(list[VarBase]|None): All trainable parameters included in the program. Default None.
Returns:
......@@ -534,7 +534,7 @@ class PartialProgramLayer:
def _prune_unused_params(self, program):
"""
Prune the parameters not used anywhere in the program.
The `@declarative` may only decorated a sub function which
The `@to_static` may only decorated a sub function which
contains some unused parameters created in `__init__`.
So prune these parameters to avoid unnecessary operations in
`run_program_op`.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册