未验证 提交 2bb28f31 编写于 作者: R Ryan 提交者: GitHub

[Dy2St] 移除 ProgramTranslator (#49628)

* add enable_to_static and drop some methods of ProgramTranslator

* fix code style

* fix cant import enable_to_static and update unitest

* change unitest and rollback code of PT

* fix can't import as of utils

* roll back PT

* fix roll back

* add some unitest

* add unitest and fix codestyle bug in api.py

* finish all unitest

* remove ProgramTranslator

* fix code style

* restore test_program_translator

* api.py remove get_func

* TestDygraphToStaticCode

* fix check_type and import err

* roll back PT without getcode

* roll back pt with get_code

* convert_to_static

* fix import __all__
上级 8f0adcb5
......@@ -564,7 +564,7 @@ def _fake_interface_only_(func):
raise AssertionError(
"'%s' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n"
" 1. If you are in static graph mode, you can switch to dynamic graph mode by turning off `paddle.enable_static()` or calling `paddle.disable_static()`.\n"
" 2. If you are using `@paddle.jit.to_static`, you can turn off ProgramTranslator by calling `paddle.jit.ProgramTranslator().enable(False)`. "
" 2. If you are using `@paddle.jit.to_static`, you can call `paddle.jit.enable_to_static(False)`. "
"If you have to translate dynamic graph to static graph, please use other API to replace '%s'."
% (func.__name__, func.__name__)
)
......
......@@ -18,7 +18,6 @@ import numpy
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
......@@ -35,7 +34,7 @@ def dyfunc_assert_non_variable(x=True):
class TestAssertVariable(unittest.TestCase):
def _run(self, func, x, with_exception, to_static):
ProgramTranslator().enable(to_static)
paddle.jit.enable_to_static(to_static)
if with_exception:
with self.assertRaises(BaseException):
with fluid.dygraph.guard():
......
......@@ -24,10 +24,8 @@ from predictor_utils import PredictorTools
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
program_translator = ProgramTranslator()
place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
)
......@@ -127,11 +125,11 @@ class TestBert(unittest.TestCase):
return loss, ppl
def train_dygraph(self, bert_config, data_reader):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
return self.train(bert_config, data_reader, False)
def train_static(self, bert_config, data_reader):
program_translator.enable(True)
paddle.jit.enable_to_static(True)
return self.train(bert_config, data_reader, True)
def predict_static(self, data):
......@@ -157,7 +155,7 @@ class TestBert(unittest.TestCase):
return pred_res
def predict_dygraph(self, bert_config, data):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(place):
bert = PretrainModelLayer(
config=bert_config, weight_sharing=False, use_fp16=False
......
......@@ -24,12 +24,11 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid import ParamAttr
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator, to_static
from paddle.jit import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
SEED = 2000
DATATYPE = 'float32'
program_translator = ProgramTranslator()
# Note: Set True to eliminate randomness.
# 1. For one operation, cuDNN has several algorithms,
......@@ -662,7 +661,7 @@ class TestTrain(unittest.TestCase):
self.temp_dir.cleanup()
def train_bmn(self, args, place, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
loss_data = []
with fluid.dygraph.guard(place):
......@@ -822,7 +821,7 @@ class TestTrain(unittest.TestCase):
break
def predict_dygraph(self, data):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(self.place):
bmn = BMN(self.args)
# load dygraph trained parameters
......
......@@ -19,7 +19,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit.api import to_static
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.jit.dy2static.utils import Dygraph2StaticException
SEED = 2020
......@@ -35,10 +34,10 @@ class TestDy2staticException(unittest.TestCase):
def test_error(self):
if self.dyfunc:
with self.assertRaisesRegex(Dygraph2StaticException, self.error):
ProgramTranslator().enable(True)
paddle.jit.enable_to_static(True)
self.assertTrue(to_static(self.dyfunc)(self.x))
paddle.fluid.dygraph.base._in_declarative_mode_ = False
ProgramTranslator().enable(False)
paddle.jit.enable_to_static(False)
def test_continue_in_for(x):
......
......@@ -18,9 +18,6 @@ import numpy as np
from test_resnet import ResNetHelper
import paddle
from paddle.jit import ProgramTranslator
program_translator = ProgramTranslator()
class TestResnetWithPass(unittest.TestCase):
......@@ -35,7 +32,7 @@ class TestResnetWithPass(unittest.TestCase):
paddle.fluid.set_flags({"FLAGS_max_inplace_grad_add": 8})
def train(self, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
return self.resnet_helper.train(to_static, self.build_strategy)
def verify_predict(self):
......
......@@ -20,7 +20,6 @@ from test_fetch_feed import Linear, Pool2D
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.jit.dy2static import convert_to_static
......@@ -91,8 +90,7 @@ class TestCacheProgramWithOptimizer(unittest.TestCase):
return self.train(to_static=False)
def train(self, to_static=False):
prog_trans = ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(fluid.CPUPlace()):
dygraph_net = self.dygraph_class()
......
......@@ -21,11 +21,8 @@ from test_program_translator import get_source_code
import paddle
import paddle.fluid as fluid
import paddle.jit.dy2static as _jst
from paddle.jit import ProgramTranslator
from paddle.jit.dy2static.convert_call_func import CONVERSION_OPTIONS
program_translator = ProgramTranslator()
SEED = 2020
np.random.seed(SEED)
......@@ -93,13 +90,13 @@ class TestRecursiveCall1(unittest.TestCase):
self.dyfunc = nested_func
def get_dygraph_output(self):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
with fluid.dygraph.guard():
res = self.dyfunc(self.input).numpy()
return res
def get_static_output(self):
program_translator.enable(True)
paddle.jit.enable_to_static(True)
with fluid.dygraph.guard():
res = self.dyfunc(self.input).numpy()
return res
......@@ -193,11 +190,11 @@ class TestRecursiveCall2(unittest.TestCase):
return res.numpy()
def get_dygraph_output(self):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
return self._run()
def get_static_output(self):
program_translator.enable(True)
paddle.jit.enable_to_static(True)
return self._run()
def test_transformed_static_result(self):
......
......@@ -38,7 +38,6 @@ os.environ["CUDA_VISIBLE_DEVICES"] = "1"
import paddle
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.nn import BatchNorm
......@@ -61,8 +60,6 @@ lambda_identity = 0.5
IMAGE_SIZE = 64
SEED = 2020
program_translator = ProgramTranslator()
class Cycle_Gan(fluid.dygraph.Layer):
def __init__(self, input_channel, istrain=True):
......@@ -560,7 +557,7 @@ def train(args, to_static):
else fluid.CPUPlace()
)
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(place):
max_images_num = args.max_images_num
......
......@@ -22,7 +22,6 @@ from test_basic_api_transformation import dyfunc_to_variable
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import Layer, to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.jit.dy2static.program_translator import (
ConcreteProgram,
......@@ -30,8 +29,6 @@ from paddle.jit.dy2static.program_translator import (
)
from paddle.static import InputSpec
program_trans = ProgramTranslator()
class SimpleNet(Layer):
def __init__(self):
......@@ -210,7 +207,7 @@ def foo_func(a, b, c=1, d=2):
class TestDifferentInputSpecCacheProgram(unittest.TestCase):
def setUp(self):
program_trans.enable(True)
paddle.jit.enable_to_static(True)
def test_with_different_input(self):
with fluid.dygraph.guard(fluid.CPUPlace()):
......@@ -357,7 +354,7 @@ class TestDeclarativeAPI(unittest.TestCase):
with self.assertRaises(RuntimeError):
func(np.ones(5).astype("int32"))
program_trans.enable(False)
paddle.jit.enable_to_static(False)
with self.assertRaises(AssertionError):
# AssertionError: We Only support to_variable in imperative mode,
# please use fluid.dygraph.guard() as context to run it in imperative Mode
......@@ -367,7 +364,7 @@ class TestDeclarativeAPI(unittest.TestCase):
class TestDecorateModelDirectly(unittest.TestCase):
def setUp(self):
paddle.disable_static()
program_trans.enable(True)
paddle.jit.enable_to_static(True)
self.x = to_variable(np.ones([4, 10]).astype('float32'))
def test_fake_input(self):
......
......@@ -19,7 +19,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import to_static
from paddle.jit.dy2static.program_translator import ProgramTranslator
PLACE = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
......@@ -135,8 +134,7 @@ class TestNetWithDict(unittest.TestCase):
return self.train(to_static=False)
def train(self, to_static=False):
prog_trans = ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(PLACE):
net = MainNetWithDict(batch_size=self.batch_size)
ret = net(self.x)
......@@ -191,8 +189,7 @@ class TestDictPop(unittest.TestCase):
return self._run(to_static=False)
def _run(self, to_static):
prog_trans = ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
result = self.dygraph_func(self.input)
......@@ -237,8 +234,7 @@ class TestDictPop3(TestNetWithDict):
self.x = np.array([2, 2]).astype('float32')
def train(self, to_static=False):
prog_trans = ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(PLACE):
net = NetWithDictPop()
ret = net(z=0, x=self.x, y=True)
......
......@@ -174,7 +174,6 @@ class TestErrorBase(unittest.TestCase):
self.filepath = inspect.getfile(unwrap(self.func_call))
self.set_exception_type()
self.set_message()
self.prog_trans = paddle.jit.ProgramTranslator()
def set_input(self):
self.input = np.ones([3, 2])
......@@ -364,30 +363,6 @@ class TestErrorStaticLayerCallInRuntime2(TestErrorStaticLayerCallInRuntime):
]
# Situation 2: Call ProgramTranslator().get_output(...) to use Dynamic-to-Static
class TestErrorGetOutputInCompiletime(TestErrorStaticLayerCallInCompiletime):
def set_func_call(self):
self.func_call = lambda: self.prog_trans.get_output(
unwrap(self.func), self.input
)
class TestErrorGetOutputInCompiletime_2(
TestErrorStaticLayerCallInCompiletime_2
):
def set_func_call(self):
self.func_call = lambda: self.prog_trans.get_output(
unwrap(self.func), self.input
)
class TestErrorGetOutputInRuntime(TestErrorStaticLayerCallInRuntime):
def set_func_call(self):
self.func_call = lambda: self.prog_trans.get_output(
unwrap(self.func), self.input
)
class TestJitSaveInCompiletime(TestErrorBase):
def setUp(self):
self.reset_flags_to_default()
......
......@@ -18,7 +18,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
SEED = 2020
......@@ -68,8 +67,7 @@ class TestPool2D(unittest.TestCase):
self.data = np.random.random((1, 2, 4, 4)).astype('float32')
def train(self, to_static=False):
program_translator = ProgramTranslator()
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard():
dy_layer = self.dygraph_class()
......
......@@ -20,11 +20,8 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.static import InputSpec
program_translator = ProgramTranslator()
# 0. for in range var.numpy()[0]
@paddle.jit.to_static
......@@ -363,7 +360,7 @@ class TestTransformBase(unittest.TestCase):
)
def _run(self, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard():
return self.dygraph_func(self.input)
......@@ -390,7 +387,7 @@ class TestTransform(TestTransformBase):
class TestTransformForOriginalList(TestTransform):
def _run(self, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard():
return self.dygraph_func()
......
......@@ -78,14 +78,5 @@ class TestFullNameDecorator(unittest.TestCase):
DoubleDecorated().double_decorated_func2(x)
class TestImportProgramTranslator(unittest.TestCase):
def test_diff_pkg_same_cls(self):
dygraph_prog_trans = paddle.jit.ProgramTranslator()
dy_to_stat_prog_trans = paddle.jit.ProgramTranslator()
full_pkg_prog_trans = paddle.jit.ProgramTranslator()
self.assertEqual(dygraph_prog_trans, dy_to_stat_prog_trans)
self.assertEqual(dygraph_prog_trans, full_pkg_prog_trans)
if __name__ == '__main__':
unittest.main()
......@@ -74,10 +74,9 @@ class TestGrad(unittest.TestCase):
self.x.stop_gradient = False
def _run(self, func, to_static):
prog_trans = paddle.jit.ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
ret = func(self.x).numpy()
prog_trans.enable(True)
paddle.jit.enable_to_static(True)
return ret
def test_forward(self):
......
......@@ -136,8 +136,7 @@ class TestGridGenerator(unittest.TestCase):
self.x = paddle.uniform(shape=[1, 20, 2], dtype='float32')
def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
net = GridGenerator(40, 20)
ret = net(self.x, [32, 100])
......
......@@ -44,7 +44,6 @@ from ifelse_simple_func import (
import paddle
import paddle.fluid.core as core
import paddle.nn.functional as F
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.jit.dy2static.utils import Dygraph2StaticException
np.random.seed(1)
......@@ -64,10 +63,10 @@ class TestDy2staticException(unittest.TestCase):
def test_error(self):
if self.dyfunc:
with self.assertRaisesRegex(Dygraph2StaticException, self.error):
ProgramTranslator().enable(True)
paddle.jit.enable_to_static(True)
self.assertTrue(paddle.jit.to_static(self.dyfunc)(self.x))
paddle.fluid.dygraph.base._in_declarative_mode_ = False
ProgramTranslator().enable(False)
paddle.jit.enable_to_static(False)
class TestDygraphIfElse(unittest.TestCase):
......@@ -254,8 +253,7 @@ class TestDygraphIfElseNet(unittest.TestCase):
return self._run(to_static=False)
def _run(self, to_static=False):
prog_trans = ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(place):
net = self.Net()
......@@ -364,8 +362,7 @@ class TestDiffModeNet(unittest.TestCase):
self.Net = DiffModeNet1
def _run(self, mode, to_static):
prog_trans = ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
net = self.Net(mode)
ret = net(self.x, self.y)
......@@ -423,10 +420,10 @@ class TestDy2StIfElseRetInt1(unittest.TestCase):
self.out = self.get_dy2stat_out()
def get_dy2stat_out(self):
ProgramTranslator().enable(True)
paddle.jit.enable_to_static(True)
static_func = paddle.jit.to_static(self.dyfunc)
out = static_func(self.x)
ProgramTranslator().enable(False)
paddle.jit.enable_to_static(False)
return out
def test_ast_to_func(self):
......@@ -457,7 +454,7 @@ class TestDy2StIfElseRetInt4(TestDy2StIfElseRetInt1):
self.dyfunc = dyfunc_ifelse_ret_int4
def test_ast_to_func(self):
ProgramTranslator().enable(True)
paddle.jit.enable_to_static(True)
with self.assertRaises(Dygraph2StaticException):
static_func = paddle.jit.to_static(self.dyfunc)
out = static_func(self.x)
......@@ -467,7 +464,7 @@ class TestDy2StIfElseRetInt4(TestDy2StIfElseRetInt1):
# an exception is thrown during Dy2St, making the `_in_declarative_mode_`
# a wrong value. So We need set `_in_declarative_mode_` to False manually.
paddle.fluid.dygraph.base._in_declarative_mode_ = False
ProgramTranslator().enable(False)
paddle.jit.enable_to_static(False)
class IfElseNet(paddle.nn.Layer):
......
......@@ -75,8 +75,7 @@ class SequentialLayer(nn.Layer):
def train(model, to_static):
prog_trans = paddle.jit.ProgramTranslator.get_instance()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
x = paddle.ones(shape=[2, 3], dtype='int32')
out = model(x)
......
......@@ -27,13 +27,11 @@ import paddle.fluid as fluid
from paddle import _legacy_C_ops
from paddle.fluid.dygraph import to_variable
from paddle.fluid.framework import _non_static_mode
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
SEED = 2020
program_translator = ProgramTranslator()
# Add InputSpec to make unittest run faster.
input_specs = [
paddle.static.InputSpec([None, None], 'int64'),
......@@ -542,7 +540,7 @@ class TestLACModel(unittest.TestCase):
self.dy_param_path = os.path.join(self.temp_dir.name, 'lac_dy_param')
def train(self, args, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
place = (
fluid.CUDAPlace(0)
if fluid.is_compiled_with_cuda()
......@@ -656,7 +654,7 @@ class TestLACModel(unittest.TestCase):
def predict_dygraph(self, batch):
words, targets, length = batch
program_translator.enable(False)
paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(self.place):
model = LexNet(self.args)
# load dygraph trained parameters
......
......@@ -21,12 +21,9 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.dy2static.logical_transformer import cmpop_node_to_str
from paddle.utils import gast
program_translator = ProgramTranslator()
SEED = 2020
np.random.seed(22)
......@@ -186,7 +183,7 @@ class TestLogicalBase(unittest.TestCase):
)
def _run(self, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(self.place):
result = self.dygraph_func(self.input)
return result.numpy()
......
......@@ -52,7 +52,7 @@ class TestLstm(unittest.TestCase):
self.temp_dir.cleanup()
def run_lstm(self, to_static):
paddle.jit.ProgramTranslator().enable(to_static)
paddle.jit.enable_to_static(to_static)
paddle.disable_static()
paddle.static.default_main_program().random_seed = 1001
......@@ -70,7 +70,7 @@ class TestLstm(unittest.TestCase):
np.testing.assert_allclose(dygraph_out, static_out, rtol=1e-05)
def test_save_in_eval(self, with_training=True):
paddle.jit.ProgramTranslator().enable(True)
paddle.jit.enable_to_static(True)
net = Net(12, 2)
x = paddle.randn((2, 10, 12))
if with_training:
......@@ -141,7 +141,7 @@ class TestSaveInEvalMode(unittest.TestCase):
self.temp_dir.cleanup()
def test_save_in_eval(self):
paddle.jit.ProgramTranslator().enable(True)
paddle.jit.enable_to_static(True)
net = LinearNet()
x = paddle.randn((2, 10))
x.stop_gradient = False
......
......@@ -24,7 +24,6 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.initializer import MSRA
from paddle.fluid.param_attr import ParamAttr
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.nn import BatchNorm, Linear
......@@ -36,7 +35,6 @@ if fluid.is_compiled_with_cuda():
fluid.set_flags({'FLAGS_cudnn_deterministic': True})
SEED = 2020
program_translator = ProgramTranslator()
class ConvBNLayer(fluid.dygraph.Layer):
......@@ -494,7 +492,7 @@ class Args:
def train_mobilenet(args, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(args.place):
np.random.seed(SEED)
......@@ -605,7 +603,7 @@ def predict_static(args, data):
def predict_dygraph(args, data):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(args.place):
if args.model == "MobileNetV1":
model = MobileNetV1(class_dim=args.class_dim, scale=1.0)
......
......@@ -17,7 +17,7 @@ import unittest
import numpy as np
import paddle
from paddle.jit import ProgramTranslator, to_static
from paddle.jit import to_static
class NetWithParameterList(paddle.nn.Layer):
......@@ -53,12 +53,11 @@ class TestParameterList(unittest.TestCase):
def setUp(self):
self.seed = 2021
self.iter_num = 5
self.prog_trans = ProgramTranslator()
def train(self, is_iter, to_static):
paddle.seed(self.seed)
np.random.seed(self.seed)
self.prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
if is_iter:
net = NetWithParameterList(10, 3)
else:
......@@ -110,7 +109,6 @@ class TestRawParameterList(unittest.TestCase):
def setUp(self):
self.seed = 2021
self.iter_num = 5
self.prog_trans = ProgramTranslator()
def init_net(self):
self.net = NetWithRawParamList(10, 3)
......@@ -118,7 +116,7 @@ class TestRawParameterList(unittest.TestCase):
def train(self, to_static):
paddle.seed(self.seed)
np.random.seed(self.seed)
self.prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
self.init_net()
sgd = paddle.optimizer.SGD(0.1, parameters=self.net.parameters())
......
......@@ -20,7 +20,6 @@ from test_fetch_feed import Linear
import paddle
import paddle.fluid as fluid
from paddle.fluid.layers.utils import flatten
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
SEED = 2020
......@@ -130,7 +129,6 @@ class TestWithNestedOutput(unittest.TestCase):
class TestWithTrainAndEval(unittest.TestCase):
def test_switch_eval_and_train(self):
program_translator = ProgramTranslator()
with fluid.dygraph.guard():
linear_net = Linear()
......
......@@ -18,9 +18,7 @@ import numpy
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator, to_static
program_translator = ProgramTranslator()
from paddle.jit import to_static
# 1. print Tensor
......@@ -99,7 +97,7 @@ class TestPrintBase(unittest.TestCase):
raise NotImplementedError("Print test should implement set_test_func")
def _run(self, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard():
self.dygraph_func(self.input)
......
......@@ -19,7 +19,6 @@ import unittest
import astor
import numpy as np
from ifelse_simple_func import (
dyfunc_with_if_else,
dyfunc_with_if_else_early_return1,
dyfunc_with_if_else_early_return2,
)
......@@ -27,7 +26,6 @@ from ifelse_simple_func import (
import paddle
import paddle.fluid as fluid
import paddle.jit.dy2static as _jst
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.jit.dy2static.utils import func_to_source_code
from paddle.utils import gast
......@@ -213,121 +211,25 @@ class NetWithError(fluid.dygraph.layers.Layer):
return y
class TestDygraphToStaticCode(unittest.TestCase):
def setUp(self):
# set to print all string diff when assertEqual fails
self.maxDiff = None
def test_decorator(self):
program_translator = ProgramTranslator()
code = program_translator.get_code(dyfunc_with_if_else)
print(code)
answer = get_source_code(StaticCode1.dyfunc_with_if_else)
self.assertEqual(
answer.replace('\n', '').replace(' ', ''),
code.replace('\n', '').replace(' ', ''),
)
def test_program_translator(self):
answer = get_source_code(StaticCode2.dyfunc_with_if_else)
program_translator = ProgramTranslator()
code = program_translator.get_code(dyfunc_with_if_else)
print(code)
self.assertEqual(
answer.replace('\n', '').replace(' ', ''),
code.replace('\n', '').replace(' ', ''),
)
class TestEnableDeclarative(unittest.TestCase):
def setUp(self):
self.x = np.random.randn(30, 10, 32).astype('float32')
self.weight = np.random.randn(32, 64).astype('float32')
self.program_translator = ProgramTranslator()
def test_raise_error(self):
with fluid.dygraph.guard():
self.program_translator.enable(True)
paddle.jit.enable_to_static(True)
net = NetWithError()
with self.assertRaises(ValueError):
net(fluid.dygraph.to_variable(self.x))
def test_enable_disable_get_output(self):
self.program_translator.enable(True)
with fluid.dygraph.guard():
static_output = self.program_translator.get_output(
simple_func, self.x, self.weight
)
self.program_translator.enable(False)
with fluid.dygraph.guard():
dygraph_output = self.program_translator.get_output(
simple_func, self.x, self.weight
)
np.testing.assert_allclose(
static_output.numpy(),
dygraph_output.numpy(),
rtol=1e-05,
atol=1e-4,
)
def test_enable_disable_get_func(self):
self.program_translator.enable(True)
with fluid.dygraph.guard():
static_func = self.program_translator.get_func(simple_func)
self.assertTrue(callable(static_func))
static_output = static_func(self.x, self.weight)
self.assertTrue(isinstance(static_output, fluid.Variable))
self.program_translator.enable(False)
with fluid.dygraph.guard():
dygraph_func = self.program_translator.get_func(simple_func)
self.assertTrue(callable(dygraph_func))
dygraph_output = dygraph_func(self.x, self.weight)
self.assertTrue(
isinstance(
dygraph_output,
(fluid.core.VarBase, fluid.core.eager.Tensor),
)
)
def test_enable_disable_get_program(self):
self.program_translator.enable(True)
static_output = self.program_translator.get_program(
simple_func, self.x, self.weight
)
self.assertTrue(isinstance(static_output, tuple))
self.assertEqual(len(static_output), 4)
self.assertTrue(isinstance(static_output[0], fluid.Program))
self.assertTrue(isinstance(static_output[1], fluid.Program))
# Check all inputs and outputs are Variable
for var in static_output[2]:
self.assertTrue(isinstance(var, fluid.Variable))
for var in static_output[3]:
self.assertTrue(isinstance(var, fluid.Variable))
self.program_translator.enable(False)
with fluid.dygraph.guard():
dygraph_output = self.program_translator.get_program(
simple_func, self.x, self.weight
)
self.assertTrue(
isinstance(
dygraph_output,
(fluid.core.VarBase, fluid.core.eager.Tensor),
)
)
def test_enable_disable_declarative(self):
self.program_translator.enable(True)
paddle.jit.enable_to_static(True)
with fluid.dygraph.guard():
static_output = decorated_simple_func(self.x, self.weight)
self.program_translator.enable(False)
paddle.jit.enable_to_static(False)
with fluid.dygraph.guard():
dygraph_output = decorated_simple_func(self.x, self.weight)
np.testing.assert_allclose(
......@@ -346,28 +248,6 @@ class Net(fluid.dygraph.layers.Layer):
return x + 1
class TestErrorWithInitFromStaticMode(unittest.TestCase):
def setUp(self):
self.program_translator = ProgramTranslator()
self.x = np.random.randn(10, 32).astype('float32')
def test_raise_error(self):
# disable imperative
paddle.enable_static()
net = Net()
self.program_translator.enable(True)
with self.assertRaisesRegex(
RuntimeError, "only available in dynamic mode"
):
self.program_translator.get_output(net.forward, self.x)
with self.assertRaisesRegex(
RuntimeError, "only available in dynamic mode"
):
self.program_translator.get_program(net.forward, self.x)
class SwitchModeNet(paddle.nn.Layer):
def __init__(self):
super().__init__()
......
......@@ -22,14 +22,11 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.optimizer import SGDOptimizer
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
PRINT_STEP = 20
SEED = 2020
program_translator = ProgramTranslator()
class SimpleLSTMRNN(fluid.Layer):
def __init__(
......@@ -319,12 +316,12 @@ def train(place):
def train_dygraph(place):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
return train(place)
def train_static(place):
program_translator.enable(True)
paddle.jit.enable_to_static(True)
return train(place)
......
......@@ -23,8 +23,6 @@ import paddle
PRINT_STEP = 20
SEED = 2020
program_translator = paddle.jit.ProgramTranslator()
class SimpleLSTMRNN(paddle.nn.Layer):
def __init__(
......@@ -319,12 +317,12 @@ def train(place):
def train_dygraph(place):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
return train(place)
def train_static(place):
program_translator.enable(True)
paddle.jit.enable_to_static(True)
return train(place)
......
......@@ -23,11 +23,9 @@ import paddle
import paddle.fluid as fluid
import paddle.nn.functional as F
from paddle.fluid.dygraph import Layer, to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
SEED = 2020
program_translator = ProgramTranslator()
class Policy(Layer):
......@@ -61,7 +59,7 @@ class Args:
def train(args, place, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
env = gym.make('CartPole-v0')
env.seed(SEED)
......
......@@ -23,7 +23,6 @@ from predictor_utils import PredictorTools
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.nn import BatchNorm
......@@ -39,7 +38,6 @@ place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
)
program_translator = ProgramTranslator()
if fluid.is_compiled_with_cuda():
fluid.set_flags({'FLAGS_cudnn_deterministic': True})
......@@ -323,7 +321,7 @@ class ResNetHelper:
return total_loss.numpy()
def predict_dygraph(self, data):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(place):
resnet = ResNet()
......@@ -382,7 +380,7 @@ class TestResnet(unittest.TestCase):
self.resnet_helper = ResNetHelper()
def train(self, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
return self.resnet_helper.train(to_static)
def verify_predict(self):
......
......@@ -20,7 +20,6 @@ from test_resnet import SEED, ResNet, optimizer_setting
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
# NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout.
batch_size = 2
......@@ -29,7 +28,6 @@ place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
)
program_translator = ProgramTranslator()
if fluid.is_compiled_with_cuda():
fluid.set_flags({'FLAGS_cudnn_deterministic': True})
......@@ -115,7 +113,7 @@ def train(to_static, build_strategy=None):
class TestResnet(unittest.TestCase):
def train(self, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
return train(to_static)
def test_resnet(self):
......
......@@ -20,13 +20,11 @@ from test_resnet import SEED, ResNet, optimizer_setting
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
# NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout.
batch_size = 2
epoch_num = 1
program_translator = ProgramTranslator()
if fluid.is_compiled_with_cuda():
fluid.set_flags({'FLAGS_cudnn_deterministic': True})
......@@ -114,7 +112,7 @@ def train(to_static, build_strategy=None):
class TestResnet(unittest.TestCase):
def train(self, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
build_strategy = paddle.static.BuildStrategy()
# Why set `build_strategy.enable_inplace = False` here?
# Because we find that this PASS strategy of PE makes dy2st training loss unstable.
......
......@@ -35,7 +35,6 @@ place = (
paddle.CUDAPlace(0) if paddle.is_compiled_with_cuda() else paddle.CPUPlace()
)
program_translator = paddle.jit.ProgramTranslator()
if paddle.is_compiled_with_cuda():
paddle.fluid.set_flags({'FLAGS_cudnn_deterministic': True})
......@@ -319,7 +318,7 @@ class TestResnet(unittest.TestCase):
return total_loss.numpy()
def predict_dygraph(self, data):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
paddle.disable_static(place)
resnet = ResNet()
......@@ -380,7 +379,7 @@ class TestResnet(unittest.TestCase):
return out
def train(self, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
return self.do_train(to_static)
def verify_predict(self):
......
......@@ -20,7 +20,7 @@ from ifelse_simple_func import dyfunc_with_if_else
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.jit import ProgramTranslator, to_static
from paddle.jit import to_static
from paddle.jit.dy2static.utils import Dygraph2StaticException
SEED = 2020
......@@ -272,13 +272,12 @@ class TestReturnBase(unittest.TestCase):
else fluid.CPUPlace()
)
self.init_dygraph_func()
self.program_translator = ProgramTranslator()
def init_dygraph_func(self):
self.dygraph_func = test_return_base
def _run(self, to_static=False):
self.program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard():
res = self.dygraph_func(self.input)
if isinstance(res, (tuple, list)):
......
......@@ -20,7 +20,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.jit.dy2static.partial_program import partial_program_from
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
......@@ -32,7 +31,6 @@ np.random.seed(SEED)
place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
)
program_translator = ProgramTranslator()
class SimpleFcLayer(fluid.dygraph.Layer):
......@@ -148,8 +146,7 @@ class TestDyToStaticSaveInferenceModel(unittest.TestCase):
class TestPartialProgramRaiseError(unittest.TestCase):
def test_param_type(self):
program_translator = ProgramTranslator()
program_translator.enable(True)
paddle.jit.enable_to_static(True)
x_data = np.random.random((20, 20)).astype('float32')
with fluid.dygraph.guard(fluid.CPUPlace()):
......
......@@ -22,7 +22,6 @@ from test_fetch_feed import Linear
import paddle
import paddle.fluid as fluid
from paddle.fluid.optimizer import AdamOptimizer
from paddle.jit import ProgramTranslator
np.random.seed(2020)
......@@ -42,13 +41,12 @@ class TestDyToStaticSaveLoad(unittest.TestCase):
self.temp_dir.cleanup()
def test_save_load_same_result(self):
program_translator = ProgramTranslator()
x_data = np.random.randn(30, 10, 32).astype('float32')
batch_num = 3
with fluid.dygraph.guard(place):
program_translator.enable(True)
paddle.jit.enable_to_static(True)
x = fluid.dygraph.to_variable(x_data)
net = Linear(32, 64)
adam = AdamOptimizer(
......@@ -81,7 +79,7 @@ class TestDyToStaticSaveLoad(unittest.TestCase):
x = fluid.dygraph.to_variable(x_data)
# predict output
program_translator.enable(False)
paddle.jit.enable_to_static(False)
dygraph_out, dygraph_loss = dygraph_net(x)
np.testing.assert_allclose(
......
......@@ -25,7 +25,6 @@ from predictor_utils import PredictorTools
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.nn import BatchNorm, Linear
......@@ -374,8 +373,7 @@ class TestSeResnet(unittest.TestCase):
self.temp_dir.cleanup()
def train(self, train_reader, to_static):
program_translator = ProgramTranslator()
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
np.random.seed(SEED)
......@@ -473,8 +471,7 @@ class TestSeResnet(unittest.TestCase):
)
def predict_dygraph(self, data):
program_translator = ProgramTranslator()
program_translator.enable(False)
paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(place):
se_resnext = SeResNeXt()
......
......@@ -20,12 +20,10 @@ from test_lac import DynamicGRU
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.nn import Embedding, Linear
SEED = 2020
program_translator = ProgramTranslator()
# Note: Set True to eliminate randomness.
# 1. For one operation, cuDNN has several algorithms,
......@@ -304,7 +302,7 @@ class Args:
def train(args, to_static):
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
place = (
fluid.CUDAPlace(0)
if fluid.is_compiled_with_cuda()
......
......@@ -23,13 +23,11 @@ from seq2seq_utils import Seq2SeqModelHyperParams, get_data_iter
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.nn import ClipGradByGlobalNorm
place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
)
program_translator = ProgramTranslator()
STEP_NUM = 10
PRINT_STEP = 2
......@@ -197,14 +195,14 @@ class TestSeq2seq(unittest.TestCase):
self.temp_dir.cleanup()
def run_dygraph(self, mode="train", attn_model=False):
program_translator.enable(False)
paddle.jit.enable_to_static(False)
if mode == "train":
return train(self.args, attn_model)
else:
return infer(self.args, attn_model)
def run_static(self, mode="train", attn_model=False):
program_translator.enable(True)
paddle.jit.enable_to_static(True)
if mode == "train":
return train(self.args, attn_model)
else:
......
......@@ -21,7 +21,6 @@ from simnet_dygraph_model import BOW, HingeLoss
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
SEED = 102
random.seed(SEED)
......@@ -104,8 +103,7 @@ def train(conf_dict, to_static):
"""
train process
"""
program_translator = ProgramTranslator()
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
# Get device
if fluid.is_compiled_with_cuda():
......
......@@ -102,8 +102,7 @@ def train(conf_dict, to_static):
"""
train process
"""
program_translator = paddle.jit.ProgramTranslator()
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
# Get device
if paddle.is_compiled_with_cuda():
......
......@@ -23,7 +23,6 @@ from paddle.static import InputSpec
SEED = 2020
np.random.seed(SEED)
prog_trans = paddle.jit.ProgramTranslator()
@paddle.jit.to_static
......@@ -130,7 +129,7 @@ class TestSliceWithoutControlFlow(unittest.TestCase):
return self._run(to_static=False)
def _run(self, to_static):
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
res = self.dygraph_func(self.input)
return res.numpy()
......@@ -177,7 +176,7 @@ class TestSetValueWithLayerAndSave(unittest.TestCase):
self.temp_dir.cleanup()
def test_set_value_with_save(self):
prog_trans.enable(True)
paddle.jit.enable_to_static(True)
model = LayerWithSetValue(input_dim=10, hidden=1)
x = paddle.full(shape=[5, 10], fill_value=5.0, dtype="float32")
paddle.jit.save(
......
......@@ -28,8 +28,7 @@ def tensor_clone(x):
class TestTensorClone(unittest.TestCase):
def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
x = paddle.ones([1, 2, 3])
return tensor_clone(x).numpy()
......@@ -48,8 +47,7 @@ def tensor_numpy(x):
class TestTensorDygraphOnlyMethodError(unittest.TestCase):
def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
x = paddle.zeros([2, 2])
y = tensor_numpy(x)
return y.numpy()
......@@ -69,8 +67,7 @@ def tensor_item(x):
class TestTensorItem(unittest.TestCase):
def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
x = paddle.ones([1])
if to_static:
return tensor_item(x).numpy()
......@@ -92,8 +89,7 @@ def tensor_size(x):
class TestTensorSize(unittest.TestCase):
def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator()
prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
x = paddle.ones([1, 2, 3])
if not to_static:
return tensor_size(x)
......
......@@ -24,7 +24,6 @@ from tsm_config_utils import merge_configs, parse_config, print_configs
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.nn import BatchNorm, Linear
......@@ -290,8 +289,7 @@ def create_optimizer(cfg, params):
def train(args, fake_data_reader, to_static):
program_translator = ProgramTranslator()
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
config = parse_config(args.config)
train_config = merge_configs(config, 'train', vars(args))
......
......@@ -20,7 +20,6 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static
from paddle.nn import Embedding
......@@ -278,8 +277,7 @@ total_steps = len(dataset) * epoch_num // batch_size
def train(to_static):
program_translator = ProgramTranslator()
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
random.seed(0)
np.random.seed(0)
......
......@@ -22,7 +22,6 @@ from yolov3 import YOLOv3, cfg
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
paddle.enable_static()
random.seed(0)
......@@ -78,8 +77,7 @@ fake_data_reader = FakeDataReader()
def train(to_static):
program_translator = ProgramTranslator()
program_translator.enable(to_static)
paddle.jit.enable_to_static(to_static)
random.seed(0)
np.random.seed(0)
......
......@@ -20,7 +20,6 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable
from paddle.fluid.framework import EagerParamBase, ParamBase, in_dygraph_mode
from paddle.jit import ProgramTranslator
class L1(fluid.Layer):
......@@ -339,11 +338,10 @@ class BufferNetWithModification(paddle.nn.Layer):
class TestModifiedBuffer(unittest.TestCase):
def funcsetUp(self):
paddle.disable_static()
self.prog_trans = ProgramTranslator()
self.shape = [10, 16]
def _run(self, to_static=False):
self.prog_trans.enable(to_static)
paddle.jit.enable_to_static(to_static)
x = paddle.ones([1], 'int32')
net = BufferNetWithModification(self.shape)
......
......@@ -49,7 +49,6 @@ class TestDirectory(unittest.TestCase):
'paddle.DataParallel',
'paddle.jit',
'paddle.jit.to_static',
'paddle.jit.ProgramTranslator',
'paddle.jit.TranslatedLayer',
'paddle.jit.save',
'paddle.jit.load',
......@@ -143,7 +142,6 @@ class TestDirectory(unittest.TestCase):
'paddle.imperative.jit',
'paddle.imperative.TracedLayer',
'paddle.imperative.declarative',
'paddle.imperative.ProgramTranslator',
'paddle.imperative.TranslatedLayer',
'paddle.imperative.jit.save',
'paddle.imperative.jit.load',
......
......@@ -20,7 +20,6 @@ import numpy as np
import paddle
from paddle.fluid.framework import _dygraph_place_guard
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.jit.layer import Layer
from paddle.static import InputSpec
......@@ -61,11 +60,10 @@ class TestMultiLoad(unittest.TestCase):
x = paddle.full([2, 4], 2)
model = Net()
program_translator = ProgramTranslator()
program_translator.enable(False)
paddle.jit.enable_to_static(False)
forward_out1 = model.forward(x)
infer_out1 = model.infer(x)
program_translator.enable(True)
paddle.jit.enable_to_static(True)
model_path = os.path.join(self.temp_dir.name, 'multi_program')
paddle.jit.save(model, model_path, combine_params=True)
......
......@@ -18,10 +18,9 @@ from .api import load
from .api import to_static
from .api import not_to_static
from .api import ignore_module
from .dy2static.logging_utils import set_code_level, set_verbosity
from .dy2static.program_translator import enable_to_static
from . import dy2static
from .dy2static.program_translator import ProgramTranslator
from .dy2static.logging_utils import set_code_level, set_verbosity
from .translated_layer import TranslatedLayer
__all__ = [ # noqa
......@@ -29,9 +28,9 @@ __all__ = [ # noqa
'load',
'to_static',
'ignore_module',
'ProgramTranslator',
'TranslatedLayer',
'set_code_level',
'set_verbosity',
'not_to_static',
'enable_to_static',
]
......@@ -75,8 +75,6 @@ from paddle.fluid.framework import (
from paddle.fluid.framework import dygraph_only, _non_static_mode
from paddle.fluid.wrapped_decorator import wrap_decorator
__all__ = []
def create_program_from_desc(program_desc):
program = Program()
......@@ -160,7 +158,7 @@ def _dygraph_to_static_func_(dygraph_func):
if _non_static_mode() or not program_translator.enable_to_static:
logging_utils.warn(
"The decorator 'dygraph_to_static_func' doesn't work in "
"dygraph mode or set ProgramTranslator.enable to False. "
"dygraph mode or set 'paddle.jit.enable_to_static' to False. "
"We will just return dygraph output."
)
return dygraph_func(*args, **kwargs)
......@@ -911,7 +909,7 @@ def save(layer, path, input_spec=None, **configs):
prog_translator = ProgramTranslator()
if not prog_translator.enable_to_static:
raise RuntimeError(
"The paddle.jit.save doesn't work when setting ProgramTranslator.enable to False."
"The paddle.jit.save doesn't work when setting 'paddle.jit.enable_to_static' to False."
)
if not (
......
......@@ -12,10 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from .utils import (
saw,
UndefinedVar,
)
from .utils import saw, UndefinedVar, ast_to_source_code
from .convert_operators import convert_logical_and as And # noqa: F401
from .convert_operators import convert_var_dtype as AsDtype # noqa: F401
from .convert_operators import convert_assert as Assert # noqa: F401
......
......@@ -416,9 +416,9 @@ class StaticFunction:
# will show up **only once**. StaticFunction.__call__ will run many times, it is appropriate to
# display this warning message only once.
logging_utils.warn(
"The decorator '@paddle.jit.to_static' does NOT work when setting ProgramTranslator.enable to False. "
"The decorator '@paddle.jit.to_static' does NOT work when setting 'paddle.jit.enable_to_static' to False. "
"We will just return dygraph output. If you would like to get static graph output, please call API "
"ProgramTranslator.enable(True)"
"paddle.jit.enable_to_static(True)"
)
return self._call_dygraph_function(*args, **kwargs)
......@@ -1222,8 +1222,7 @@ class ProgramTranslator:
return x_v
prog_trans = paddle.jit.ProgramTranslator()
prog_trans.enable(False)
paddle.jit.enable_to_static(False)
x = paddle.ones([1, 2])
# ProgramTranslator is disabled so the func is run in dygraph
......@@ -1513,3 +1512,47 @@ class ProgramTranslator:
"""
return self._program_cache
def enable_to_static(enable_to_static_bool):
"""
Enable or disable the converting from imperative to static graph by
ProgramTranslator globally.
Args:
enable_to_static_bool (bool): True or False to enable or disable converting to static.
Returns:
None.
Examples:
.. code-block:: python
import paddle
@paddle.jit.to_static
def func(x):
if paddle.mean(x) > 0:
x_v = x - 1
else:
x_v = x + 1
return x_v
paddle.jit.enable_to_static(False)
x = paddle.ones([1, 2])
# ProgramTranslator is disabled so the func is run in dygraph
print(func(x)) # [[0. 0.]]
"""
check_type(
enable_to_static_bool,
"enable_to_static_bool",
bool,
"paddle.jit.enable_to_static",
)
_program_trans = ProgramTranslator()
_program_trans.enable(enable_to_static_bool)
......@@ -25,7 +25,6 @@ import paddle.vision.models as models
from paddle import Model, fluid, to_tensor
from paddle.hapi.model import prepare_distributed_context
from paddle.io import Dataset, DistributedBatchSampler
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.metric import Accuracy
from paddle.nn import Conv2D, Linear, ReLU, Sequential
from paddle.nn.layer.loss import CrossEntropyLoss
......@@ -826,8 +825,8 @@ class TestModelFunction(unittest.TestCase):
for dynamic in [True, False]:
paddle.disable_static() if dynamic else None
prog_translator = ProgramTranslator()
prog_translator.enable(False) if not dynamic else None
paddle.jit.enable_to_static(False) if not dynamic else None
net = LeNet()
inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')]
model = Model(net, inputs)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册