diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 65b823a05ad303b1d59e84e42c7d061c22c4e3c3..e41ac22ae98bdd7fc7f5574e680e9de9b38c7e30 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -564,7 +564,7 @@ def _fake_interface_only_(func): raise AssertionError( "'%s' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n" " 1. If you are in static graph mode, you can switch to dynamic graph mode by turning off `paddle.enable_static()` or calling `paddle.disable_static()`.\n" - " 2. If you are using `@paddle.jit.to_static`, you can turn off ProgramTranslator by calling `paddle.jit.ProgramTranslator().enable(False)`. " + " 2. If you are using `@paddle.jit.to_static`, you can call `paddle.jit.enable_to_static(False)`. " "If you have to translate dynamic graph to static graph, please use other API to replace '%s'." % (func.__name__, func.__name__) ) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_assert.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_assert.py index 0bfd19e732678406e19dcf29f093cfbc0369db83..55e1060104bdb205bc57f7f7351ceb3a5a943bae 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_assert.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_assert.py @@ -18,7 +18,6 @@ import numpy import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static @@ -35,7 +34,7 @@ def dyfunc_assert_non_variable(x=True): class TestAssertVariable(unittest.TestCase): def _run(self, func, x, with_exception, to_static): - ProgramTranslator().enable(to_static) + paddle.jit.enable_to_static(to_static) if with_exception: with self.assertRaises(BaseException): with fluid.dygraph.guard(): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bert.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bert.py index 863ed57b86a6feb5d2b23897a9d75fed67ee28a6..88054e689fc0e1b9a43eb18d7be7e597087d27c9 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bert.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bert.py @@ -24,10 +24,8 @@ from predictor_utils import PredictorTools import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX -program_translator = ProgramTranslator() place = ( fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() ) @@ -127,11 +125,11 @@ class TestBert(unittest.TestCase): return loss, ppl def train_dygraph(self, bert_config, data_reader): - program_translator.enable(False) + paddle.jit.enable_to_static(False) return self.train(bert_config, data_reader, False) def train_static(self, bert_config, data_reader): - program_translator.enable(True) + paddle.jit.enable_to_static(True) return self.train(bert_config, data_reader, True) def predict_static(self, data): @@ -157,7 +155,7 @@ class TestBert(unittest.TestCase): return pred_res def predict_dygraph(self, bert_config, data): - program_translator.enable(False) + paddle.jit.enable_to_static(False) with fluid.dygraph.guard(place): bert = PretrainModelLayer( config=bert_config, weight_sharing=False, use_fp16=False diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py index ae1c1327ccd49ae5d33f115e2a08e7ff9a86c159..a6a9d7281208dca73a081983c3bced0bc452227b 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py @@ -24,12 +24,11 @@ import paddle import paddle.fluid as fluid from paddle.fluid import ParamAttr from paddle.fluid.dygraph import to_variable -from paddle.jit import ProgramTranslator, to_static +from paddle.jit import to_static from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX SEED = 2000 DATATYPE = 'float32' -program_translator = ProgramTranslator() # Note: Set True to eliminate randomness. # 1. For one operation, cuDNN has several algorithms, @@ -662,7 +661,7 @@ class TestTrain(unittest.TestCase): self.temp_dir.cleanup() def train_bmn(self, args, place, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) loss_data = [] with fluid.dygraph.guard(place): @@ -822,7 +821,7 @@ class TestTrain(unittest.TestCase): break def predict_dygraph(self, data): - program_translator.enable(False) + paddle.jit.enable_to_static(False) with fluid.dygraph.guard(self.place): bmn = BMN(self.args) # load dygraph trained parameters diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_break_continue.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_break_continue.py index 209f6acbcd168e2dda1f6423f1048177a4c76a07..499f7285f29aad5eb25435b4ad9401b05f392bbf 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_break_continue.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_break_continue.py @@ -19,7 +19,6 @@ import numpy as np import paddle import paddle.fluid as fluid from paddle.jit.api import to_static -from paddle.jit.dy2static.program_translator import ProgramTranslator from paddle.jit.dy2static.utils import Dygraph2StaticException SEED = 2020 @@ -35,10 +34,10 @@ class TestDy2staticException(unittest.TestCase): def test_error(self): if self.dyfunc: with self.assertRaisesRegex(Dygraph2StaticException, self.error): - ProgramTranslator().enable(True) + paddle.jit.enable_to_static(True) self.assertTrue(to_static(self.dyfunc)(self.x)) paddle.fluid.dygraph.base._in_declarative_mode_ = False - ProgramTranslator().enable(False) + paddle.jit.enable_to_static(False) def test_continue_in_for(x): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_build_strategy.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_build_strategy.py index 96ae39c0cc0d19cdc48e32c192f9f51e93255a2c..13fb22421d36584a559186f7391433be983018be 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_build_strategy.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_build_strategy.py @@ -18,9 +18,6 @@ import numpy as np from test_resnet import ResNetHelper import paddle -from paddle.jit import ProgramTranslator - -program_translator = ProgramTranslator() class TestResnetWithPass(unittest.TestCase): @@ -35,7 +32,7 @@ class TestResnetWithPass(unittest.TestCase): paddle.fluid.set_flags({"FLAGS_max_inplace_grad_add": 8}) def train(self, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) return self.resnet_helper.train(to_static, self.build_strategy) def verify_predict(self): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_cache_program.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_cache_program.py index d4b7c603a1f8af8ef51f6bbcdccc3242c660e65c..68698794f1a1c729653f32090512a131099c4d1a 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_cache_program.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_cache_program.py @@ -20,7 +20,6 @@ from test_fetch_feed import Linear, Pool2D import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.jit.dy2static import convert_to_static @@ -91,8 +90,7 @@ class TestCacheProgramWithOptimizer(unittest.TestCase): return self.train(to_static=False) def train(self, to_static=False): - prog_trans = ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(fluid.CPUPlace()): dygraph_net = self.dygraph_class() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_call.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_call.py index ad08aa1e3ccae5f837978b75feafd19257c45a14..c14631c35b6b41bc57f2062afa011d9282c2fc18 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_call.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_call.py @@ -21,11 +21,8 @@ from test_program_translator import get_source_code import paddle import paddle.fluid as fluid import paddle.jit.dy2static as _jst -from paddle.jit import ProgramTranslator from paddle.jit.dy2static.convert_call_func import CONVERSION_OPTIONS -program_translator = ProgramTranslator() - SEED = 2020 np.random.seed(SEED) @@ -93,13 +90,13 @@ class TestRecursiveCall1(unittest.TestCase): self.dyfunc = nested_func def get_dygraph_output(self): - program_translator.enable(False) + paddle.jit.enable_to_static(False) with fluid.dygraph.guard(): res = self.dyfunc(self.input).numpy() return res def get_static_output(self): - program_translator.enable(True) + paddle.jit.enable_to_static(True) with fluid.dygraph.guard(): res = self.dyfunc(self.input).numpy() return res @@ -193,11 +190,11 @@ class TestRecursiveCall2(unittest.TestCase): return res.numpy() def get_dygraph_output(self): - program_translator.enable(False) + paddle.jit.enable_to_static(False) return self._run() def get_static_output(self): - program_translator.enable(True) + paddle.jit.enable_to_static(True) return self._run() def test_transformed_static_result(self): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_cycle_gan.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_cycle_gan.py index 988a994a72d9260872f564779f8a57551e2696c5..0701750e3011a7ac5fbf96fbe251a70ac67bca36 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_cycle_gan.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_cycle_gan.py @@ -38,7 +38,6 @@ os.environ["CUDA_VISIBLE_DEVICES"] = "1" import paddle from paddle.fluid.dygraph import to_variable -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.nn import BatchNorm @@ -61,8 +60,6 @@ lambda_identity = 0.5 IMAGE_SIZE = 64 SEED = 2020 -program_translator = ProgramTranslator() - class Cycle_Gan(fluid.dygraph.Layer): def __init__(self, input_channel, istrain=True): @@ -560,7 +557,7 @@ def train(args, to_static): else fluid.CPUPlace() ) - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(place): max_images_num = args.max_images_num diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_declarative.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_declarative.py index cf9f094388d56c30676d0976e70b02a4fde1047a..2f9287a315c0f7dbef696da771bc462e5c921866 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_declarative.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_declarative.py @@ -22,7 +22,6 @@ from test_basic_api_transformation import dyfunc_to_variable import paddle import paddle.fluid as fluid from paddle.fluid.dygraph import Layer, to_variable -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.jit.dy2static.program_translator import ( ConcreteProgram, @@ -30,8 +29,6 @@ from paddle.jit.dy2static.program_translator import ( ) from paddle.static import InputSpec -program_trans = ProgramTranslator() - class SimpleNet(Layer): def __init__(self): @@ -210,7 +207,7 @@ def foo_func(a, b, c=1, d=2): class TestDifferentInputSpecCacheProgram(unittest.TestCase): def setUp(self): - program_trans.enable(True) + paddle.jit.enable_to_static(True) def test_with_different_input(self): with fluid.dygraph.guard(fluid.CPUPlace()): @@ -357,7 +354,7 @@ class TestDeclarativeAPI(unittest.TestCase): with self.assertRaises(RuntimeError): func(np.ones(5).astype("int32")) - program_trans.enable(False) + paddle.jit.enable_to_static(False) with self.assertRaises(AssertionError): # AssertionError: We Only support to_variable in imperative mode, # please use fluid.dygraph.guard() as context to run it in imperative Mode @@ -367,7 +364,7 @@ class TestDeclarativeAPI(unittest.TestCase): class TestDecorateModelDirectly(unittest.TestCase): def setUp(self): paddle.disable_static() - program_trans.enable(True) + paddle.jit.enable_to_static(True) self.x = to_variable(np.ones([4, 10]).astype('float32')) def test_fake_input(self): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_dict.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_dict.py index 2c882fc332e706bd1f407d8650a0d25e89fd0486..2a9b8157dea90c06e5ce2a0428a0abc645cc10bd 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_dict.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_dict.py @@ -19,7 +19,6 @@ import numpy as np import paddle import paddle.fluid as fluid from paddle.jit import to_static -from paddle.jit.dy2static.program_translator import ProgramTranslator PLACE = ( fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() @@ -135,8 +134,7 @@ class TestNetWithDict(unittest.TestCase): return self.train(to_static=False) def train(self, to_static=False): - prog_trans = ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(PLACE): net = MainNetWithDict(batch_size=self.batch_size) ret = net(self.x) @@ -191,8 +189,7 @@ class TestDictPop(unittest.TestCase): return self._run(to_static=False) def _run(self, to_static): - prog_trans = ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) result = self.dygraph_func(self.input) @@ -237,8 +234,7 @@ class TestDictPop3(TestNetWithDict): self.x = np.array([2, 2]).astype('float32') def train(self, to_static=False): - prog_trans = ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(PLACE): net = NetWithDictPop() ret = net(z=0, x=self.x, y=True) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_error.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_error.py index b2c9dc1f75e9238804903eaa1e5476485b87125a..074d45f9ccb41ad95d1e60b3291174862a3ba228 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_error.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_error.py @@ -174,7 +174,6 @@ class TestErrorBase(unittest.TestCase): self.filepath = inspect.getfile(unwrap(self.func_call)) self.set_exception_type() self.set_message() - self.prog_trans = paddle.jit.ProgramTranslator() def set_input(self): self.input = np.ones([3, 2]) @@ -364,30 +363,6 @@ class TestErrorStaticLayerCallInRuntime2(TestErrorStaticLayerCallInRuntime): ] -# Situation 2: Call ProgramTranslator().get_output(...) to use Dynamic-to-Static -class TestErrorGetOutputInCompiletime(TestErrorStaticLayerCallInCompiletime): - def set_func_call(self): - self.func_call = lambda: self.prog_trans.get_output( - unwrap(self.func), self.input - ) - - -class TestErrorGetOutputInCompiletime_2( - TestErrorStaticLayerCallInCompiletime_2 -): - def set_func_call(self): - self.func_call = lambda: self.prog_trans.get_output( - unwrap(self.func), self.input - ) - - -class TestErrorGetOutputInRuntime(TestErrorStaticLayerCallInRuntime): - def set_func_call(self): - self.func_call = lambda: self.prog_trans.get_output( - unwrap(self.func), self.input - ) - - class TestJitSaveInCompiletime(TestErrorBase): def setUp(self): self.reset_flags_to_default() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_fetch_feed.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_fetch_feed.py index 6cd2c8bd17465ac21a2438f89eaff8c2166eb8cc..b10b96a0329a7de656d2189662fc03b87e2b3950 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_fetch_feed.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_fetch_feed.py @@ -18,7 +18,6 @@ import numpy as np import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static SEED = 2020 @@ -68,8 +67,7 @@ class TestPool2D(unittest.TestCase): self.data = np.random.random((1, 2, 4, 4)).astype('float32') def train(self, to_static=False): - program_translator = ProgramTranslator() - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(): dy_layer = self.dygraph_class() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_for_enumerate.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_for_enumerate.py index ed2480ab85232d53198b15e810dc826d10ddc678..daf6f0a9aca688e81834eb6e2367b7c7e127ba17 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_for_enumerate.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_for_enumerate.py @@ -20,11 +20,8 @@ import numpy as np import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.static import InputSpec -program_translator = ProgramTranslator() - # 0. for in range var.numpy()[0] @paddle.jit.to_static @@ -363,7 +360,7 @@ class TestTransformBase(unittest.TestCase): ) def _run(self, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(): return self.dygraph_func(self.input) @@ -390,7 +387,7 @@ class TestTransform(TestTransformBase): class TestTransformForOriginalList(TestTransform): def _run(self, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(): return self.dygraph_func() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_full_name_usage.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_full_name_usage.py index 0777279942ee0e41082e5e07f45bbbbef87b8ee8..a33263faa0b400f39ef1ac77e4952d06016aa12c 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_full_name_usage.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_full_name_usage.py @@ -78,14 +78,5 @@ class TestFullNameDecorator(unittest.TestCase): DoubleDecorated().double_decorated_func2(x) -class TestImportProgramTranslator(unittest.TestCase): - def test_diff_pkg_same_cls(self): - dygraph_prog_trans = paddle.jit.ProgramTranslator() - dy_to_stat_prog_trans = paddle.jit.ProgramTranslator() - full_pkg_prog_trans = paddle.jit.ProgramTranslator() - self.assertEqual(dygraph_prog_trans, dy_to_stat_prog_trans) - self.assertEqual(dygraph_prog_trans, full_pkg_prog_trans) - - if __name__ == '__main__': unittest.main() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_grad.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_grad.py index e359514e4c8d693ee899decf61819372cae8aed9..7f2d2e19f66b128fd7e6a9bd02c85555b505852d 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_grad.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_grad.py @@ -74,10 +74,9 @@ class TestGrad(unittest.TestCase): self.x.stop_gradient = False def _run(self, func, to_static): - prog_trans = paddle.jit.ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) ret = func(self.x).numpy() - prog_trans.enable(True) + paddle.jit.enable_to_static(True) return ret def test_forward(self): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_grid_generator.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_grid_generator.py index f46ae0eb6bfa489c9914bf9d3ba84adcdee4840a..e4bf77d8b9a485a5f0ab6d9471eb8ab810aa6192 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_grid_generator.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_grid_generator.py @@ -136,8 +136,7 @@ class TestGridGenerator(unittest.TestCase): self.x = paddle.uniform(shape=[1, 20, 2], dtype='float32') def _run(self, to_static): - prog_trans = paddle.jit.ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) net = GridGenerator(40, 20) ret = net(self.x, [32, 100]) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ifelse.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ifelse.py index d06ba1b2ec26fb140a2cc76f1a8c4fef0fef603b..72ed077af3339ec3d8088677aab6f74553d2cf98 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ifelse.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ifelse.py @@ -44,7 +44,6 @@ from ifelse_simple_func import ( import paddle import paddle.fluid.core as core import paddle.nn.functional as F -from paddle.jit.dy2static.program_translator import ProgramTranslator from paddle.jit.dy2static.utils import Dygraph2StaticException np.random.seed(1) @@ -64,10 +63,10 @@ class TestDy2staticException(unittest.TestCase): def test_error(self): if self.dyfunc: with self.assertRaisesRegex(Dygraph2StaticException, self.error): - ProgramTranslator().enable(True) + paddle.jit.enable_to_static(True) self.assertTrue(paddle.jit.to_static(self.dyfunc)(self.x)) paddle.fluid.dygraph.base._in_declarative_mode_ = False - ProgramTranslator().enable(False) + paddle.jit.enable_to_static(False) class TestDygraphIfElse(unittest.TestCase): @@ -254,8 +253,7 @@ class TestDygraphIfElseNet(unittest.TestCase): return self._run(to_static=False) def _run(self, to_static=False): - prog_trans = ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(place): net = self.Net() @@ -364,8 +362,7 @@ class TestDiffModeNet(unittest.TestCase): self.Net = DiffModeNet1 def _run(self, mode, to_static): - prog_trans = ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) net = self.Net(mode) ret = net(self.x, self.y) @@ -423,10 +420,10 @@ class TestDy2StIfElseRetInt1(unittest.TestCase): self.out = self.get_dy2stat_out() def get_dy2stat_out(self): - ProgramTranslator().enable(True) + paddle.jit.enable_to_static(True) static_func = paddle.jit.to_static(self.dyfunc) out = static_func(self.x) - ProgramTranslator().enable(False) + paddle.jit.enable_to_static(False) return out def test_ast_to_func(self): @@ -457,7 +454,7 @@ class TestDy2StIfElseRetInt4(TestDy2StIfElseRetInt1): self.dyfunc = dyfunc_ifelse_ret_int4 def test_ast_to_func(self): - ProgramTranslator().enable(True) + paddle.jit.enable_to_static(True) with self.assertRaises(Dygraph2StaticException): static_func = paddle.jit.to_static(self.dyfunc) out = static_func(self.x) @@ -467,7 +464,7 @@ class TestDy2StIfElseRetInt4(TestDy2StIfElseRetInt1): # an exception is thrown during Dy2St, making the `_in_declarative_mode_` # a wrong value. So We need set `_in_declarative_mode_` to False manually. paddle.fluid.dygraph.base._in_declarative_mode_ = False - ProgramTranslator().enable(False) + paddle.jit.enable_to_static(False) class IfElseNet(paddle.nn.Layer): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_isinstance.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_isinstance.py index 490dda5bc404938b7ddc19c9f65415b6ac7d8b77..cb4c156b855b19e54fa6e485a6da072c98cd96b2 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_isinstance.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_isinstance.py @@ -75,8 +75,7 @@ class SequentialLayer(nn.Layer): def train(model, to_static): - prog_trans = paddle.jit.ProgramTranslator.get_instance() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) x = paddle.ones(shape=[2, 3], dtype='int32') out = model(x) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lac.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lac.py index 8e877f39bfe9565359011ee2fd110de8b7780164..94e1dba49313a0dd2092ba2199a845e66cc106be 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lac.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lac.py @@ -27,13 +27,11 @@ import paddle.fluid as fluid from paddle import _legacy_C_ops from paddle.fluid.dygraph import to_variable from paddle.fluid.framework import _non_static_mode -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX SEED = 2020 -program_translator = ProgramTranslator() # Add InputSpec to make unittest run faster. input_specs = [ paddle.static.InputSpec([None, None], 'int64'), @@ -542,7 +540,7 @@ class TestLACModel(unittest.TestCase): self.dy_param_path = os.path.join(self.temp_dir.name, 'lac_dy_param') def train(self, args, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) place = ( fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() @@ -656,7 +654,7 @@ class TestLACModel(unittest.TestCase): def predict_dygraph(self, batch): words, targets, length = batch - program_translator.enable(False) + paddle.jit.enable_to_static(False) with fluid.dygraph.guard(self.place): model = LexNet(self.args) # load dygraph trained parameters diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_logical.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_logical.py index fd335c41ba24ef6bb0ea4a1e61d19b4a705d37d1..2161b2c4edef18898e11836a9c298bdbc44755c9 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_logical.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_logical.py @@ -21,12 +21,9 @@ import numpy as np import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.jit.dy2static.logical_transformer import cmpop_node_to_str from paddle.utils import gast -program_translator = ProgramTranslator() - SEED = 2020 np.random.seed(22) @@ -186,7 +183,7 @@ class TestLogicalBase(unittest.TestCase): ) def _run(self, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(self.place): result = self.dygraph_func(self.input) return result.numpy() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py index 447488c5ef90ba21dd1973d59cef5d6e4a12193b..1c114a50914cefda2077e3ad62e26d7860857530 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_lstm.py @@ -52,7 +52,7 @@ class TestLstm(unittest.TestCase): self.temp_dir.cleanup() def run_lstm(self, to_static): - paddle.jit.ProgramTranslator().enable(to_static) + paddle.jit.enable_to_static(to_static) paddle.disable_static() paddle.static.default_main_program().random_seed = 1001 @@ -70,7 +70,7 @@ class TestLstm(unittest.TestCase): np.testing.assert_allclose(dygraph_out, static_out, rtol=1e-05) def test_save_in_eval(self, with_training=True): - paddle.jit.ProgramTranslator().enable(True) + paddle.jit.enable_to_static(True) net = Net(12, 2) x = paddle.randn((2, 10, 12)) if with_training: @@ -141,7 +141,7 @@ class TestSaveInEvalMode(unittest.TestCase): self.temp_dir.cleanup() def test_save_in_eval(self): - paddle.jit.ProgramTranslator().enable(True) + paddle.jit.enable_to_static(True) net = LinearNet() x = paddle.randn((2, 10)) x.stop_gradient = False diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mobile_net.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mobile_net.py index 6521dc31fc8caf48c96306e9ee5b738b25b219f6..d708dc1eadfedd2a14f9cc08782325eb2c4ee560 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mobile_net.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mobile_net.py @@ -24,7 +24,6 @@ import paddle import paddle.fluid as fluid from paddle.fluid.initializer import MSRA from paddle.fluid.param_attr import ParamAttr -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.nn import BatchNorm, Linear @@ -36,7 +35,6 @@ if fluid.is_compiled_with_cuda(): fluid.set_flags({'FLAGS_cudnn_deterministic': True}) SEED = 2020 -program_translator = ProgramTranslator() class ConvBNLayer(fluid.dygraph.Layer): @@ -494,7 +492,7 @@ class Args: def train_mobilenet(args, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(args.place): np.random.seed(SEED) @@ -605,7 +603,7 @@ def predict_static(args, data): def predict_dygraph(args, data): - program_translator.enable(False) + paddle.jit.enable_to_static(False) with fluid.dygraph.guard(args.place): if args.model == "MobileNetV1": model = MobileNetV1(class_dim=args.class_dim, scale=1.0) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_param_guard.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_param_guard.py index 9270b50b28d339ff85ded594735902ea480e2113..064d6a1857461b460775356d68bf8b188604895a 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_param_guard.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_param_guard.py @@ -17,7 +17,7 @@ import unittest import numpy as np import paddle -from paddle.jit import ProgramTranslator, to_static +from paddle.jit import to_static class NetWithParameterList(paddle.nn.Layer): @@ -53,12 +53,11 @@ class TestParameterList(unittest.TestCase): def setUp(self): self.seed = 2021 self.iter_num = 5 - self.prog_trans = ProgramTranslator() def train(self, is_iter, to_static): paddle.seed(self.seed) np.random.seed(self.seed) - self.prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) if is_iter: net = NetWithParameterList(10, 3) else: @@ -110,7 +109,6 @@ class TestRawParameterList(unittest.TestCase): def setUp(self): self.seed = 2021 self.iter_num = 5 - self.prog_trans = ProgramTranslator() def init_net(self): self.net = NetWithRawParamList(10, 3) @@ -118,7 +116,7 @@ class TestRawParameterList(unittest.TestCase): def train(self, to_static): paddle.seed(self.seed) np.random.seed(self.seed) - self.prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) self.init_net() sgd = paddle.optimizer.SGD(0.1, parameters=self.net.parameters()) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_partial_program.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_partial_program.py index cd172d8c97c2f8806ad8ac809bfc74bfe811aa3b..36d3ebcc945fa832f09abd7149b1d5d5ad817740 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_partial_program.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_partial_program.py @@ -20,7 +20,6 @@ from test_fetch_feed import Linear import paddle import paddle.fluid as fluid from paddle.fluid.layers.utils import flatten -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static SEED = 2020 @@ -130,7 +129,6 @@ class TestWithNestedOutput(unittest.TestCase): class TestWithTrainAndEval(unittest.TestCase): def test_switch_eval_and_train(self): - program_translator = ProgramTranslator() with fluid.dygraph.guard(): linear_net = Linear() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_print.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_print.py index 21de18a9f531dd97e8ecdb68262919fac78198f8..5663e1c96f3f581380ca422417a1bf5c1b82a671 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_print.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_print.py @@ -18,9 +18,7 @@ import numpy import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator, to_static - -program_translator = ProgramTranslator() +from paddle.jit import to_static # 1. print Tensor @@ -99,7 +97,7 @@ class TestPrintBase(unittest.TestCase): raise NotImplementedError("Print test should implement set_test_func") def _run(self, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(): self.dygraph_func(self.input) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_program_translator.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_program_translator.py index db7e29782c72b2b057269675d090cb8190193a02..007b9916ebdcd40cca1a28085498d96cfb688d4b 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_program_translator.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_program_translator.py @@ -19,7 +19,6 @@ import unittest import astor import numpy as np from ifelse_simple_func import ( - dyfunc_with_if_else, dyfunc_with_if_else_early_return1, dyfunc_with_if_else_early_return2, ) @@ -27,7 +26,6 @@ from ifelse_simple_func import ( import paddle import paddle.fluid as fluid import paddle.jit.dy2static as _jst -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.jit.dy2static.utils import func_to_source_code from paddle.utils import gast @@ -213,121 +211,25 @@ class NetWithError(fluid.dygraph.layers.Layer): return y -class TestDygraphToStaticCode(unittest.TestCase): - def setUp(self): - # set to print all string diff when assertEqual fails - self.maxDiff = None - - def test_decorator(self): - program_translator = ProgramTranslator() - code = program_translator.get_code(dyfunc_with_if_else) - print(code) - answer = get_source_code(StaticCode1.dyfunc_with_if_else) - self.assertEqual( - answer.replace('\n', '').replace(' ', ''), - code.replace('\n', '').replace(' ', ''), - ) - - def test_program_translator(self): - answer = get_source_code(StaticCode2.dyfunc_with_if_else) - program_translator = ProgramTranslator() - code = program_translator.get_code(dyfunc_with_if_else) - print(code) - self.assertEqual( - answer.replace('\n', '').replace(' ', ''), - code.replace('\n', '').replace(' ', ''), - ) - - class TestEnableDeclarative(unittest.TestCase): def setUp(self): self.x = np.random.randn(30, 10, 32).astype('float32') self.weight = np.random.randn(32, 64).astype('float32') - self.program_translator = ProgramTranslator() def test_raise_error(self): with fluid.dygraph.guard(): - self.program_translator.enable(True) + paddle.jit.enable_to_static(True) net = NetWithError() with self.assertRaises(ValueError): net(fluid.dygraph.to_variable(self.x)) - def test_enable_disable_get_output(self): - self.program_translator.enable(True) - with fluid.dygraph.guard(): - static_output = self.program_translator.get_output( - simple_func, self.x, self.weight - ) - - self.program_translator.enable(False) - with fluid.dygraph.guard(): - dygraph_output = self.program_translator.get_output( - simple_func, self.x, self.weight - ) - np.testing.assert_allclose( - static_output.numpy(), - dygraph_output.numpy(), - rtol=1e-05, - atol=1e-4, - ) - - def test_enable_disable_get_func(self): - - self.program_translator.enable(True) - with fluid.dygraph.guard(): - static_func = self.program_translator.get_func(simple_func) - self.assertTrue(callable(static_func)) - static_output = static_func(self.x, self.weight) - self.assertTrue(isinstance(static_output, fluid.Variable)) - - self.program_translator.enable(False) - with fluid.dygraph.guard(): - dygraph_func = self.program_translator.get_func(simple_func) - self.assertTrue(callable(dygraph_func)) - dygraph_output = dygraph_func(self.x, self.weight) - self.assertTrue( - isinstance( - dygraph_output, - (fluid.core.VarBase, fluid.core.eager.Tensor), - ) - ) - - def test_enable_disable_get_program(self): - - self.program_translator.enable(True) - static_output = self.program_translator.get_program( - simple_func, self.x, self.weight - ) - self.assertTrue(isinstance(static_output, tuple)) - self.assertEqual(len(static_output), 4) - self.assertTrue(isinstance(static_output[0], fluid.Program)) - self.assertTrue(isinstance(static_output[1], fluid.Program)) - # Check all inputs and outputs are Variable - for var in static_output[2]: - self.assertTrue(isinstance(var, fluid.Variable)) - - for var in static_output[3]: - self.assertTrue(isinstance(var, fluid.Variable)) - - self.program_translator.enable(False) - with fluid.dygraph.guard(): - dygraph_output = self.program_translator.get_program( - simple_func, self.x, self.weight - ) - self.assertTrue( - isinstance( - dygraph_output, - (fluid.core.VarBase, fluid.core.eager.Tensor), - ) - ) - def test_enable_disable_declarative(self): - self.program_translator.enable(True) + paddle.jit.enable_to_static(True) with fluid.dygraph.guard(): static_output = decorated_simple_func(self.x, self.weight) - self.program_translator.enable(False) + paddle.jit.enable_to_static(False) with fluid.dygraph.guard(): dygraph_output = decorated_simple_func(self.x, self.weight) np.testing.assert_allclose( @@ -346,28 +248,6 @@ class Net(fluid.dygraph.layers.Layer): return x + 1 -class TestErrorWithInitFromStaticMode(unittest.TestCase): - def setUp(self): - self.program_translator = ProgramTranslator() - self.x = np.random.randn(10, 32).astype('float32') - - def test_raise_error(self): - # disable imperative - paddle.enable_static() - net = Net() - - self.program_translator.enable(True) - with self.assertRaisesRegex( - RuntimeError, "only available in dynamic mode" - ): - self.program_translator.get_output(net.forward, self.x) - - with self.assertRaisesRegex( - RuntimeError, "only available in dynamic mode" - ): - self.program_translator.get_program(net.forward, self.x) - - class SwitchModeNet(paddle.nn.Layer): def __init__(self): super().__init__() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ptb_lm.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ptb_lm.py index 1ddd8e7065f819d94f19f67900a2af432b62b4fa..53687ca6c1ea5b1cb59a28fc2fe462b198414430 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ptb_lm.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ptb_lm.py @@ -22,14 +22,11 @@ import paddle import paddle.fluid as fluid from paddle.fluid.dygraph.base import to_variable from paddle.fluid.optimizer import SGDOptimizer -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static PRINT_STEP = 20 SEED = 2020 -program_translator = ProgramTranslator() - class SimpleLSTMRNN(fluid.Layer): def __init__( @@ -319,12 +316,12 @@ def train(place): def train_dygraph(place): - program_translator.enable(False) + paddle.jit.enable_to_static(False) return train(place) def train_static(place): - program_translator.enable(True) + paddle.jit.enable_to_static(True) return train(place) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ptb_lm_v2.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ptb_lm_v2.py index b28aa1a1c291f8d138362d75c152b1c66f17271d..b065a51bd56d8150861507d268bd6c79f9b71bf4 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ptb_lm_v2.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_ptb_lm_v2.py @@ -23,8 +23,6 @@ import paddle PRINT_STEP = 20 SEED = 2020 -program_translator = paddle.jit.ProgramTranslator() - class SimpleLSTMRNN(paddle.nn.Layer): def __init__( @@ -319,12 +317,12 @@ def train(place): def train_dygraph(place): - program_translator.enable(False) + paddle.jit.enable_to_static(False) return train(place) def train_static(place): - program_translator.enable(True) + paddle.jit.enable_to_static(True) return train(place) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_reinforcement_learning.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_reinforcement_learning.py index 3eeb6f169121b4b4baf02475cfed58d4685f5c84..2716e54d03fddd3d0fc7cbfc30378cb2daf8f2e0 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_reinforcement_learning.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_reinforcement_learning.py @@ -23,11 +23,9 @@ import paddle import paddle.fluid as fluid import paddle.nn.functional as F from paddle.fluid.dygraph import Layer, to_variable -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static SEED = 2020 -program_translator = ProgramTranslator() class Policy(Layer): @@ -61,7 +59,7 @@ class Args: def train(args, place, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) env = gym.make('CartPole-v0') env.seed(SEED) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet.py index 9e8a854b5b402f25e11a3d33d1ee3dc2c0120321..7972904d80cb1e16c75b3178df0f5416084c143e 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet.py @@ -23,7 +23,6 @@ from predictor_utils import PredictorTools import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.nn import BatchNorm @@ -39,7 +38,6 @@ place = ( fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() ) -program_translator = ProgramTranslator() if fluid.is_compiled_with_cuda(): fluid.set_flags({'FLAGS_cudnn_deterministic': True}) @@ -323,7 +321,7 @@ class ResNetHelper: return total_loss.numpy() def predict_dygraph(self, data): - program_translator.enable(False) + paddle.jit.enable_to_static(False) with fluid.dygraph.guard(place): resnet = ResNet() @@ -382,7 +380,7 @@ class TestResnet(unittest.TestCase): self.resnet_helper = ResNetHelper() def train(self, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) return self.resnet_helper.train(to_static) def verify_predict(self): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_amp.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_amp.py index 22c045dc05c503cd8e9d276f6775d083bc7c8e5e..8b91b4189570ccd6051872873e806da3bd9888ec 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_amp.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_amp.py @@ -20,7 +20,6 @@ from test_resnet import SEED, ResNet, optimizer_setting import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator # NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout. batch_size = 2 @@ -29,7 +28,6 @@ place = ( fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() ) -program_translator = ProgramTranslator() if fluid.is_compiled_with_cuda(): fluid.set_flags({'FLAGS_cudnn_deterministic': True}) @@ -115,7 +113,7 @@ def train(to_static, build_strategy=None): class TestResnet(unittest.TestCase): def train(self, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) return train(to_static) def test_resnet(self): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_pure_fp16.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_pure_fp16.py index 6cbde55c974fb8b340b1bca0b02402b2d7288491..1f9c1d1104696d5475c9b82ebb424394f0a91165 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_pure_fp16.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_pure_fp16.py @@ -20,13 +20,11 @@ from test_resnet import SEED, ResNet, optimizer_setting import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator # NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout. batch_size = 2 epoch_num = 1 -program_translator = ProgramTranslator() if fluid.is_compiled_with_cuda(): fluid.set_flags({'FLAGS_cudnn_deterministic': True}) @@ -114,7 +112,7 @@ def train(to_static, build_strategy=None): class TestResnet(unittest.TestCase): def train(self, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) build_strategy = paddle.static.BuildStrategy() # Why set `build_strategy.enable_inplace = False` here? # Because we find that this PASS strategy of PE makes dy2st training loss unstable. diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_v2.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_v2.py index 13a267cb3b7a8d98f07d6465762aa64605f2761a..45004d42e2c809fdbe56464731812a959ff35bd1 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_v2.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_resnet_v2.py @@ -35,7 +35,6 @@ place = ( paddle.CUDAPlace(0) if paddle.is_compiled_with_cuda() else paddle.CPUPlace() ) -program_translator = paddle.jit.ProgramTranslator() if paddle.is_compiled_with_cuda(): paddle.fluid.set_flags({'FLAGS_cudnn_deterministic': True}) @@ -319,7 +318,7 @@ class TestResnet(unittest.TestCase): return total_loss.numpy() def predict_dygraph(self, data): - program_translator.enable(False) + paddle.jit.enable_to_static(False) paddle.disable_static(place) resnet = ResNet() @@ -380,7 +379,7 @@ class TestResnet(unittest.TestCase): return out def train(self, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) return self.do_train(to_static) def verify_predict(self): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_return.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_return.py index b4ec4fb8fd5afea62670299561b0cafb7702ab56..48c60795c4c38b702243aee23946544c367d01b3 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_return.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_return.py @@ -20,7 +20,7 @@ from ifelse_simple_func import dyfunc_with_if_else import paddle import paddle.fluid as fluid import paddle.fluid.core as core -from paddle.jit import ProgramTranslator, to_static +from paddle.jit import to_static from paddle.jit.dy2static.utils import Dygraph2StaticException SEED = 2020 @@ -272,13 +272,12 @@ class TestReturnBase(unittest.TestCase): else fluid.CPUPlace() ) self.init_dygraph_func() - self.program_translator = ProgramTranslator() def init_dygraph_func(self): self.dygraph_func = test_return_base def _run(self, to_static=False): - self.program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) with fluid.dygraph.guard(): res = self.dygraph_func(self.input) if isinstance(res, (tuple, list)): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_save_inference_model.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_save_inference_model.py index 721e56e4d64f4ade32292ba35c2ed4252a5a3f7d..92bc890dcc4d228466d376d134831cb84d723979 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_save_inference_model.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_save_inference_model.py @@ -20,7 +20,6 @@ import numpy as np import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.jit.dy2static.partial_program import partial_program_from from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX @@ -32,7 +31,6 @@ np.random.seed(SEED) place = ( fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() ) -program_translator = ProgramTranslator() class SimpleFcLayer(fluid.dygraph.Layer): @@ -148,8 +146,7 @@ class TestDyToStaticSaveInferenceModel(unittest.TestCase): class TestPartialProgramRaiseError(unittest.TestCase): def test_param_type(self): - program_translator = ProgramTranslator() - program_translator.enable(True) + paddle.jit.enable_to_static(True) x_data = np.random.random((20, 20)).astype('float32') with fluid.dygraph.guard(fluid.CPUPlace()): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_save_load.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_save_load.py index c2d67514e3987147a0a8ad3e513b51027e46abc2..a7c031536ca19efbe3c43d7de63d59384cd1aa2c 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_save_load.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_save_load.py @@ -22,7 +22,6 @@ from test_fetch_feed import Linear import paddle import paddle.fluid as fluid from paddle.fluid.optimizer import AdamOptimizer -from paddle.jit import ProgramTranslator np.random.seed(2020) @@ -42,13 +41,12 @@ class TestDyToStaticSaveLoad(unittest.TestCase): self.temp_dir.cleanup() def test_save_load_same_result(self): - program_translator = ProgramTranslator() x_data = np.random.randn(30, 10, 32).astype('float32') batch_num = 3 with fluid.dygraph.guard(place): - program_translator.enable(True) + paddle.jit.enable_to_static(True) x = fluid.dygraph.to_variable(x_data) net = Linear(32, 64) adam = AdamOptimizer( @@ -81,7 +79,7 @@ class TestDyToStaticSaveLoad(unittest.TestCase): x = fluid.dygraph.to_variable(x_data) # predict output - program_translator.enable(False) + paddle.jit.enable_to_static(False) dygraph_out, dygraph_loss = dygraph_net(x) np.testing.assert_allclose( diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_se_resnet.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_se_resnet.py index 51e6b7068621aa8966fd6195710f89b935f230f7..e01b77af7655b534818b3f5bfbf8a8b51a97411d 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_se_resnet.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_se_resnet.py @@ -25,7 +25,6 @@ from predictor_utils import PredictorTools import paddle import paddle.fluid as fluid from paddle.fluid.dygraph.base import to_variable -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.nn import BatchNorm, Linear @@ -374,8 +373,7 @@ class TestSeResnet(unittest.TestCase): self.temp_dir.cleanup() def train(self, train_reader, to_static): - program_translator = ProgramTranslator() - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) np.random.seed(SEED) @@ -473,8 +471,7 @@ class TestSeResnet(unittest.TestCase): ) def predict_dygraph(self, data): - program_translator = ProgramTranslator() - program_translator.enable(False) + paddle.jit.enable_to_static(False) with fluid.dygraph.guard(place): se_resnext = SeResNeXt() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_sentiment.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_sentiment.py index 95cf1365cbc4abe9f09fde594cf3bbf33d041975..4cc493bf476428cf86271e7f1214f14cdbad2f5a 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_sentiment.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_sentiment.py @@ -20,12 +20,10 @@ from test_lac import DynamicGRU import paddle import paddle.fluid as fluid from paddle.fluid.dygraph import to_variable -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.nn import Embedding, Linear SEED = 2020 -program_translator = ProgramTranslator() # Note: Set True to eliminate randomness. # 1. For one operation, cuDNN has several algorithms, @@ -304,7 +302,7 @@ class Args: def train(args, to_static): - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) place = ( fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_seq2seq.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_seq2seq.py index 060909dbc1ac9e1b2a2184a2c368dbf3ecbe90fc..8294b7965f11f5d2db7bc9603e4844917d7163c2 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_seq2seq.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_seq2seq.py @@ -23,13 +23,11 @@ from seq2seq_utils import Seq2SeqModelHyperParams, get_data_iter import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.nn import ClipGradByGlobalNorm place = ( fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() ) -program_translator = ProgramTranslator() STEP_NUM = 10 PRINT_STEP = 2 @@ -197,14 +195,14 @@ class TestSeq2seq(unittest.TestCase): self.temp_dir.cleanup() def run_dygraph(self, mode="train", attn_model=False): - program_translator.enable(False) + paddle.jit.enable_to_static(False) if mode == "train": return train(self.args, attn_model) else: return infer(self.args, attn_model) def run_static(self, mode="train", attn_model=False): - program_translator.enable(True) + paddle.jit.enable_to_static(True) if mode == "train": return train(self.args, attn_model) else: diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_simnet.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_simnet.py index f5ce0774d292062df3fe11cfa297299a6ec57f72..83512ba8826fcf479393fd502c3aa1d58721467f 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_simnet.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_simnet.py @@ -21,7 +21,6 @@ from simnet_dygraph_model import BOW, HingeLoss import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator SEED = 102 random.seed(SEED) @@ -104,8 +103,7 @@ def train(conf_dict, to_static): """ train process """ - program_translator = ProgramTranslator() - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) # Get device if fluid.is_compiled_with_cuda(): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_simnet_v2.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_simnet_v2.py index 46afc02f641e783f19e79e6588a40df5e0e56501..3e8cb4c10b3d4c4ca2cd9dd08e901bc177b86e7a 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_simnet_v2.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_simnet_v2.py @@ -102,8 +102,7 @@ def train(conf_dict, to_static): """ train process """ - program_translator = paddle.jit.ProgramTranslator() - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) # Get device if paddle.is_compiled_with_cuda(): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_slice.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_slice.py index 87eb6e51e74a50f2117e47833cece6ab8adaac05..d0837245460030b3f23b5254fd99992b680342f0 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_slice.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_slice.py @@ -23,7 +23,6 @@ from paddle.static import InputSpec SEED = 2020 np.random.seed(SEED) -prog_trans = paddle.jit.ProgramTranslator() @paddle.jit.to_static @@ -130,7 +129,7 @@ class TestSliceWithoutControlFlow(unittest.TestCase): return self._run(to_static=False) def _run(self, to_static): - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) res = self.dygraph_func(self.input) return res.numpy() @@ -177,7 +176,7 @@ class TestSetValueWithLayerAndSave(unittest.TestCase): self.temp_dir.cleanup() def test_set_value_with_save(self): - prog_trans.enable(True) + paddle.jit.enable_to_static(True) model = LayerWithSetValue(input_dim=10, hidden=1) x = paddle.full(shape=[5, 10], fill_value=5.0, dtype="float32") paddle.jit.save( diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_methods.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_methods.py index 307e8f422de6e61a9a9eb9462f46637573e5ddc0..b1a512c08fddd1a991d1766011e1796769c249d6 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_methods.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_methods.py @@ -28,8 +28,7 @@ def tensor_clone(x): class TestTensorClone(unittest.TestCase): def _run(self, to_static): - prog_trans = paddle.jit.ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) x = paddle.ones([1, 2, 3]) return tensor_clone(x).numpy() @@ -48,8 +47,7 @@ def tensor_numpy(x): class TestTensorDygraphOnlyMethodError(unittest.TestCase): def _run(self, to_static): - prog_trans = paddle.jit.ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) x = paddle.zeros([2, 2]) y = tensor_numpy(x) return y.numpy() @@ -69,8 +67,7 @@ def tensor_item(x): class TestTensorItem(unittest.TestCase): def _run(self, to_static): - prog_trans = paddle.jit.ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) x = paddle.ones([1]) if to_static: return tensor_item(x).numpy() @@ -92,8 +89,7 @@ def tensor_size(x): class TestTensorSize(unittest.TestCase): def _run(self, to_static): - prog_trans = paddle.jit.ProgramTranslator() - prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) x = paddle.ones([1, 2, 3]) if not to_static: return tensor_size(x) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tsm.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tsm.py index 5e0c971c6ca1a156026815d6b81e91986c48dee7..66e882e477e741f2234c3627b85220ee2491d1aa 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tsm.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tsm.py @@ -24,7 +24,6 @@ from tsm_config_utils import merge_configs, parse_config, print_configs import paddle import paddle.fluid as fluid from paddle.fluid.dygraph import to_variable -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.nn import BatchNorm, Linear @@ -290,8 +289,7 @@ def create_optimizer(cfg, params): def train(args, fake_data_reader, to_static): - program_translator = ProgramTranslator() - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) config = parse_config(args.config) train_config = merge_configs(config, 'train', vars(args)) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_word2vec.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_word2vec.py index 61150231c578e10a9feb87e809bea57b82701557..560132565907e5b3cd3e53a109236ed05760a127 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_word2vec.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_word2vec.py @@ -20,7 +20,6 @@ import numpy as np import paddle import paddle.fluid as fluid -from paddle.jit import ProgramTranslator from paddle.jit.api import to_static from paddle.nn import Embedding @@ -278,8 +277,7 @@ total_steps = len(dataset) * epoch_num // batch_size def train(to_static): - program_translator = ProgramTranslator() - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) random.seed(0) np.random.seed(0) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_yolov3.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_yolov3.py index d8e766839160665884bf9b6858657c2002a642de..b84196b421aede82e63336d9f93571ebcb556e35 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_yolov3.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_yolov3.py @@ -22,7 +22,6 @@ from yolov3 import YOLOv3, cfg import paddle import paddle.fluid as fluid from paddle.fluid.dygraph import to_variable -from paddle.jit import ProgramTranslator paddle.enable_static() random.seed(0) @@ -78,8 +77,7 @@ fake_data_reader = FakeDataReader() def train(to_static): - program_translator = ProgramTranslator() - program_translator.enable(to_static) + paddle.jit.enable_to_static(to_static) random.seed(0) np.random.seed(0) diff --git a/python/paddle/fluid/tests/unittests/test_base_layer.py b/python/paddle/fluid/tests/unittests/test_base_layer.py index bdb0d3076274fd9cb0adb2a69a68170b828e7115..05d9b71c1e4372dc7d7f870b433a1d17466c761d 100644 --- a/python/paddle/fluid/tests/unittests/test_base_layer.py +++ b/python/paddle/fluid/tests/unittests/test_base_layer.py @@ -20,7 +20,6 @@ import paddle import paddle.fluid as fluid from paddle.fluid.dygraph import to_variable from paddle.fluid.framework import EagerParamBase, ParamBase, in_dygraph_mode -from paddle.jit import ProgramTranslator class L1(fluid.Layer): @@ -339,11 +338,10 @@ class BufferNetWithModification(paddle.nn.Layer): class TestModifiedBuffer(unittest.TestCase): def funcsetUp(self): paddle.disable_static() - self.prog_trans = ProgramTranslator() self.shape = [10, 16] def _run(self, to_static=False): - self.prog_trans.enable(to_static) + paddle.jit.enable_to_static(to_static) x = paddle.ones([1], 'int32') net = BufferNetWithModification(self.shape) diff --git a/python/paddle/fluid/tests/unittests/test_directory_migration.py b/python/paddle/fluid/tests/unittests/test_directory_migration.py index 439104b69a18c48eb9dce39b4a7e1dec68dca4d8..948db941b183c3e750425b584e7bb7da8a13243f 100644 --- a/python/paddle/fluid/tests/unittests/test_directory_migration.py +++ b/python/paddle/fluid/tests/unittests/test_directory_migration.py @@ -49,7 +49,6 @@ class TestDirectory(unittest.TestCase): 'paddle.DataParallel', 'paddle.jit', 'paddle.jit.to_static', - 'paddle.jit.ProgramTranslator', 'paddle.jit.TranslatedLayer', 'paddle.jit.save', 'paddle.jit.load', @@ -143,7 +142,6 @@ class TestDirectory(unittest.TestCase): 'paddle.imperative.jit', 'paddle.imperative.TracedLayer', 'paddle.imperative.declarative', - 'paddle.imperative.ProgramTranslator', 'paddle.imperative.TranslatedLayer', 'paddle.imperative.jit.save', 'paddle.imperative.jit.load', diff --git a/python/paddle/fluid/tests/unittests/test_jit_layer.py b/python/paddle/fluid/tests/unittests/test_jit_layer.py index c670ac00aed6c382e2eb94ba744ce291f5b78680..93380400d2afbc980d554a15a0660cac4460d626 100644 --- a/python/paddle/fluid/tests/unittests/test_jit_layer.py +++ b/python/paddle/fluid/tests/unittests/test_jit_layer.py @@ -20,7 +20,6 @@ import numpy as np import paddle from paddle.fluid.framework import _dygraph_place_guard -from paddle.jit.dy2static.program_translator import ProgramTranslator from paddle.jit.layer import Layer from paddle.static import InputSpec @@ -61,11 +60,10 @@ class TestMultiLoad(unittest.TestCase): x = paddle.full([2, 4], 2) model = Net() - program_translator = ProgramTranslator() - program_translator.enable(False) + paddle.jit.enable_to_static(False) forward_out1 = model.forward(x) infer_out1 = model.infer(x) - program_translator.enable(True) + paddle.jit.enable_to_static(True) model_path = os.path.join(self.temp_dir.name, 'multi_program') paddle.jit.save(model, model_path, combine_params=True) diff --git a/python/paddle/jit/__init__.py b/python/paddle/jit/__init__.py index 00f5f609430db8aba4eff6780c16922f99fc87ac..f508f72478b00bf6b1074b22fd3ffbe5697f3542 100644 --- a/python/paddle/jit/__init__.py +++ b/python/paddle/jit/__init__.py @@ -18,10 +18,9 @@ from .api import load from .api import to_static from .api import not_to_static from .api import ignore_module -from .dy2static.logging_utils import set_code_level, set_verbosity +from .dy2static.program_translator import enable_to_static -from . import dy2static -from .dy2static.program_translator import ProgramTranslator +from .dy2static.logging_utils import set_code_level, set_verbosity from .translated_layer import TranslatedLayer __all__ = [ # noqa @@ -29,9 +28,9 @@ __all__ = [ # noqa 'load', 'to_static', 'ignore_module', - 'ProgramTranslator', 'TranslatedLayer', 'set_code_level', 'set_verbosity', 'not_to_static', + 'enable_to_static', ] diff --git a/python/paddle/jit/api.py b/python/paddle/jit/api.py index 88988016401c115d533da2e1b25f8e247b4a0fe8..ebef5f28654e6fae3004244b94cb1e442b286de9 100644 --- a/python/paddle/jit/api.py +++ b/python/paddle/jit/api.py @@ -75,8 +75,6 @@ from paddle.fluid.framework import ( from paddle.fluid.framework import dygraph_only, _non_static_mode from paddle.fluid.wrapped_decorator import wrap_decorator -__all__ = [] - def create_program_from_desc(program_desc): program = Program() @@ -160,7 +158,7 @@ def _dygraph_to_static_func_(dygraph_func): if _non_static_mode() or not program_translator.enable_to_static: logging_utils.warn( "The decorator 'dygraph_to_static_func' doesn't work in " - "dygraph mode or set ProgramTranslator.enable to False. " + "dygraph mode or set 'paddle.jit.enable_to_static' to False. " "We will just return dygraph output." ) return dygraph_func(*args, **kwargs) @@ -911,7 +909,7 @@ def save(layer, path, input_spec=None, **configs): prog_translator = ProgramTranslator() if not prog_translator.enable_to_static: raise RuntimeError( - "The paddle.jit.save doesn't work when setting ProgramTranslator.enable to False." + "The paddle.jit.save doesn't work when setting 'paddle.jit.enable_to_static' to False." ) if not ( diff --git a/python/paddle/jit/dy2static/__init__.py b/python/paddle/jit/dy2static/__init__.py index b55d5d672c2b118545d896ea2e0a5135ec76eb7c..b3e70f487003151dee80b708eed1e001fb64ab9f 100644 --- a/python/paddle/jit/dy2static/__init__.py +++ b/python/paddle/jit/dy2static/__init__.py @@ -12,10 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .utils import ( - saw, - UndefinedVar, -) +from .utils import saw, UndefinedVar, ast_to_source_code from .convert_operators import convert_logical_and as And # noqa: F401 from .convert_operators import convert_var_dtype as AsDtype # noqa: F401 from .convert_operators import convert_assert as Assert # noqa: F401 diff --git a/python/paddle/jit/dy2static/program_translator.py b/python/paddle/jit/dy2static/program_translator.py index 131defe7d0bace08ee080a76df46e18b92af3ea7..7fd6b0ce7fe004819e210bd01e158e56a27e574d 100644 --- a/python/paddle/jit/dy2static/program_translator.py +++ b/python/paddle/jit/dy2static/program_translator.py @@ -416,9 +416,9 @@ class StaticFunction: # will show up **only once**. StaticFunction.__call__ will run many times, it is appropriate to # display this warning message only once. logging_utils.warn( - "The decorator '@paddle.jit.to_static' does NOT work when setting ProgramTranslator.enable to False. " + "The decorator '@paddle.jit.to_static' does NOT work when setting 'paddle.jit.enable_to_static' to False. " "We will just return dygraph output. If you would like to get static graph output, please call API " - "ProgramTranslator.enable(True)" + "paddle.jit.enable_to_static(True)" ) return self._call_dygraph_function(*args, **kwargs) @@ -1222,8 +1222,7 @@ class ProgramTranslator: return x_v - prog_trans = paddle.jit.ProgramTranslator() - prog_trans.enable(False) + paddle.jit.enable_to_static(False) x = paddle.ones([1, 2]) # ProgramTranslator is disabled so the func is run in dygraph @@ -1513,3 +1512,47 @@ class ProgramTranslator: """ return self._program_cache + + +def enable_to_static(enable_to_static_bool): + + """ + Enable or disable the converting from imperative to static graph by + ProgramTranslator globally. + + Args: + enable_to_static_bool (bool): True or False to enable or disable converting to static. + + Returns: + None. + + Examples: + .. code-block:: python + + import paddle + + + @paddle.jit.to_static + def func(x): + if paddle.mean(x) > 0: + x_v = x - 1 + else: + x_v = x + 1 + return x_v + + + paddle.jit.enable_to_static(False) + + x = paddle.ones([1, 2]) + # ProgramTranslator is disabled so the func is run in dygraph + print(func(x)) # [[0. 0.]] + + """ + check_type( + enable_to_static_bool, + "enable_to_static_bool", + bool, + "paddle.jit.enable_to_static", + ) + _program_trans = ProgramTranslator() + _program_trans.enable(enable_to_static_bool) diff --git a/python/paddle/tests/test_model.py b/python/paddle/tests/test_model.py index 7094ea5d59a4d168b6f8bdc07424dee953fe96c2..9090c28306a05db45d5e93a8eaaff90ab32eff77 100644 --- a/python/paddle/tests/test_model.py +++ b/python/paddle/tests/test_model.py @@ -25,7 +25,6 @@ import paddle.vision.models as models from paddle import Model, fluid, to_tensor from paddle.hapi.model import prepare_distributed_context from paddle.io import Dataset, DistributedBatchSampler -from paddle.jit.dy2static.program_translator import ProgramTranslator from paddle.metric import Accuracy from paddle.nn import Conv2D, Linear, ReLU, Sequential from paddle.nn.layer.loss import CrossEntropyLoss @@ -826,8 +825,8 @@ class TestModelFunction(unittest.TestCase): for dynamic in [True, False]: paddle.disable_static() if dynamic else None - prog_translator = ProgramTranslator() - prog_translator.enable(False) if not dynamic else None + paddle.jit.enable_to_static(False) if not dynamic else None + net = LeNet() inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')] model = Model(net, inputs)