未验证 提交 2bb28f31 编写于 作者: R Ryan 提交者: GitHub

[Dy2St] 移除 ProgramTranslator (#49628)

* add enable_to_static and drop some methods of ProgramTranslator

* fix code style

* fix cant import enable_to_static and update unitest

* change unitest and rollback code of PT

* fix can't import as of utils

* roll back PT

* fix roll back

* add some unitest

* add unitest and fix codestyle bug in api.py

* finish all unitest

* remove ProgramTranslator

* fix code style

* restore test_program_translator

* api.py remove get_func

* TestDygraphToStaticCode

* fix check_type and import err

* roll back PT without getcode

* roll back pt with get_code

* convert_to_static

* fix import __all__
上级 8f0adcb5
...@@ -564,7 +564,7 @@ def _fake_interface_only_(func): ...@@ -564,7 +564,7 @@ def _fake_interface_only_(func):
raise AssertionError( raise AssertionError(
"'%s' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n" "'%s' only can be called by `paddle.Tensor` in dynamic graph mode. Suggestions:\n"
" 1. If you are in static graph mode, you can switch to dynamic graph mode by turning off `paddle.enable_static()` or calling `paddle.disable_static()`.\n" " 1. If you are in static graph mode, you can switch to dynamic graph mode by turning off `paddle.enable_static()` or calling `paddle.disable_static()`.\n"
" 2. If you are using `@paddle.jit.to_static`, you can turn off ProgramTranslator by calling `paddle.jit.ProgramTranslator().enable(False)`. " " 2. If you are using `@paddle.jit.to_static`, you can call `paddle.jit.enable_to_static(False)`. "
"If you have to translate dynamic graph to static graph, please use other API to replace '%s'." "If you have to translate dynamic graph to static graph, please use other API to replace '%s'."
% (func.__name__, func.__name__) % (func.__name__, func.__name__)
) )
......
...@@ -18,7 +18,6 @@ import numpy ...@@ -18,7 +18,6 @@ import numpy
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
...@@ -35,7 +34,7 @@ def dyfunc_assert_non_variable(x=True): ...@@ -35,7 +34,7 @@ def dyfunc_assert_non_variable(x=True):
class TestAssertVariable(unittest.TestCase): class TestAssertVariable(unittest.TestCase):
def _run(self, func, x, with_exception, to_static): def _run(self, func, x, with_exception, to_static):
ProgramTranslator().enable(to_static) paddle.jit.enable_to_static(to_static)
if with_exception: if with_exception:
with self.assertRaises(BaseException): with self.assertRaises(BaseException):
with fluid.dygraph.guard(): with fluid.dygraph.guard():
......
...@@ -24,10 +24,8 @@ from predictor_utils import PredictorTools ...@@ -24,10 +24,8 @@ from predictor_utils import PredictorTools
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
program_translator = ProgramTranslator()
place = ( place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
) )
...@@ -127,11 +125,11 @@ class TestBert(unittest.TestCase): ...@@ -127,11 +125,11 @@ class TestBert(unittest.TestCase):
return loss, ppl return loss, ppl
def train_dygraph(self, bert_config, data_reader): def train_dygraph(self, bert_config, data_reader):
program_translator.enable(False) paddle.jit.enable_to_static(False)
return self.train(bert_config, data_reader, False) return self.train(bert_config, data_reader, False)
def train_static(self, bert_config, data_reader): def train_static(self, bert_config, data_reader):
program_translator.enable(True) paddle.jit.enable_to_static(True)
return self.train(bert_config, data_reader, True) return self.train(bert_config, data_reader, True)
def predict_static(self, data): def predict_static(self, data):
...@@ -157,7 +155,7 @@ class TestBert(unittest.TestCase): ...@@ -157,7 +155,7 @@ class TestBert(unittest.TestCase):
return pred_res return pred_res
def predict_dygraph(self, bert_config, data): def predict_dygraph(self, bert_config, data):
program_translator.enable(False) paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(place): with fluid.dygraph.guard(place):
bert = PretrainModelLayer( bert = PretrainModelLayer(
config=bert_config, weight_sharing=False, use_fp16=False config=bert_config, weight_sharing=False, use_fp16=False
......
...@@ -24,12 +24,11 @@ import paddle ...@@ -24,12 +24,11 @@ import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid import ParamAttr from paddle.fluid import ParamAttr
from paddle.fluid.dygraph import to_variable from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator, to_static from paddle.jit import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
SEED = 2000 SEED = 2000
DATATYPE = 'float32' DATATYPE = 'float32'
program_translator = ProgramTranslator()
# Note: Set True to eliminate randomness. # Note: Set True to eliminate randomness.
# 1. For one operation, cuDNN has several algorithms, # 1. For one operation, cuDNN has several algorithms,
...@@ -662,7 +661,7 @@ class TestTrain(unittest.TestCase): ...@@ -662,7 +661,7 @@ class TestTrain(unittest.TestCase):
self.temp_dir.cleanup() self.temp_dir.cleanup()
def train_bmn(self, args, place, to_static): def train_bmn(self, args, place, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
loss_data = [] loss_data = []
with fluid.dygraph.guard(place): with fluid.dygraph.guard(place):
...@@ -822,7 +821,7 @@ class TestTrain(unittest.TestCase): ...@@ -822,7 +821,7 @@ class TestTrain(unittest.TestCase):
break break
def predict_dygraph(self, data): def predict_dygraph(self, data):
program_translator.enable(False) paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(self.place): with fluid.dygraph.guard(self.place):
bmn = BMN(self.args) bmn = BMN(self.args)
# load dygraph trained parameters # load dygraph trained parameters
......
...@@ -19,7 +19,6 @@ import numpy as np ...@@ -19,7 +19,6 @@ import numpy as np
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.jit.dy2static.utils import Dygraph2StaticException from paddle.jit.dy2static.utils import Dygraph2StaticException
SEED = 2020 SEED = 2020
...@@ -35,10 +34,10 @@ class TestDy2staticException(unittest.TestCase): ...@@ -35,10 +34,10 @@ class TestDy2staticException(unittest.TestCase):
def test_error(self): def test_error(self):
if self.dyfunc: if self.dyfunc:
with self.assertRaisesRegex(Dygraph2StaticException, self.error): with self.assertRaisesRegex(Dygraph2StaticException, self.error):
ProgramTranslator().enable(True) paddle.jit.enable_to_static(True)
self.assertTrue(to_static(self.dyfunc)(self.x)) self.assertTrue(to_static(self.dyfunc)(self.x))
paddle.fluid.dygraph.base._in_declarative_mode_ = False paddle.fluid.dygraph.base._in_declarative_mode_ = False
ProgramTranslator().enable(False) paddle.jit.enable_to_static(False)
def test_continue_in_for(x): def test_continue_in_for(x):
......
...@@ -18,9 +18,6 @@ import numpy as np ...@@ -18,9 +18,6 @@ import numpy as np
from test_resnet import ResNetHelper from test_resnet import ResNetHelper
import paddle import paddle
from paddle.jit import ProgramTranslator
program_translator = ProgramTranslator()
class TestResnetWithPass(unittest.TestCase): class TestResnetWithPass(unittest.TestCase):
...@@ -35,7 +32,7 @@ class TestResnetWithPass(unittest.TestCase): ...@@ -35,7 +32,7 @@ class TestResnetWithPass(unittest.TestCase):
paddle.fluid.set_flags({"FLAGS_max_inplace_grad_add": 8}) paddle.fluid.set_flags({"FLAGS_max_inplace_grad_add": 8})
def train(self, to_static): def train(self, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
return self.resnet_helper.train(to_static, self.build_strategy) return self.resnet_helper.train(to_static, self.build_strategy)
def verify_predict(self): def verify_predict(self):
......
...@@ -20,7 +20,6 @@ from test_fetch_feed import Linear, Pool2D ...@@ -20,7 +20,6 @@ from test_fetch_feed import Linear, Pool2D
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.jit.dy2static import convert_to_static from paddle.jit.dy2static import convert_to_static
...@@ -91,8 +90,7 @@ class TestCacheProgramWithOptimizer(unittest.TestCase): ...@@ -91,8 +90,7 @@ class TestCacheProgramWithOptimizer(unittest.TestCase):
return self.train(to_static=False) return self.train(to_static=False)
def train(self, to_static=False): def train(self, to_static=False):
prog_trans = ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
with fluid.dygraph.guard(fluid.CPUPlace()): with fluid.dygraph.guard(fluid.CPUPlace()):
dygraph_net = self.dygraph_class() dygraph_net = self.dygraph_class()
......
...@@ -21,11 +21,8 @@ from test_program_translator import get_source_code ...@@ -21,11 +21,8 @@ from test_program_translator import get_source_code
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
import paddle.jit.dy2static as _jst import paddle.jit.dy2static as _jst
from paddle.jit import ProgramTranslator
from paddle.jit.dy2static.convert_call_func import CONVERSION_OPTIONS from paddle.jit.dy2static.convert_call_func import CONVERSION_OPTIONS
program_translator = ProgramTranslator()
SEED = 2020 SEED = 2020
np.random.seed(SEED) np.random.seed(SEED)
...@@ -93,13 +90,13 @@ class TestRecursiveCall1(unittest.TestCase): ...@@ -93,13 +90,13 @@ class TestRecursiveCall1(unittest.TestCase):
self.dyfunc = nested_func self.dyfunc = nested_func
def get_dygraph_output(self): def get_dygraph_output(self):
program_translator.enable(False) paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
res = self.dyfunc(self.input).numpy() res = self.dyfunc(self.input).numpy()
return res return res
def get_static_output(self): def get_static_output(self):
program_translator.enable(True) paddle.jit.enable_to_static(True)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
res = self.dyfunc(self.input).numpy() res = self.dyfunc(self.input).numpy()
return res return res
...@@ -193,11 +190,11 @@ class TestRecursiveCall2(unittest.TestCase): ...@@ -193,11 +190,11 @@ class TestRecursiveCall2(unittest.TestCase):
return res.numpy() return res.numpy()
def get_dygraph_output(self): def get_dygraph_output(self):
program_translator.enable(False) paddle.jit.enable_to_static(False)
return self._run() return self._run()
def get_static_output(self): def get_static_output(self):
program_translator.enable(True) paddle.jit.enable_to_static(True)
return self._run() return self._run()
def test_transformed_static_result(self): def test_transformed_static_result(self):
......
...@@ -38,7 +38,6 @@ os.environ["CUDA_VISIBLE_DEVICES"] = "1" ...@@ -38,7 +38,6 @@ os.environ["CUDA_VISIBLE_DEVICES"] = "1"
import paddle import paddle
from paddle.fluid.dygraph import to_variable from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.nn import BatchNorm from paddle.nn import BatchNorm
...@@ -61,8 +60,6 @@ lambda_identity = 0.5 ...@@ -61,8 +60,6 @@ lambda_identity = 0.5
IMAGE_SIZE = 64 IMAGE_SIZE = 64
SEED = 2020 SEED = 2020
program_translator = ProgramTranslator()
class Cycle_Gan(fluid.dygraph.Layer): class Cycle_Gan(fluid.dygraph.Layer):
def __init__(self, input_channel, istrain=True): def __init__(self, input_channel, istrain=True):
...@@ -560,7 +557,7 @@ def train(args, to_static): ...@@ -560,7 +557,7 @@ def train(args, to_static):
else fluid.CPUPlace() else fluid.CPUPlace()
) )
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(place): with fluid.dygraph.guard(place):
max_images_num = args.max_images_num max_images_num = args.max_images_num
......
...@@ -22,7 +22,6 @@ from test_basic_api_transformation import dyfunc_to_variable ...@@ -22,7 +22,6 @@ from test_basic_api_transformation import dyfunc_to_variable
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.dygraph import Layer, to_variable from paddle.fluid.dygraph import Layer, to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.jit.dy2static.program_translator import ( from paddle.jit.dy2static.program_translator import (
ConcreteProgram, ConcreteProgram,
...@@ -30,8 +29,6 @@ from paddle.jit.dy2static.program_translator import ( ...@@ -30,8 +29,6 @@ from paddle.jit.dy2static.program_translator import (
) )
from paddle.static import InputSpec from paddle.static import InputSpec
program_trans = ProgramTranslator()
class SimpleNet(Layer): class SimpleNet(Layer):
def __init__(self): def __init__(self):
...@@ -210,7 +207,7 @@ def foo_func(a, b, c=1, d=2): ...@@ -210,7 +207,7 @@ def foo_func(a, b, c=1, d=2):
class TestDifferentInputSpecCacheProgram(unittest.TestCase): class TestDifferentInputSpecCacheProgram(unittest.TestCase):
def setUp(self): def setUp(self):
program_trans.enable(True) paddle.jit.enable_to_static(True)
def test_with_different_input(self): def test_with_different_input(self):
with fluid.dygraph.guard(fluid.CPUPlace()): with fluid.dygraph.guard(fluid.CPUPlace()):
...@@ -357,7 +354,7 @@ class TestDeclarativeAPI(unittest.TestCase): ...@@ -357,7 +354,7 @@ class TestDeclarativeAPI(unittest.TestCase):
with self.assertRaises(RuntimeError): with self.assertRaises(RuntimeError):
func(np.ones(5).astype("int32")) func(np.ones(5).astype("int32"))
program_trans.enable(False) paddle.jit.enable_to_static(False)
with self.assertRaises(AssertionError): with self.assertRaises(AssertionError):
# AssertionError: We Only support to_variable in imperative mode, # AssertionError: We Only support to_variable in imperative mode,
# please use fluid.dygraph.guard() as context to run it in imperative Mode # please use fluid.dygraph.guard() as context to run it in imperative Mode
...@@ -367,7 +364,7 @@ class TestDeclarativeAPI(unittest.TestCase): ...@@ -367,7 +364,7 @@ class TestDeclarativeAPI(unittest.TestCase):
class TestDecorateModelDirectly(unittest.TestCase): class TestDecorateModelDirectly(unittest.TestCase):
def setUp(self): def setUp(self):
paddle.disable_static() paddle.disable_static()
program_trans.enable(True) paddle.jit.enable_to_static(True)
self.x = to_variable(np.ones([4, 10]).astype('float32')) self.x = to_variable(np.ones([4, 10]).astype('float32'))
def test_fake_input(self): def test_fake_input(self):
......
...@@ -19,7 +19,6 @@ import numpy as np ...@@ -19,7 +19,6 @@ import numpy as np
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import to_static from paddle.jit import to_static
from paddle.jit.dy2static.program_translator import ProgramTranslator
PLACE = ( PLACE = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
...@@ -135,8 +134,7 @@ class TestNetWithDict(unittest.TestCase): ...@@ -135,8 +134,7 @@ class TestNetWithDict(unittest.TestCase):
return self.train(to_static=False) return self.train(to_static=False)
def train(self, to_static=False): def train(self, to_static=False):
prog_trans = ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
with fluid.dygraph.guard(PLACE): with fluid.dygraph.guard(PLACE):
net = MainNetWithDict(batch_size=self.batch_size) net = MainNetWithDict(batch_size=self.batch_size)
ret = net(self.x) ret = net(self.x)
...@@ -191,8 +189,7 @@ class TestDictPop(unittest.TestCase): ...@@ -191,8 +189,7 @@ class TestDictPop(unittest.TestCase):
return self._run(to_static=False) return self._run(to_static=False)
def _run(self, to_static): def _run(self, to_static):
prog_trans = ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
result = self.dygraph_func(self.input) result = self.dygraph_func(self.input)
...@@ -237,8 +234,7 @@ class TestDictPop3(TestNetWithDict): ...@@ -237,8 +234,7 @@ class TestDictPop3(TestNetWithDict):
self.x = np.array([2, 2]).astype('float32') self.x = np.array([2, 2]).astype('float32')
def train(self, to_static=False): def train(self, to_static=False):
prog_trans = ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
with fluid.dygraph.guard(PLACE): with fluid.dygraph.guard(PLACE):
net = NetWithDictPop() net = NetWithDictPop()
ret = net(z=0, x=self.x, y=True) ret = net(z=0, x=self.x, y=True)
......
...@@ -174,7 +174,6 @@ class TestErrorBase(unittest.TestCase): ...@@ -174,7 +174,6 @@ class TestErrorBase(unittest.TestCase):
self.filepath = inspect.getfile(unwrap(self.func_call)) self.filepath = inspect.getfile(unwrap(self.func_call))
self.set_exception_type() self.set_exception_type()
self.set_message() self.set_message()
self.prog_trans = paddle.jit.ProgramTranslator()
def set_input(self): def set_input(self):
self.input = np.ones([3, 2]) self.input = np.ones([3, 2])
...@@ -364,30 +363,6 @@ class TestErrorStaticLayerCallInRuntime2(TestErrorStaticLayerCallInRuntime): ...@@ -364,30 +363,6 @@ class TestErrorStaticLayerCallInRuntime2(TestErrorStaticLayerCallInRuntime):
] ]
# Situation 2: Call ProgramTranslator().get_output(...) to use Dynamic-to-Static
class TestErrorGetOutputInCompiletime(TestErrorStaticLayerCallInCompiletime):
def set_func_call(self):
self.func_call = lambda: self.prog_trans.get_output(
unwrap(self.func), self.input
)
class TestErrorGetOutputInCompiletime_2(
TestErrorStaticLayerCallInCompiletime_2
):
def set_func_call(self):
self.func_call = lambda: self.prog_trans.get_output(
unwrap(self.func), self.input
)
class TestErrorGetOutputInRuntime(TestErrorStaticLayerCallInRuntime):
def set_func_call(self):
self.func_call = lambda: self.prog_trans.get_output(
unwrap(self.func), self.input
)
class TestJitSaveInCompiletime(TestErrorBase): class TestJitSaveInCompiletime(TestErrorBase):
def setUp(self): def setUp(self):
self.reset_flags_to_default() self.reset_flags_to_default()
......
...@@ -18,7 +18,6 @@ import numpy as np ...@@ -18,7 +18,6 @@ import numpy as np
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
SEED = 2020 SEED = 2020
...@@ -68,8 +67,7 @@ class TestPool2D(unittest.TestCase): ...@@ -68,8 +67,7 @@ class TestPool2D(unittest.TestCase):
self.data = np.random.random((1, 2, 4, 4)).astype('float32') self.data = np.random.random((1, 2, 4, 4)).astype('float32')
def train(self, to_static=False): def train(self, to_static=False):
program_translator = ProgramTranslator() paddle.jit.enable_to_static(to_static)
program_translator.enable(to_static)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
dy_layer = self.dygraph_class() dy_layer = self.dygraph_class()
......
...@@ -20,11 +20,8 @@ import numpy as np ...@@ -20,11 +20,8 @@ import numpy as np
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.static import InputSpec from paddle.static import InputSpec
program_translator = ProgramTranslator()
# 0. for in range var.numpy()[0] # 0. for in range var.numpy()[0]
@paddle.jit.to_static @paddle.jit.to_static
...@@ -363,7 +360,7 @@ class TestTransformBase(unittest.TestCase): ...@@ -363,7 +360,7 @@ class TestTransformBase(unittest.TestCase):
) )
def _run(self, to_static): def _run(self, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
return self.dygraph_func(self.input) return self.dygraph_func(self.input)
...@@ -390,7 +387,7 @@ class TestTransform(TestTransformBase): ...@@ -390,7 +387,7 @@ class TestTransform(TestTransformBase):
class TestTransformForOriginalList(TestTransform): class TestTransformForOriginalList(TestTransform):
def _run(self, to_static): def _run(self, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
return self.dygraph_func() return self.dygraph_func()
......
...@@ -78,14 +78,5 @@ class TestFullNameDecorator(unittest.TestCase): ...@@ -78,14 +78,5 @@ class TestFullNameDecorator(unittest.TestCase):
DoubleDecorated().double_decorated_func2(x) DoubleDecorated().double_decorated_func2(x)
class TestImportProgramTranslator(unittest.TestCase):
def test_diff_pkg_same_cls(self):
dygraph_prog_trans = paddle.jit.ProgramTranslator()
dy_to_stat_prog_trans = paddle.jit.ProgramTranslator()
full_pkg_prog_trans = paddle.jit.ProgramTranslator()
self.assertEqual(dygraph_prog_trans, dy_to_stat_prog_trans)
self.assertEqual(dygraph_prog_trans, full_pkg_prog_trans)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()
...@@ -74,10 +74,9 @@ class TestGrad(unittest.TestCase): ...@@ -74,10 +74,9 @@ class TestGrad(unittest.TestCase):
self.x.stop_gradient = False self.x.stop_gradient = False
def _run(self, func, to_static): def _run(self, func, to_static):
prog_trans = paddle.jit.ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
ret = func(self.x).numpy() ret = func(self.x).numpy()
prog_trans.enable(True) paddle.jit.enable_to_static(True)
return ret return ret
def test_forward(self): def test_forward(self):
......
...@@ -136,8 +136,7 @@ class TestGridGenerator(unittest.TestCase): ...@@ -136,8 +136,7 @@ class TestGridGenerator(unittest.TestCase):
self.x = paddle.uniform(shape=[1, 20, 2], dtype='float32') self.x = paddle.uniform(shape=[1, 20, 2], dtype='float32')
def _run(self, to_static): def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
net = GridGenerator(40, 20) net = GridGenerator(40, 20)
ret = net(self.x, [32, 100]) ret = net(self.x, [32, 100])
......
...@@ -44,7 +44,6 @@ from ifelse_simple_func import ( ...@@ -44,7 +44,6 @@ from ifelse_simple_func import (
import paddle import paddle
import paddle.fluid.core as core import paddle.fluid.core as core
import paddle.nn.functional as F import paddle.nn.functional as F
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.jit.dy2static.utils import Dygraph2StaticException from paddle.jit.dy2static.utils import Dygraph2StaticException
np.random.seed(1) np.random.seed(1)
...@@ -64,10 +63,10 @@ class TestDy2staticException(unittest.TestCase): ...@@ -64,10 +63,10 @@ class TestDy2staticException(unittest.TestCase):
def test_error(self): def test_error(self):
if self.dyfunc: if self.dyfunc:
with self.assertRaisesRegex(Dygraph2StaticException, self.error): with self.assertRaisesRegex(Dygraph2StaticException, self.error):
ProgramTranslator().enable(True) paddle.jit.enable_to_static(True)
self.assertTrue(paddle.jit.to_static(self.dyfunc)(self.x)) self.assertTrue(paddle.jit.to_static(self.dyfunc)(self.x))
paddle.fluid.dygraph.base._in_declarative_mode_ = False paddle.fluid.dygraph.base._in_declarative_mode_ = False
ProgramTranslator().enable(False) paddle.jit.enable_to_static(False)
class TestDygraphIfElse(unittest.TestCase): class TestDygraphIfElse(unittest.TestCase):
...@@ -254,8 +253,7 @@ class TestDygraphIfElseNet(unittest.TestCase): ...@@ -254,8 +253,7 @@ class TestDygraphIfElseNet(unittest.TestCase):
return self._run(to_static=False) return self._run(to_static=False)
def _run(self, to_static=False): def _run(self, to_static=False):
prog_trans = ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
with fluid.dygraph.guard(place): with fluid.dygraph.guard(place):
net = self.Net() net = self.Net()
...@@ -364,8 +362,7 @@ class TestDiffModeNet(unittest.TestCase): ...@@ -364,8 +362,7 @@ class TestDiffModeNet(unittest.TestCase):
self.Net = DiffModeNet1 self.Net = DiffModeNet1
def _run(self, mode, to_static): def _run(self, mode, to_static):
prog_trans = ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
net = self.Net(mode) net = self.Net(mode)
ret = net(self.x, self.y) ret = net(self.x, self.y)
...@@ -423,10 +420,10 @@ class TestDy2StIfElseRetInt1(unittest.TestCase): ...@@ -423,10 +420,10 @@ class TestDy2StIfElseRetInt1(unittest.TestCase):
self.out = self.get_dy2stat_out() self.out = self.get_dy2stat_out()
def get_dy2stat_out(self): def get_dy2stat_out(self):
ProgramTranslator().enable(True) paddle.jit.enable_to_static(True)
static_func = paddle.jit.to_static(self.dyfunc) static_func = paddle.jit.to_static(self.dyfunc)
out = static_func(self.x) out = static_func(self.x)
ProgramTranslator().enable(False) paddle.jit.enable_to_static(False)
return out return out
def test_ast_to_func(self): def test_ast_to_func(self):
...@@ -457,7 +454,7 @@ class TestDy2StIfElseRetInt4(TestDy2StIfElseRetInt1): ...@@ -457,7 +454,7 @@ class TestDy2StIfElseRetInt4(TestDy2StIfElseRetInt1):
self.dyfunc = dyfunc_ifelse_ret_int4 self.dyfunc = dyfunc_ifelse_ret_int4
def test_ast_to_func(self): def test_ast_to_func(self):
ProgramTranslator().enable(True) paddle.jit.enable_to_static(True)
with self.assertRaises(Dygraph2StaticException): with self.assertRaises(Dygraph2StaticException):
static_func = paddle.jit.to_static(self.dyfunc) static_func = paddle.jit.to_static(self.dyfunc)
out = static_func(self.x) out = static_func(self.x)
...@@ -467,7 +464,7 @@ class TestDy2StIfElseRetInt4(TestDy2StIfElseRetInt1): ...@@ -467,7 +464,7 @@ class TestDy2StIfElseRetInt4(TestDy2StIfElseRetInt1):
# an exception is thrown during Dy2St, making the `_in_declarative_mode_` # an exception is thrown during Dy2St, making the `_in_declarative_mode_`
# a wrong value. So We need set `_in_declarative_mode_` to False manually. # a wrong value. So We need set `_in_declarative_mode_` to False manually.
paddle.fluid.dygraph.base._in_declarative_mode_ = False paddle.fluid.dygraph.base._in_declarative_mode_ = False
ProgramTranslator().enable(False) paddle.jit.enable_to_static(False)
class IfElseNet(paddle.nn.Layer): class IfElseNet(paddle.nn.Layer):
......
...@@ -75,8 +75,7 @@ class SequentialLayer(nn.Layer): ...@@ -75,8 +75,7 @@ class SequentialLayer(nn.Layer):
def train(model, to_static): def train(model, to_static):
prog_trans = paddle.jit.ProgramTranslator.get_instance() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
x = paddle.ones(shape=[2, 3], dtype='int32') x = paddle.ones(shape=[2, 3], dtype='int32')
out = model(x) out = model(x)
......
...@@ -27,13 +27,11 @@ import paddle.fluid as fluid ...@@ -27,13 +27,11 @@ import paddle.fluid as fluid
from paddle import _legacy_C_ops from paddle import _legacy_C_ops
from paddle.fluid.dygraph import to_variable from paddle.fluid.dygraph import to_variable
from paddle.fluid.framework import _non_static_mode from paddle.fluid.framework import _non_static_mode
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
SEED = 2020 SEED = 2020
program_translator = ProgramTranslator()
# Add InputSpec to make unittest run faster. # Add InputSpec to make unittest run faster.
input_specs = [ input_specs = [
paddle.static.InputSpec([None, None], 'int64'), paddle.static.InputSpec([None, None], 'int64'),
...@@ -542,7 +540,7 @@ class TestLACModel(unittest.TestCase): ...@@ -542,7 +540,7 @@ class TestLACModel(unittest.TestCase):
self.dy_param_path = os.path.join(self.temp_dir.name, 'lac_dy_param') self.dy_param_path = os.path.join(self.temp_dir.name, 'lac_dy_param')
def train(self, args, to_static): def train(self, args, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
place = ( place = (
fluid.CUDAPlace(0) fluid.CUDAPlace(0)
if fluid.is_compiled_with_cuda() if fluid.is_compiled_with_cuda()
...@@ -656,7 +654,7 @@ class TestLACModel(unittest.TestCase): ...@@ -656,7 +654,7 @@ class TestLACModel(unittest.TestCase):
def predict_dygraph(self, batch): def predict_dygraph(self, batch):
words, targets, length = batch words, targets, length = batch
program_translator.enable(False) paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(self.place): with fluid.dygraph.guard(self.place):
model = LexNet(self.args) model = LexNet(self.args)
# load dygraph trained parameters # load dygraph trained parameters
......
...@@ -21,12 +21,9 @@ import numpy as np ...@@ -21,12 +21,9 @@ import numpy as np
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.dy2static.logical_transformer import cmpop_node_to_str from paddle.jit.dy2static.logical_transformer import cmpop_node_to_str
from paddle.utils import gast from paddle.utils import gast
program_translator = ProgramTranslator()
SEED = 2020 SEED = 2020
np.random.seed(22) np.random.seed(22)
...@@ -186,7 +183,7 @@ class TestLogicalBase(unittest.TestCase): ...@@ -186,7 +183,7 @@ class TestLogicalBase(unittest.TestCase):
) )
def _run(self, to_static): def _run(self, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(self.place): with fluid.dygraph.guard(self.place):
result = self.dygraph_func(self.input) result = self.dygraph_func(self.input)
return result.numpy() return result.numpy()
......
...@@ -52,7 +52,7 @@ class TestLstm(unittest.TestCase): ...@@ -52,7 +52,7 @@ class TestLstm(unittest.TestCase):
self.temp_dir.cleanup() self.temp_dir.cleanup()
def run_lstm(self, to_static): def run_lstm(self, to_static):
paddle.jit.ProgramTranslator().enable(to_static) paddle.jit.enable_to_static(to_static)
paddle.disable_static() paddle.disable_static()
paddle.static.default_main_program().random_seed = 1001 paddle.static.default_main_program().random_seed = 1001
...@@ -70,7 +70,7 @@ class TestLstm(unittest.TestCase): ...@@ -70,7 +70,7 @@ class TestLstm(unittest.TestCase):
np.testing.assert_allclose(dygraph_out, static_out, rtol=1e-05) np.testing.assert_allclose(dygraph_out, static_out, rtol=1e-05)
def test_save_in_eval(self, with_training=True): def test_save_in_eval(self, with_training=True):
paddle.jit.ProgramTranslator().enable(True) paddle.jit.enable_to_static(True)
net = Net(12, 2) net = Net(12, 2)
x = paddle.randn((2, 10, 12)) x = paddle.randn((2, 10, 12))
if with_training: if with_training:
...@@ -141,7 +141,7 @@ class TestSaveInEvalMode(unittest.TestCase): ...@@ -141,7 +141,7 @@ class TestSaveInEvalMode(unittest.TestCase):
self.temp_dir.cleanup() self.temp_dir.cleanup()
def test_save_in_eval(self): def test_save_in_eval(self):
paddle.jit.ProgramTranslator().enable(True) paddle.jit.enable_to_static(True)
net = LinearNet() net = LinearNet()
x = paddle.randn((2, 10)) x = paddle.randn((2, 10))
x.stop_gradient = False x.stop_gradient = False
......
...@@ -24,7 +24,6 @@ import paddle ...@@ -24,7 +24,6 @@ import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.initializer import MSRA from paddle.fluid.initializer import MSRA
from paddle.fluid.param_attr import ParamAttr from paddle.fluid.param_attr import ParamAttr
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.nn import BatchNorm, Linear from paddle.nn import BatchNorm, Linear
...@@ -36,7 +35,6 @@ if fluid.is_compiled_with_cuda(): ...@@ -36,7 +35,6 @@ if fluid.is_compiled_with_cuda():
fluid.set_flags({'FLAGS_cudnn_deterministic': True}) fluid.set_flags({'FLAGS_cudnn_deterministic': True})
SEED = 2020 SEED = 2020
program_translator = ProgramTranslator()
class ConvBNLayer(fluid.dygraph.Layer): class ConvBNLayer(fluid.dygraph.Layer):
...@@ -494,7 +492,7 @@ class Args: ...@@ -494,7 +492,7 @@ class Args:
def train_mobilenet(args, to_static): def train_mobilenet(args, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(args.place): with fluid.dygraph.guard(args.place):
np.random.seed(SEED) np.random.seed(SEED)
...@@ -605,7 +603,7 @@ def predict_static(args, data): ...@@ -605,7 +603,7 @@ def predict_static(args, data):
def predict_dygraph(args, data): def predict_dygraph(args, data):
program_translator.enable(False) paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(args.place): with fluid.dygraph.guard(args.place):
if args.model == "MobileNetV1": if args.model == "MobileNetV1":
model = MobileNetV1(class_dim=args.class_dim, scale=1.0) model = MobileNetV1(class_dim=args.class_dim, scale=1.0)
......
...@@ -17,7 +17,7 @@ import unittest ...@@ -17,7 +17,7 @@ import unittest
import numpy as np import numpy as np
import paddle import paddle
from paddle.jit import ProgramTranslator, to_static from paddle.jit import to_static
class NetWithParameterList(paddle.nn.Layer): class NetWithParameterList(paddle.nn.Layer):
...@@ -53,12 +53,11 @@ class TestParameterList(unittest.TestCase): ...@@ -53,12 +53,11 @@ class TestParameterList(unittest.TestCase):
def setUp(self): def setUp(self):
self.seed = 2021 self.seed = 2021
self.iter_num = 5 self.iter_num = 5
self.prog_trans = ProgramTranslator()
def train(self, is_iter, to_static): def train(self, is_iter, to_static):
paddle.seed(self.seed) paddle.seed(self.seed)
np.random.seed(self.seed) np.random.seed(self.seed)
self.prog_trans.enable(to_static) paddle.jit.enable_to_static(to_static)
if is_iter: if is_iter:
net = NetWithParameterList(10, 3) net = NetWithParameterList(10, 3)
else: else:
...@@ -110,7 +109,6 @@ class TestRawParameterList(unittest.TestCase): ...@@ -110,7 +109,6 @@ class TestRawParameterList(unittest.TestCase):
def setUp(self): def setUp(self):
self.seed = 2021 self.seed = 2021
self.iter_num = 5 self.iter_num = 5
self.prog_trans = ProgramTranslator()
def init_net(self): def init_net(self):
self.net = NetWithRawParamList(10, 3) self.net = NetWithRawParamList(10, 3)
...@@ -118,7 +116,7 @@ class TestRawParameterList(unittest.TestCase): ...@@ -118,7 +116,7 @@ class TestRawParameterList(unittest.TestCase):
def train(self, to_static): def train(self, to_static):
paddle.seed(self.seed) paddle.seed(self.seed)
np.random.seed(self.seed) np.random.seed(self.seed)
self.prog_trans.enable(to_static) paddle.jit.enable_to_static(to_static)
self.init_net() self.init_net()
sgd = paddle.optimizer.SGD(0.1, parameters=self.net.parameters()) sgd = paddle.optimizer.SGD(0.1, parameters=self.net.parameters())
......
...@@ -20,7 +20,6 @@ from test_fetch_feed import Linear ...@@ -20,7 +20,6 @@ from test_fetch_feed import Linear
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.layers.utils import flatten from paddle.fluid.layers.utils import flatten
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
SEED = 2020 SEED = 2020
...@@ -130,7 +129,6 @@ class TestWithNestedOutput(unittest.TestCase): ...@@ -130,7 +129,6 @@ class TestWithNestedOutput(unittest.TestCase):
class TestWithTrainAndEval(unittest.TestCase): class TestWithTrainAndEval(unittest.TestCase):
def test_switch_eval_and_train(self): def test_switch_eval_and_train(self):
program_translator = ProgramTranslator()
with fluid.dygraph.guard(): with fluid.dygraph.guard():
linear_net = Linear() linear_net = Linear()
......
...@@ -18,9 +18,7 @@ import numpy ...@@ -18,9 +18,7 @@ import numpy
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator, to_static from paddle.jit import to_static
program_translator = ProgramTranslator()
# 1. print Tensor # 1. print Tensor
...@@ -99,7 +97,7 @@ class TestPrintBase(unittest.TestCase): ...@@ -99,7 +97,7 @@ class TestPrintBase(unittest.TestCase):
raise NotImplementedError("Print test should implement set_test_func") raise NotImplementedError("Print test should implement set_test_func")
def _run(self, to_static): def _run(self, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
self.dygraph_func(self.input) self.dygraph_func(self.input)
......
...@@ -19,7 +19,6 @@ import unittest ...@@ -19,7 +19,6 @@ import unittest
import astor import astor
import numpy as np import numpy as np
from ifelse_simple_func import ( from ifelse_simple_func import (
dyfunc_with_if_else,
dyfunc_with_if_else_early_return1, dyfunc_with_if_else_early_return1,
dyfunc_with_if_else_early_return2, dyfunc_with_if_else_early_return2,
) )
...@@ -27,7 +26,6 @@ from ifelse_simple_func import ( ...@@ -27,7 +26,6 @@ from ifelse_simple_func import (
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
import paddle.jit.dy2static as _jst import paddle.jit.dy2static as _jst
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.jit.dy2static.utils import func_to_source_code from paddle.jit.dy2static.utils import func_to_source_code
from paddle.utils import gast from paddle.utils import gast
...@@ -213,121 +211,25 @@ class NetWithError(fluid.dygraph.layers.Layer): ...@@ -213,121 +211,25 @@ class NetWithError(fluid.dygraph.layers.Layer):
return y return y
class TestDygraphToStaticCode(unittest.TestCase):
def setUp(self):
# set to print all string diff when assertEqual fails
self.maxDiff = None
def test_decorator(self):
program_translator = ProgramTranslator()
code = program_translator.get_code(dyfunc_with_if_else)
print(code)
answer = get_source_code(StaticCode1.dyfunc_with_if_else)
self.assertEqual(
answer.replace('\n', '').replace(' ', ''),
code.replace('\n', '').replace(' ', ''),
)
def test_program_translator(self):
answer = get_source_code(StaticCode2.dyfunc_with_if_else)
program_translator = ProgramTranslator()
code = program_translator.get_code(dyfunc_with_if_else)
print(code)
self.assertEqual(
answer.replace('\n', '').replace(' ', ''),
code.replace('\n', '').replace(' ', ''),
)
class TestEnableDeclarative(unittest.TestCase): class TestEnableDeclarative(unittest.TestCase):
def setUp(self): def setUp(self):
self.x = np.random.randn(30, 10, 32).astype('float32') self.x = np.random.randn(30, 10, 32).astype('float32')
self.weight = np.random.randn(32, 64).astype('float32') self.weight = np.random.randn(32, 64).astype('float32')
self.program_translator = ProgramTranslator()
def test_raise_error(self): def test_raise_error(self):
with fluid.dygraph.guard(): with fluid.dygraph.guard():
self.program_translator.enable(True) paddle.jit.enable_to_static(True)
net = NetWithError() net = NetWithError()
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
net(fluid.dygraph.to_variable(self.x)) net(fluid.dygraph.to_variable(self.x))
def test_enable_disable_get_output(self):
self.program_translator.enable(True)
with fluid.dygraph.guard():
static_output = self.program_translator.get_output(
simple_func, self.x, self.weight
)
self.program_translator.enable(False)
with fluid.dygraph.guard():
dygraph_output = self.program_translator.get_output(
simple_func, self.x, self.weight
)
np.testing.assert_allclose(
static_output.numpy(),
dygraph_output.numpy(),
rtol=1e-05,
atol=1e-4,
)
def test_enable_disable_get_func(self):
self.program_translator.enable(True)
with fluid.dygraph.guard():
static_func = self.program_translator.get_func(simple_func)
self.assertTrue(callable(static_func))
static_output = static_func(self.x, self.weight)
self.assertTrue(isinstance(static_output, fluid.Variable))
self.program_translator.enable(False)
with fluid.dygraph.guard():
dygraph_func = self.program_translator.get_func(simple_func)
self.assertTrue(callable(dygraph_func))
dygraph_output = dygraph_func(self.x, self.weight)
self.assertTrue(
isinstance(
dygraph_output,
(fluid.core.VarBase, fluid.core.eager.Tensor),
)
)
def test_enable_disable_get_program(self):
self.program_translator.enable(True)
static_output = self.program_translator.get_program(
simple_func, self.x, self.weight
)
self.assertTrue(isinstance(static_output, tuple))
self.assertEqual(len(static_output), 4)
self.assertTrue(isinstance(static_output[0], fluid.Program))
self.assertTrue(isinstance(static_output[1], fluid.Program))
# Check all inputs and outputs are Variable
for var in static_output[2]:
self.assertTrue(isinstance(var, fluid.Variable))
for var in static_output[3]:
self.assertTrue(isinstance(var, fluid.Variable))
self.program_translator.enable(False)
with fluid.dygraph.guard():
dygraph_output = self.program_translator.get_program(
simple_func, self.x, self.weight
)
self.assertTrue(
isinstance(
dygraph_output,
(fluid.core.VarBase, fluid.core.eager.Tensor),
)
)
def test_enable_disable_declarative(self): def test_enable_disable_declarative(self):
self.program_translator.enable(True) paddle.jit.enable_to_static(True)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
static_output = decorated_simple_func(self.x, self.weight) static_output = decorated_simple_func(self.x, self.weight)
self.program_translator.enable(False) paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
dygraph_output = decorated_simple_func(self.x, self.weight) dygraph_output = decorated_simple_func(self.x, self.weight)
np.testing.assert_allclose( np.testing.assert_allclose(
...@@ -346,28 +248,6 @@ class Net(fluid.dygraph.layers.Layer): ...@@ -346,28 +248,6 @@ class Net(fluid.dygraph.layers.Layer):
return x + 1 return x + 1
class TestErrorWithInitFromStaticMode(unittest.TestCase):
def setUp(self):
self.program_translator = ProgramTranslator()
self.x = np.random.randn(10, 32).astype('float32')
def test_raise_error(self):
# disable imperative
paddle.enable_static()
net = Net()
self.program_translator.enable(True)
with self.assertRaisesRegex(
RuntimeError, "only available in dynamic mode"
):
self.program_translator.get_output(net.forward, self.x)
with self.assertRaisesRegex(
RuntimeError, "only available in dynamic mode"
):
self.program_translator.get_program(net.forward, self.x)
class SwitchModeNet(paddle.nn.Layer): class SwitchModeNet(paddle.nn.Layer):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
......
...@@ -22,14 +22,11 @@ import paddle ...@@ -22,14 +22,11 @@ import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.optimizer import SGDOptimizer from paddle.fluid.optimizer import SGDOptimizer
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
PRINT_STEP = 20 PRINT_STEP = 20
SEED = 2020 SEED = 2020
program_translator = ProgramTranslator()
class SimpleLSTMRNN(fluid.Layer): class SimpleLSTMRNN(fluid.Layer):
def __init__( def __init__(
...@@ -319,12 +316,12 @@ def train(place): ...@@ -319,12 +316,12 @@ def train(place):
def train_dygraph(place): def train_dygraph(place):
program_translator.enable(False) paddle.jit.enable_to_static(False)
return train(place) return train(place)
def train_static(place): def train_static(place):
program_translator.enable(True) paddle.jit.enable_to_static(True)
return train(place) return train(place)
......
...@@ -23,8 +23,6 @@ import paddle ...@@ -23,8 +23,6 @@ import paddle
PRINT_STEP = 20 PRINT_STEP = 20
SEED = 2020 SEED = 2020
program_translator = paddle.jit.ProgramTranslator()
class SimpleLSTMRNN(paddle.nn.Layer): class SimpleLSTMRNN(paddle.nn.Layer):
def __init__( def __init__(
...@@ -319,12 +317,12 @@ def train(place): ...@@ -319,12 +317,12 @@ def train(place):
def train_dygraph(place): def train_dygraph(place):
program_translator.enable(False) paddle.jit.enable_to_static(False)
return train(place) return train(place)
def train_static(place): def train_static(place):
program_translator.enable(True) paddle.jit.enable_to_static(True)
return train(place) return train(place)
......
...@@ -23,11 +23,9 @@ import paddle ...@@ -23,11 +23,9 @@ import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
import paddle.nn.functional as F import paddle.nn.functional as F
from paddle.fluid.dygraph import Layer, to_variable from paddle.fluid.dygraph import Layer, to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
SEED = 2020 SEED = 2020
program_translator = ProgramTranslator()
class Policy(Layer): class Policy(Layer):
...@@ -61,7 +59,7 @@ class Args: ...@@ -61,7 +59,7 @@ class Args:
def train(args, place, to_static): def train(args, place, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
env = gym.make('CartPole-v0') env = gym.make('CartPole-v0')
env.seed(SEED) env.seed(SEED)
......
...@@ -23,7 +23,6 @@ from predictor_utils import PredictorTools ...@@ -23,7 +23,6 @@ from predictor_utils import PredictorTools
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.nn import BatchNorm from paddle.nn import BatchNorm
...@@ -39,7 +38,6 @@ place = ( ...@@ -39,7 +38,6 @@ place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
) )
program_translator = ProgramTranslator()
if fluid.is_compiled_with_cuda(): if fluid.is_compiled_with_cuda():
fluid.set_flags({'FLAGS_cudnn_deterministic': True}) fluid.set_flags({'FLAGS_cudnn_deterministic': True})
...@@ -323,7 +321,7 @@ class ResNetHelper: ...@@ -323,7 +321,7 @@ class ResNetHelper:
return total_loss.numpy() return total_loss.numpy()
def predict_dygraph(self, data): def predict_dygraph(self, data):
program_translator.enable(False) paddle.jit.enable_to_static(False)
with fluid.dygraph.guard(place): with fluid.dygraph.guard(place):
resnet = ResNet() resnet = ResNet()
...@@ -382,7 +380,7 @@ class TestResnet(unittest.TestCase): ...@@ -382,7 +380,7 @@ class TestResnet(unittest.TestCase):
self.resnet_helper = ResNetHelper() self.resnet_helper = ResNetHelper()
def train(self, to_static): def train(self, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
return self.resnet_helper.train(to_static) return self.resnet_helper.train(to_static)
def verify_predict(self): def verify_predict(self):
......
...@@ -20,7 +20,6 @@ from test_resnet import SEED, ResNet, optimizer_setting ...@@ -20,7 +20,6 @@ from test_resnet import SEED, ResNet, optimizer_setting
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
# NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout. # NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout.
batch_size = 2 batch_size = 2
...@@ -29,7 +28,6 @@ place = ( ...@@ -29,7 +28,6 @@ place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
) )
program_translator = ProgramTranslator()
if fluid.is_compiled_with_cuda(): if fluid.is_compiled_with_cuda():
fluid.set_flags({'FLAGS_cudnn_deterministic': True}) fluid.set_flags({'FLAGS_cudnn_deterministic': True})
...@@ -115,7 +113,7 @@ def train(to_static, build_strategy=None): ...@@ -115,7 +113,7 @@ def train(to_static, build_strategy=None):
class TestResnet(unittest.TestCase): class TestResnet(unittest.TestCase):
def train(self, to_static): def train(self, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
return train(to_static) return train(to_static)
def test_resnet(self): def test_resnet(self):
......
...@@ -20,13 +20,11 @@ from test_resnet import SEED, ResNet, optimizer_setting ...@@ -20,13 +20,11 @@ from test_resnet import SEED, ResNet, optimizer_setting
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
# NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout. # NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout.
batch_size = 2 batch_size = 2
epoch_num = 1 epoch_num = 1
program_translator = ProgramTranslator()
if fluid.is_compiled_with_cuda(): if fluid.is_compiled_with_cuda():
fluid.set_flags({'FLAGS_cudnn_deterministic': True}) fluid.set_flags({'FLAGS_cudnn_deterministic': True})
...@@ -114,7 +112,7 @@ def train(to_static, build_strategy=None): ...@@ -114,7 +112,7 @@ def train(to_static, build_strategy=None):
class TestResnet(unittest.TestCase): class TestResnet(unittest.TestCase):
def train(self, to_static): def train(self, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
build_strategy = paddle.static.BuildStrategy() build_strategy = paddle.static.BuildStrategy()
# Why set `build_strategy.enable_inplace = False` here? # Why set `build_strategy.enable_inplace = False` here?
# Because we find that this PASS strategy of PE makes dy2st training loss unstable. # Because we find that this PASS strategy of PE makes dy2st training loss unstable.
......
...@@ -35,7 +35,6 @@ place = ( ...@@ -35,7 +35,6 @@ place = (
paddle.CUDAPlace(0) if paddle.is_compiled_with_cuda() else paddle.CPUPlace() paddle.CUDAPlace(0) if paddle.is_compiled_with_cuda() else paddle.CPUPlace()
) )
program_translator = paddle.jit.ProgramTranslator()
if paddle.is_compiled_with_cuda(): if paddle.is_compiled_with_cuda():
paddle.fluid.set_flags({'FLAGS_cudnn_deterministic': True}) paddle.fluid.set_flags({'FLAGS_cudnn_deterministic': True})
...@@ -319,7 +318,7 @@ class TestResnet(unittest.TestCase): ...@@ -319,7 +318,7 @@ class TestResnet(unittest.TestCase):
return total_loss.numpy() return total_loss.numpy()
def predict_dygraph(self, data): def predict_dygraph(self, data):
program_translator.enable(False) paddle.jit.enable_to_static(False)
paddle.disable_static(place) paddle.disable_static(place)
resnet = ResNet() resnet = ResNet()
...@@ -380,7 +379,7 @@ class TestResnet(unittest.TestCase): ...@@ -380,7 +379,7 @@ class TestResnet(unittest.TestCase):
return out return out
def train(self, to_static): def train(self, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
return self.do_train(to_static) return self.do_train(to_static)
def verify_predict(self): def verify_predict(self):
......
...@@ -20,7 +20,7 @@ from ifelse_simple_func import dyfunc_with_if_else ...@@ -20,7 +20,7 @@ from ifelse_simple_func import dyfunc_with_if_else
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
import paddle.fluid.core as core import paddle.fluid.core as core
from paddle.jit import ProgramTranslator, to_static from paddle.jit import to_static
from paddle.jit.dy2static.utils import Dygraph2StaticException from paddle.jit.dy2static.utils import Dygraph2StaticException
SEED = 2020 SEED = 2020
...@@ -272,13 +272,12 @@ class TestReturnBase(unittest.TestCase): ...@@ -272,13 +272,12 @@ class TestReturnBase(unittest.TestCase):
else fluid.CPUPlace() else fluid.CPUPlace()
) )
self.init_dygraph_func() self.init_dygraph_func()
self.program_translator = ProgramTranslator()
def init_dygraph_func(self): def init_dygraph_func(self):
self.dygraph_func = test_return_base self.dygraph_func = test_return_base
def _run(self, to_static=False): def _run(self, to_static=False):
self.program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
with fluid.dygraph.guard(): with fluid.dygraph.guard():
res = self.dygraph_func(self.input) res = self.dygraph_func(self.input)
if isinstance(res, (tuple, list)): if isinstance(res, (tuple, list)):
......
...@@ -20,7 +20,6 @@ import numpy as np ...@@ -20,7 +20,6 @@ import numpy as np
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.jit.dy2static.partial_program import partial_program_from from paddle.jit.dy2static.partial_program import partial_program_from
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
...@@ -32,7 +31,6 @@ np.random.seed(SEED) ...@@ -32,7 +31,6 @@ np.random.seed(SEED)
place = ( place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
) )
program_translator = ProgramTranslator()
class SimpleFcLayer(fluid.dygraph.Layer): class SimpleFcLayer(fluid.dygraph.Layer):
...@@ -148,8 +146,7 @@ class TestDyToStaticSaveInferenceModel(unittest.TestCase): ...@@ -148,8 +146,7 @@ class TestDyToStaticSaveInferenceModel(unittest.TestCase):
class TestPartialProgramRaiseError(unittest.TestCase): class TestPartialProgramRaiseError(unittest.TestCase):
def test_param_type(self): def test_param_type(self):
program_translator = ProgramTranslator() paddle.jit.enable_to_static(True)
program_translator.enable(True)
x_data = np.random.random((20, 20)).astype('float32') x_data = np.random.random((20, 20)).astype('float32')
with fluid.dygraph.guard(fluid.CPUPlace()): with fluid.dygraph.guard(fluid.CPUPlace()):
......
...@@ -22,7 +22,6 @@ from test_fetch_feed import Linear ...@@ -22,7 +22,6 @@ from test_fetch_feed import Linear
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.optimizer import AdamOptimizer from paddle.fluid.optimizer import AdamOptimizer
from paddle.jit import ProgramTranslator
np.random.seed(2020) np.random.seed(2020)
...@@ -42,13 +41,12 @@ class TestDyToStaticSaveLoad(unittest.TestCase): ...@@ -42,13 +41,12 @@ class TestDyToStaticSaveLoad(unittest.TestCase):
self.temp_dir.cleanup() self.temp_dir.cleanup()
def test_save_load_same_result(self): def test_save_load_same_result(self):
program_translator = ProgramTranslator()
x_data = np.random.randn(30, 10, 32).astype('float32') x_data = np.random.randn(30, 10, 32).astype('float32')
batch_num = 3 batch_num = 3
with fluid.dygraph.guard(place): with fluid.dygraph.guard(place):
program_translator.enable(True) paddle.jit.enable_to_static(True)
x = fluid.dygraph.to_variable(x_data) x = fluid.dygraph.to_variable(x_data)
net = Linear(32, 64) net = Linear(32, 64)
adam = AdamOptimizer( adam = AdamOptimizer(
...@@ -81,7 +79,7 @@ class TestDyToStaticSaveLoad(unittest.TestCase): ...@@ -81,7 +79,7 @@ class TestDyToStaticSaveLoad(unittest.TestCase):
x = fluid.dygraph.to_variable(x_data) x = fluid.dygraph.to_variable(x_data)
# predict output # predict output
program_translator.enable(False) paddle.jit.enable_to_static(False)
dygraph_out, dygraph_loss = dygraph_net(x) dygraph_out, dygraph_loss = dygraph_net(x)
np.testing.assert_allclose( np.testing.assert_allclose(
......
...@@ -25,7 +25,6 @@ from predictor_utils import PredictorTools ...@@ -25,7 +25,6 @@ from predictor_utils import PredictorTools
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable from paddle.fluid.dygraph.base import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX from paddle.jit.translated_layer import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.nn import BatchNorm, Linear from paddle.nn import BatchNorm, Linear
...@@ -374,8 +373,7 @@ class TestSeResnet(unittest.TestCase): ...@@ -374,8 +373,7 @@ class TestSeResnet(unittest.TestCase):
self.temp_dir.cleanup() self.temp_dir.cleanup()
def train(self, train_reader, to_static): def train(self, train_reader, to_static):
program_translator = ProgramTranslator() paddle.jit.enable_to_static(to_static)
program_translator.enable(to_static)
np.random.seed(SEED) np.random.seed(SEED)
...@@ -473,8 +471,7 @@ class TestSeResnet(unittest.TestCase): ...@@ -473,8 +471,7 @@ class TestSeResnet(unittest.TestCase):
) )
def predict_dygraph(self, data): def predict_dygraph(self, data):
program_translator = ProgramTranslator() paddle.jit.enable_to_static(False)
program_translator.enable(False)
with fluid.dygraph.guard(place): with fluid.dygraph.guard(place):
se_resnext = SeResNeXt() se_resnext = SeResNeXt()
......
...@@ -20,12 +20,10 @@ from test_lac import DynamicGRU ...@@ -20,12 +20,10 @@ from test_lac import DynamicGRU
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.nn import Embedding, Linear from paddle.nn import Embedding, Linear
SEED = 2020 SEED = 2020
program_translator = ProgramTranslator()
# Note: Set True to eliminate randomness. # Note: Set True to eliminate randomness.
# 1. For one operation, cuDNN has several algorithms, # 1. For one operation, cuDNN has several algorithms,
...@@ -304,7 +302,7 @@ class Args: ...@@ -304,7 +302,7 @@ class Args:
def train(args, to_static): def train(args, to_static):
program_translator.enable(to_static) paddle.jit.enable_to_static(to_static)
place = ( place = (
fluid.CUDAPlace(0) fluid.CUDAPlace(0)
if fluid.is_compiled_with_cuda() if fluid.is_compiled_with_cuda()
......
...@@ -23,13 +23,11 @@ from seq2seq_utils import Seq2SeqModelHyperParams, get_data_iter ...@@ -23,13 +23,11 @@ from seq2seq_utils import Seq2SeqModelHyperParams, get_data_iter
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.nn import ClipGradByGlobalNorm from paddle.nn import ClipGradByGlobalNorm
place = ( place = (
fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
) )
program_translator = ProgramTranslator()
STEP_NUM = 10 STEP_NUM = 10
PRINT_STEP = 2 PRINT_STEP = 2
...@@ -197,14 +195,14 @@ class TestSeq2seq(unittest.TestCase): ...@@ -197,14 +195,14 @@ class TestSeq2seq(unittest.TestCase):
self.temp_dir.cleanup() self.temp_dir.cleanup()
def run_dygraph(self, mode="train", attn_model=False): def run_dygraph(self, mode="train", attn_model=False):
program_translator.enable(False) paddle.jit.enable_to_static(False)
if mode == "train": if mode == "train":
return train(self.args, attn_model) return train(self.args, attn_model)
else: else:
return infer(self.args, attn_model) return infer(self.args, attn_model)
def run_static(self, mode="train", attn_model=False): def run_static(self, mode="train", attn_model=False):
program_translator.enable(True) paddle.jit.enable_to_static(True)
if mode == "train": if mode == "train":
return train(self.args, attn_model) return train(self.args, attn_model)
else: else:
......
...@@ -21,7 +21,6 @@ from simnet_dygraph_model import BOW, HingeLoss ...@@ -21,7 +21,6 @@ from simnet_dygraph_model import BOW, HingeLoss
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
SEED = 102 SEED = 102
random.seed(SEED) random.seed(SEED)
...@@ -104,8 +103,7 @@ def train(conf_dict, to_static): ...@@ -104,8 +103,7 @@ def train(conf_dict, to_static):
""" """
train process train process
""" """
program_translator = ProgramTranslator() paddle.jit.enable_to_static(to_static)
program_translator.enable(to_static)
# Get device # Get device
if fluid.is_compiled_with_cuda(): if fluid.is_compiled_with_cuda():
......
...@@ -102,8 +102,7 @@ def train(conf_dict, to_static): ...@@ -102,8 +102,7 @@ def train(conf_dict, to_static):
""" """
train process train process
""" """
program_translator = paddle.jit.ProgramTranslator() paddle.jit.enable_to_static(to_static)
program_translator.enable(to_static)
# Get device # Get device
if paddle.is_compiled_with_cuda(): if paddle.is_compiled_with_cuda():
......
...@@ -23,7 +23,6 @@ from paddle.static import InputSpec ...@@ -23,7 +23,6 @@ from paddle.static import InputSpec
SEED = 2020 SEED = 2020
np.random.seed(SEED) np.random.seed(SEED)
prog_trans = paddle.jit.ProgramTranslator()
@paddle.jit.to_static @paddle.jit.to_static
...@@ -130,7 +129,7 @@ class TestSliceWithoutControlFlow(unittest.TestCase): ...@@ -130,7 +129,7 @@ class TestSliceWithoutControlFlow(unittest.TestCase):
return self._run(to_static=False) return self._run(to_static=False)
def _run(self, to_static): def _run(self, to_static):
prog_trans.enable(to_static) paddle.jit.enable_to_static(to_static)
res = self.dygraph_func(self.input) res = self.dygraph_func(self.input)
return res.numpy() return res.numpy()
...@@ -177,7 +176,7 @@ class TestSetValueWithLayerAndSave(unittest.TestCase): ...@@ -177,7 +176,7 @@ class TestSetValueWithLayerAndSave(unittest.TestCase):
self.temp_dir.cleanup() self.temp_dir.cleanup()
def test_set_value_with_save(self): def test_set_value_with_save(self):
prog_trans.enable(True) paddle.jit.enable_to_static(True)
model = LayerWithSetValue(input_dim=10, hidden=1) model = LayerWithSetValue(input_dim=10, hidden=1)
x = paddle.full(shape=[5, 10], fill_value=5.0, dtype="float32") x = paddle.full(shape=[5, 10], fill_value=5.0, dtype="float32")
paddle.jit.save( paddle.jit.save(
......
...@@ -28,8 +28,7 @@ def tensor_clone(x): ...@@ -28,8 +28,7 @@ def tensor_clone(x):
class TestTensorClone(unittest.TestCase): class TestTensorClone(unittest.TestCase):
def _run(self, to_static): def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
x = paddle.ones([1, 2, 3]) x = paddle.ones([1, 2, 3])
return tensor_clone(x).numpy() return tensor_clone(x).numpy()
...@@ -48,8 +47,7 @@ def tensor_numpy(x): ...@@ -48,8 +47,7 @@ def tensor_numpy(x):
class TestTensorDygraphOnlyMethodError(unittest.TestCase): class TestTensorDygraphOnlyMethodError(unittest.TestCase):
def _run(self, to_static): def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
x = paddle.zeros([2, 2]) x = paddle.zeros([2, 2])
y = tensor_numpy(x) y = tensor_numpy(x)
return y.numpy() return y.numpy()
...@@ -69,8 +67,7 @@ def tensor_item(x): ...@@ -69,8 +67,7 @@ def tensor_item(x):
class TestTensorItem(unittest.TestCase): class TestTensorItem(unittest.TestCase):
def _run(self, to_static): def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
x = paddle.ones([1]) x = paddle.ones([1])
if to_static: if to_static:
return tensor_item(x).numpy() return tensor_item(x).numpy()
...@@ -92,8 +89,7 @@ def tensor_size(x): ...@@ -92,8 +89,7 @@ def tensor_size(x):
class TestTensorSize(unittest.TestCase): class TestTensorSize(unittest.TestCase):
def _run(self, to_static): def _run(self, to_static):
prog_trans = paddle.jit.ProgramTranslator() paddle.jit.enable_to_static(to_static)
prog_trans.enable(to_static)
x = paddle.ones([1, 2, 3]) x = paddle.ones([1, 2, 3])
if not to_static: if not to_static:
return tensor_size(x) return tensor_size(x)
......
...@@ -24,7 +24,6 @@ from tsm_config_utils import merge_configs, parse_config, print_configs ...@@ -24,7 +24,6 @@ from tsm_config_utils import merge_configs, parse_config, print_configs
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.nn import BatchNorm, Linear from paddle.nn import BatchNorm, Linear
...@@ -290,8 +289,7 @@ def create_optimizer(cfg, params): ...@@ -290,8 +289,7 @@ def create_optimizer(cfg, params):
def train(args, fake_data_reader, to_static): def train(args, fake_data_reader, to_static):
program_translator = ProgramTranslator() paddle.jit.enable_to_static(to_static)
program_translator.enable(to_static)
config = parse_config(args.config) config = parse_config(args.config)
train_config = merge_configs(config, 'train', vars(args)) train_config = merge_configs(config, 'train', vars(args))
......
...@@ -20,7 +20,6 @@ import numpy as np ...@@ -20,7 +20,6 @@ import numpy as np
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.jit import ProgramTranslator
from paddle.jit.api import to_static from paddle.jit.api import to_static
from paddle.nn import Embedding from paddle.nn import Embedding
...@@ -278,8 +277,7 @@ total_steps = len(dataset) * epoch_num // batch_size ...@@ -278,8 +277,7 @@ total_steps = len(dataset) * epoch_num // batch_size
def train(to_static): def train(to_static):
program_translator = ProgramTranslator() paddle.jit.enable_to_static(to_static)
program_translator.enable(to_static)
random.seed(0) random.seed(0)
np.random.seed(0) np.random.seed(0)
......
...@@ -22,7 +22,6 @@ from yolov3 import YOLOv3, cfg ...@@ -22,7 +22,6 @@ from yolov3 import YOLOv3, cfg
import paddle import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
paddle.enable_static() paddle.enable_static()
random.seed(0) random.seed(0)
...@@ -78,8 +77,7 @@ fake_data_reader = FakeDataReader() ...@@ -78,8 +77,7 @@ fake_data_reader = FakeDataReader()
def train(to_static): def train(to_static):
program_translator = ProgramTranslator() paddle.jit.enable_to_static(to_static)
program_translator.enable(to_static)
random.seed(0) random.seed(0)
np.random.seed(0) np.random.seed(0)
......
...@@ -20,7 +20,6 @@ import paddle ...@@ -20,7 +20,6 @@ import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable from paddle.fluid.dygraph import to_variable
from paddle.fluid.framework import EagerParamBase, ParamBase, in_dygraph_mode from paddle.fluid.framework import EagerParamBase, ParamBase, in_dygraph_mode
from paddle.jit import ProgramTranslator
class L1(fluid.Layer): class L1(fluid.Layer):
...@@ -339,11 +338,10 @@ class BufferNetWithModification(paddle.nn.Layer): ...@@ -339,11 +338,10 @@ class BufferNetWithModification(paddle.nn.Layer):
class TestModifiedBuffer(unittest.TestCase): class TestModifiedBuffer(unittest.TestCase):
def funcsetUp(self): def funcsetUp(self):
paddle.disable_static() paddle.disable_static()
self.prog_trans = ProgramTranslator()
self.shape = [10, 16] self.shape = [10, 16]
def _run(self, to_static=False): def _run(self, to_static=False):
self.prog_trans.enable(to_static) paddle.jit.enable_to_static(to_static)
x = paddle.ones([1], 'int32') x = paddle.ones([1], 'int32')
net = BufferNetWithModification(self.shape) net = BufferNetWithModification(self.shape)
......
...@@ -49,7 +49,6 @@ class TestDirectory(unittest.TestCase): ...@@ -49,7 +49,6 @@ class TestDirectory(unittest.TestCase):
'paddle.DataParallel', 'paddle.DataParallel',
'paddle.jit', 'paddle.jit',
'paddle.jit.to_static', 'paddle.jit.to_static',
'paddle.jit.ProgramTranslator',
'paddle.jit.TranslatedLayer', 'paddle.jit.TranslatedLayer',
'paddle.jit.save', 'paddle.jit.save',
'paddle.jit.load', 'paddle.jit.load',
...@@ -143,7 +142,6 @@ class TestDirectory(unittest.TestCase): ...@@ -143,7 +142,6 @@ class TestDirectory(unittest.TestCase):
'paddle.imperative.jit', 'paddle.imperative.jit',
'paddle.imperative.TracedLayer', 'paddle.imperative.TracedLayer',
'paddle.imperative.declarative', 'paddle.imperative.declarative',
'paddle.imperative.ProgramTranslator',
'paddle.imperative.TranslatedLayer', 'paddle.imperative.TranslatedLayer',
'paddle.imperative.jit.save', 'paddle.imperative.jit.save',
'paddle.imperative.jit.load', 'paddle.imperative.jit.load',
......
...@@ -20,7 +20,6 @@ import numpy as np ...@@ -20,7 +20,6 @@ import numpy as np
import paddle import paddle
from paddle.fluid.framework import _dygraph_place_guard from paddle.fluid.framework import _dygraph_place_guard
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.jit.layer import Layer from paddle.jit.layer import Layer
from paddle.static import InputSpec from paddle.static import InputSpec
...@@ -61,11 +60,10 @@ class TestMultiLoad(unittest.TestCase): ...@@ -61,11 +60,10 @@ class TestMultiLoad(unittest.TestCase):
x = paddle.full([2, 4], 2) x = paddle.full([2, 4], 2)
model = Net() model = Net()
program_translator = ProgramTranslator() paddle.jit.enable_to_static(False)
program_translator.enable(False)
forward_out1 = model.forward(x) forward_out1 = model.forward(x)
infer_out1 = model.infer(x) infer_out1 = model.infer(x)
program_translator.enable(True) paddle.jit.enable_to_static(True)
model_path = os.path.join(self.temp_dir.name, 'multi_program') model_path = os.path.join(self.temp_dir.name, 'multi_program')
paddle.jit.save(model, model_path, combine_params=True) paddle.jit.save(model, model_path, combine_params=True)
......
...@@ -18,10 +18,9 @@ from .api import load ...@@ -18,10 +18,9 @@ from .api import load
from .api import to_static from .api import to_static
from .api import not_to_static from .api import not_to_static
from .api import ignore_module from .api import ignore_module
from .dy2static.logging_utils import set_code_level, set_verbosity from .dy2static.program_translator import enable_to_static
from . import dy2static from .dy2static.logging_utils import set_code_level, set_verbosity
from .dy2static.program_translator import ProgramTranslator
from .translated_layer import TranslatedLayer from .translated_layer import TranslatedLayer
__all__ = [ # noqa __all__ = [ # noqa
...@@ -29,9 +28,9 @@ __all__ = [ # noqa ...@@ -29,9 +28,9 @@ __all__ = [ # noqa
'load', 'load',
'to_static', 'to_static',
'ignore_module', 'ignore_module',
'ProgramTranslator',
'TranslatedLayer', 'TranslatedLayer',
'set_code_level', 'set_code_level',
'set_verbosity', 'set_verbosity',
'not_to_static', 'not_to_static',
'enable_to_static',
] ]
...@@ -75,8 +75,6 @@ from paddle.fluid.framework import ( ...@@ -75,8 +75,6 @@ from paddle.fluid.framework import (
from paddle.fluid.framework import dygraph_only, _non_static_mode from paddle.fluid.framework import dygraph_only, _non_static_mode
from paddle.fluid.wrapped_decorator import wrap_decorator from paddle.fluid.wrapped_decorator import wrap_decorator
__all__ = []
def create_program_from_desc(program_desc): def create_program_from_desc(program_desc):
program = Program() program = Program()
...@@ -160,7 +158,7 @@ def _dygraph_to_static_func_(dygraph_func): ...@@ -160,7 +158,7 @@ def _dygraph_to_static_func_(dygraph_func):
if _non_static_mode() or not program_translator.enable_to_static: if _non_static_mode() or not program_translator.enable_to_static:
logging_utils.warn( logging_utils.warn(
"The decorator 'dygraph_to_static_func' doesn't work in " "The decorator 'dygraph_to_static_func' doesn't work in "
"dygraph mode or set ProgramTranslator.enable to False. " "dygraph mode or set 'paddle.jit.enable_to_static' to False. "
"We will just return dygraph output." "We will just return dygraph output."
) )
return dygraph_func(*args, **kwargs) return dygraph_func(*args, **kwargs)
...@@ -911,7 +909,7 @@ def save(layer, path, input_spec=None, **configs): ...@@ -911,7 +909,7 @@ def save(layer, path, input_spec=None, **configs):
prog_translator = ProgramTranslator() prog_translator = ProgramTranslator()
if not prog_translator.enable_to_static: if not prog_translator.enable_to_static:
raise RuntimeError( raise RuntimeError(
"The paddle.jit.save doesn't work when setting ProgramTranslator.enable to False." "The paddle.jit.save doesn't work when setting 'paddle.jit.enable_to_static' to False."
) )
if not ( if not (
......
...@@ -12,10 +12,7 @@ ...@@ -12,10 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from .utils import ( from .utils import saw, UndefinedVar, ast_to_source_code
saw,
UndefinedVar,
)
from .convert_operators import convert_logical_and as And # noqa: F401 from .convert_operators import convert_logical_and as And # noqa: F401
from .convert_operators import convert_var_dtype as AsDtype # noqa: F401 from .convert_operators import convert_var_dtype as AsDtype # noqa: F401
from .convert_operators import convert_assert as Assert # noqa: F401 from .convert_operators import convert_assert as Assert # noqa: F401
......
...@@ -416,9 +416,9 @@ class StaticFunction: ...@@ -416,9 +416,9 @@ class StaticFunction:
# will show up **only once**. StaticFunction.__call__ will run many times, it is appropriate to # will show up **only once**. StaticFunction.__call__ will run many times, it is appropriate to
# display this warning message only once. # display this warning message only once.
logging_utils.warn( logging_utils.warn(
"The decorator '@paddle.jit.to_static' does NOT work when setting ProgramTranslator.enable to False. " "The decorator '@paddle.jit.to_static' does NOT work when setting 'paddle.jit.enable_to_static' to False. "
"We will just return dygraph output. If you would like to get static graph output, please call API " "We will just return dygraph output. If you would like to get static graph output, please call API "
"ProgramTranslator.enable(True)" "paddle.jit.enable_to_static(True)"
) )
return self._call_dygraph_function(*args, **kwargs) return self._call_dygraph_function(*args, **kwargs)
...@@ -1222,8 +1222,7 @@ class ProgramTranslator: ...@@ -1222,8 +1222,7 @@ class ProgramTranslator:
return x_v return x_v
prog_trans = paddle.jit.ProgramTranslator() paddle.jit.enable_to_static(False)
prog_trans.enable(False)
x = paddle.ones([1, 2]) x = paddle.ones([1, 2])
# ProgramTranslator is disabled so the func is run in dygraph # ProgramTranslator is disabled so the func is run in dygraph
...@@ -1513,3 +1512,47 @@ class ProgramTranslator: ...@@ -1513,3 +1512,47 @@ class ProgramTranslator:
""" """
return self._program_cache return self._program_cache
def enable_to_static(enable_to_static_bool):
"""
Enable or disable the converting from imperative to static graph by
ProgramTranslator globally.
Args:
enable_to_static_bool (bool): True or False to enable or disable converting to static.
Returns:
None.
Examples:
.. code-block:: python
import paddle
@paddle.jit.to_static
def func(x):
if paddle.mean(x) > 0:
x_v = x - 1
else:
x_v = x + 1
return x_v
paddle.jit.enable_to_static(False)
x = paddle.ones([1, 2])
# ProgramTranslator is disabled so the func is run in dygraph
print(func(x)) # [[0. 0.]]
"""
check_type(
enable_to_static_bool,
"enable_to_static_bool",
bool,
"paddle.jit.enable_to_static",
)
_program_trans = ProgramTranslator()
_program_trans.enable(enable_to_static_bool)
...@@ -25,7 +25,6 @@ import paddle.vision.models as models ...@@ -25,7 +25,6 @@ import paddle.vision.models as models
from paddle import Model, fluid, to_tensor from paddle import Model, fluid, to_tensor
from paddle.hapi.model import prepare_distributed_context from paddle.hapi.model import prepare_distributed_context
from paddle.io import Dataset, DistributedBatchSampler from paddle.io import Dataset, DistributedBatchSampler
from paddle.jit.dy2static.program_translator import ProgramTranslator
from paddle.metric import Accuracy from paddle.metric import Accuracy
from paddle.nn import Conv2D, Linear, ReLU, Sequential from paddle.nn import Conv2D, Linear, ReLU, Sequential
from paddle.nn.layer.loss import CrossEntropyLoss from paddle.nn.layer.loss import CrossEntropyLoss
...@@ -826,8 +825,8 @@ class TestModelFunction(unittest.TestCase): ...@@ -826,8 +825,8 @@ class TestModelFunction(unittest.TestCase):
for dynamic in [True, False]: for dynamic in [True, False]:
paddle.disable_static() if dynamic else None paddle.disable_static() if dynamic else None
prog_translator = ProgramTranslator() paddle.jit.enable_to_static(False) if not dynamic else None
prog_translator.enable(False) if not dynamic else None
net = LeNet() net = LeNet()
inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')] inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')]
model = Model(net, inputs) model = Model(net, inputs)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册