未验证 提交 ea830d43 编写于 作者: A Aurelius84 提交者: GitHub

[Fluid Clean] Migrate program_translate.py/jit.py into paddle.jit dir (#48240)

* [Fluid Clean] Migrate program_translate.py/jit.py into paddle.jit dir
上级 fd9c91c3
......@@ -75,6 +75,7 @@ paddle/fluid/operators/generated_op.cc
paddle/fluid/operators/generated_sparse_op.cc
paddle/phi/ops/compat/generated_sig.cc
paddle/phi/ops/compat/generated_sparse_sig.cc
paddle/phi/api/yaml/parsed_apis/
paddle/fluid/operators/generator/parsed_ops/
paddle/fluid/pybind/tmp_eager_op_function_impl.h
paddle/fluid/pybind/eager_op_function_impl.h
......
......@@ -20,7 +20,7 @@ from paddle.jit import to_static, not_to_static
from paddle.fluid.framework import Parameter
from paddle.fluid.framework import program_guard
from paddle.fluid.executor import global_scope
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
StaticFunction,
)
......
......@@ -691,16 +691,12 @@ class IpuDynamicPatcher:
Returns:
None
"""
from ..fluid.dygraph.dygraph_to_static.program_translator import (
ProgramCache,
)
from ..fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
CacheKey,
)
from ..fluid.dygraph.dygraph_to_static import logging_utils
from ..fluid.dygraph.dygraph_to_static.program_translator import (
ProgramCache,
MAX_TRACED_PROGRAM_COUNT,
)
from ..fluid.dygraph.dygraph_to_static import logging_utils
from ..fluid.dygraph.dygraph_to_static.partial_program import (
partial_program_from,
)
......
......@@ -36,8 +36,6 @@ from .checkpoint import *
from . import learning_rate_scheduler
from .learning_rate_scheduler import *
from . import jit
from .jit import *
from . import io
from .io import *
......@@ -46,7 +44,6 @@ from . import static_runner
from .static_runner import StaticModelRunner
from . import dygraph_to_static
from .dygraph_to_static import ProgramTranslator
from . import rnn
from .rnn import *
......@@ -66,8 +63,6 @@ __all__ += nn.__all__
__all__ += parallel.__all__
__all__ += checkpoint.__all__
__all__ += learning_rate_scheduler.__all__
__all__ += jit.__all__
__all__ += io.__all__
__all__ += rnn.__all__
__all__ += ['ProgramTranslator']
__all__ += amp.__all__
......@@ -31,7 +31,7 @@ from . import learning_rate_scheduler
import warnings
from .. import core
from .base import guard
from paddle.fluid.dygraph.jit import _SaveLoadConfig
from paddle.jit.api import _SaveLoadConfig
from paddle.fluid.dygraph.io import (
_construct_program_holders,
_construct_params_and_buffers,
......
......@@ -12,9 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from . import ast_transformer
from .ast_transformer import *
from . import static_analysis
from .static_analysis import *
......@@ -24,9 +21,6 @@ from .loop_transformer import *
from . import variable_trans_func
from .variable_trans_func import *
from . import program_translator
from .program_translator import *
from . import convert_call_func
from .convert_call_func import *
......@@ -36,10 +30,8 @@ from . import logging_utils
from .logging_utils import *
__all__ = []
__all__ += ast_transformer.__all__
__all__ += loop_transformer.__all__
__all__ += static_analysis.__all__
__all__ += variable_trans_func.__all__
__all__ += program_translator.__all__
__all__ += convert_call_func.__all__
__all__ += logging_utils.__all__
......@@ -36,15 +36,7 @@ from paddle.fluid.dygraph.dygraph_to_static.convert_operators import (
from paddle.fluid.dygraph.dygraph_to_static.logging_utils import (
TranslatorLogger,
)
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
StaticFunction,
)
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
convert_to_static,
)
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
unwrap_decorators,
)
from paddle.fluid.dygraph.dygraph_to_static.utils import is_paddle_func, unwrap
from paddle.fluid.dygraph.layers import Layer
......@@ -185,6 +177,13 @@ def convert_call(func):
# [1. 1. 1.]]
"""
# NOTE(Aurelius84): Fix it after all files migrating into jit.
from paddle.jit.dy2static.program_translator import (
convert_to_static,
unwrap_decorators,
StaticFunction,
)
translator_logger.log(
1, "Convert callable object: convert {}.".format(func)
)
......
......@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.fluid.dygraph.jit import _SaveLoadConfig
from paddle.jit.api import _SaveLoadConfig
from paddle.fluid.dygraph.io import TranslatedLayer
......
......@@ -15,7 +15,7 @@
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import Embedding, Layer, Linear
from paddle.fluid.dygraph.jit import declarative
from paddle.jit.api import declarative
from transformer_dygraph_model import MultiHeadAttention, PrePostProcessLayer
......
......@@ -20,7 +20,7 @@ from paddle.fluid import ParamAttr
from paddle.fluid import layers
from paddle.fluid.dygraph import Layer
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.dygraph.jit import declarative
from paddle.jit.api import declarative
from paddle.fluid.dygraph.nn import Embedding
from seq2seq_utils import Seq2SeqModelHyperParams as args
......
......@@ -17,7 +17,7 @@ import paddle.fluid as fluid
import paddle.fluid.param_attr as attr
from functools import reduce
from paddle.fluid.dygraph import declarative
from paddle.jit.api import declarative
from paddle.fluid.dygraph import Embedding, Layer, Linear
from paddle.static import Variable
......
......@@ -17,8 +17,8 @@ import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.fluid.dygraph.jit import declarative
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
@paddle.jit.to_static
......
......@@ -23,7 +23,7 @@ import paddle.fluid.dygraph as dygraph
from paddle import to_tensor
from paddle.fluid.dygraph import to_variable
from paddle.fluid.dygraph.jit import dygraph_to_static_func
from paddle.jit.api import dygraph_to_static_func
from paddle.fluid.dygraph.dygraph_to_static.utils import is_dygraph_api
SEED = 2020
......
......@@ -20,7 +20,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from bert_dygraph_model import PretrainModelLayer
......@@ -118,7 +118,7 @@ class TestBert(unittest.TestCase):
step_idx += 1
if step_idx == STEP_NUM:
if to_static:
fluid.dygraph.jit.save(bert, self.model_save_prefix)
paddle.jit.save(bert, self.model_save_prefix)
else:
fluid.dygraph.save_dygraph(
bert.state_dict(), self.dy_state_dict_save_path
......@@ -194,7 +194,7 @@ class TestBert(unittest.TestCase):
def predict_dygraph_jit(self, data):
with fluid.dygraph.guard(place):
bert = fluid.dygraph.jit.load(self.model_save_prefix)
bert = paddle.jit.load(self.model_save_prefix)
bert.eval()
(
......
......@@ -22,7 +22,7 @@ from paddle.jit import to_static
import paddle.fluid as fluid
from paddle.fluid import ParamAttr
from paddle.fluid.dygraph import to_variable
from paddle.fluid.dygraph import ProgramTranslator
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from predictor_utils import PredictorTools
......@@ -751,7 +751,7 @@ class TestTrain(unittest.TestCase):
if batch_id == args.train_batch_num:
if to_static:
fluid.dygraph.jit.save(bmn, self.model_save_prefix)
paddle.jit.save(bmn, self.model_save_prefix)
else:
fluid.dygraph.save_dygraph(
bmn.state_dict(), self.dy_param_path
......@@ -865,7 +865,7 @@ class TestTrain(unittest.TestCase):
def predict_dygraph_jit(self, data):
with fluid.dygraph.guard(self.place):
bmn = fluid.dygraph.jit.load(self.model_save_prefix)
bmn = paddle.jit.load(self.model_save_prefix)
bmn.eval()
x = to_variable(data)
......
......@@ -16,8 +16,8 @@ import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import declarative
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.api import declarative
from paddle.jit.dy2static.program_translator import (
ProgramTranslator,
)
from paddle.fluid.dygraph.dygraph_to_static.utils import Dygraph2StaticException
......
......@@ -18,9 +18,9 @@ from collections import Counter
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import declarative
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.fluid.dygraph.dygraph_to_static import convert_to_static
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
from paddle.jit.dy2static import convert_to_static
from test_fetch_feed import Pool2D, Linear
......
......@@ -15,7 +15,7 @@
import unittest
import numpy as np
import paddle.fluid as fluid
from paddle.fluid.dygraph import declarative
from paddle.jit.api import declarative
SEED = 2020
np.random.seed(SEED)
......
......@@ -19,7 +19,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import ProgramTranslator
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.dygraph_to_static.convert_call_func import (
CONVERSION_OPTIONS,
)
......
......@@ -36,7 +36,9 @@ os.environ["CUDA_VISIBLE_DEVICES"] = "1"
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable, declarative, ProgramTranslator
from paddle.fluid.dygraph import to_variable
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.nn import Conv2DTranspose, BatchNorm
# Note: Set True to eliminate randomness.
......
......@@ -21,12 +21,11 @@ import paddle.fluid as fluid
from paddle.static import InputSpec
from paddle.fluid.dygraph import (
to_variable,
declarative,
ProgramTranslator,
Layer,
jit,
)
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
from paddle.jit.dy2static.program_translator import (
ConcreteProgram,
StaticFunction,
)
......@@ -131,8 +130,8 @@ class TestInputSpec(unittest.TestCase):
# 2. test save load
net.inner_function(x)
jit.save(net, self.model_path)
infer_net = fluid.dygraph.jit.load(self.model_path)
paddle.jit.save(net, self.model_path)
infer_net = paddle.jit.load(self.model_path)
pred = infer_net(x)
np.testing.assert_allclose(out.numpy(), pred.numpy(), rtol=1e-05)
......
......@@ -15,7 +15,7 @@
import unittest
import paddle
import numpy as np
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
StaticFunction,
)
......
......@@ -18,7 +18,7 @@ import unittest
import paddle
import paddle.fluid as fluid
from paddle.jit import to_static
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
ProgramTranslator,
)
......
......@@ -16,8 +16,8 @@ import numpy as np
import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import declarative
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
SEED = 2020
......
......@@ -19,7 +19,7 @@ import tempfile
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.jit import ProgramTranslator
from paddle.static import InputSpec
program_translator = ProgramTranslator()
......
......@@ -16,10 +16,10 @@ import numpy as np
import paddle
import paddle.fluid as fluid
import unittest
from paddle.fluid.dygraph import declarative
from paddle.jit import to_static
@fluid.dygraph.declarative
@paddle.jit.to_static
def dygraph_decorated_func(x):
x = fluid.dygraph.to_variable(x)
if paddle.mean(x) > 0:
......@@ -29,7 +29,7 @@ def dygraph_decorated_func(x):
return x_v
@fluid.dygraph.declarative
@paddle.jit.to_static
def jit_decorated_func(x):
x = fluid.dygraph.to_variable(x)
if paddle.mean(x) > 0:
......@@ -39,19 +39,19 @@ def jit_decorated_func(x):
return x_v
@fluid.dygraph.declarative
@paddle.jit.to_static
def decorated_call_decorated(x):
return jit_decorated_func(x)
class DoubleDecorated:
@classmethod
@declarative
@to_static
def double_decorated_func1(self, x):
return dygraph_decorated_func(x)
@classmethod
@fluid.dygraph.declarative
@paddle.jit.to_static
def double_decorated_func2(self, x):
return jit_decorated_func(x)
......@@ -78,13 +78,9 @@ class TestFullNameDecorator(unittest.TestCase):
class TestImportProgramTranslator(unittest.TestCase):
def test_diff_pkg_same_cls(self):
dygraph_prog_trans = fluid.dygraph.ProgramTranslator()
dy_to_stat_prog_trans = (
fluid.dygraph.dygraph_to_static.ProgramTranslator()
)
full_pkg_prog_trans = (
fluid.dygraph.dygraph_to_static.program_translator.ProgramTranslator()
)
dygraph_prog_trans = paddle.jit.ProgramTranslator()
dy_to_stat_prog_trans = paddle.jit.ProgramTranslator()
full_pkg_prog_trans = paddle.jit.ProgramTranslator()
self.assertEqual(dygraph_prog_trans, dy_to_stat_prog_trans)
self.assertEqual(dygraph_prog_trans, full_pkg_prog_trans)
......
......@@ -16,8 +16,8 @@ import numpy as np
import unittest
import paddle
from paddle.fluid.dygraph.jit import declarative
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.api import declarative
from paddle.jit.dy2static.program_translator import (
ProgramTranslator,
)
from paddle.fluid.dygraph.dygraph_to_static.utils import Dygraph2StaticException
......
......@@ -26,7 +26,8 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable
from paddle.fluid.dygraph import Embedding, Linear, GRUUnit
from paddle.fluid.dygraph import declarative, ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.fluid.framework import _non_static_mode
from paddle import _legacy_C_ops
......@@ -621,7 +622,7 @@ class TestLACModel(unittest.TestCase):
step += 1
# save inference model
if to_static:
fluid.dygraph.jit.save(
paddle.jit.save(
layer=model,
path=self.model_save_prefix,
input_spec=[input_specs[0], input_specs[-1]],
......@@ -706,7 +707,7 @@ class TestLACModel(unittest.TestCase):
def predict_dygraph_jit(self, batch):
words, targets, length = batch
with fluid.dygraph.guard(self.place):
model = fluid.dygraph.jit.load(self.model_save_prefix)
model = paddle.jit.load(self.model_save_prefix)
model.eval()
pred_res = model(to_variable(words), to_variable(length))
......
......@@ -17,7 +17,7 @@ import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import declarative
from paddle.jit.api import declarative
def call_lambda_as_func(x):
......
......@@ -17,7 +17,7 @@ import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import declarative
from paddle.jit.api import declarative
from paddle.fluid.dygraph.dygraph_to_static import convert_call
SEED = 2020
......
......@@ -18,7 +18,7 @@ import unittest
import paddle
import numpy as np
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import declarative
from paddle.jit.api import declarative
from paddle.fluid.layers.utils import map_structure
SEED = 2020
......
......@@ -22,7 +22,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import ProgramTranslator
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.dygraph_to_static.logical_transformer import (
cmpop_node_to_str,
)
......
......@@ -20,7 +20,7 @@ import paddle.fluid as fluid
import unittest
from paddle.fluid.dygraph.dygraph_to_static.loop_transformer import NameVisitor
from paddle.fluid.dygraph.jit import declarative
from paddle.jit.api import declarative
SEED = 2020
np.random.seed(SEED)
......
......@@ -269,7 +269,7 @@ class TestMNISTWithToStatic(TestMNIST):
model_save_prefix = os.path.join(model_save_dir, 'mnist')
model_filename = "mnist" + INFER_MODEL_SUFFIX
params_filename = "mnist" + INFER_PARAMS_SUFFIX
fluid.dygraph.jit.save(
paddle.jit.save(
layer=model,
path=model_save_prefix,
input_spec=input_spec,
......@@ -325,7 +325,7 @@ class TestMNISTWithToStatic(TestMNIST):
return np.array(results[0])
def jit_load_and_run_inference_dygraph(self, model_path, inputs):
infer_net = fluid.dygraph.jit.load(model_path)
infer_net = paddle.jit.load(model_path)
pred = infer_net(inputs[0])
return pred.numpy()
......
......@@ -21,7 +21,8 @@ import paddle.fluid as fluid
from paddle.fluid.initializer import MSRA
from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.dygraph.nn import Pool2D, BatchNorm, Linear
from paddle.fluid.dygraph import declarative, ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
import unittest
......@@ -565,7 +566,7 @@ def train_mobilenet(args, to_static):
t_last = time.time()
if batch_id > args.train_step:
if to_static:
fluid.dygraph.jit.save(net, args.model_save_prefix)
paddle.jit.save(net, args.model_save_prefix)
else:
fluid.dygraph.save_dygraph(
net.state_dict(), args.dy_state_dict_save_path
......@@ -618,7 +619,7 @@ def predict_dygraph(args, data):
def predict_dygraph_jit(args, data):
with fluid.dygraph.guard(args.place):
model = fluid.dygraph.jit.load(args.model_save_prefix)
model = paddle.jit.load(args.model_save_prefix)
model.eval()
pred_res = model(data)
......
......@@ -15,7 +15,7 @@
import sys
import unittest
from paddle.fluid.dygraph.dygraph_to_static.ast_transformer import (
from paddle.jit.dy2static import (
DygraphToStaticAst,
)
from paddle.fluid.dygraph.dygraph_to_static.origin_info import (
......@@ -29,7 +29,7 @@ from paddle.fluid.dygraph.dygraph_to_static.origin_info import (
unwrap,
)
from paddle.fluid.dygraph.dygraph_to_static.utils import ast_to_func
from paddle.fluid.dygraph.jit import declarative
from paddle.jit.api import declarative
def simple_func(x):
......
......@@ -16,7 +16,8 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.layers.utils import flatten
from paddle.fluid.dygraph import declarative, ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
from test_fetch_feed import Linear
......
......@@ -16,8 +16,8 @@ import numpy
import unittest
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.fluid.dygraph.jit import declarative
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
program_translator = ProgramTranslator()
......
......@@ -21,8 +21,8 @@ import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.fluid.dygraph.jit import declarative
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.fluid.dygraph.dygraph_to_static.utils import func_to_source_code
import paddle.jit.dy2static as _jst
......
......@@ -19,9 +19,9 @@ import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.dygraph.jit import declarative
from paddle.jit.api import declarative
from paddle.fluid.dygraph.nn import Embedding
from paddle.fluid.optimizer import SGDOptimizer
......
......@@ -20,7 +20,8 @@ import paddle
import paddle.fluid as fluid
import paddle.fluid.dygraph.nn as nn
from paddle.fluid.dygraph import to_variable, Layer
from paddle.fluid.dygraph import declarative, ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
import unittest
......
......@@ -22,7 +22,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import ProgramTranslator
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.nn import BatchNorm, Linear, Pool2D
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
......@@ -311,9 +311,7 @@ class ResNetHelper:
)
if batch_id == 10:
if to_static:
fluid.dygraph.jit.save(
resnet, self.model_save_prefix
)
paddle.jit.save(resnet, self.model_save_prefix)
else:
fluid.dygraph.save_dygraph(
resnet.state_dict(),
......@@ -364,7 +362,7 @@ class ResNetHelper:
def predict_dygraph_jit(self, data):
with fluid.dygraph.guard(place):
resnet = fluid.dygraph.jit.load(self.model_save_prefix)
resnet = paddle.jit.load(self.model_save_prefix)
resnet.eval()
pred_res = resnet(data)
......
......@@ -19,7 +19,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import ProgramTranslator
from paddle.jit import ProgramTranslator
from test_resnet import ResNet, optimizer_setting, SEED
# NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout.
......
......@@ -19,7 +19,7 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import ProgramTranslator
from paddle.jit import ProgramTranslator
from test_resnet import ResNet, optimizer_setting, SEED
# NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout.
......
......@@ -16,7 +16,7 @@ import unittest
import paddle
import numpy as np
from paddle.fluid.dygraph.dygraph_to_static.utils import func_to_source_code
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
StaticFunction,
)
......
......@@ -19,8 +19,8 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.fluid.dygraph.jit import declarative
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from paddle.fluid.dygraph.dygraph_to_static.partial_program import (
partial_program_from,
)
......@@ -81,7 +81,7 @@ class TestDyToStaticSaveInferenceModel(unittest.TestCase):
infer_model_dir = os.path.join(
self.temp_dir.name, "test_dy2stat_inference_in_guard"
)
fluid.dygraph.jit.save(
paddle.jit.save(
layer=layer,
path=infer_model_prefix,
input_spec=[x],
......@@ -111,7 +111,7 @@ class TestDyToStaticSaveInferenceModel(unittest.TestCase):
)
model_filename = "model" + INFER_MODEL_SUFFIX
params_filename = "model" + INFER_PARAMS_SUFFIX
fluid.dygraph.jit.save(
paddle.jit.save(
layer=model,
path=infer_model_prefix,
input_spec=feed if feed else None,
......
......@@ -19,7 +19,7 @@ import tempfile
import numpy as np
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.jit import ProgramTranslator
from paddle.fluid.optimizer import AdamOptimizer
from test_fetch_feed import Linear
......
......@@ -24,8 +24,8 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.dygraph.nn import BatchNorm, Linear, Pool2D
from paddle.fluid.dygraph import declarative
from paddle.fluid.dygraph import ProgramTranslator
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from predictor_utils import PredictorTools
......@@ -458,7 +458,7 @@ class TestSeResnet(unittest.TestCase):
step_idx += 1
if step_idx == STEP_NUM:
if to_static:
fluid.dygraph.jit.save(
paddle.jit.save(
se_resnext,
self.model_save_prefix,
[img],
......@@ -520,7 +520,7 @@ class TestSeResnet(unittest.TestCase):
def predict_dygraph_jit(self, data):
with fluid.dygraph.guard(place):
se_resnext = fluid.dygraph.jit.load(self.model_save_prefix)
se_resnext = paddle.jit.load(self.model_save_prefix)
se_resnext.eval()
pred_res = se_resnext(data)
......
......@@ -18,7 +18,9 @@ import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.nn import Linear, Embedding
from paddle.fluid.dygraph import to_variable, ProgramTranslator, declarative
from paddle.fluid.dygraph import to_variable
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
from test_lac import DynamicGRU
......
......@@ -20,7 +20,7 @@ import unittest
import numpy as np
import paddle.fluid as fluid
from paddle.fluid.clip import GradientClipByGlobalNorm
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.jit import ProgramTranslator
from seq2seq_dygraph_model import BaseModel, AttentionModel
from seq2seq_utils import Seq2SeqModelHyperParams
......
......@@ -19,7 +19,7 @@ import paddle.fluid as fluid
import random
import unittest
from paddle.fluid.dygraph import ProgramTranslator
from paddle.jit import ProgramTranslator
from simnet_dygraph_model import BOW, HingeLoss
SEED = 102
......
......@@ -17,7 +17,7 @@ import numpy as np
import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import declarative
from paddle.jit.api import declarative
def dyfunc_tensor_shape_1(x):
......
......@@ -20,7 +20,9 @@ import sys
import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import declarative, ProgramTranslator, to_variable
from paddle.jit.api import declarative
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph import to_variable
from paddle.fluid.dygraph.nn import BatchNorm, Linear, Pool2D
from tsm_config_utils import merge_configs, parse_config, print_configs
......
......@@ -16,7 +16,7 @@ import numpy as np
import paddle.fluid as fluid
import unittest
from paddle.fluid.dygraph.jit import declarative
from paddle.jit.api import declarative
SEED = 2020
np.random.seed(SEED)
......
......@@ -21,8 +21,8 @@ import unittest
import paddle
from paddle.fluid.dygraph.nn import Embedding
from paddle.fluid.dygraph import ProgramTranslator
from paddle.fluid.dygraph import declarative
from paddle.jit import ProgramTranslator
from paddle.jit.api import declarative
def fake_text():
......
......@@ -19,7 +19,7 @@ import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import ProgramTranslator
from paddle.jit import ProgramTranslator
from paddle.fluid.dygraph import to_variable
from yolov3 import cfg, YOLOv3
......
......@@ -23,7 +23,7 @@ from paddle.fluid.dygraph import (
Linear,
to_variable,
)
from paddle.fluid.dygraph.jit import dygraph_to_static_func
from paddle.jit.api import dygraph_to_static_func
from paddle.fluid.layers.utils import map_structure
import paddle
import paddle.nn.functional as F
......
......@@ -18,7 +18,7 @@ import sys
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import declarative
from paddle.jit.api import declarative
from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.regularizer import L2Decay
......
......@@ -17,7 +17,7 @@ import unittest
import numpy as np
import paddle
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
ProgramCache,
)
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUD2STest
......
......@@ -20,7 +20,7 @@ from paddle.fluid.dygraph.nn import Embedding
import paddle.fluid.framework as framework
from paddle.fluid.optimizer import SGDOptimizer
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.dygraph import TracedLayer
from paddle.jit import TracedLayer
from test_imperative_base import new_program_scope
import numpy as np
from utils import DyGraphProgramDescTracerTestHelper, is_equal_program
......
......@@ -23,7 +23,7 @@ from paddle.fluid import Pool2D, BatchNorm, Linear
from paddle.fluid.dygraph.base import to_variable
from test_imperative_base import new_program_scope
from utils import DyGraphProgramDescTracerTestHelper, is_equal_program
from paddle.fluid.dygraph import TracedLayer
from paddle.jit import TracedLayer
from paddle.fluid.framework import _test_eager_guard, _in_legacy_dygraph
# NOTE(zhiqiu): run with FLAGS_cudnn_deterministic=1
......
......@@ -17,7 +17,7 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid import Embedding, LayerNorm, Linear, Layer
from paddle.fluid.dygraph import to_variable, guard
from paddle.fluid.dygraph import TracedLayer
from paddle.jit import TracedLayer
from test_imperative_base import new_program_scope
from paddle.fluid.framework import _in_legacy_dygraph, _test_eager_guard
from paddle.fluid import core
......
......@@ -20,7 +20,7 @@ import numpy as np
from paddle.static import InputSpec
from paddle.fluid.framework import _dygraph_place_guard
from paddle.jit.layer import Layer
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
ProgramTranslator,
)
......
......@@ -16,7 +16,7 @@
import unittest
import paddle
from paddle.fluid.dygraph.jit import (
from paddle.jit.api import (
_run_save_pre_hooks,
_clear_save_pre_hooks,
_register_save_pre_hook,
......@@ -32,24 +32,20 @@ class TestPreSaveHooks(unittest.TestCase):
_counter += 1
remove_handler = _register_save_pre_hook(fake_func)
self.assertEqual(len(paddle.fluid.dygraph.jit._save_pre_hooks), 1)
self.assertTrue(
paddle.fluid.dygraph.jit._save_pre_hooks[0] is fake_func
)
self.assertEqual(len(paddle.jit.api._save_pre_hooks), 1)
self.assertTrue(paddle.jit.api._save_pre_hooks[0] is fake_func)
# Test of avoiding redundancy hanging
remove_handler = _register_save_pre_hook(fake_func)
self.assertEqual(len(paddle.fluid.dygraph.jit._save_pre_hooks), 1)
self.assertTrue(
paddle.fluid.dygraph.jit._save_pre_hooks[0] is fake_func
)
self.assertEqual(len(paddle.jit.api._save_pre_hooks), 1)
self.assertTrue(paddle.jit.api._save_pre_hooks[0] is fake_func)
remove_handler.remove()
self.assertEqual(len(paddle.fluid.dygraph.jit._save_pre_hooks), 0)
self.assertEqual(len(paddle.jit.api._save_pre_hooks), 0)
remove_handler = _register_save_pre_hook(fake_func)
_clear_save_pre_hooks()
self.assertEqual(len(paddle.fluid.dygraph.jit._save_pre_hooks), 0)
self.assertEqual(len(paddle.jit.api._save_pre_hooks), 0)
global _counter
_counter = 0
......
......@@ -24,7 +24,7 @@ from paddle.static import InputSpec
import paddle.fluid as fluid
from paddle.fluid.layers.utils import flatten
from paddle.fluid.dygraph import Linear
from paddle.fluid.dygraph import declarative
from paddle.jit.api import declarative
from paddle.fluid.dygraph.io import INFER_PARAMS_INFO_SUFFIX
from paddle.fluid import unique_name
......
......@@ -16,7 +16,7 @@ import unittest
from paddle.fluid.framework import in_dygraph_mode
import paddle.fluid as fluid
import paddle.fluid.layers as layers
from paddle.fluid.dygraph.jit import TracedLayer
from paddle.jit.api import TracedLayer
import numpy as np
from paddle import _legacy_C_ops
......
......@@ -74,7 +74,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
None, [in_x]
)
self.assertEqual(
"The type of 'layer' in fluid.dygraph.jit.TracedLayer.trace must be fluid.dygraph.Layer, but received <{} 'NoneType'>.".format(
"The type of 'layer' in paddle.jit.TracedLayer.trace must be fluid.dygraph.Layer, but received <{} 'NoneType'>.".format(
self.type_str
),
str(e.exception),
......@@ -84,7 +84,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
self.layer, 3
)
self.assertEqual(
"The type of 'each element of inputs' in fluid.dygraph.jit.TracedLayer.trace must be fluid.Variable, but received <{} 'int'>.".format(
"The type of 'each element of inputs' in paddle.jit.TracedLayer.trace must be fluid.Variable, but received <{} 'int'>.".format(
self.type_str
),
str(e.exception),
......@@ -94,7 +94,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
self.layer, [True, 1]
)
self.assertEqual(
"The type of 'each element of inputs' in fluid.dygraph.jit.TracedLayer.trace must be fluid.Variable, but received <{} 'bool'>.".format(
"The type of 'each element of inputs' in paddle.jit.TracedLayer.trace must be fluid.Variable, but received <{} 'bool'>.".format(
self.type_str
),
str(e.exception),
......@@ -120,7 +120,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
with self.assertRaises(AssertionError) as e:
traced_layer.set_strategy(1, fluid.ExecutionStrategy())
self.assertEqual(
"The type of 'build_strategy' in fluid.dygraph.jit.TracedLayer.set_strategy must be fluid.BuildStrategy, but received <{} 'int'>.".format(
"The type of 'build_strategy' in paddle.jit.TracedLayer.set_strategy must be fluid.BuildStrategy, but received <{} 'int'>.".format(
self.type_str
),
str(e.exception),
......@@ -129,7 +129,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
with self.assertRaises(AssertionError) as e:
traced_layer.set_strategy(fluid.BuildStrategy(), False)
self.assertEqual(
"The type of 'exec_strategy' in fluid.dygraph.jit.TracedLayer.set_strategy must be fluid.ExecutionStrategy, but received <{} 'bool'>.".format(
"The type of 'exec_strategy' in paddle.jit.TracedLayer.set_strategy must be fluid.ExecutionStrategy, but received <{} 'bool'>.".format(
self.type_str
),
str(e.exception),
......@@ -158,7 +158,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
with self.assertRaises(TypeError) as e:
traced_layer.save_inference_model([0])
self.assertEqual(
"The type of 'path' in fluid.dygraph.jit.TracedLayer.save_inference_model must be <{} 'str'>, but received <{} 'list'>. ".format(
"The type of 'path' in paddle.jit.TracedLayer.save_inference_model must be <{} 'str'>, but received <{} 'list'>. ".format(
self.type_str, self.type_str
),
str(e.exception),
......@@ -166,7 +166,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
with self.assertRaises(TypeError) as e:
traced_layer.save_inference_model(path, [0], [None])
self.assertEqual(
"The type of 'each element of fetch' in fluid.dygraph.jit.TracedLayer.save_inference_model must be <{} 'int'>, but received <{} 'NoneType'>. ".format(
"The type of 'each element of fetch' in paddle.jit.TracedLayer.save_inference_model must be <{} 'int'>, but received <{} 'NoneType'>. ".format(
self.type_str, self.type_str
),
str(e.exception),
......@@ -174,7 +174,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
with self.assertRaises(TypeError) as e:
traced_layer.save_inference_model(path, [0], False)
self.assertEqual(
"The type of 'fetch' in fluid.dygraph.jit.TracedLayer.save_inference_model must be (<{} 'NoneType'>, <{} 'list'>), but received <{} 'bool'>. ".format(
"The type of 'fetch' in paddle.jit.TracedLayer.save_inference_model must be (<{} 'NoneType'>, <{} 'list'>), but received <{} 'bool'>. ".format(
self.type_str, self.type_str, self.type_str
),
str(e.exception),
......@@ -182,7 +182,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
with self.assertRaises(TypeError) as e:
traced_layer.save_inference_model(path, [None], [0])
self.assertEqual(
"The type of 'each element of feed' in fluid.dygraph.jit.TracedLayer.save_inference_model must be <{} 'int'>, but received <{} 'NoneType'>. ".format(
"The type of 'each element of feed' in paddle.jit.TracedLayer.save_inference_model must be <{} 'int'>, but received <{} 'NoneType'>. ".format(
self.type_str, self.type_str
),
str(e.exception),
......@@ -190,7 +190,7 @@ class TestTracedLayerErrMsg(unittest.TestCase):
with self.assertRaises(TypeError) as e:
traced_layer.save_inference_model(path, True, [0])
self.assertEqual(
"The type of 'feed' in fluid.dygraph.jit.TracedLayer.save_inference_model must be (<{} 'NoneType'>, <{} 'list'>), but received <{} 'bool'>. ".format(
"The type of 'feed' in paddle.jit.TracedLayer.save_inference_model must be (<{} 'NoneType'>, <{} 'list'>), but received <{} 'bool'>. ".format(
self.type_str, self.type_str, self.type_str
),
str(e.exception),
......
......@@ -33,7 +33,6 @@ from paddle.fluid.dygraph.io import (
_construct_params_and_buffers,
_construct_program_holders,
)
from paddle.fluid.dygraph.jit import _SaveLoadConfig
from paddle.fluid.framework import (
EagerParamBase,
ParamBase,
......@@ -52,6 +51,7 @@ from paddle.fluid.io import (
_pickle_loads_mac,
_unpack_saved_dict,
)
from paddle.jit.api import _SaveLoadConfig
__all__ = []
......
......@@ -17,7 +17,7 @@ import warnings
import paddle.nn as nn
import numpy as np
from .static_flops import static_flops, Table
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
unwrap_decorators,
)
......
......@@ -13,26 +13,26 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from ..fluid.dygraph.jit import save # noqa: F401
from ..fluid.dygraph.jit import load # noqa: F401
from ..fluid.dygraph.jit import TracedLayer # noqa: F401
from ..fluid.dygraph.jit import set_code_level # noqa: F401
from ..fluid.dygraph.jit import set_verbosity # noqa: F401
from ..fluid.dygraph.jit import declarative as to_static # noqa: F401
from ..fluid.dygraph.jit import not_to_static # noqa: F401
from ..fluid.dygraph import ProgramTranslator # noqa: F401
from ..fluid.dygraph.io import TranslatedLayer # noqa: F401
from .api import save
from .api import load
from .api import TracedLayer
from .api import set_code_level
from .api import set_verbosity
from .api import declarative as to_static
from .api import not_to_static
from .api import TranslatedLayer
from . import dy2static # noqa: F401
from . import dy2static
from .dy2static.program_translator import ProgramTranslator
__all__ = [ # noqa
'save',
'load',
'TracedLayer',
'to_static',
'ProgramTranslator',
'TranslatedLayer',
'set_code_level',
'set_verbosity',
'not_to_static',
'TracedLayer',
]
......@@ -16,7 +16,6 @@
import os
import pickle
import warnings
import functools
from collections import OrderedDict
import inspect
import threading
......@@ -44,7 +43,7 @@ from paddle.fluid.dygraph.dygraph_to_static.logging_utils import (
set_code_level,
set_verbosity,
)
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
ProgramTranslator,
StaticFunction,
unwrap_decorators,
......@@ -102,7 +101,7 @@ def _extract_vars(inputs, result_list, err_tag='inputs'):
_extract_vars(var, result_list, err_tag)
else:
raise TypeError(
"The type of 'each element of {}' in fluid.dygraph.jit.TracedLayer.trace must be fluid.Variable, but received {}.".format(
"The type of 'each element of {}' in paddle.jit.TracedLayer.trace must be fluid.Variable, but received {}.".format(
err_tag, type(inputs)
)
)
......@@ -139,7 +138,7 @@ def _dygraph_to_static_func_(dygraph_func):
import paddle.fluid as fluid
import numpy as np
from paddle.fluid.dygraph.jit import dygraph_to_static_func
from paddle.jit.api import dygraph_to_static_func
@dygraph_to_static_func
def func(x):
......@@ -1569,6 +1568,8 @@ class TracedLayer:
Examples:
.. code-block:: python:
import os
os.environ['FLAGS_enable_eager_mode'] = '0'
import paddle
class ExampleLayer(paddle.nn.Layer):
......@@ -1596,7 +1597,7 @@ class TracedLayer:
"""
assert isinstance(
layer, Layer
), "The type of 'layer' in fluid.dygraph.jit.TracedLayer.trace must be fluid.dygraph.Layer, but received {}.".format(
), "The type of 'layer' in paddle.jit.TracedLayer.trace must be fluid.dygraph.Layer, but received {}.".format(
type(layer)
)
outs, prog, feed, fetch, parameters = _trace(layer, inputs)
......@@ -1619,6 +1620,8 @@ class TracedLayer:
Examples:
.. code-block:: python:
import os
os.environ['FLAGS_enable_eager_mode'] = '0'
import paddle
class ExampleLayer(paddle.nn.Layer):
......@@ -1647,12 +1650,12 @@ class TracedLayer:
assert self._compiled_program is None, "Cannot set strategy after run"
assert isinstance(
build_strategy, (type(None), BuildStrategy)
), "The type of 'build_strategy' in fluid.dygraph.jit.TracedLayer.set_strategy must be fluid.BuildStrategy, but received {}.".format(
), "The type of 'build_strategy' in paddle.jit.TracedLayer.set_strategy must be fluid.BuildStrategy, but received {}.".format(
type(build_strategy)
)
assert isinstance(
exec_strategy, (type(None), ExecutionStrategy)
), "The type of 'exec_strategy' in fluid.dygraph.jit.TracedLayer.set_strategy must be fluid.ExecutionStrategy, but received {}.".format(
), "The type of 'exec_strategy' in paddle.jit.TracedLayer.set_strategy must be fluid.ExecutionStrategy, but received {}.".format(
type(exec_strategy)
)
self._build_strategy = build_strategy
......@@ -1723,6 +1726,8 @@ class TracedLayer:
Examples:
.. code-block:: python:
import os
os.environ['FLAGS_enable_eager_mode'] = '0'
import numpy as np
import paddle
......@@ -1755,13 +1760,13 @@ class TracedLayer:
path,
"path",
str,
"fluid.dygraph.jit.TracedLayer.save_inference_model",
"paddle.jit.TracedLayer.save_inference_model",
)
check_type(
feed,
"feed",
(type(None), list),
"fluid.dygraph.jit.TracedLayer.save_inference_model",
"paddle.jit.TracedLayer.save_inference_model",
)
if isinstance(feed, list):
for f in feed:
......@@ -1769,13 +1774,13 @@ class TracedLayer:
f,
"each element of feed",
int,
"fluid.dygraph.jit.TracedLayer.save_inference_model",
"paddle.jit.TracedLayer.save_inference_model",
)
check_type(
fetch,
"fetch",
(type(None), list),
"fluid.dygraph.jit.TracedLayer.save_inference_model",
"paddle.jit.TracedLayer.save_inference_model",
)
if isinstance(fetch, list):
for f in fetch:
......@@ -1783,7 +1788,7 @@ class TracedLayer:
f,
"each element of fetch",
int,
"fluid.dygraph.jit.TracedLayer.save_inference_model",
"paddle.jit.TracedLayer.save_inference_model",
)
clip_extra = kwargs.get('clip_extra', True)
# path check
......
......@@ -32,5 +32,8 @@ from .convert_operators import indexable as Indexable # noqa: F401
from .variable_trans_func import create_bool_as_type # noqa: F401
from .variable_trans_func import to_static_variable # noqa: F401
from .convert_operators import convert_shape_compare # noqa: F401
from .assert_transformer import AssertTransformer
from .ast_transformer import DygraphToStaticAst
from .program_translator import convert_to_static
__all__ = []
......@@ -18,14 +18,13 @@
# See details in https://github.com/serge-sans-paille/gast/
import os
from paddle.utils import gast
from paddle.fluid.dygraph.dygraph_to_static.base_transformer import (
BaseTransformer,
)
from paddle.fluid.dygraph.dygraph_to_static.early_return_transformer import (
EarlyReturnTransformer,
)
from paddle.fluid.dygraph.dygraph_to_static.assert_transformer import (
from .assert_transformer import (
AssertTransformer,
)
from paddle.fluid.dygraph.dygraph_to_static.basic_api_transformer import (
......@@ -76,7 +75,6 @@ from paddle.fluid.dygraph.dygraph_to_static.decorator_transformer import (
from paddle.fluid.dygraph.dygraph_to_static import logging_utils
from paddle.fluid.dygraph.dygraph_to_static.utils import ast_to_source_code
from paddle.fluid.dygraph.dygraph_to_static.utils import get_attribute_full_name
__all__ = ['DygraphToStaticAst']
......
......@@ -26,7 +26,6 @@ from paddle.fluid.data_feeder import check_type
from paddle.fluid.layers.utils import flatten
from paddle.fluid.dygraph.base import param_guard
from paddle.fluid.dygraph.base import switch_to_static_graph
from paddle.fluid.dygraph.dygraph_to_static import DygraphToStaticAst
from paddle.fluid.dygraph.dygraph_to_static import error
from paddle.fluid.dygraph.dygraph_to_static import logging_utils
from paddle.fluid.dygraph.dygraph_to_static.origin_info import (
......@@ -59,7 +58,8 @@ from paddle.fluid.dygraph.dygraph_to_static.function_spec import (
get_buffers,
get_parameters,
)
from paddle.fluid.wrapped_decorator import signature_safe_contextmanager
from .ast_transformer import DygraphToStaticAst
__all__ = ['ProgramTranslator', 'convert_to_static']
......@@ -334,7 +334,7 @@ class StaticFunction:
def train(self):
if (
isinstance(self._class_instance, layers.Layer)
and self._class_instance.training == False
and self._class_instance.training is False
):
raise RuntimeError(
"Failed to switch train mode. {} is a Layer's method, "
......@@ -347,7 +347,7 @@ class StaticFunction:
def eval(self):
if (
isinstance(self._class_instance, layers.Layer)
and self._class_instance.training == True
and self._class_instance.training is True
):
raise RuntimeError(
"Failed to switch eval mode. {} is a Layer's method, "
......
......@@ -31,10 +31,10 @@ from paddle.metric import Accuracy
from paddle.vision.datasets import MNIST
from paddle.vision.models import LeNet
import paddle.vision.models as models
import paddle.fluid.dygraph.jit as jit
import paddle.jit as jit
from paddle.io import DistributedBatchSampler, Dataset
from paddle.hapi.model import prepare_distributed_context
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
from paddle.jit.dy2static.program_translator import (
ProgramTranslator,
)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册