diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/assert_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/assert_transformer.py index 4d5076108cd31ad6c6cde811b49c6042f17a1c3f..3d5ca1c13681615eab0ad47acfb8c42147a12289 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/assert_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/assert_transformer.py @@ -36,10 +36,8 @@ class AssertTransformer(gast.NodeTransformer): self.visit(self.root) def visit_Assert(self, node): - convert_assert_node = gast.parse( - '_jst.convert_assert({test}, {msg})'.format( - test=ast_to_source_code(node.test), - msg=ast_to_source_code(node.msg) - if node.msg else "")).body[0].value + convert_assert_node = gast.parse('_jst.Assert({test}, {msg})'.format( + test=ast_to_source_code(node.test), + msg=ast_to_source_code(node.msg) if node.msg else "")).body[0].value return gast.Expr(value=convert_assert_node) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/call_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/call_transformer.py index c16d1ff17f70718c8450f93f9c728da512072e9d..b14977ced1db57e2924a1e23f7729564d8035162 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/call_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/call_transformer.py @@ -71,7 +71,7 @@ class CallTransformer(gast.NodeTransformer): if PDB_SET in func_str: return node - new_func_str = "_jst.convert_call({})".format(func_str) + new_func_str = "_jst.Call({})".format(func_str) new_func_ast = gast.parse(new_func_str).body[0].value node.func = new_func_ast diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/cast_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/cast_transformer.py index bf7791c788ccf919c5ce73792854557a94361f27..3b2d9be99ff009a0368ff24cd38418040125c62a 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/cast_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/cast_transformer.py @@ -39,8 +39,7 @@ class CastTransformer(gast.NodeTransformer): func_str = ast_to_source_code(node.func).strip() if func_str in self._castable_type and len(node.args) > 0: args_str = ast_to_source_code(node.args[0]).strip() - new_func_str = "_jst.convert_var_dtype({}, '{}')".format( - args_str, func_str) + new_func_str = "_jst.AsDtype({}, '{}')".format(args_str, func_str) new_node = gast.parse(new_func_str).body[0].value return new_node diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py index f4d19905975d95a79724b9d08cb95bc10a0cfdcc..1935629f54e865040900cb8eb934bfc3e86988d5 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/ifelse_transformer.py @@ -361,8 +361,8 @@ def parse_cond_return(parent_vars_dict, if_vars_dict, else_vars_dict, After transformed, q and z are created in parent scope. For example, x, y = 5, 10 - q = paddle.jit.dy2static.data_layer_not_check(name='q', shape=[-1], dtype='float32') - z = paddle.jit.dy2static.data_layer_not_check(name='z', shape=[-1], dtype='float32') + q = paddle.jit.dy2static.UndefindVar('q') + z = paddle.jit.dy2static.UndefindVar('z') def true_func(x, y, q): x = x+1 @@ -647,7 +647,7 @@ def create_convert_ifelse_node(return_name_ids, false_func_source = false_func.name convert_ifelse_layer = gast.parse( - '_jst.convert_ifelse(' + '_jst.IfElse(' '{pred}, {true_fn}, {false_fn}, {get_args}, {set_args}, {return_name_ids})' .format( pred=ast_to_source_code(pred), diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/list_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/list_transformer.py index 0951635162e5e6afdb4526e1b5233ee01b71c897..48fa9906828c032af2c1a3f70498a358daa01df1 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/list_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/list_transformer.py @@ -252,7 +252,7 @@ class ListTransformer(gast.NodeTransformer): # 2. pop stmt for a list or dict if len(args_str) == 1 # 3. pop stmt for a dict if len(args_str) == 2 if len(args_str) <= 2: - new_pop_str = "_jst.convert_pop({}, {})"\ + new_pop_str = "_jst.Pop({}, {})"\ .format(target_str, ",".join(args_str)) new_pop_node = gast.parse(new_pop_str).body[0].value return new_pop_node diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/logical_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/logical_transformer.py index 5cf8c610018ca7b5fe061d17295f9fd3b999588d..80f5bffe46d1bc0a4d48d6a7af55b35b0676bd6f 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/logical_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/logical_transformer.py @@ -43,7 +43,7 @@ class LogicalTransformer(gast.NodeTransformer): a = x > 1 and y < 1 Transformed code: - a = paddle.jit.dy2static.convert_logical_and(lambda:x>1, lambda:y<1) + a = _jst.And(lambda:x>1, lambda:y<1) """ def __init__(self, wrapper_root): @@ -57,7 +57,7 @@ class LogicalTransformer(gast.NodeTransformer): self.generic_visit(node) if isinstance(node.op, gast.Not): arg = ast_to_source_code(node.operand) - new_node_str = "_jst.convert_logical_not({})".format(arg) + new_node_str = "_jst.Not({})".format(arg) # NOTE: gast.parse returns Module(body=[expr(value=...)]) new_node = gast.parse(new_node_str).body[0].value return new_node @@ -66,9 +66,9 @@ class LogicalTransformer(gast.NodeTransformer): def visit_BoolOp(self, node): self.generic_visit(node) if isinstance(node.op, gast.And): - new_node = self._create_bool_op_node(node.values, 'and') + new_node = self._create_bool_op_node(node.values, 'And') elif isinstance(node.op, gast.Or): - new_node = self._create_bool_op_node(node.values, 'or') + new_node = self._create_bool_op_node(node.values, 'Or') else: raise TypeError( "Only supports and/or syntax in control flow if statement.") @@ -95,7 +95,7 @@ class LogicalTransformer(gast.NodeTransformer): nodes = [pre_logic_node] + [post_logic_node] args = [ast_to_source_code(child) for child in nodes] - new_node_str = "_jst.convert_logical_{}(lambda:{}, lambda:{})".format( + new_node_str = "_jst.{}(lambda:{}, lambda:{})".format( api_type, args[0], args[1]) # NOTE: gast.parse return Module(body=[expr(...)]) new_node = gast.parse(new_node_str).body[0].value diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py index fa401fa3e4bf8291107c62a57f0f10ab42d856da..832c502c0aa5cd582250dcf161ca3c243d931402 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/loop_transformer.py @@ -28,7 +28,7 @@ from paddle.fluid.dygraph.dygraph_to_static.utils import get_attribute_full_name from paddle.fluid.dygraph.dygraph_to_static.utils import ForLoopTuplePreTransformer from paddle.fluid.dygraph.dygraph_to_static.utils import ForNodeVisitor from paddle.fluid.dygraph.dygraph_to_static.utils import RenameTransformer -from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_static_variable_gast_node +from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_fill_constant_node __all__ = ['LoopTransformer', 'NameVisitor'] @@ -89,7 +89,7 @@ def create_while_nodes(condition_name, body_name, loop_var_names): else: assign_loop_var_names.append(name) - while_func_name = "_jst.convert_while_loop" + while_func_name = "_jst.While" while_node_str = "[{}] = {}({}, {}, [{}])".format( ",".join(assign_loop_var_names), while_func_name, condition_name, body_name, ",".join(loop_var_names)) @@ -672,7 +672,7 @@ class LoopTransformer(gast.NodeTransformer): # We need to create static variable for those variables for name in create_var_names: if "." not in name: - new_stmts.append(create_static_variable_gast_node(name)) + new_stmts.append(create_fill_constant_node(name)) # 4. append init statements new_stmts.extend(init_stmts) @@ -756,7 +756,7 @@ class LoopTransformer(gast.NodeTransformer): # We need to create static variable for those variables for name in create_var_names: if "." not in name: - new_stmts.append(create_static_variable_gast_node(name)) + new_stmts.append(create_fill_constant_node(name)) condition_func_node = gast.FunctionDef( name=unique_name.generate(WHILE_CONDITION_PREFIX), diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/print_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/print_transformer.py index f045d01c99bab018afa193ec00cc22106ec7d776..d7a889ad2fc9c641be3dfecfef7bf226e3e6f33f 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/print_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/print_transformer.py @@ -50,5 +50,5 @@ class PrintTransformer(gast.NodeTransformer): return gast.Expr(value=convert_print_node) def _create_print_node(self, print_args): - convert_print_func = gast.parse('_jst.convert_print').body[0].value + convert_print_func = gast.parse('_jst.Print').body[0].value return gast.Call(func=convert_print_func, args=print_args, keywords=[]) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/tensor_shape_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/tensor_shape_transformer.py index 9c19b9fc257aace9126f0c1b35a185e26e43297a..b7a2087d1f24d893fa7af4ee45c6011f23bebf09 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/tensor_shape_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/tensor_shape_transformer.py @@ -14,22 +14,16 @@ from __future__ import print_function -import copy from paddle.utils import gast -from paddle.fluid import unique_name from paddle.fluid.dygraph.dygraph_to_static.utils import ast_to_source_code -from paddle.fluid.dygraph.dygraph_to_static.utils import slice_is_num -from paddle.fluid.dygraph.dygraph_to_static.utils import is_paddle_api -from paddle.fluid.dygraph.dygraph_to_static.utils import SplitAssignTransformer from paddle.fluid.dygraph.dygraph_to_static.static_analysis import AstNodeWrapper -from paddle.fluid.dygraph.dygraph_to_static.static_analysis import StaticAnalysisVisitor class TensorShapeTransformer(gast.NodeTransformer): """ This class transforms variable.shape into Static Graph Ast. - All 'xxx.shape' will be converted int '_jst.convert_shape(x)'. + All 'xxx.shape' will be converted int '_jst.Shape(x)'. """ def __init__(self, wrapper_root): @@ -48,7 +42,7 @@ class TensorShapeTransformer(gast.NodeTransformer): # NOTE(dev): we can deal with paddle.shape in this case, but it's # not pretty to modify into 'convert_shape(paddle)(x)[0]'. if args != 'paddle': - convert_shape_func = "_jst.convert_shape({})".format(args) + convert_shape_func = "_jst.Shape({})".format(args) shape_node = gast.parse(convert_shape_func).body[0].value return shape_node return node diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py index 2df8169a3efe1b5ceefbb6ab0656104873d1e3ae..8dd11c06e463fe74aa3ee0bd7404eb7136aaca48 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/utils.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/utils.py @@ -1178,7 +1178,7 @@ class ForNodeVisitor(object): else: iter_var_name = ast_to_source_code(self.iter_node).strip() - convert_len_node_source_str = '{} = _jst.convert_len({})'.format( + convert_len_node_source_str = '{} = _jst.Len({})'.format( self.iter_var_len_name, iter_var_name) convert_len_node = gast.parse(convert_len_node_source_str).body[0] diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py index e823813acaacbd974f36f51d85a5af63262fe0d1..92ef7a3f13d9bb86024299b871800a266680a420 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/variable_trans_func.py @@ -23,57 +23,11 @@ from paddle.fluid.framework import Variable from paddle.fluid.layer_helper import LayerHelper __all__ = [ - 'create_bool_as_type', 'create_fill_constant_node', - 'create_static_variable_gast_node', 'data_layer_not_check', - 'to_static_variable', 'to_static_variable_gast_node', 'create_undefined_var' + 'create_bool_as_type', 'create_fill_constant_node', 'to_static_variable', + 'create_undefined_var' ] -def data_layer_not_check(name, shape, dtype='float32', lod_level=0): - """ - This function creates a Tensor on the global block. The created Tensor - doesn't check the dtype and the shape of feed data because dygraph input - data can be various-length. This API is used in translating dygraph into - static graph. - - Note: - The default :code:`stop_gradient` attribute of the Tensor created by - this API is true, which means the gradient won't be passed backward - through the data Tensor. Set :code:`var.stop_gradient = False` If - user would like to pass backward gradient. - - Args: - name (str): The name/alias of the Tensor, see :ref:`api_guide_Name` - for more details. - shape (list|tuple): List|Tuple of integers declaring the shape. You can - set "None" at a dimension to indicate the dimension can be of any - size. For example, it is useful to set changeable batch size as "None" - dtype (np.dtype|VarType|str, optional): The type of the data. Supported - dtype: bool, float16, float32, float64, int8, int16, int32, int64, - uint8. Default: float32 - lod_level (int, optional): The LoD level of the LoDTensor. Usually users - don't have to set this value. For more details about when and how to - use LoD level, see :ref:`user_guide_lod_tensor` . Default: 0 - - Returns: - Tensor: The global Tensor that gives access to the data. - """ - helper = LayerHelper('data', **locals()) - shape = list(shape) - for i in six.moves.range(len(shape)): - if shape[i] is None: - shape[i] = -1 - - return helper.create_global_variable(name=name, - shape=shape, - dtype=dtype, - type=core.VarDesc.VarType.LOD_TENSOR, - stop_gradient=True, - lod_level=lod_level, - is_data=True, - need_check_feed=False) - - def create_undefined_var(name): func_code = "{} = _jst.UndefinedVar('{}')".format(name, name) return gast.parse(func_code).body[0] @@ -85,18 +39,7 @@ def create_nonlocal_stmt_node(names): return gast.parse(func_code).body[0] -def to_static_variable_gast_node(name): - func_code = "{} = _jst.to_static_variable({})".format(name, name) - return gast.parse(func_code).body[0] - - -def create_static_variable_gast_node(name): - func_code = "{} = _jst.data_layer_not_check(name='{}', shape=[-1], dtype='float32')".format( - name, unique_name.generate(name)) - return gast.parse(func_code).body[0] - - -def create_fill_constant_node(name, value): +def create_fill_constant_node(name, value=0): func_code = "{} = paddle.full(shape=[1], ".format(name) if isinstance(value, bool): func_code += "dtype='bool', fill_value={}, name='{}')".format( @@ -121,7 +64,6 @@ def to_static_variable(x): return paddle.full(shape=[1], dtype='bool', fill_value=x) if isinstance(x, float): return paddle.full(shape=[1], dtype='float64', fill_value=x) - if isinstance(x, six.integer_types): return paddle.full(shape=[1], dtype='int64', fill_value=x) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/ifelse_simple_func.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/ifelse_simple_func.py index b5ba4c89ee2e3c0d8b03a514b0ce169a9dcfcf32..34264cac8a1be6b54d4667741adaac0c52b29148 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/ifelse_simple_func.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/ifelse_simple_func.py @@ -72,10 +72,8 @@ def dyfunc_with_if_else3(x): # The var is created only in one of If.body or If.orelse node, and it used as gast.Load firstly after gast.If node. # The transformed code: """ - q = paddle.jit.dy2static. - data_layer_not_check(name='q', shape=[-1], dtype='float32') - z = paddle.jit.dy2static. - data_layer_not_check(name='z', shape=[-1], dtype='float32') + q = paddle.jit.dy2static.UndefinedVar('q') + z = paddle.jit.dy2static.UndefinedVar('z') def true_fn_0(q, x, y): x = x + 1 diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_call.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_call.py index 38746337ce3cd59346d414d4a46b6f47c7a4e017..136d2d37db800b3bc1e9ad0b1265e92401e0b1d2 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_call.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_convert_call.py @@ -266,7 +266,7 @@ class TestDynamicToStaticCode(unittest.TestCase): return get_source_code(self.answer_func) def _get_transformed_code(self): - transformed_func = _jst.convert_call(self.func) + transformed_func = _jst.Call(self.func) return get_source_code(transformed_func) def test_code(self): @@ -289,7 +289,7 @@ class TestDynamicToStaticCode2(TestDynamicToStaticCode): class StaticCode(): def func_convert_then_not_to_static(x): - y = _jst.convert_call(func_not_to_static)(x) + y = _jst.Call(func_not_to_static)(x) return y self.answer_func = StaticCode.func_convert_then_not_to_static diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_list.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_list.py index 55dff1c92bb208a8b0230b2aefa99fea1d7e9c39..f573960b5dba0a7c207bb8e427e812e85815352f 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_list.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_list.py @@ -277,6 +277,7 @@ class TestListInWhileLoop(TestListWithoutControlFlow): with fluid.dygraph.guard(): if to_static: + print(declarative(self.dygraph_func).code) res = declarative(self.dygraph_func)(self.input, self.iter_num) else: res = self.dygraph_func(self.input, self.iter_num) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_program_translator.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_program_translator.py index 75bac135424aae50c09937b5dcd5c54f9fec9b6f..41968278f7bc062268cfa1453bd02c36785d6193 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_program_translator.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_program_translator.py @@ -90,7 +90,7 @@ class StaticCode1(): x_v = x_v + 1 return x_v - _jst.convert_ifelse( + _jst.IfElse( fluid.layers.mean(x_v)[0] > 5, true_fn_0, false_fn_0, get_args_0, set_args_0, ('x_v', )) @@ -115,8 +115,8 @@ class StaticCode1(): __return_value_0 = x_v return __return_value_0 - _jst.convert_ifelse(label is not None, true_fn_1, false_fn_1, - get_args_1, set_args_1, ('__return_value_0', )) + _jst.IfElse(label is not None, true_fn_1, false_fn_1, get_args_1, + set_args_1, ('__return_value_0', )) return __return_value_0 @@ -147,7 +147,7 @@ class StaticCode2(): x_v = x_v + 1 return x_v - _jst.convert_ifelse( + _jst.IfElse( fluid.layers.mean(x_v)[0] > 5, true_fn_2, false_fn_2, get_args_2, set_args_2, ('x_v', )) @@ -172,8 +172,8 @@ class StaticCode2(): __return_value_1 = x_v return __return_value_1 - _jst.convert_ifelse(label is not None, true_fn_3, false_fn_3, - get_args_3, set_args_3, ('__return_value_1', )) + _jst.IfElse(label is not None, true_fn_3, false_fn_3, get_args_3, + set_args_3, ('__return_value_1', )) return __return_value_1 diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_shape.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_shape.py index 54c7866503cb23e570510445a81d259142e490cb..9b1cde6dcc5e11c271979e9e3985ee8742cd28a6 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_shape.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_shape.py @@ -275,7 +275,6 @@ class TestTensorShapeBasic(unittest.TestCase): self.expected_slice_op_num = 0 def _compute_op_num(self, program): - print(program) self.op_num = sum([len(block.ops) for block in program.blocks]) self.shape_op_num = 0 self.slice_op_num = 0 diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_variable_trans_func.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_variable_trans_func.py index 87379669714ba98373d87af31aaaef465a9391e8..4a064e91b24a99c907fe4d586a38b8d0c5583a5f 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_variable_trans_func.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_variable_trans_func.py @@ -22,30 +22,6 @@ import paddle.fluid as fluid from paddle.fluid.dygraph.dygraph_to_static.utils import ast_to_source_code from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_fill_constant_node -from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import data_layer_not_check - - -class TestDataLayerNotCheck(unittest.TestCase): - - def test_create_none_shape(self): - main_program = fluid.Program() - with fluid.program_guard(main_program): - d = data_layer_not_check(name="d", shape=(None, -1, 3)) - self.assertEqual(d.shape, (-1, -1, 3)) - self.assertEqual(d.name, "d") - - def test_feed_mismatch_shape(self): - main_program = fluid.Program() - with fluid.program_guard(main_program): - d = data_layer_not_check(name="d", shape=(1, 2, 3)) - feed_in_data = np.random.uniform(size=[1, 2, 4]).astype(np.float32) - place = fluid.CUDAPlace( - 0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace() - exe = fluid.Executor(place) - ret = exe.run(main_program, - feed={d.name: feed_in_data}, - fetch_list=[d.name]) - self.assertTrue(np.allclose(ret, feed_in_data)) class TestVariableTransFunc(unittest.TestCase): diff --git a/python/paddle/jit/dy2static/__init__.py b/python/paddle/jit/dy2static/__init__.py index 7f20c00024e558f5076e956c965a587b3431e7b6..0a51a3e265ede56e0362b40f8cb289fe88178042 100644 --- a/python/paddle/jit/dy2static/__init__.py +++ b/python/paddle/jit/dy2static/__init__.py @@ -14,25 +14,21 @@ from .base import saw from .base import UndefinedVar -from .convert_call_func import convert_call # noqa: F401 -from .convert_operators import cast_bool_if_necessary # noqa: F401 -from .convert_operators import convert_assert # noqa: F401 -from .convert_operators import convert_ifelse # noqa: F401 -from .convert_operators import convert_len # noqa: F401 -from .convert_operators import convert_logical_and # noqa: F401 -from .convert_operators import convert_logical_not # noqa: F401 -from .convert_operators import convert_logical_or # noqa: F401 -from .convert_operators import convert_pop # noqa: F401 -from .convert_operators import convert_print # noqa: F401 -from .convert_operators import convert_shape_compare # noqa: F401 -from .convert_operators import convert_var_dtype # noqa: F401 -from .convert_operators import convert_shape # noqa: F401 -from .convert_operators import convert_while_loop # noqa: F401 +from .convert_operators import convert_logical_and as And # noqa: F401 +from .convert_operators import convert_var_dtype as AsDtype # noqa: F401 +from .convert_operators import convert_assert as Assert # noqa: F401 +from .convert_call_func import convert_call as Call # noqa: F401 +from .convert_operators import convert_ifelse as IfElse # noqa: F401 +from .convert_operators import convert_len as Len # noqa: F401 +from .convert_operators import convert_logical_not as Not # noqa: F401 +from .convert_operators import convert_logical_or as Or # noqa: F401 +from .convert_operators import convert_pop as Pop # noqa: F401 +from .convert_operators import convert_print as Print # noqa: F401 +from .convert_operators import convert_shape as Shape # noqa: F401 +from .convert_operators import convert_while_loop as While # noqa: F401 + from .variable_trans_func import create_bool_as_type # noqa: F401 -from .variable_trans_func import create_fill_constant_node # noqa: F401 -from .variable_trans_func import create_static_variable_gast_node # noqa: F401 -from .variable_trans_func import data_layer_not_check # noqa: F401 from .variable_trans_func import to_static_variable # noqa: F401 -from .variable_trans_func import to_static_variable_gast_node # noqa: F401 +from .convert_operators import convert_shape_compare # noqa: F401 __all__ = [] diff --git a/python/paddle/jit/dy2static/variable_trans_func.py b/python/paddle/jit/dy2static/variable_trans_func.py index 9ce2bc2da381655e65225397831faa228c613ca6..49a0d6a31e5914a5d166339e984171e981008abb 100644 --- a/python/paddle/jit/dy2static/variable_trans_func.py +++ b/python/paddle/jit/dy2static/variable_trans_func.py @@ -15,10 +15,6 @@ from __future__ import print_function from ...fluid.dygraph.dygraph_to_static.variable_trans_func import create_bool_as_type # noqa: F401 -from ...fluid.dygraph.dygraph_to_static.variable_trans_func import create_fill_constant_node # noqa: F401 -from ...fluid.dygraph.dygraph_to_static.variable_trans_func import create_static_variable_gast_node # noqa: F401 -from ...fluid.dygraph.dygraph_to_static.variable_trans_func import data_layer_not_check # noqa: F401 from ...fluid.dygraph.dygraph_to_static.variable_trans_func import to_static_variable # noqa: F401 -from ...fluid.dygraph.dygraph_to_static.variable_trans_func import to_static_variable_gast_node # noqa: F401 __all__ = []