提交 57c02ed1 编写于 作者: Z zhhsplendid

Modify paths, test=develop

上级 c2a5f5a8
......@@ -78,7 +78,7 @@ class StaticCode1():
x_v = x_v + 1
return x_v
x_v = paddle.jit.dygraph_to_static.convert_operators.convert_ifelse(
x_v = paddle.jit.dy2static.convert_ifelse(
fluid.layers.mean(x_v)[0] > 5, true_fn_0, false_fn_0, (x_v, ),
(x_v, ), (x_v, ))
......@@ -92,8 +92,7 @@ class StaticCode1():
def false_fn_1(__return_0, __return_value_0):
return __return_0, __return_value_0
__return_0, __return_value_0 = (
paddle.jit.dygraph_to_static.convert_operators.convert_ifelse(
__return_0, __return_value_0 = (paddle.jit.dy2static.convert_ifelse(
label is not None, true_fn_1, false_fn_1,
(__return_0, __return_value_0, label, x_v),
(__return_0, __return_value_0), (__return_0, __return_value_0)))
......@@ -107,11 +106,9 @@ class StaticCode1():
def false_fn_2(__return_1, __return_value_0):
return __return_1, __return_value_0
__return_1, __return_value_0 = (
paddle.jit.dygraph_to_static.convert_operators.convert_ifelse(
paddle.jit.dygraph_to_static.convert_operators.
convert_logical_not(__return_0), true_fn_2, false_fn_2,
(__return_1, __return_value_0, x_v),
__return_1, __return_value_0 = (paddle.jit.dy2static.convert_ifelse(
paddle.jit.dy2static.convert_logical_not(__return_0), true_fn_2,
false_fn_2, (__return_1, __return_value_0, x_v),
(__return_1, __return_value_0), (__return_1, __return_value_0)))
return __return_value_0
......@@ -133,7 +130,7 @@ class StaticCode2():
x_v = x_v + 1
return x_v
x_v = paddle.jit.dygraph_to_static.convert_operators.convert_ifelse(
x_v = paddle.jit.dy2static.convert_ifelse(
fluid.layers.mean(x_v)[0] > 5, true_fn_3, false_fn_3, (x_v, ),
(x_v, ), (x_v, ))
......@@ -147,8 +144,7 @@ class StaticCode2():
def false_fn_4(__return_2, __return_value_1):
return __return_2, __return_value_1
__return_2, __return_value_1 = (
paddle.jit.dygraph_to_static.convert_operators.convert_ifelse(
__return_2, __return_value_1 = (paddle.jit.dy2static.convert_ifelse(
label is not None, true_fn_4, false_fn_4,
(__return_2, __return_value_1, label, x_v),
(__return_2, __return_value_1), (__return_2, __return_value_1)))
......@@ -162,11 +158,9 @@ class StaticCode2():
def false_fn_5(__return_3, __return_value_1):
return __return_3, __return_value_1
__return_3, __return_value_1 = (
paddle.jit.dygraph_to_static.convert_operators.convert_ifelse(
paddle.jit.dygraph_to_static.convert_operators.
convert_logical_not(__return_2), true_fn_5, false_fn_5,
(__return_3, __return_value_1, x_v),
__return_3, __return_value_1 = (paddle.jit.dy2static.convert_ifelse(
paddle.jit.dy2static.convert_logical_not(__return_2), true_fn_5,
false_fn_5, (__return_3, __return_value_1, x_v),
(__return_3, __return_value_1), (__return_3, __return_value_1)))
return __return_value_1
......
......@@ -52,7 +52,7 @@ def dyfunc_tensor_shape_4(x):
def dyfunc_tensor_shape_5(x):
# `res = fluid.layers.reshape(x, shape=(-1, s))` to
# `res = fluid.layers.reshape(x, shape=(-1,
# paddle.jit.dygraph_to_static.convert_operators.convert_var_shape(x)[0]))`
# paddle.jit.dy2static.convert_var_shape(x)[0]))`
x = fluid.dygraph.to_variable(x)
s = x.shape[0]
res = fluid.layers.reshape(x, shape=(-1, s))
......@@ -65,7 +65,7 @@ def dyfunc_with_if_1(x):
x_shape_0 = x.shape[0]
if x_shape_0 < 1:
# `res.shape[0]` is transformed into
# `paddle.jit.dygraph_to_static.convert_operators.convert_var_shape(res)[0]`
# `paddle.jit.dy2static.convert_var_shape(res)[0]`
if res.shape[0] > 1:
res = fluid.layers.fill_constant(
value=2, shape=x.shape, dtype="int32")
......@@ -89,7 +89,7 @@ def dyfunc_with_if_2(x):
def dyfunc_with_for_1(x):
x = fluid.dygraph.to_variable(x)
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
# `x.shape[0]` is transformed into `paddle.jit.dygraph_to_static.convert_operators.convert_var_shape(x)[0]`
# `x.shape[0]` is transformed into `paddle.jit.dy2static.convert_var_shape(x)[0]`
for i in range(x.shape[0]):
res += 1
return res
......@@ -100,7 +100,7 @@ def dyfunc_with_for_2(x):
x_shape_0 = x.shape[0]
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
# `x_shape_0` is transformed into `paddle.jit.dygraph_to_static.convert_operators.convert_var_shape(x)[0]`
# `x_shape_0` is transformed into `paddle.jit.dy2static.convert_var_shape(x)[0]`
for i in range(x_shape_0):
res += 1
return res
......@@ -124,7 +124,7 @@ def dyfunc_with_for_3(x):
def dyfunc_with_while_1(x):
x = fluid.dygraph.to_variable(x)
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
# `x.shape[0]` is transformed into `paddle.jit.dygraph_to_static.convert_operators.convert_var_shape(x)[0]`
# `x.shape[0]` is transformed into `paddle.jit.dy2static.convert_var_shape(x)[0]`
i = 1
while i < x.shape[0]:
res += 1
......@@ -137,7 +137,7 @@ def dyfunc_with_while_2(x):
x_shape_0 = x.shape[0]
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
i = 1
# `x_shape_0` is transformed into `paddle.jit.dygraph_to_static.convert_operators.convert_var_shape(x)[0]`
# `x_shape_0` is transformed into `paddle.jit.dy2static.convert_var_shape(x)[0]`
while i < x_shape_0:
res += 1
i = i + 2
......
......@@ -23,7 +23,7 @@ from ..fluid.dygraph.jit import declarative as to_static #DEFINE_ALIAS
from ..fluid.dygraph import ProgramTranslator #DEFINE_ALIAS
from ..fluid.dygraph.io import TranslatedLayer #DEFINE_ALIAS
from . import dygraph_to_static
from . import dy2static
__all__ = [
'save', 'load', 'TracedLayer', 'to_static', 'ProgramTranslator',
......
......@@ -15,6 +15,7 @@
from __future__ import print_function
from . import convert_operators
from .convert_operators import *
from . import convert_call_func
from .convert_call_func import *
......@@ -23,5 +24,6 @@ from . import variable_trans_func
from .variable_trans_func import *
__all__ = []
__all__ += convert_operators.__all__
__all__ += convert_call_func.__all__
__all__ += variable_trans_func.__all__
......@@ -28,6 +28,6 @@ from ...fluid.dygraph.dygraph_to_static.convert_operators import convert_while_l
__all__ = [
'cast_bool_if_necessary', 'convert_assert', 'convert_ifelse', 'convert_len',
'convert_logical_and', 'convert_logical_not', 'convert_logical_or',
'convert_logical_print', 'convert_var_dtype', 'convert_var_shape',
'convert_print', 'convert_var_dtype', 'convert_var_shape',
'convert_while_loop'
]
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册