From 67d03bed70ffca44bfca7fb9a952b96af51b6cf7 Mon Sep 17 00:00:00 2001 From: Aurelius84 Date: Wed, 26 Aug 2020 14:00:18 +0800 Subject: [PATCH] Fix jit.to_static usage (#26682) --- .../fluid/dygraph/dygraph_to_static/ast_transformer.py | 2 +- .../fluid/tests/unittests/dygraph_to_static/test_assert.py | 3 ++- .../fluid/tests/unittests/dygraph_to_static/test_bmn.py | 6 +++--- .../fluid/tests/unittests/dygraph_to_static/test_dict.py | 4 ++-- .../fluid/tests/unittests/dygraph_to_static/test_mnist.py | 7 +++---- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/ast_transformer.py b/python/paddle/fluid/dygraph/dygraph_to_static/ast_transformer.py index f859d40050..8297f16f60 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/ast_transformer.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/ast_transformer.py @@ -38,7 +38,7 @@ from paddle.fluid.dygraph.dygraph_to_static.utils import get_attribute_full_name __all__ = ['DygraphToStaticAst'] -DECORATOR_NAMES = ['declarative', 'dygraph_to_static_func'] +DECORATOR_NAMES = ['declarative', 'to_static', 'dygraph_to_static_func'] class DygraphToStaticAst(gast.NodeTransformer): diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_assert.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_assert.py index 68e6f32872..d4646833ea 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_assert.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_assert.py @@ -17,12 +17,13 @@ from __future__ import print_function import numpy import unittest +import paddle import paddle.fluid as fluid from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator from paddle.fluid.dygraph.jit import declarative -@declarative +@paddle.jit.to_static def dyfunc_assert_variable(x): x_v = fluid.dygraph.to_variable(x) assert x_v diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py index a8cef6e28a..dd58a49bb5 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py @@ -15,11 +15,11 @@ import math import numpy as np import unittest - +from paddle.jit import to_static import paddle.fluid as fluid from paddle.fluid import ParamAttr from paddle.fluid.dygraph import to_variable -from paddle.fluid.dygraph import declarative, ProgramTranslator +from paddle.fluid.dygraph import ProgramTranslator from paddle.fluid.dygraph.io import VARIABLE_FILENAME from predictor_utils import PredictorTools @@ -242,7 +242,7 @@ class BMN(fluid.dygraph.Layer): param_attr=ParamAttr(name="PEM_2d4_w"), bias_attr=ParamAttr(name="PEM_2d4_b")) - @declarative + @to_static def forward(self, x): # Base Module x = self.b_conv1(x) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_dict.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_dict.py index c8051b3f24..af1e44ffe2 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_dict.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_dict.py @@ -19,7 +19,7 @@ import numpy as np import unittest import paddle.fluid as fluid -from paddle.fluid.dygraph.jit import declarative +from paddle.jit import to_static from paddle.fluid.dygraph.dygraph_to_static.program_translator import ProgramTranslator PLACE = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace( @@ -76,7 +76,7 @@ class MainNetWithDict(fluid.dygraph.Layer): self.output_size = output_size self.sub_net = SubNetWithDict(hidden_size, output_size) - @declarative + @to_static def forward(self, input, max_len=4): input = fluid.dygraph.to_variable(input) cache = { diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist.py index 8851374904..1ef3bd1bf1 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist.py @@ -25,7 +25,6 @@ from paddle.fluid.dygraph.base import switch_to_static_graph from paddle.fluid.dygraph import to_variable from paddle.fluid.dygraph.nn import Conv2D, Linear, Pool2D from paddle.fluid.optimizer import AdamOptimizer -from paddle.fluid.dygraph.jit import declarative from paddle.fluid.dygraph.io import VARIABLE_FILENAME from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator @@ -102,7 +101,7 @@ class MNIST(fluid.dygraph.Layer): loc=0.0, scale=scale)), act="softmax") - @declarative + @paddle.jit.to_static def forward(self, inputs, label=None): x = self.inference(inputs) if label is not None: @@ -134,7 +133,7 @@ class TestMNIST(unittest.TestCase): drop_last=True) -class TestMNISTWithDeclarative(TestMNIST): +class TestMNISTWithToStatic(TestMNIST): """ Tests model if doesn't change the layers while decorated by `dygraph_to_static_output`. In this case, everything should @@ -147,7 +146,7 @@ class TestMNISTWithDeclarative(TestMNIST): def train_dygraph(self): return self.train(to_static=False) - def test_mnist_declarative(self): + def test_mnist_to_static(self): dygraph_loss = self.train_dygraph() static_loss = self.train_static() self.assertTrue( -- GitLab