From 69cfb7a27b354d2771159e21a1f1b1ebc365dafc Mon Sep 17 00:00:00 2001 From: 0x45f <23097963+0x45f@users.noreply.github.com> Date: Mon, 20 Dec 2021 12:08:42 +0800 Subject: [PATCH] [Dy2St]Skip windows for test_mnist_pure_fp16 (#38214) --- .../tests/unittests/dygraph_to_static/test_mnist_pure_fp16.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist_pure_fp16.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist_pure_fp16.py index 1860362896..029e3e9a53 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist_pure_fp16.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist_pure_fp16.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import paddle import unittest import numpy as np @@ -32,7 +33,7 @@ class TestPureFP16(TestMNIST): return self.train(to_static=False) def test_mnist_to_static(self): - if paddle.fluid.is_compiled_with_cuda(): + if paddle.fluid.is_compiled_with_cuda() and os.name != 'nt': dygraph_loss = self.train_dygraph() static_loss = self.train_static() # NOTE: In pure fp16 training, loss is not stable, so we enlarge atol here. -- GitLab