op_test_ipu.py 3.4 KB
Newer Older
J
jianghaicheng 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import os
J
jianghaicheng 已提交
16 17 18
import random
import unittest
import numpy as np
19
from enum import Enum
J
jianghaicheng 已提交
20

21 22
import paddle
import paddle.static
J
jianghaicheng 已提交
23 24 25 26 27 28 29 30 31 32 33 34 35

map_np_dtype_to_fluid_dtype = {
    'bool': "bool",
    'int8': "int8",
    'uint8': "uint8",
    "int32": "int32",
    "int64": "int64",
    "float16": "float16",
    "float32": "float32",
    "float64": "float64",
}


36 37 38 39 40 41 42 43 44 45 46 47 48
class ExecutionMode(Enum):
    CPU_FP32 = 1
    IPU_FP32 = 2
    # enable_fp16 through ipu_strategy.enable_fp16
    IPU_POPART_FP16 = 3

    def __lt__(self, other):
        return self.value < other.value

    def __gt__(self, other):
        return self.value > other.value


J
jianghaicheng 已提交
49 50 51 52 53 54 55
def np_dtype_to_fluid_str(dtype: np.dtype) -> str:
    return map_np_dtype_to_fluid_dtype[dtype.name]


class IPUOpTest(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
56
        # Get random seeds
J
jianghaicheng 已提交
57 58 59
        cls._np_rand_state = np.random.get_state()
        cls._py_rand_state = random.getstate()

60
        cls.SEED = 2021
J
jianghaicheng 已提交
61 62 63
        np.random.seed(cls.SEED)
        random.seed(cls.SEED)

64 65
        # Enable paddle static graph mode
        paddle.enable_static()
J
jianghaicheng 已提交
66 67 68 69 70 71 72

    @classmethod
    def tearDownClass(cls):
        """Restore random seeds"""
        np.random.set_state(cls._np_rand_state)
        random.setstate(cls._py_rand_state)

73 74 75 76 77 78 79 80
    @classmethod
    def use_ipumodel(cls):
        if 'POPLAR_IPUMODEL' not in os.environ:
            return False
        else:
            flag = os.environ['POPLAR_IPUMODEL']
            if flag.upper() in ['1', "TRUE"]:
                return True
J
jianghaicheng 已提交
81 82

    def set_atol(self):
83 84 85 86
        self.atol = 1e-10
        self.rtol = 1e-6
        self.atol_fp16 = 1e-3
        self.rtol_fp16 = 1e-3
J
jianghaicheng 已提交
87 88 89 90

    def set_training(self):
        self.is_training = False
        self.epoch = 1
91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116

    def check(self, outputs, check_shape=False):
        cpu_fp32 = outputs[ExecutionMode.CPU_FP32]
        ipu_fp32 = outputs[ExecutionMode.IPU_FP32]
        max_diff = np.abs(cpu_fp32 - ipu_fp32).max()
        fp32_flag = np.allclose(
            cpu_fp32, ipu_fp32, rtol=self.rtol, atol=self.atol)
        self.assertTrue(fp32_flag, "max diff is %f" % (max_diff))

        if check_shape:
            self.assertTrue(cpu_fp32.shape == ipu_fp32.shape)

        ipu_popart_fp16 = None
        if ExecutionMode.IPU_POPART_FP16 in outputs.keys():
            ipu_popart_fp16 = outputs[ExecutionMode.IPU_POPART_FP16]
            max_diff = np.abs(ipu_popart_fp16.astype(np.float32) -
                              cpu_fp32).max()
            fp16_flag = np.allclose(
                ipu_popart_fp16.astype(np.float32),
                cpu_fp32,
                rtol=self.rtol_fp16,
                atol=self.atol_fp16)
            self.assertTrue(fp16_flag, "max diff is %f" % (max_diff))

            if check_shape:
                self.assertTrue(ipu_popart_fp16.shape == cpu_fp32.shape)