diff --git a/python/paddle/fluid/tests/unittests/test_python_bf16_numpy_datatype.py b/python/paddle/fluid/tests/unittests/test_python_bf16_numpy_datatype.py new file mode 100644 index 0000000000000000000000000000000000000000..a58d7d35807c666ff16f50a1fe88d5064b10d161 --- /dev/null +++ b/python/paddle/fluid/tests/unittests/test_python_bf16_numpy_datatype.py @@ -0,0 +1,34 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +from paddle_bfloat import bfloat16 +import unittest + + +class TestBF16DataType(unittest.TestCase): + def test_matmul(self): + a_bf16 = np.random.random((6, 7)).astype(bfloat16) + b_bf16 = np.random.random((7, 8)).astype(bfloat16) + c_bf16 = np.matmul(a_bf16, b_bf16) + + a_fp32 = a_bf16.astype(np.float32) + b_fp32 = b_bf16.astype(np.float32) + c_fp32 = np.matmul(a_fp32, b_fp32) + + self.assertTrue(np.allclose(c_bf16, c_fp32)) + + +if __name__ == "__main__": + unittest.main() diff --git a/python/requirements.txt b/python/requirements.txt index f2a4580a94e51fd400293ce64f59e9db4b1ff7ed..5f2b788a81a0ad5b8150ee065602e7b643591ea2 100644 --- a/python/requirements.txt +++ b/python/requirements.txt @@ -5,3 +5,4 @@ Pillow six decorator astor +paddle_bfloat==0.1.2