diff --git a/PaddleNLP/pretrain_language_models/BERT/utils/init.py b/PaddleNLP/pretrain_language_models/BERT/utils/init.py index b6d15f9b04db9c8fb10137737b617445b3c78230..091e5c34e156922d94844daa215b83917b059d49 100644 --- a/PaddleNLP/pretrain_language_models/BERT/utils/init.py +++ b/PaddleNLP/pretrain_language_models/BERT/utils/init.py @@ -30,7 +30,7 @@ def cast_fp32_to_fp16(exe, main_program): param_t = fluid.global_scope().find_var(param.name).get_tensor() data = np.array(param_t) if param.name.find("layer_norm") == -1: - param_t.set(np.float16(data).view(np.uint16), exe.place) + param_t.set(np.float16(data), exe.place) master_param_var = fluid.global_scope().find_var(param.name + ".master") if master_param_var is not None: diff --git a/PaddleNLP/pretrain_language_models/XLNet/utils/init.py b/PaddleNLP/pretrain_language_models/XLNet/utils/init.py index d495a42edffe644389d393b657d2c2e64bb5df0d..61e86a5d9f9552972ca1c83c0ed80cc8d32d37a1 100644 --- a/PaddleNLP/pretrain_language_models/XLNet/utils/init.py +++ b/PaddleNLP/pretrain_language_models/XLNet/utils/init.py @@ -32,7 +32,7 @@ def cast_fp32_to_fp16(exe, main_program): if param.name.find("layer_norm") == -1 and param.name.find( "embedding") == -1: print("shkip params", param.name) - param_t.set(np.float16(data).view(np.uint16), exe.place) + param_t.set(np.float16(data), exe.place) master_param_var = fluid.global_scope().find_var(param.name + ".master") if master_param_var is not None: diff --git a/dygraph/bert/utils/init.py b/dygraph/bert/utils/init.py index 6b69d87e7d3a877cccedd7fd5f42788279ef1470..e0963eef8c815c30ea741a67fc61eda076fa7870 100644 --- a/dygraph/bert/utils/init.py +++ b/dygraph/bert/utils/init.py @@ -30,7 +30,7 @@ def cast_fp32_to_fp16(exe, main_program): param_t = fluid.global_scope().find_var(param.name).get_tensor() data = np.array(param_t) if param.name.find("layer_norm") == -1: - param_t.set(np.float16(data).view(np.uint16), exe.place) + param_t.set(np.float16(data), exe.place) master_param_var = fluid.global_scope().find_var(param.name + ".master") if master_param_var is not None: