diff --git a/python/paddle/static/amp/fp16_utils.py b/python/paddle/static/amp/fp16_utils.py index bfe0a146f233f42a6174bff052bdbaf91da3e20a..24a2a53d35a236572abb1e1bfaf2eee4dd3a5903 100644 --- a/python/paddle/static/amp/fp16_utils.py +++ b/python/paddle/static/amp/fp16_utils.py @@ -438,6 +438,9 @@ def cast_model_to_fp16(program, amp_lists=None, use_fp16_guard=True): "while", "while_grad", "cast", + "tensor_array_to_tensor", + "lod_array_length", + "write_to_array", } global_block = program.global_block() keep_fp32_ops = set()