"The `input_spec`: {} used to construct concrete_program is conflict with the `input_spec`: {} in `@paddle.jit.to_static`".format(
input_spec,self._function_spec.input_spec
)
)
# NOTE(chenweihang): we should always translated program based on the `input_spec`
# decorated on forward if it is valid
desired_input_spec=self._function_spec.input_spec
ifinput_specisnotNone:
logging_utils.warn(
"\n\nYou have specified `input_spec` both in function definition (higher priority) and `paddle.jit.save` (will be ignored.)\n\n\t Using: {}\n\n\t Ignore: {}\n".format(
desired_input_spec,input_spec
)
# NOTE(chenweihang): we should always translated program based on the `input_spec`
# decorated on forward if it is valid
desired_input_spec=self._function_spec.input_spec
ifinput_specisnotNone:
)
has_input_spec=desired_input_specisnotNone
ifhas_input_spec:
concrete_program,_=self.get_concrete_program(
*desired_input_spec,
with_hook=with_hook,
is_train=self._is_train_mode(),
is_prim_infer=is_prim_infer,
)
returnconcrete_program
else:
ifcached_program_len!=0:
logging_utils.warn(
"No input_spec is found, save cached program instead"
)
ifcached_program_len>1:
logging_utils.warn(
"\n\nYou have specified `input_spec` both in function definition (higher priority) and `paddle.jit.save` (will be ignored.)\n\n\t Using: {}\n\n\t Ignore: {}\n".format(
desired_input_spec,input_spec
"Current {} has more than one cached programs: {}, the last traced progam will be return by default.".format(
"No valid transformed program for {}.\n\t Please specific `input_spec` in `@paddle.jit.to_static` or feed input tensor to call the decorated function at once.\n".format(