调用save_inference_model 保存模型报错
Created by: lgone2000
def testsaveinference1():
a = fluid.layers.data(name='a', shape=[-1,1], dtype='float32', append_batch_size=False)
# fc可以work
#d = fluid.layers.fc(a, size=1)
#换成下面这个就报错
d = a+1
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
output = exe.run(fluid.default_main_program(), feed={'a':np.ones([1,1], np.float32)}, fetch_list=[d])
print output
fluid.io.save_inference_model(
dirname='test',
feeded_var_names=['a'],
target_vars=[d],
executor=exe,
main_program=fluid.default_main_program(),
model_filename='model',
params_filename='params')