Failed to load model file with C API Paddle. Getting Invoke paddle error 3 in paddle_gradient_machine_create_for_inference(&gradient_machine, file_buf, file_size).
Created by: kelvict
We have saved a model with paddle.v2 with python api. We can load the model with python API. But we fail to load it with C API Paddle.
Code to save paddle model with python API
def model_tf_2_paddle(input_w_mat, input_bias, feature_len, paddle_parameters,
cost, optimizer, inference_topo, count=None, output_path_prefix="./"):
input_sample_num = 24
self_build_data = []
a0 = [1] * (feature_len + 1) * input_sample_num
for index in xrange(len(a0)):
a0[index] = 1 - random.random()
a1 = np.array(a0)
a2 = a1.reshape(input_sample_num, feature_len + 1)
for item in a2:
self_build_data.append((item[:-1], item[-1:]))
def gen_data():
for item in self_build_data:
yield item[0], item[1]
paddle_parameters.set("___fc_layer_0__.w0", input_w_mat)
paddle_parameters.set("___fc_layer_0__.wbias", input_bias)
trainer = paddle.trainer.SGD(cost=cost, parameters=paddle_parameters, update_equation=optimizer)
feeding = {'x': 0, 'y': 1}
trainer.train(
reader=paddle.batch(paddle.reader.shuffle(gen_data, buf_size=500),batch_size=2),
feeding=feeding,
num_passes=0
)
if count is None:
count_str = ""
else:
count_str = "_"+str(count)
para_info_tar_path = output_path_prefix+"./"+'para_info'+count_str+".tar"
with open(para_info_tar_path, 'wb') as f:
trainer.save_parameter_to_tar(f)
topo_path = output_path_prefix+"./"+'topo'+count_str+'.pkl'
with open(topo_path, 'wb') as f:
inference_topo.serialize_for_inference(f)
C API Code to load model
fseek(file, 0L, SEEK_END);
int file_size = ftell(file);
fseek(file, 0L, SEEK_SET);
void* file_buf = malloc(file_size);
fread(file_buf, 1, file_size, file);
fclose(file);
paddle_gradient_machine gradient_machine;
CHECK(paddle_gradient_machine_create_for_inference(&gradient_machine, file_buf, file_size));
Error Code
WARNING: 01-16 11:27:21: bsr * 7319 [CvmPredict][create_gradient_machine() .....3] Invoke paddle error 3 in paddle_gradient_machine_create_for_inference(&gradient_machine, file_buf, file_size)