From b02e229a4d04cef1c3ecf1b41016ad0fe16007c4 Mon Sep 17 00:00:00 2001 From: dongdaxiang Date: Thu, 23 Apr 2020 16:17:43 +0800 Subject: [PATCH] add doc --- doc/INFERNCE_TO_SERVING.md | 14 ++++++++++++++ doc/INFERNCE_TO_SERVING_CN.md | 14 ++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 doc/INFERNCE_TO_SERVING.md create mode 100644 doc/INFERNCE_TO_SERVING_CN.md diff --git a/doc/INFERNCE_TO_SERVING.md b/doc/INFERNCE_TO_SERVING.md new file mode 100644 index 00000000..8334159e --- /dev/null +++ b/doc/INFERNCE_TO_SERVING.md @@ -0,0 +1,14 @@ +# How to Convert Paddle Inference Model To Paddle Serving Format + +([简体中文](./INFERENCE_TO_SERVING_CN.md)|English) + +## Example + +``` python +from paddle_serving_client.io import inference_model_to_serving +inference_model_dir = "your_inference_model" +serving_client_dir = "serving_client_dir" +serving_server_dir = "serving_server_dir" +feed_var_names, fetch_var_names = inference_model_to_serving( + inference_model_dir, serving_client_dir, serving_server_dir) +``` diff --git a/doc/INFERNCE_TO_SERVING_CN.md b/doc/INFERNCE_TO_SERVING_CN.md new file mode 100644 index 00000000..94d1def4 --- /dev/null +++ b/doc/INFERNCE_TO_SERVING_CN.md @@ -0,0 +1,14 @@ +# 如何从Paddle保存的预测模型转为Paddle Serving格式可部署的模型 + +([English](./INFERENCE_TO_SERVING.md)|简体中文) + +## 示例 + +``` python +from paddle_serving_client.io import inference_model_to_serving +inference_model_dir = "your_inference_model" +serving_client_dir = "serving_client_dir" +serving_server_dir = "serving_server_dir" +feed_var_names, fetch_var_names = inference_model_to_serving( + inference_model_dir, serving_client_dir, serving_server_dir) +``` -- GitLab