diff --git a/python/examples/imdb/get_data.sh b/python/examples/imdb/get_data.sh new file mode 100644 index 0000000000000000000000000000000000000000..5fdb49396e3d03ddc86c514c8c6cb9bee8f013b0 --- /dev/null +++ b/python/examples/imdb/get_data.sh @@ -0,0 +1,2 @@ +wget 10.86.69.44:/home/work/incubate/text_classification_data.tar.gz +tar -zxvf text_classification_data.tar.gz diff --git a/python/examples/imdb/inference.conf b/python/examples/imdb/inference.conf deleted file mode 100644 index bbb056b8914e457e7efcbaa59be5e994f87a9ca1..0000000000000000000000000000000000000000 --- a/python/examples/imdb/inference.conf +++ /dev/null @@ -1,6 +0,0 @@ -2 3 -words 1 -1 0 -label 1 1 0 -cost mean_0.tmp_0 -acc accuracy_0.tmp_0 -prediction fc_1.tmp_2 diff --git a/python/examples/imdb/local_train.py b/python/examples/imdb/local_train.py index 9cf7e3400e0c6b8d5a267098664caef00c8568bc..52d6dfa2c26aaacbb12d197879ce69c701f82a9e 100644 --- a/python/examples/imdb/local_train.py +++ b/python/examples/imdb/local_train.py @@ -56,13 +56,13 @@ if __name__ == "__main__": exe.run(fluid.default_startup_program()) epochs = 30 save_dirname = "cnn_model" + for i in range(epochs): exe.train_from_dataset(program=fluid.default_main_program(), dataset=dataset, debug=False) logger.info("TRAIN --> pass: {}".format(i)) fluid.io.save_inference_model("%s/epoch%d.model" % (save_dirname, i), [data.name, label.name], [acc], exe) - serving.io.save_model("%s/epoch%d.model" % (save_dirname, i), - ["words", "label"], {"acc": acc}, exe) - - + serving.save_model("%s/epoch%d.model" % (save_dirname, i), "client_config{}".format(i), + {"words": data, "label": label}, + {"acc": acc, "cost": avg_cost, "prediction": prediction}) diff --git a/python/paddle_serving/__init__.py b/python/paddle_serving/__init__.py index 1b1ac92198b39c3d00c50af2e68baf68ca9ff75e..f8cab8c47308f39fe3963883e3090f33080ebd83 100644 --- a/python/paddle_serving/__init__.py +++ b/python/paddle_serving/__init__.py @@ -12,3 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. from .serving_client import Client +from .io import save_model diff --git a/python/paddle_serving/io/__init__.py b/python/paddle_serving/io/__init__.py index b2a9c21938767b995214b5ecc00ce7ca43befaff..e38fdabad3b56f16bb98b98241884b963704fcec 100644 --- a/python/paddle_serving/io/__init__.py +++ b/python/paddle_serving/io/__init__.py @@ -12,9 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. + from paddle.fluid import Executor from paddle.fluid.compiler import CompiledProgram from paddle.fluid.framework import Program +from paddle.fluid.framework import default_main_program +from paddle.fluid import CPUPlace +from paddle.fluid.io import save_persistables +import os def save_model(server_model_folder, client_config_folder, @@ -30,10 +35,30 @@ def save_model(server_model_folder, if not isinstance(main_program, Program): raise TypeError("program should be as Program type or None") - executor = Executor(place=paddle.fluid.CPUPlace()) - paddle.fluid.io.save_persistables(executor, server_model_folder, - main_program) - + executor = Executor(place=CPUPlace()) + + save_persistables(executor, server_model_folder, + main_program) + + cmd = "mkdir -p {}".format(client_config_folder) + os.system(cmd) + with open("{}/client.conf".format(client_config_folder), "w") as fout: + fout.write("{} {}\n".format(len(feed_var_dict), len(fetch_var_dict))) + for key in feed_var_dict: + fout.write("{}".format(key)) + if feed_var_dict[key].lod_level == 1: + fout.write(" 1 -1\n") + elif feed_var_dict[key].lod_level == 0: + fout.write(" {}".format(len(feed_var_dict[key].shape))) + for dim in feed_var_dict[key].shape: + fout.write(" {}".format(dim)) + fout.write("\n") + for key in fetch_var_dict: + fout.write("{} {}\n".format(key, fetch_var_dict[key].name)) + + cmd = "cp {}/client.conf {}/server.conf".format( + client_config_folder, server_model_folder) + os.system(cmd) diff --git a/python/setup.py.in b/python/setup.py.in index 313f78092b36ac49b403a7df8dc0ef5dfe5d8052..90d2fcd59ff83c3f16f01830f690b83277e2c326 100644 --- a/python/setup.py.in +++ b/python/setup.py.in @@ -34,12 +34,15 @@ REQUIRED_PACKAGES = [ packages=['paddle_serving', 'paddle_serving.serving_client', - 'paddle_serving.proto'] + 'paddle_serving.proto', + 'paddle_serving.io'] package_data={'paddle_serving.serving_client': ['serving_client.so']} package_dir={'paddle_serving.serving_client': '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving/serving_client', 'paddle_serving.proto': - '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving/proto'} + '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving/proto', + 'paddle_serving.io': + '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving/io'} setup( name='paddle-serving-client',