提交 d7d9e5f9 编写于 作者: G guru4elephant

add server save_model

上级 871c7167
wget 10.86.69.44:/home/work/incubate/text_classification_data.tar.gz
tar -zxvf text_classification_data.tar.gz
2 3
words 1 -1 0
label 1 1 0
cost mean_0.tmp_0
acc accuracy_0.tmp_0
prediction fc_1.tmp_2
......@@ -56,13 +56,13 @@ if __name__ == "__main__":
exe.run(fluid.default_startup_program())
epochs = 30
save_dirname = "cnn_model"
for i in range(epochs):
exe.train_from_dataset(program=fluid.default_main_program(),
dataset=dataset, debug=False)
logger.info("TRAIN --> pass: {}".format(i))
fluid.io.save_inference_model("%s/epoch%d.model" % (save_dirname, i),
[data.name, label.name], [acc], exe)
serving.io.save_model("%s/epoch%d.model" % (save_dirname, i),
["words", "label"], {"acc": acc}, exe)
serving.save_model("%s/epoch%d.model" % (save_dirname, i), "client_config{}".format(i),
{"words": data, "label": label},
{"acc": acc, "cost": avg_cost, "prediction": prediction})
......@@ -12,3 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from .serving_client import Client
from .io import save_model
......@@ -12,9 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.fluid import Executor
from paddle.fluid.compiler import CompiledProgram
from paddle.fluid.framework import Program
from paddle.fluid.framework import default_main_program
from paddle.fluid import CPUPlace
from paddle.fluid.io import save_persistables
import os
def save_model(server_model_folder,
client_config_folder,
......@@ -30,10 +35,30 @@ def save_model(server_model_folder,
if not isinstance(main_program, Program):
raise TypeError("program should be as Program type or None")
executor = Executor(place=paddle.fluid.CPUPlace())
paddle.fluid.io.save_persistables(executor, server_model_folder,
main_program)
executor = Executor(place=CPUPlace())
save_persistables(executor, server_model_folder,
main_program)
cmd = "mkdir -p {}".format(client_config_folder)
os.system(cmd)
with open("{}/client.conf".format(client_config_folder), "w") as fout:
fout.write("{} {}\n".format(len(feed_var_dict), len(fetch_var_dict)))
for key in feed_var_dict:
fout.write("{}".format(key))
if feed_var_dict[key].lod_level == 1:
fout.write(" 1 -1\n")
elif feed_var_dict[key].lod_level == 0:
fout.write(" {}".format(len(feed_var_dict[key].shape)))
for dim in feed_var_dict[key].shape:
fout.write(" {}".format(dim))
fout.write("\n")
for key in fetch_var_dict:
fout.write("{} {}\n".format(key, fetch_var_dict[key].name))
cmd = "cp {}/client.conf {}/server.conf".format(
client_config_folder, server_model_folder)
os.system(cmd)
......@@ -34,12 +34,15 @@ REQUIRED_PACKAGES = [
packages=['paddle_serving',
'paddle_serving.serving_client',
'paddle_serving.proto']
'paddle_serving.proto',
'paddle_serving.io']
package_data={'paddle_serving.serving_client': ['serving_client.so']}
package_dir={'paddle_serving.serving_client':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving/serving_client',
'paddle_serving.proto':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving/proto'}
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving/proto',
'paddle_serving.io':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving/io'}
setup(
name='paddle-serving-client',
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册