提交 d8238185 编写于 作者: M MRXLT 提交者: MRXLT

fix python api

上级 c6338059
FROM centos:centos6.10
RUN export http_proxy=http://172.19.56.199:3128 \
&& export https_proxy=http://172.19.56.199:3128 \
&& yum -y install wget \
RUN yum -y install wget \
&& wget http://people.centos.org/tru/devtools-2/devtools-2.repo -O /etc/yum.repos.d/devtoolset-2.repo \
&& yum -y install devtoolset-2-gcc devtoolset-2-gcc-c++ devtoolset-2-binutils \
&& source /opt/rh/devtoolset-2/enable \
......
......@@ -14,6 +14,8 @@ op_seq_maker.add_op(general_infer_op)
server = Server()
server.set_op_sequence(op_seq_maker.get_op_sequence())
server.set_num_threads(12)
server.load_model_config(sys.argv[1])
server.prepare_server(workdir="work_dir1", port=9292, device="cpu")
port = int(sys.argv[2])
server.prepare_server(workdir="work_dir1", port=port, device="cpu")
server.run_server()
......@@ -104,7 +104,7 @@ class Client(object):
predictor_sdk = SDKConfig()
predictor_sdk.set_server_endpoints(endpoints)
sdk_desc = predictor_sdk.gen_desc()
self.client_handle_.create_predictor_by_desc(sdk_desc)
self.client_handle_.create_predictor_by_desc(sdk_desc.SerializeToString())
def get_feed_names(self):
return self.feed_names_
......
......@@ -176,7 +176,7 @@ class Server(object):
def run_server(self):
# just run server with system command
# currently we do not load cube
command = "/home/users/dongdaxiang/github_develop/Serving/build_server/core/general-server/serving" \
command = "/home/xulongteng/github/Serving/build_server/core/general-server/serving" \
" -enable_model_toolkit " \
"-inferservice_path {} " \
"-inferservice_file {} " \
......@@ -187,7 +187,8 @@ class Server(object):
"-resource_path {} " \
"-resource_file {} " \
"-workflow_path {} " \
"-workflow_file {} ".format(
"-workflow_file {} " \
"-bthread_concurrency {} ".format(
self.workdir,
self.infer_service_fn,
self.max_concurrency,
......@@ -197,7 +198,8 @@ class Server(object):
self.workdir,
self.resource_fn,
self.workdir,
self.workflow_fn)
self.workflow_fn,
self.num_threads,)
os.system(command)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册