test_pipeline_server.py 2.5 KB
Newer Older
B
barrierye 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=doc-string-missing

16 17
from paddle_serving_server.pipeline import Op
from paddle_serving_server.pipeline import PipelineServer
B
barrierye 已提交
18
import numpy as np
B
barrierye 已提交
19
import logging
B
barrierye 已提交
20

B
barrierye 已提交
21 22 23
logging.basicConfig(
    format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
    datefmt='%Y-%m-%d %H:%M',
24
    #level=logging.DEBUG)
B
barrierye 已提交
25
    level=logging.INFO)
B
barrierye 已提交
26

B
barrierye 已提交
27 28 29

class CombineOp(Op):
    def preprocess(self, input_data):
30
        combined_prediction = 0
31 32
        for op_name, channeldata in input_data.items():
            data = channeldata.parse()
33 34
            logging.info("{}: {}".format(op_name, data["prediction"]))
            combined_prediction += data["prediction"]
B
barrierye 已提交
35
        data = {"prediction": combined_prediction / 2}
B
barrierye 已提交
36 37
        return data

B
barrierye 已提交
38

B
fix bug  
barrierye 已提交
39
read_op = Op(name="read", inputs=None)
40 41 42
bow_op = Op(name="bow",
            inputs=[read_op],
            server_model="imdb_bow_model",
43
            server_port="9393",
44 45
            device="cpu",
            client_config="imdb_bow_client_conf/serving_client_conf.prototxt",
46
            server_name="127.0.0.1:9393",
47 48
            fetch_names=["prediction"],
            concurrency=1,
B
barrierye 已提交
49
            timeout=0.1,
50 51 52 53 54 55 56 57 58 59 60 61
            retry=2)
cnn_op = Op(name="cnn",
            inputs=[read_op],
            server_model="imdb_cnn_model",
            server_port="9292",
            device="cpu",
            client_config="imdb_cnn_client_conf/serving_client_conf.prototxt",
            server_name="127.0.0.1:9292",
            fetch_names=["prediction"],
            concurrency=1,
            timeout=-1,
            retry=1)
B
barrierye 已提交
62
combine_op = CombineOp(
63
    name="combine", inputs=[bow_op, cnn_op], concurrency=1, timeout=-1, retry=1)
B
barrierye 已提交
64

65
pyserver = PipelineServer(
66 67 68 69 70
    use_multithread=True,
    client_type='grpc',
    use_future=False,
    profile=False,
    retry=1)
71
pyserver.add_ops([read_op, bow_op, cnn_op, combine_op])
B
barrierye 已提交
72
pyserver.prepare_server(port=8080, worker_num=2)
B
barrierye 已提交
73
pyserver.run_server()