diff --git a/python/examples/bert/bert_gpu_server.py b/python/examples/bert/bert_gpu_server.py index 3fd64c345405e7ea031345f0694cce84029af385..7708a078636fd876c40e88d1441bc711d599f8a6 100644 --- a/python/examples/bert/bert_gpu_server.py +++ b/python/examples/bert/bert_gpu_server.py @@ -14,9 +14,9 @@ import os import sys -from paddle_serving_server_gpu import OpMaker -from paddle_serving_server_gpu import OpSeqMaker -from paddle_serving_server_gpu import Server +from paddle_serving_server import OpMaker +from paddle_serving_server import OpSeqMaker +from paddle_serving_server import Server op_maker = OpMaker() read_op = op_maker.create('general_reader') diff --git a/python/examples/bert/bert_web_service_gpu.py b/python/examples/bert/bert_web_service_gpu.py index cbdd321c0932bf68c1e37f02f0c08e08a6c0e43e..fb332bca3b16ee6a2c8c25dc7ab8f1b70998e874 100644 --- a/python/examples/bert/bert_web_service_gpu.py +++ b/python/examples/bert/bert_web_service_gpu.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # pylint: disable=doc-string-missing -from paddle_serving_server_gpu.web_service import WebService +from paddle_serving_server.web_service import WebService from paddle_serving_app.reader import ChineseBertReader import sys import os diff --git a/python/examples/detection/faster_rcnn_hrnetv2p_w18_1x/README.md b/python/examples/detection/faster_rcnn_hrnetv2p_w18_1x/README.md index 21ee05809042f5e6ee2e496306975f3ae18ed158..f25bd27766cfa64e20cc28c731f0a17e1f6dd826 100644 --- a/python/examples/detection/faster_rcnn_hrnetv2p_w18_1x/README.md +++ b/python/examples/detection/faster_rcnn_hrnetv2p_w18_1x/README.md @@ -10,7 +10,7 @@ wget --no-check-certificate https://paddle-serving.bj.bcebos.com/pddet_demo/2.0/ ### Start the service ``` tar xf faster_rcnn_hrnetv2p_w18_1x.tar -python -m paddle_serving_server_gpu.serve --model serving_server --port 9494 --gpu_ids 0 +python -m paddle_serving_server.serve --model serving_server --port 9494 --gpu_ids 0 ``` This model support TensorRT, if you want a faster inference, please use `--use_trt`. diff --git a/python/examples/detection/faster_rcnn_hrnetv2p_w18_1x/README_CN.md b/python/examples/detection/faster_rcnn_hrnetv2p_w18_1x/README_CN.md index 30c455500c0d2a9cc5a68976e261292fee200c75..2c9048e10f32698cad0ded98643f804b91c810fc 100644 --- a/python/examples/detection/faster_rcnn_hrnetv2p_w18_1x/README_CN.md +++ b/python/examples/detection/faster_rcnn_hrnetv2p_w18_1x/README_CN.md @@ -11,7 +11,7 @@ wget --no-check-certificate https://paddle-serving.bj.bcebos.com/pddet_demo/2.0/ ### 启动服务 ``` tar xf faster_rcnn_hrnetv2p_w18_1x.tar -python -m paddle_serving_server_gpu.serve --model serving_server --port 9494 --gpu_ids 0 +python -m paddle_serving_server.serve --model serving_server --port 9494 --gpu_ids 0 ``` 该模型支持TensorRT,如果想要更快的预测速度,可以开启`--use_trt`选项。 diff --git a/python/examples/detection/fcos_dcn_r50_fpn_1x_coco/README.md b/python/examples/detection/fcos_dcn_r50_fpn_1x_coco/README.md index 251732c76e1996493eb7d785c721cd478f3b060b..d0cdb1582584cb7e0e95d00231c2c8a5fb33d464 100644 --- a/python/examples/detection/fcos_dcn_r50_fpn_1x_coco/README.md +++ b/python/examples/detection/fcos_dcn_r50_fpn_1x_coco/README.md @@ -10,7 +10,7 @@ wget --no-check-certificate https://paddle-serving.bj.bcebos.com/pddet_demo/2.0/ ### Start the service ``` tar xf fcos_dcn_r50_fpn_1x_coco.tar -python -m paddle_serving_server_gpu.serve --model serving_server --port 9494 --gpu_ids 0 +python -m paddle_serving_server.serve --model serving_server --port 9494 --gpu_ids 0 ``` This model support TensorRT, if you want a faster inference, please use `--use_trt`. @@ -18,4 +18,3 @@ This model support TensorRT, if you want a faster inference, please use `--use_t ``` python test_client.py 000000570688.jpg ``` - diff --git a/python/examples/detection/fcos_dcn_r50_fpn_1x_coco/README_CN.md b/python/examples/detection/fcos_dcn_r50_fpn_1x_coco/README_CN.md index d4a65e072a069efcfe93053b92bdb764f5cbcc32..56c2505c8c7ee2be7627a2f6fd9e108868428805 100644 --- a/python/examples/detection/fcos_dcn_r50_fpn_1x_coco/README_CN.md +++ b/python/examples/detection/fcos_dcn_r50_fpn_1x_coco/README_CN.md @@ -11,7 +11,7 @@ wget --no-check-certificate https://paddle-serving.bj.bcebos.com/pddet_demo/2.0/ ### 启动服务 ``` tar xf fcos_dcn_r50_fpn_1x_coco.tar -python -m paddle_serving_server_gpu.serve --model serving_server --port 9494 --gpu_ids 0 +python -m paddle_serving_server.serve --model serving_server --port 9494 --gpu_ids 0 ``` 该模型支持TensorRT,如果想要更快的预测速度,可以开启`--use_trt`选项。 @@ -20,4 +20,3 @@ python -m paddle_serving_server_gpu.serve --model serving_server --port 9494 --g ``` python test_client.py 000000570688.jpg ``` - diff --git a/python/examples/detection/ssd_vgg16_300_240e_voc/README.md b/python/examples/detection/ssd_vgg16_300_240e_voc/README.md index f449dc45da135b353fdb60a591a306a6ef3d40c3..062941bfb8deff3a09c938e9c43cd2b710cbb0e5 100644 --- a/python/examples/detection/ssd_vgg16_300_240e_voc/README.md +++ b/python/examples/detection/ssd_vgg16_300_240e_voc/README.md @@ -10,7 +10,7 @@ wget --no-check-certificate https://paddle-serving.bj.bcebos.com/pddet_demo/2.0/ ### Start the service ``` tar xf ssd_vgg16_300_240e_voc.tar -python -m paddle_serving_server_gpu.serve --model serving_server --port 9494 --gpu_ids 0 +python -m paddle_serving_server.serve --model serving_server --port 9494 --gpu_ids 0 ``` This model support TensorRT, if you want a faster inference, please use `--use_trt`. @@ -18,4 +18,3 @@ This model support TensorRT, if you want a faster inference, please use `--use_t ``` python test_client.py 000000570688.jpg ``` - diff --git a/python/examples/detection/ssd_vgg16_300_240e_voc/README_CN.md b/python/examples/detection/ssd_vgg16_300_240e_voc/README_CN.md index 332937cacc2f3bdf948a670de91dd20276473abc..32c19b5159a497e52df1c5fd01a87fd43f7d67e4 100644 --- a/python/examples/detection/ssd_vgg16_300_240e_voc/README_CN.md +++ b/python/examples/detection/ssd_vgg16_300_240e_voc/README_CN.md @@ -11,7 +11,7 @@ wget --no-check-certificate https://paddle-serving.bj.bcebos.com/pddet_demo/2.0/ ### 启动服务 ``` tar xf ssd_vgg16_300_240e_voc.tar -python -m paddle_serving_server_gpu.serve --model serving_server --port 9494 --gpu_ids 0 +python -m paddle_serving_server.serve --model serving_server --port 9494 --gpu_ids 0 ``` 该模型支持TensorRT,如果想要更快的预测速度,可以开启`--use_trt`选项。 @@ -20,4 +20,3 @@ python -m paddle_serving_server_gpu.serve --model serving_server --port 9494 --g ``` python test_client.py 000000570688.jpg ``` - diff --git a/python/examples/fit_a_line/test_server.py b/python/examples/fit_a_line/test_server.py index 958fb2b6bf2732f66ba1e7e6194e9ebeebbaa223..d055b309d7530ccbe928d50e2bcaba23fb1ddaff 100644 --- a/python/examples/fit_a_line/test_server.py +++ b/python/examples/fit_a_line/test_server.py @@ -13,10 +13,7 @@ # limitations under the License. # pylint: disable=doc-string-missing -try: - from paddle_serving_server_gpu.web_service import WebService -except ImportError: - from paddle_serving_server.web_service import WebService +from paddle_serving_server.web_service import WebService import numpy as np diff --git a/python/examples/grpc_impl_example/fit_a_line/test_server_gpu.py b/python/examples/grpc_impl_example/fit_a_line/test_server_gpu.py index 1547ee445f4f8ceebe58e6f9e4f05b92520911eb..62361d9994ca1c532a2f07384c8b089d3b0fad65 100644 --- a/python/examples/grpc_impl_example/fit_a_line/test_server_gpu.py +++ b/python/examples/grpc_impl_example/fit_a_line/test_server_gpu.py @@ -15,9 +15,9 @@ import os import sys -from paddle_serving_server_gpu import OpMaker -from paddle_serving_server_gpu import OpSeqMaker -from paddle_serving_server_gpu import MultiLangServer as Server +from paddle_serving_server import OpMaker +from paddle_serving_server import OpSeqMaker +from paddle_serving_server import MultiLangServer as Server op_maker = OpMaker() read_op = op_maker.create('general_reader') diff --git a/python/examples/imagenet/resnet50_web_service.py b/python/examples/imagenet/resnet50_web_service.py index 7033103717621807ecd74093bf5eba8d31a9b877..ca111615deb9d240f9d8b042f1f7edb599a1b775 100644 --- a/python/examples/imagenet/resnet50_web_service.py +++ b/python/examples/imagenet/resnet50_web_service.py @@ -25,7 +25,7 @@ device = sys.argv[2] if device == "cpu": from paddle_serving_server.web_service import WebService else: - from paddle_serving_server_gpu.web_service import WebService + from paddle_serving_server.web_service import WebService class ImageService(WebService): diff --git a/python/examples/imdb/text_classify_service.py b/python/examples/imdb/text_classify_service.py index c973150eb3e3ac824825f81834c24f08c81973e6..ca1e26002baf0284f282add235706080f7902c33 100755 --- a/python/examples/imdb/text_classify_service.py +++ b/python/examples/imdb/text_classify_service.py @@ -13,11 +13,7 @@ # limitations under the License. # pylint: disable=doc-string-missing -try: - from paddle_serving_server.web_service import WebService -except ImportError: - from paddle_serving_server_gpu.web_service import WebService - +from paddle_serving_server.web_service import WebService from paddle_serving_app.reader.imdb_reader import IMDBDataset import sys import numpy as np diff --git a/python/examples/lac/lac_web_service.py b/python/examples/lac/lac_web_service.py index 3cbf441eb1298e9068e22229a09e9c3efee37695..cf37f66294bd154324f2c7cacd1a35571b6c6350 100644 --- a/python/examples/lac/lac_web_service.py +++ b/python/examples/lac/lac_web_service.py @@ -12,10 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - from paddle_serving_server.web_service import WebService -except ImportError: - from paddle_serving_server_gpu.web_service import WebService +from paddle_serving_server.web_service import WebService import sys from paddle_serving_app.reader import LACReader import numpy as np diff --git a/python/examples/ocr/det_debugger_server.py b/python/examples/ocr/det_debugger_server.py index ebaf0a3066c7e948b4ca37dda06c60423ad28da9..8c8305012368d1307ce75983233f6d3af8e35a76 100644 --- a/python/examples/ocr/det_debugger_server.py +++ b/python/examples/ocr/det_debugger_server.py @@ -22,7 +22,7 @@ from paddle_serving_app.reader import Sequential, ResizeByFactor from paddle_serving_app.reader import Div, Normalize, Transpose from paddle_serving_app.reader import DBPostProcess, FilterBoxes if sys.argv[1] == 'gpu': - from paddle_serving_server_gpu.web_service import WebService + from paddle_serving_server.web_service import WebService elif sys.argv[1] == 'cpu': from paddle_serving_server.web_service import WebService import time diff --git a/python/examples/ocr/det_web_server.py b/python/examples/ocr/det_web_server.py index e90efc7813e96e6b768a09b4ec9f5108c5878527..c72dc6af964694ee2d364add100d52930576b798 100644 --- a/python/examples/ocr/det_web_server.py +++ b/python/examples/ocr/det_web_server.py @@ -22,7 +22,7 @@ from paddle_serving_app.reader import Sequential, ResizeByFactor from paddle_serving_app.reader import Div, Normalize, Transpose from paddle_serving_app.reader import DBPostProcess, FilterBoxes if sys.argv[1] == 'gpu': - from paddle_serving_server_gpu.web_service import WebService + from paddle_serving_server.web_service import WebService elif sys.argv[1] == 'cpu': from paddle_serving_server.web_service import WebService import time diff --git a/python/examples/ocr/ocr_debugger_server.py b/python/examples/ocr/ocr_debugger_server.py index e29f1b47980530cc8a278da0d14ba69a43f4f6a0..13d29fe42e801870969cf19cbf830eda560c3f49 100644 --- a/python/examples/ocr/ocr_debugger_server.py +++ b/python/examples/ocr/ocr_debugger_server.py @@ -23,7 +23,7 @@ from paddle_serving_app.reader import Sequential, URL2Image, ResizeByFactor from paddle_serving_app.reader import Div, Normalize, Transpose from paddle_serving_app.reader import DBPostProcess, FilterBoxes, GetRotateCropImage, SortedBoxes if sys.argv[1] == 'gpu': - from paddle_serving_server_gpu.web_service import WebService + from paddle_serving_server.web_service import WebService elif sys.argv[1] == 'cpu': from paddle_serving_server.web_service import WebService from paddle_serving_app.local_predict import LocalPredictor diff --git a/python/examples/ocr/rec_debugger_server.py b/python/examples/ocr/rec_debugger_server.py index 29c0639776fa8c7338233ccf1ecf23fd263fb97f..d7d663fe3ba7dd2f157c5e856f55aa82d1159e0a 100644 --- a/python/examples/ocr/rec_debugger_server.py +++ b/python/examples/ocr/rec_debugger_server.py @@ -23,7 +23,7 @@ from paddle_serving_app.reader import Sequential, URL2Image, ResizeByFactor from paddle_serving_app.reader import Div, Normalize, Transpose from paddle_serving_app.reader import DBPostProcess, FilterBoxes, GetRotateCropImage, SortedBoxes if sys.argv[1] == 'gpu': - from paddle_serving_server_gpu.web_service import WebService + from paddle_serving_server.web_service import WebService elif sys.argv[1] == 'cpu': from paddle_serving_server.web_service import WebService import time diff --git a/python/examples/ocr/rec_web_server.py b/python/examples/ocr/rec_web_server.py index a3de120aff910f72a224a61cdc67d1ff50e65ab2..61669fddf34fe361460c6d18049905e7e3400db0 100644 --- a/python/examples/ocr/rec_web_server.py +++ b/python/examples/ocr/rec_web_server.py @@ -23,7 +23,7 @@ from paddle_serving_app.reader import Sequential, URL2Image, ResizeByFactor from paddle_serving_app.reader import Div, Normalize, Transpose from paddle_serving_app.reader import DBPostProcess, FilterBoxes, GetRotateCropImage, SortedBoxes if sys.argv[1] == 'gpu': - from paddle_serving_server_gpu.web_service import WebService + from paddle_serving_server.web_service import WebService elif sys.argv[1] == 'cpu': from paddle_serving_server.web_service import WebService import time diff --git a/python/examples/pipeline/bert/pipeline_rpc_client.py b/python/examples/pipeline/bert/pipeline_rpc_client.py index 11bdee54dd48500b4d8c7d674318ceb8909afd91..1ce1998a07670a30dff197d654d2192201c12928 100644 --- a/python/examples/pipeline/bert/pipeline_rpc_client.py +++ b/python/examples/pipeline/bert/pipeline_rpc_client.py @@ -1,27 +1,37 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import sys import os import yaml import requests import time import json -try: - from paddle_serving_server_gpu.pipeline import PipelineClient -except ImportError: - from paddle_serving_server.pipeline import PipelineClient +from paddle_serving_server.pipeline import PipelineClient import numpy as np - client = PipelineClient() client.connect(['127.0.0.1:9998']) batch_size = 101 with open("data-c.txt", 'r') as fin: - lines = fin.readlines() - start_idx = 0 - while start_idx < len(lines): - end_idx = min(len(lines), start_idx + batch_size) - feed = {} - for i in range(start_idx, end_idx): - feed[str(i - start_idx)] = lines[i] - ret = client.predict(feed_dict=feed, fetch=["res"]) - print(ret) - start_idx += batch_size + lines = fin.readlines() + start_idx = 0 + while start_idx < len(lines): + end_idx = min(len(lines), start_idx + batch_size) + feed = {} + for i in range(start_idx, end_idx): + feed[str(i - start_idx)] = lines[i] + ret = client.predict(feed_dict=feed, fetch=["res"]) + print(ret) + start_idx += batch_size diff --git a/python/examples/pipeline/bert/web_service.py b/python/examples/pipeline/bert/web_service.py index da4ec0e6043919052eb41857e98c10f3dfc5a1d1..7f5128f95d772a8d108e5ab3a92314eee103235d 100644 --- a/python/examples/pipeline/bert/web_service.py +++ b/python/examples/pipeline/bert/web_service.py @@ -11,10 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - from paddle_serving_server_gpu.web_service import WebService, Op -except ImportError: - from paddle_serving_server.web_service import WebService, Op +from paddle_serving_server.web_service import WebService, Op import logging import numpy as np import sys diff --git a/python/examples/pipeline/imagenet/resnet50_web_service.py b/python/examples/pipeline/imagenet/resnet50_web_service.py index b0a62c5131dd0384d6cbb14708a15c969f081b63..53a0b6d9c5d7290b709df9c5ba7a314d29bbd08d 100644 --- a/python/examples/pipeline/imagenet/resnet50_web_service.py +++ b/python/examples/pipeline/imagenet/resnet50_web_service.py @@ -13,10 +13,7 @@ # limitations under the License. import sys from paddle_serving_app.reader import Sequential, URL2Image, Resize, CenterCrop, RGB2BGR, Transpose, Div, Normalize, Base64ToImage -try: - from paddle_serving_server.web_service import WebService, Op -except ImportError: - from paddle_serving_server_gpu.web_service import WebService, Op +from paddle_serving_server.web_service import WebService, Op import logging import numpy as np import base64, cv2 diff --git a/python/examples/pipeline/imdb_model_ensemble/test_pipeline_server.py b/python/examples/pipeline/imdb_model_ensemble/test_pipeline_server.py index 69a3e3460a7df18e580962284d7ca077f1f96de4..30317f0ef4fbeba82c3fa6a1551284b467e0adfd 100644 --- a/python/examples/pipeline/imdb_model_ensemble/test_pipeline_server.py +++ b/python/examples/pipeline/imdb_model_ensemble/test_pipeline_server.py @@ -15,18 +15,11 @@ import numpy as np from paddle_serving_app.reader.imdb_reader import IMDBDataset import logging -try: - from paddle_serving_server.web_service import WebService - from paddle_serving_server.pipeline import Op, RequestOp, ResponseOp - from paddle_serving_server.pipeline import PipelineServer - from paddle_serving_server.pipeline.proto import pipeline_service_pb2 - from paddle_serving_server.pipeline.channel import ChannelDataErrcode -except ImportError: - from paddle_serving_server_gpu.web_service import WebService - from paddle_serving_server_gpu.pipeline import Op, RequestOp, ResponseOp - from paddle_serving_server_gpu.pipeline import PipelineServer - from paddle_serving_server_gpu.pipeline.proto import pipeline_service_pb2 - from paddle_serving_server_gpu.pipeline.channel import ChannelDataErrcode +from paddle_serving_server.web_service import WebService +from paddle_serving_server.pipeline import Op, RequestOp, ResponseOp +from paddle_serving_server.pipeline import PipelineServer +from paddle_serving_server.pipeline.proto import pipeline_service_pb2 +from paddle_serving_server.pipeline.channel import ChannelDataErrcode _LOGGER = logging.getLogger() user_handler = logging.StreamHandler() diff --git a/python/examples/pipeline/ocr/web_service.py b/python/examples/pipeline/ocr/web_service.py index 8e7c409170aa1a5b4354d64aa2caa920dd12899c..42a87d60d3d872fa203951537febca84c78937da 100644 --- a/python/examples/pipeline/ocr/web_service.py +++ b/python/examples/pipeline/ocr/web_service.py @@ -11,10 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - from paddle_serving_server_gpu.web_service import WebService, Op -except ImportError: - from paddle_serving_server.web_service import WebService, Op +from paddle_serving_server.web_service import WebService, Op import logging import numpy as np import cv2 diff --git a/python/examples/pipeline/simple_web_service/web_service.py b/python/examples/pipeline/simple_web_service/web_service.py index 6275076484965426386d919f6a7f56d020463a8c..ea3109cf998ab81ecf68f556c0254fe35b3f4091 100644 --- a/python/examples/pipeline/simple_web_service/web_service.py +++ b/python/examples/pipeline/simple_web_service/web_service.py @@ -11,10 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - from paddle_serving_server.web_service import WebService, Op -except ImportError: - from paddle_serving_server_gpu.web_service import WebService, Op + +from paddle_serving_server.web_service import WebService, Op import logging import numpy as np import sys diff --git a/python/examples/pipeline/simple_web_service/web_service_java.py b/python/examples/pipeline/simple_web_service/web_service_java.py index 7c573c6be7e7846431f15160ed4f7d7b570f904c..a32ba0ee5417757d6c2210a2cfa2481d7a8a115c 100644 --- a/python/examples/pipeline/simple_web_service/web_service_java.py +++ b/python/examples/pipeline/simple_web_service/web_service_java.py @@ -11,10 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - from paddle_serving_server.web_service import WebService, Op -except ImportError: - from paddle_serving_server_gpu.web_service import WebService, Op +from paddle_serving_server.web_service import WebService, Op import logging import numpy as np from numpy import array diff --git a/python/examples/senta/senta_web_service.py b/python/examples/senta/senta_web_service.py index 51e530695c8b41da4d6ca0f8128244197ff8a8ad..1e872f0eae0e9ecbfae820367e26db9e94f3cf86 100644 --- a/python/examples/senta/senta_web_service.py +++ b/python/examples/senta/senta_web_service.py @@ -16,11 +16,7 @@ import os import sys import numpy as np -try: - from paddle_serving_server.web_service import WebService -except ImportError: - from paddle_serving_server_gpu.web_service import WebService - +from paddle_serving_server.web_service import WebService from paddle_serving_client import Client from paddle_serving_app.reader import LACReader, SentaReader diff --git a/python/examples/xpu/fit_a_line_xpu/test_server.py b/python/examples/xpu/fit_a_line_xpu/test_server.py index 27499f45ffa442fb3b26cc692695028619aa62b7..a7b7e096ce7ff6f58215bbb06d36f663e1bd32f2 100644 --- a/python/examples/xpu/fit_a_line_xpu/test_server.py +++ b/python/examples/xpu/fit_a_line_xpu/test_server.py @@ -13,11 +13,7 @@ # limitations under the License. # pylint: disable=doc-string-missing -try: - from paddle_serving_server.web_service import WebService -except ImportError: - from paddle_serving_server_gpu.web_service import WebService - +from paddle_serving_server.web_service import WebService import numpy as np