From 339f1418bd3d3a0a316cbe77b70c431775359772 Mon Sep 17 00:00:00 2001
From: zhangjun <ewalker@live.cn>
Date: Sat, 24 Apr 2021 14:45:21 +0000
Subject: [PATCH] fix, remove webservcie for xpu example

---
 python/examples/xpu/ernie/README.md           |  4 +-
 python/examples/xpu/ernie/ernie_client.py     |  2 +-
 python/examples/xpu/fit_a_line_xpu/README.md  | 16 -------
 .../examples/xpu/fit_a_line_xpu/README_CN.md  | 17 --------
 .../test_multi_process_client.py              | 42 -------------------
 .../xpu/fit_a_line_xpu/test_server.py         | 37 ----------------
 6 files changed, 3 insertions(+), 115 deletions(-)
 delete mode 100644 python/examples/xpu/fit_a_line_xpu/test_multi_process_client.py
 delete mode 100644 python/examples/xpu/fit_a_line_xpu/test_server.py

diff --git a/python/examples/xpu/ernie/README.md b/python/examples/xpu/ernie/README.md
index 1ce982a9..ef001e80 100644
--- a/python/examples/xpu/ernie/README.md
+++ b/python/examples/xpu/ernie/README.md
@@ -11,8 +11,8 @@ python3 -m paddle_serving_client.convert --dirname ernie
 ```
 ### or, you can get the serving saved model directly
 ```
-wget https://paddle-serving.bj.bcebos.com/models/xpu/bert.tar.gz
-tar zxvf bert.tar.gz 
+wget https://paddle-serving.bj.bcebos.com/models/xpu/ernie.tar.gz
+tar zxvf ernie.tar.gz 
 ```
 ### Getting Dict and Sample Dataset
 
diff --git a/python/examples/xpu/ernie/ernie_client.py b/python/examples/xpu/ernie/ernie_client.py
index b02c9d0a..d5f2d25d 100644
--- a/python/examples/xpu/ernie/ernie_client.py
+++ b/python/examples/xpu/ernie/ernie_client.py
@@ -23,7 +23,7 @@ args = benchmark_args()
 
 reader = ChineseErnieReader({"max_seq_len": 128})
 fetch = ["save_infer_model/scale_0"]
-endpoint_list = ['127.0.0.1:7704']
+endpoint_list = ['127.0.0.1:12000']
 client = Client()
 client.load_client_config(args.model)
 client.connect(endpoint_list)
diff --git a/python/examples/xpu/fit_a_line_xpu/README.md b/python/examples/xpu/fit_a_line_xpu/README.md
index 04f2ea5f..b8108dec 100644
--- a/python/examples/xpu/fit_a_line_xpu/README.md
+++ b/python/examples/xpu/fit_a_line_xpu/README.md
@@ -29,19 +29,3 @@ The `paddlepaddle` package is used in `test_client.py`, and you may need to down
 python test_client.py uci_housing_client/serving_client_conf.prototxt
 ```
 
-
-
-## HTTP service
-
-### Start server
-
-Start a web service with default web service hosting modules:
-``` shell
-python test_server.py
-```
-
-### Client prediction
-
-``` shell
-curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332]}], "fetch":["price"]}' http://127.0.0.1:9292/uci/prediction
-```
diff --git a/python/examples/xpu/fit_a_line_xpu/README_CN.md b/python/examples/xpu/fit_a_line_xpu/README_CN.md
index 5434f70e..19549aca 100644
--- a/python/examples/xpu/fit_a_line_xpu/README_CN.md
+++ b/python/examples/xpu/fit_a_line_xpu/README_CN.md
@@ -32,20 +32,3 @@ python -m paddle_serving_server.serve --model uci_housing_model --thread 10 --po
 python test_client.py uci_housing_client/serving_client_conf.prototxt
 ```
 
-
-
-## HTTP服务
-
-### 开启服务端
-
-通过下面的一行代码开启默认web服务:
-
-``` shell
-python test_server.py
-```
-
-### 客户端预测
-
-``` shell
-curl -H "Content-Type:application/json" -X POST -d '{"feed":[{"x": [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, -0.0332]}], "fetch":["price"]}' http://127.0.0.1:9292/uci/prediction
-```
diff --git a/python/examples/xpu/fit_a_line_xpu/test_multi_process_client.py b/python/examples/xpu/fit_a_line_xpu/test_multi_process_client.py
deleted file mode 100644
index e6120266..00000000
--- a/python/examples/xpu/fit_a_line_xpu/test_multi_process_client.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from paddle_serving_client import Client
-from paddle_serving_client.utils import MultiThreadRunner
-import paddle
-import numpy as np
-
-
-def single_func(idx, resource):
-    client = Client()
-    client.load_client_config(
-        "./uci_housing_client/serving_client_conf.prototxt")
-    client.connect(["127.0.0.1:9293", "127.0.0.1:9292"])
-    x = [
-        0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584,
-        0.6283, 0.4919, 0.1856, 0.0795, -0.0332
-    ]
-    x = np.array(x)
-    for i in range(1000):
-        fetch_map = client.predict(feed={"x": x}, fetch=["price"])
-        if fetch_map is None:
-            return [[None]]
-    return [[0]]
-
-
-multi_thread_runner = MultiThreadRunner()
-thread_num = 4
-result = multi_thread_runner.run(single_func, thread_num, {})
-if None in result[0]:
-    exit(1)
diff --git a/python/examples/xpu/fit_a_line_xpu/test_server.py b/python/examples/xpu/fit_a_line_xpu/test_server.py
deleted file mode 100644
index a7b7e096..00000000
--- a/python/examples/xpu/fit_a_line_xpu/test_server.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# pylint: disable=doc-string-missing
-
-from paddle_serving_server.web_service import WebService
-import numpy as np
-
-
-class UciService(WebService):
-    def preprocess(self, feed=[], fetch=[]):
-        feed_batch = []
-        is_batch = True
-        new_data = np.zeros((len(feed), 1, 13)).astype("float32")
-        for i, ins in enumerate(feed):
-            nums = np.array(ins["x"]).reshape(1, 1, 13)
-            new_data[i] = nums
-        feed = {"x": new_data}
-        return feed, fetch, is_batch
-
-
-uci_service = UciService(name="uci")
-uci_service.load_model_config("uci_housing_model")
-uci_service.prepare_server(
-    workdir="workdir", port=9393, use_lite=True, use_xpu=True, ir_optim=True)
-uci_service.run_rpc_service()
-uci_service.run_web_service()
-- 
GitLab