提交 f678d788 编写于 作者: F felixhjh

when paddle is not installed, stop env check process to info user to intall paddle

上级 b8ebeb2d
...@@ -47,6 +47,7 @@ def run_test_cases(cases_list, case_type): ...@@ -47,6 +47,7 @@ def run_test_cases(cases_list, case_type):
elif res == 1: elif res == 1:
if case_name == "inference": if case_name == "inference":
print("{} {} environment running failure. Please refer to https://www.paddlepaddle.org.cn/install/quick?docurl=/documentation/docs/zh/install/pip/linux-pip.html to configure environment".format(case_type, case_name)) print("{} {} environment running failure. Please refer to https://www.paddlepaddle.org.cn/install/quick?docurl=/documentation/docs/zh/install/pip/linux-pip.html to configure environment".format(case_type, case_name))
os._exit(0)
else: else:
print("{} {} environment running failure, if you need this environment, please refer to https://github.com/PaddlePaddle/Serving/blob/HEAD/doc/Compile_CN.md to configure environment".format(case_type, case_name)) print("{} {} environment running failure, if you need this environment, please refer to https://github.com/PaddlePaddle/Serving/blob/HEAD/doc/Compile_CN.md to configure environment".format(case_type, case_name))
......
...@@ -8,10 +8,7 @@ import sys ...@@ -8,10 +8,7 @@ import sys
from paddle_serving_client import Client from paddle_serving_client import Client
from paddle_serving_client.httpclient import HttpClient from paddle_serving_client.httpclient import HttpClient
from paddle_serving_client.io import inference_model_to_serving
from paddle_serving_app.reader import SegPostprocess
from paddle_serving_app.reader import * from paddle_serving_app.reader import *
import paddle.inference as paddle_infer
from util import * from util import *
...@@ -31,6 +28,11 @@ class TestFitALine(object): ...@@ -31,6 +28,11 @@ class TestFitALine(object):
self.serving_util.release() self.serving_util.release()
def get_truth_val_by_inference(self): def get_truth_val_by_inference(self):
try:
import paddle.inference as paddle_infer
except:
# when paddle is not installed, directly return
return
data = np.array( data = np.array(
[0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795,
-0.0332]).astype("float32")[np.newaxis, :] -0.0332]).astype("float32")[np.newaxis, :]
......
...@@ -10,7 +10,6 @@ import sys ...@@ -10,7 +10,6 @@ import sys
from paddle_serving_server.pipeline import PipelineClient from paddle_serving_server.pipeline import PipelineClient
from paddle_serving_app.reader import CenterCrop, RGB2BGR, Transpose, Div, Normalize, RCNNPostprocess from paddle_serving_app.reader import CenterCrop, RGB2BGR, Transpose, Div, Normalize, RCNNPostprocess
from paddle_serving_app.reader import Sequential, File2Image, Resize, Transpose, BGR2RGB, SegPostprocess from paddle_serving_app.reader import Sequential, File2Image, Resize, Transpose, BGR2RGB, SegPostprocess
import paddle.inference as paddle_infer
from util import * from util import *
...@@ -30,6 +29,11 @@ class TestUCIPipeline(object): ...@@ -30,6 +29,11 @@ class TestUCIPipeline(object):
self.serving_util.release() self.serving_util.release()
def get_truth_val_by_inference(self): def get_truth_val_by_inference(self):
try:
import paddle.inference as paddle_infer
except:
# when paddle is not installed, directly return
return
data = np.array( data = np.array(
[0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795, [0.0137, -0.1136, 0.2553, -0.0692, 0.0582, -0.0727, -0.1583, -0.0584, 0.6283, 0.4919, 0.1856, 0.0795,
-0.0332]).astype("float32")[np.newaxis, :] -0.0332]).astype("float32")[np.newaxis, :]
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册