提交 3201036a 编写于 作者: M MRXLT

fix demo

上级 07d01908
......@@ -25,7 +25,7 @@ from paddlehub.common.logger import logger
import socket
from paddle_serving_client import Client
from paddle_serving_client.utils import benchmark_args
from paddle_serving_app import ChineseBertReader
from paddle_serving_app.reader import ChineseBertReader
args = benchmark_args()
......
......@@ -16,7 +16,7 @@
import sys
import time
import requests
from paddle_serving_app import IMDBDataset
from paddle_serving_app.reader import IMDBDataset
from paddle_serving_client import Client
from paddle_serving_client.utils import MultiThreadRunner
from paddle_serving_client.utils import benchmark_args
......
......@@ -13,7 +13,7 @@
# limitations under the License.
# pylint: disable=doc-string-missing
from paddle_serving_client import Client
from paddle_serving_app import IMDBDataset
from paddle_serving_app.reader import IMDBDataset
import sys
client = Client()
......
......@@ -14,7 +14,7 @@
# pylint: disable=doc-string-missing
from paddle_serving_server.web_service import WebService
from paddle_serving_app import IMDBDataset
from paddle_serving_app.reader import IMDBDataset
import sys
......
......@@ -14,7 +14,7 @@
from paddle_serving_app.reader import Sequential, File2Image, Resize, CenterCrop
from paddle_serving_app.reader import RGB2BGR, Transpose, Div, Normalize
from paddle_serving_app import Debugger
from paddle_serving_app.local_predict import Debugger
import sys
debugger = Debugger()
......
wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/SentimentAnalysis/senta_bilstm.tar.gz --no-check-certificate
tar -xzvf senta_bilstm.tar.gz
wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/LexicalAnalysis/lac_model.tar.gz --no-check-certificate
wget https://paddle-serving.bj.bcebos.com/paddle_hub_models/text/LexicalAnalysis/lac.tar.gz --no-check-certificate
tar -xzvf lac_model.tar.gz
wget https://paddle-serving.bj.bcebos.com/reader/lac/lac_dict.tar.gz --no-check-certificate
tar -xzvf lac_dict.tar.gz
......
......@@ -14,7 +14,7 @@
from paddle_serving_server_gpu.web_service import WebService
from paddle_serving_client import Client
from paddle_serving_app import LACReader, SentaReader
from paddle_serving_app.reader import LACReader, SentaReader
import os
import sys
from multiprocessing import Process
......
......@@ -158,7 +158,7 @@ Therefore, a local prediction tool is built into the paddle_serving_app, which i
Taking [fit_a_line prediction service](../examples/fit_a_line) as an example, the following code can be used to run local prediction.
```python
from paddle_serving_app import Debugger
from paddle_serving_app.local_predict import Debugger
import numpy as np
debugger = Debugger()
......
......@@ -147,7 +147,7 @@ Paddle Serving框架的server预测op使用了Paddle 的预测框架,在部署
[fit_a_line预测服务](../examples/fit_a_line)为例,使用以下代码即可执行本地预测。
```python
from paddle_serving_app import Debugger
from paddle_serving_app.local_predict import Debugger
import numpy as np
debugger = Debugger()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册