未验证 提交 eb200541 编写于 作者: D Dong Daxiang 提交者: GitHub

Merge pull request #399 from guru4elephant/fix_go_problem

Fix go problem
......@@ -21,37 +21,10 @@ wget https://paddle-serving.bj.bcebos.com/data/text_classification/imdb_serving_
tar -xzf imdb_serving_example.tar.gz
```
### Server Side Code
```python
# test_server_go.py
import os
import sys
from paddle_serving_server import OpMaker
from paddle_serving_server import OpSeqMaker
from paddle_serving_server import Server
op_maker = OpMaker()
read_op = op_maker.create('general_text_reader')
general_infer_op = op_maker.create('general_infer')
general_response_op = op_maker.create('general_text_response')
op_seq_maker = OpSeqMaker()
op_seq_maker.add_op(read_op)
op_seq_maker.add_op(general_infer_op)
op_seq_maker.add_op(general_response_op)
server = Server()
server.set_op_sequence(op_seq_maker.get_op_sequence())
server.load_model_config(sys.argv[1])
server.prepare_server(workdir="work_dir1", port=9292, device="cpu")
server.run_server()
```
### Start Server
``` shell
python test_server_go.py ./serving_server_model/ 9292
python -m paddle_serving_server.serve --model ./serving_server_model/ --port 9292
```
### Client code example
......
......@@ -20,37 +20,10 @@ wget https://paddle-serving.bj.bcebos.com/data/text_classification/imdb_serving_
tar -xzf imdb_serving_example.tar.gz
```
### 服务器端代码
```python
# test_server_go.py
import os
import sys
from paddle_serving_server import OpMaker
from paddle_serving_server import OpSeqMaker
from paddle_serving_server import Server
op_maker = OpMaker ()
read_op = op_maker.create ('general_text_reader')
general_infer_op = op_maker.create ('general_infer')
general_response_op = op_maker.create ('general_text_response')
op_seq_maker = OpSeqMaker ()
op_seq_maker.add_op (read_op)
op_seq_maker.add_op (general_infer_op)
op_seq_maker.add_op (general_response_op)
server = Server ()
server.set_op_sequence (op_seq_maker.get_op_sequence ())
server.load_model_config (sys.argv [1])
server.prepare_server (workdir = "work_dir1", port = 9292, device = "cpu")
server.run_server ()
```
### 启动服务器
```shell
python test_server_go.py ./serving_server_model/ 9292
python -m paddle_serving_server.serve --model ./serving_server_model/ --port 9292
```
### 客户端代码示例
......
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
:github_url: https://github.com/PaddlePaddle/Serving
.. mdinclude:: md/logo.md
Paddle Serving
==============
.. toctree::
:maxdepth: 1
:caption: Readme
:hidden:
.. _instruction: instruction.html
此差异已折叠。
sphinx==2.1.0
mistune
sphinx_rtd_theme
paddlepaddle>=1.6
......@@ -28,7 +28,7 @@ func main() {
var config_file_path string
config_file_path = os.Args[1]
handle := serving_client.LoadModelConfig(config_file_path)
handle = serving_client.Connect("127.0.0.1", "9393", handle)
handle = serving_client.Connect("127.0.0.1", "9292", handle)
test_file_path := os.Args[2]
fi, err := os.Open(test_file_path)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册