提交 5bec34d9 编写于 作者: B barriery

fix cpu

上级 37763f55
......@@ -13,9 +13,9 @@
# limitations under the License.
# pylint: disable=doc-string-missing
try:
from paddle_serving_server import pipeline
except ImportError:
from paddle_serving_server_gpu import pipeline
except ImportError:
from paddle_serving_server import pipeline
import numpy as np
import logging
......
......@@ -11,8 +11,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle_serving_server_gpu.web_service import DefaultPipelineWebService
try:
from paddle_serving_server_gpu.web_service import DefaultPipelineWebService
except ImportError:
from paddle_serving_server.web_service import DefaultPipelineWebService
import logging
import numpy as np
......@@ -40,6 +42,9 @@ class UciService(DefaultPipelineWebService):
uci_service = UciService(name="uci")
uci_service.init_separator()
uci_service.load_model_config("./uci_housing_model")
uci_service.set_gpus("0")
try:
uci_service.set_gpus("0")
except Exception:
pass
uci_service.prepare_server(workdir="workdir", port=18080)
uci_service.run_service()
......@@ -159,6 +159,10 @@ class Server(object):
self.mkl_flag = False
self.model_config_paths = None # for multi-model in a workflow
def get_fetch_list(self):
fetch_names = [var.alias_name for var in self.model_conf.fetch_var]
return fetch_names
def set_max_concurrency(self, concurrency):
self.max_concurrency = concurrency
......
......@@ -197,6 +197,10 @@ class Server(object):
self.gpuid = 0
self.model_config_paths = None # for multi-model in a workflow
def get_fetch_list(self):
fetch_names = [var.alias_name for var in self.model_conf.fetch_var]
return fetch_names
def set_max_concurrency(self, concurrency):
self.max_concurrency = concurrency
......
......@@ -16,9 +16,9 @@ import os
import logging
import multiprocessing
try:
from paddle_serving_server import OpMaker, OpSeqMaker, Server
except ImportError:
from paddle_serving_server_gpu import OpMaker, OpSeqMaker, Server
except ImportError:
from paddle_serving_server import OpMaker, OpSeqMaker, Server
from .util import AvailablePortGenerator, NameGenerator
_LOGGER = logging.getLogger(__name__)
......
......@@ -63,8 +63,10 @@ packages=['paddle_serving_client',
'paddle_serving_client.metric',
'paddle_serving_client.utils',
'paddle_serving_client.pipeline',
'paddle_serving_client.pipeline.proto']
package_data={'paddle_serving_client': ['serving_client.so','lib/*'],}
'paddle_serving_client.pipeline.proto',
'paddle_serving_client.pipeline.gateway',
'paddle_serving_client.pipeline.gateway.proto']
package_data={'paddle_serving_client': ['serving_client.so', 'lib/*', 'pipeline/gateway/libproxy_server.so'],}
package_dir={'paddle_serving_client':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client',
'paddle_serving_client.proto':
......@@ -78,7 +80,11 @@ package_dir={'paddle_serving_client':
'paddle_serving_client.pipeline':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline',
'paddle_serving_client.pipeline.proto':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline/proto'}
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline/proto',
'paddle_serving_client.pipeline.gateway':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline/gateway',
'paddle_serving_client.pipeline.gateway.proto':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline/gateway/proto'}
setup(
name='paddle-serving-client',
......
......@@ -35,7 +35,9 @@ REQUIRED_PACKAGES = [
packages=['paddle_serving_server',
'paddle_serving_server.proto',
'paddle_serving_server.pipeline',
'paddle_serving_server.pipeline.proto']
'paddle_serving_server.pipeline.proto',
'paddle_serving_server.pipeline.gateway',
'paddle_serving_server.pipeline.gateway.proto']
package_dir={'paddle_serving_server':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server',
......@@ -44,7 +46,13 @@ package_dir={'paddle_serving_server':
'paddle_serving_server.pipeline':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline',
'paddle_serving_server.pipeline.proto':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline/proto'}
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline/proto',
'paddle_serving_server.pipeline.gateway':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline/gateway',
'paddle_serving_server.pipeline.gateway.proto':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline/gateway/proto'}
package_data={'paddle_serving_server': ['pipeline/gateway/libproxy_server.so'],}
setup(
name='paddle-serving-server',
......@@ -56,6 +64,7 @@ setup(
author_email='guru4elephant@gmail.com',
install_requires=REQUIRED_PACKAGES,
packages=packages,
package_data=package_data,
package_dir=package_dir,
# PyPI package information.
classifiers=[
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册