提交 1fb339d0 编写于 作者: B barriery

fix cpu

上级 23210349
...@@ -13,9 +13,9 @@ ...@@ -13,9 +13,9 @@
# limitations under the License. # limitations under the License.
# pylint: disable=doc-string-missing # pylint: disable=doc-string-missing
try: try:
from paddle_serving_server import pipeline
except ImportError:
from paddle_serving_server_gpu import pipeline from paddle_serving_server_gpu import pipeline
except ImportError:
from paddle_serving_server import pipeline
import numpy as np import numpy as np
import logging import logging
......
...@@ -11,8 +11,10 @@ ...@@ -11,8 +11,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
try:
from paddle_serving_server_gpu.web_service import DefaultPipelineWebService from paddle_serving_server_gpu.web_service import DefaultPipelineWebService
except ImportError:
from paddle_serving_server.web_service import DefaultPipelineWebService
import logging import logging
import numpy as np import numpy as np
...@@ -40,6 +42,9 @@ class UciService(DefaultPipelineWebService): ...@@ -40,6 +42,9 @@ class UciService(DefaultPipelineWebService):
uci_service = UciService(name="uci") uci_service = UciService(name="uci")
uci_service.init_separator() uci_service.init_separator()
uci_service.load_model_config("./uci_housing_model") uci_service.load_model_config("./uci_housing_model")
uci_service.set_gpus("0") try:
uci_service.set_gpus("0")
except Exception:
pass
uci_service.prepare_server(workdir="workdir", port=18080) uci_service.prepare_server(workdir="workdir", port=18080)
uci_service.run_service() uci_service.run_service()
...@@ -159,6 +159,10 @@ class Server(object): ...@@ -159,6 +159,10 @@ class Server(object):
self.mkl_flag = False self.mkl_flag = False
self.model_config_paths = None # for multi-model in a workflow self.model_config_paths = None # for multi-model in a workflow
def get_fetch_list(self):
fetch_names = [var.alias_name for var in self.model_conf.fetch_var]
return fetch_names
def set_max_concurrency(self, concurrency): def set_max_concurrency(self, concurrency):
self.max_concurrency = concurrency self.max_concurrency = concurrency
......
...@@ -197,6 +197,10 @@ class Server(object): ...@@ -197,6 +197,10 @@ class Server(object):
self.gpuid = 0 self.gpuid = 0
self.model_config_paths = None # for multi-model in a workflow self.model_config_paths = None # for multi-model in a workflow
def get_fetch_list(self):
fetch_names = [var.alias_name for var in self.model_conf.fetch_var]
return fetch_names
def set_max_concurrency(self, concurrency): def set_max_concurrency(self, concurrency):
self.max_concurrency = concurrency self.max_concurrency = concurrency
......
...@@ -16,9 +16,9 @@ import os ...@@ -16,9 +16,9 @@ import os
import logging import logging
import multiprocessing import multiprocessing
try: try:
from paddle_serving_server import OpMaker, OpSeqMaker, Server
except ImportError:
from paddle_serving_server_gpu import OpMaker, OpSeqMaker, Server from paddle_serving_server_gpu import OpMaker, OpSeqMaker, Server
except ImportError:
from paddle_serving_server import OpMaker, OpSeqMaker, Server
from .util import AvailablePortGenerator, NameGenerator from .util import AvailablePortGenerator, NameGenerator
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
......
...@@ -63,8 +63,10 @@ packages=['paddle_serving_client', ...@@ -63,8 +63,10 @@ packages=['paddle_serving_client',
'paddle_serving_client.metric', 'paddle_serving_client.metric',
'paddle_serving_client.utils', 'paddle_serving_client.utils',
'paddle_serving_client.pipeline', 'paddle_serving_client.pipeline',
'paddle_serving_client.pipeline.proto'] 'paddle_serving_client.pipeline.proto',
package_data={'paddle_serving_client': ['serving_client.so','lib/*'],} 'paddle_serving_client.pipeline.gateway',
'paddle_serving_client.pipeline.gateway.proto']
package_data={'paddle_serving_client': ['serving_client.so', 'lib/*', 'pipeline/gateway/libproxy_server.so'],}
package_dir={'paddle_serving_client': package_dir={'paddle_serving_client':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client', '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client',
'paddle_serving_client.proto': 'paddle_serving_client.proto':
...@@ -78,7 +80,11 @@ package_dir={'paddle_serving_client': ...@@ -78,7 +80,11 @@ package_dir={'paddle_serving_client':
'paddle_serving_client.pipeline': 'paddle_serving_client.pipeline':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline', '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline',
'paddle_serving_client.pipeline.proto': 'paddle_serving_client.pipeline.proto':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline/proto'} '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline/proto',
'paddle_serving_client.pipeline.gateway':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline/gateway',
'paddle_serving_client.pipeline.gateway.proto':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_client/pipeline/gateway/proto'}
setup( setup(
name='paddle-serving-client', name='paddle-serving-client',
......
...@@ -35,7 +35,9 @@ REQUIRED_PACKAGES = [ ...@@ -35,7 +35,9 @@ REQUIRED_PACKAGES = [
packages=['paddle_serving_server', packages=['paddle_serving_server',
'paddle_serving_server.proto', 'paddle_serving_server.proto',
'paddle_serving_server.pipeline', 'paddle_serving_server.pipeline',
'paddle_serving_server.pipeline.proto'] 'paddle_serving_server.pipeline.proto',
'paddle_serving_server.pipeline.gateway',
'paddle_serving_server.pipeline.gateway.proto']
package_dir={'paddle_serving_server': package_dir={'paddle_serving_server':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server', '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server',
...@@ -44,7 +46,13 @@ package_dir={'paddle_serving_server': ...@@ -44,7 +46,13 @@ package_dir={'paddle_serving_server':
'paddle_serving_server.pipeline': 'paddle_serving_server.pipeline':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline', '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline',
'paddle_serving_server.pipeline.proto': 'paddle_serving_server.pipeline.proto':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline/proto'} '${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline/proto',
'paddle_serving_server.pipeline.gateway':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline/gateway',
'paddle_serving_server.pipeline.gateway.proto':
'${PADDLE_SERVING_BINARY_DIR}/python/paddle_serving_server/pipeline/gateway/proto'}
package_data={'paddle_serving_server': ['pipeline/gateway/libproxy_server.so'],}
setup( setup(
name='paddle-serving-server', name='paddle-serving-server',
...@@ -56,6 +64,7 @@ setup( ...@@ -56,6 +64,7 @@ setup(
author_email='guru4elephant@gmail.com', author_email='guru4elephant@gmail.com',
install_requires=REQUIRED_PACKAGES, install_requires=REQUIRED_PACKAGES,
packages=packages, packages=packages,
package_data=package_data,
package_dir=package_dir, package_dir=package_dir,
# PyPI package information. # PyPI package information.
classifiers=[ classifiers=[
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册