提交 b6d3a797 编写于 作者: H HexToString

fix client ip

上级 d870d335
...@@ -98,7 +98,7 @@ java -cp paddle-serving-sdk-java-examples-0.0.1-jar-with-dependencies.jar Pipeli ...@@ -98,7 +98,7 @@ java -cp paddle-serving-sdk-java-examples-0.0.1-jar-with-dependencies.jar Pipeli
### 注意事项 ### 注意事项
1.在示例中,端口号都是9393,ip默认设置为了0.0.0.0表示本机,注意ip和port需要与Server端对应。 1.在示例中,端口号都是9393,ip默认设置为了127.0.0.1表示本机,注意ip和port需要与Server端对应。
2.目前Serving已推出Pipeline模式(原理详见[Pipeline Serving](../doc/PIPELINE_SERVING_CN.md)),面向Java的Pipeline Serving Client已发布。 2.目前Serving已推出Pipeline模式(原理详见[Pipeline Serving](../doc/PIPELINE_SERVING_CN.md)),面向Java的Pipeline Serving Client已发布。
......
...@@ -25,7 +25,7 @@ public class PaddleServingClientExample { ...@@ -25,7 +25,7 @@ public class PaddleServingClientExample {
List<String> fetch = Arrays.asList("price"); List<String> fetch = Arrays.asList("price");
Client client = new Client(); Client client = new Client();
client.setIP("0.0.0.0"); client.setIP("127.0.0.1");
client.setPort("9393"); client.setPort("9393");
client.loadClientConfig(model_config_path); client.loadClientConfig(model_config_path);
String result = client.predict(feed_data, fetch, true, 0); String result = client.predict(feed_data, fetch, true, 0);
...@@ -49,7 +49,7 @@ public class PaddleServingClientExample { ...@@ -49,7 +49,7 @@ public class PaddleServingClientExample {
Client client = new Client(); Client client = new Client();
//注意:跨docker,需要设置--net-host或直接访问另一个docker的ip //注意:跨docker,需要设置--net-host或直接访问另一个docker的ip
client.setIP("0.0.0.0"); client.setIP("127.0.0.1");
client.setPort("9393"); client.setPort("9393");
client.set_http_proto(false); client.set_http_proto(false);
client.loadClientConfig(model_config_path); client.loadClientConfig(model_config_path);
...@@ -73,7 +73,7 @@ public class PaddleServingClientExample { ...@@ -73,7 +73,7 @@ public class PaddleServingClientExample {
List<String> fetch = Arrays.asList("price"); List<String> fetch = Arrays.asList("price");
Client client = new Client(); Client client = new Client();
client.setIP("0.0.0.0"); client.setIP("127.0.0.1");
client.setPort("9393"); client.setPort("9393");
client.loadClientConfig(model_config_path); client.loadClientConfig(model_config_path);
client.set_use_grpc_client(true); client.set_use_grpc_client(true);
...@@ -97,7 +97,7 @@ public class PaddleServingClientExample { ...@@ -97,7 +97,7 @@ public class PaddleServingClientExample {
List<String> fetch = Arrays.asList("price"); List<String> fetch = Arrays.asList("price");
Client client = new Client(); Client client = new Client();
client.setIP("0.0.0.0"); client.setIP("127.0.0.1");
client.setPort("9393"); client.setPort("9393");
client.loadClientConfig(model_config_path); client.loadClientConfig(model_config_path);
client.use_key(keyFilePath); client.use_key(keyFilePath);
...@@ -125,7 +125,7 @@ public class PaddleServingClientExample { ...@@ -125,7 +125,7 @@ public class PaddleServingClientExample {
List<String> fetch = Arrays.asList("price"); List<String> fetch = Arrays.asList("price");
Client client = new Client(); Client client = new Client();
client.setIP("0.0.0.0"); client.setIP("127.0.0.1");
client.setPort("9393"); client.setPort("9393");
client.loadClientConfig(model_config_path); client.loadClientConfig(model_config_path);
client.set_request_compress(true); client.set_request_compress(true);
...@@ -176,7 +176,7 @@ public class PaddleServingClientExample { ...@@ -176,7 +176,7 @@ public class PaddleServingClientExample {
}}; }};
List<String> fetch = Arrays.asList("save_infer_model/scale_0.tmp_0"); List<String> fetch = Arrays.asList("save_infer_model/scale_0.tmp_0");
Client client = new Client(); Client client = new Client();
client.setIP("0.0.0.0"); client.setIP("127.0.0.1");
client.setPort("9393"); client.setPort("9393");
client.loadClientConfig(model_config_path); client.loadClientConfig(model_config_path);
String result = client.predict(feed_data, fetch, true, 0); String result = client.predict(feed_data, fetch, true, 0);
...@@ -198,7 +198,7 @@ public class PaddleServingClientExample { ...@@ -198,7 +198,7 @@ public class PaddleServingClientExample {
}}; }};
List<String> fetch = Arrays.asList("pooled_output"); List<String> fetch = Arrays.asList("pooled_output");
Client client = new Client(); Client client = new Client();
client.setIP("0.0.0.0"); client.setIP("127.0.0.1");
client.setPort("9393"); client.setPort("9393");
client.loadClientConfig(model_config_path); client.loadClientConfig(model_config_path);
String result = client.predict(feed_data, fetch, true, 0); String result = client.predict(feed_data, fetch, true, 0);
...@@ -268,7 +268,7 @@ public class PaddleServingClientExample { ...@@ -268,7 +268,7 @@ public class PaddleServingClientExample {
}}; }};
List<String> fetch = Arrays.asList("prob"); List<String> fetch = Arrays.asList("prob");
Client client = new Client(); Client client = new Client();
client.setIP("0.0.0.0"); client.setIP("127.0.0.1");
client.setPort("9393"); client.setPort("9393");
client.loadClientConfig(model_config_path); client.loadClientConfig(model_config_path);
String result = client.predict(feed_data, fetch, true, 0); String result = client.predict(feed_data, fetch, true, 0);
......
...@@ -134,7 +134,7 @@ public class Client { ...@@ -134,7 +134,7 @@ public class Client {
feedTensorLen_ = null; feedTensorLen_ = null;
feedNameToIndex_ = null; feedNameToIndex_ = null;
timeoutS_ = 200000; timeoutS_ = 200000;
ip = "0.0.0.0"; ip = "127.0.0.1";
port = "9393"; port = "9393";
serverPort = "9393"; serverPort = "9393";
serviceName = "/GeneralModelService/inference"; serviceName = "/GeneralModelService/inference";
......
...@@ -75,7 +75,7 @@ def data_bytes_number(datalist): ...@@ -75,7 +75,7 @@ def data_bytes_number(datalist):
# 或者直接调用grpc_client_predict() # 或者直接调用grpc_client_predict()
class HttpClient(object): class HttpClient(object):
def __init__(self, def __init__(self,
ip="0.0.0.0", ip="127.0.0.1",
port="9393", port="9393",
service_name="/GeneralModelService/inference"): service_name="/GeneralModelService/inference"):
self.feed_names_ = [] self.feed_names_ = []
......
...@@ -37,7 +37,7 @@ import socket ...@@ -37,7 +37,7 @@ import socket
def port_is_available(port): def port_is_available(port):
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.settimeout(2) sock.settimeout(2)
result = sock.connect_ex(('0.0.0.0', port)) result = sock.connect_ex(('127.0.0.1', port))
if result != 0: if result != 0:
return True return True
else: else:
......
...@@ -537,7 +537,7 @@ class Server(object): ...@@ -537,7 +537,7 @@ class Server(object):
def port_is_available(self, port): def port_is_available(self, port):
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.settimeout(2) sock.settimeout(2)
result = sock.connect_ex(('0.0.0.0', port)) result = sock.connect_ex(('127.0.0.1', port))
if result != 0: if result != 0:
return True return True
else: else:
......
...@@ -33,7 +33,7 @@ from paddle_serving_server.serve import format_gpu_to_strlist ...@@ -33,7 +33,7 @@ from paddle_serving_server.serve import format_gpu_to_strlist
def port_is_available(port): def port_is_available(port):
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.settimeout(2) sock.settimeout(2)
result = sock.connect_ex(('0.0.0.0', port)) result = sock.connect_ex(('127.0.0.1', port))
if result != 0: if result != 0:
return True return True
else: else:
......
...@@ -39,7 +39,7 @@ class AvailablePortGenerator(object): ...@@ -39,7 +39,7 @@ class AvailablePortGenerator(object):
def port_is_available(port): def port_is_available(port):
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.settimeout(2) sock.settimeout(2)
result = sock.connect_ex(('0.0.0.0', port)) result = sock.connect_ex(('127.0.0.1', port))
if result != 0: if result != 0:
return True return True
else: else:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册