multi_lang_general_model_service.proto 2.0 KB
Newer Older
B
barrierye 已提交
1
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
B
barrierye 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

syntax = "proto2";

option java_multiple_files = true;
option java_package = "io.paddle.serving.grpc";
option java_outer_classname = "ServingProto";

message Tensor {
  optional bytes data = 1;
  repeated int32 int_data = 2;
  repeated int64 int64_data = 3;
  repeated float float_data = 4;
  optional int32 elem_type = 5;
  repeated int32 shape = 6;
  repeated int32 lod = 7; // only for fetch tensor currently
};

message FeedInst { repeated Tensor tensor_array = 1; };

message FetchInst { repeated Tensor tensor_array = 1; };

message InferenceRequest {
  repeated FeedInst insts = 1;
  repeated string feed_var_names = 2;
  repeated string fetch_var_names = 3;
  required bool is_python = 4 [ default = false ];
};

message InferenceResponse {
  repeated ModelOutput outputs = 1;
  optional string tag = 2;
  required int32 err_code = 3;
B
barrierye 已提交
46
  optional string profile = 4;
B
barrierye 已提交
47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67
};

message ModelOutput {
  repeated FetchInst insts = 1;
  optional string engine_name = 2;
}

message SetTimeoutRequest { required int32 timeout_ms = 1; }

message SimpleResponse { required int32 err_code = 1; }

message GetClientConfigRequest {}

message GetClientConfigResponse { required string client_config_str = 1; }

service MultiLangGeneralModelService {
  rpc Inference(InferenceRequest) returns (InferenceResponse) {}
  rpc SetTimeout(SetTimeoutRequest) returns (SimpleResponse) {}
  rpc GetClientConfig(GetClientConfigRequest)
      returns (GetClientConfigResponse) {}
};