inference.proto 1.19 KB
Newer Older
limm's avatar
limm committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
syntax = "proto3";

option java_multiple_files = true;
option java_package = "mmdeploy.snpe";
option java_outer_classname = "SNPEWrapper";
option objc_class_prefix = "SNPE";

package mmdeploy;

// The inference service definition.
service Inference {

  rpc Echo(Empty) returns (Reply) {}

  // Init Model with model file
  rpc Init(Model) returns (Reply) {}

  // Get output names
  rpc OutputNames(Empty) returns (Names) {}

  // Inference with inputs
  rpc Inference(TensorList) returns (Reply) {}

  // Destroy handle
  rpc Destroy(Empty) returns (Reply) {}
}

message Model {
  optional string name = 1;
  // bin
  bytes weights = 2;
  // config
  enum Device {
    CPU = 0;
    GPU = 1;
    DSP = 2;
  }
  optional Device device = 3;
}

// https://stackoverflow.com/questions/31768665/can-i-define-a-grpc-call-with-a-null-request-or-response
message Empty {}

message Tensor {
  // name
  string name = 1;

  // datatype
  optional string dtype = 2;

  // data
  bytes data = 3;

  // shape
  repeated int32 shape = 4;
}

message TensorList {
  repeated Tensor data = 1;
}

message Reply {
  int32 status = 1;
  string info = 2;
  repeated Tensor data =  3;
}

message Names {
  repeated string names = 1;
}