27
27
_sym_db = _symbol_database .Default ()
28
28
29
29
30
+ from tensorflow_serving .apis import get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2
30
31
from tensorflow_serving .apis import predict_pb2 as tensorflow__serving_dot_apis_dot_predict__pb2
31
32
32
33
33
34
DESCRIPTOR = _descriptor .FileDescriptor (
34
35
name = 'tensorflow_serving/apis/prediction_service.proto' ,
35
36
package = 'tensorflow.serving' ,
36
37
syntax = 'proto3' ,
37
- serialized_pb = _b ('\n 0tensorflow_serving/apis/prediction_service.proto\x12 \x12 tensorflow.serving\x1a %tensorflow_serving/apis/predict.proto2g \ n\x11 PredictionService\x12 R\n \x07 Predict\x12 \" .tensorflow.serving.PredictRequest\x1a #.tensorflow.serving.PredictResponseB \x03 \xf8 \x01 \x01 \x62 \x06 proto3' )
38
+ serialized_pb = _b ('\n 0tensorflow_serving/apis/prediction_service.proto\x12 \x12 tensorflow.serving\x1a \x30 tensorflow_serving/apis/get_model_metadata.proto \x1a %tensorflow_serving/apis/predict.proto2 \xd6 \x01 \ n\x11 PredictionService\x12 R\n \x07 Predict\x12 \" .tensorflow.serving.PredictRequest\x1a #.tensorflow.serving.PredictResponse \x12 m \n \x10 GetModelMetadata \x12 +.tensorflow.serving.GetModelMetadataRequest \x1a ,.tensorflow.serving.GetModelMetadataResponseB \x03 \xf8 \x01 \x01 \x62 \x06 proto3' )
38
39
,
39
- dependencies = [tensorflow__serving_dot_apis_dot_predict__pb2 .DESCRIPTOR ,])
40
+ dependencies = [tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 . DESCRIPTOR , tensorflow__serving_dot_apis_dot_predict__pb2 .DESCRIPTOR ,])
40
41
_sym_db .RegisterFileDescriptor (DESCRIPTOR )
41
42
42
43
54
55
55
56
56
57
class PredictionServiceStub (object ):
57
- """PredictionService provides access to machine-learned models loaded by
58
+ """open source marker; do not remove
59
+ PredictionService provides access to machine-learned models loaded by
58
60
model_servers.
59
61
"""
60
62
@@ -69,10 +71,16 @@ def __init__(self, channel):
69
71
request_serializer = tensorflow__serving_dot_apis_dot_predict__pb2 .PredictRequest .SerializeToString ,
70
72
response_deserializer = tensorflow__serving_dot_apis_dot_predict__pb2 .PredictResponse .FromString ,
71
73
)
74
+ self .GetModelMetadata = channel .unary_unary (
75
+ '/tensorflow.serving.PredictionService/GetModelMetadata' ,
76
+ request_serializer = tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataRequest .SerializeToString ,
77
+ response_deserializer = tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataResponse .FromString ,
78
+ )
72
79
73
80
74
81
class PredictionServiceServicer (object ):
75
- """PredictionService provides access to machine-learned models loaded by
82
+ """open source marker; do not remove
83
+ PredictionService provides access to machine-learned models loaded by
76
84
model_servers.
77
85
"""
78
86
@@ -83,6 +91,13 @@ def Predict(self, request, context):
83
91
context .set_details ('Method not implemented!' )
84
92
raise NotImplementedError ('Method not implemented!' )
85
93
94
+ def GetModelMetadata (self , request , context ):
95
+ """GetModelMetadata - provides access to metadata for loaded models.
96
+ """
97
+ context .set_code (grpc .StatusCode .UNIMPLEMENTED )
98
+ context .set_details ('Method not implemented!' )
99
+ raise NotImplementedError ('Method not implemented!' )
100
+
86
101
87
102
def add_PredictionServiceServicer_to_server (servicer , server ):
88
103
rpc_method_handlers = {
@@ -91,6 +106,11 @@ def add_PredictionServiceServicer_to_server(servicer, server):
91
106
request_deserializer = tensorflow__serving_dot_apis_dot_predict__pb2 .PredictRequest .FromString ,
92
107
response_serializer = tensorflow__serving_dot_apis_dot_predict__pb2 .PredictResponse .SerializeToString ,
93
108
),
109
+ 'GetModelMetadata' : grpc .unary_unary_rpc_method_handler (
110
+ servicer .GetModelMetadata ,
111
+ request_deserializer = tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataRequest .FromString ,
112
+ response_serializer = tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataResponse .SerializeToString ,
113
+ ),
94
114
}
95
115
generic_handler = grpc .method_handlers_generic_handler (
96
116
'tensorflow.serving.PredictionService' , rpc_method_handlers )
@@ -103,13 +123,18 @@ class BetaPredictionServiceServicer(object):
103
123
It is recommended to use the GA API (classes and functions in this
104
124
file not marked beta) for all further purposes. This class was generated
105
125
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
106
- """PredictionService provides access to machine-learned models loaded by
126
+ """open source marker; do not remove
127
+ PredictionService provides access to machine-learned models loaded by
107
128
model_servers.
108
129
"""
109
130
def Predict (self , request , context ):
110
131
"""Predict -- provides access to loaded TensorFlow model.
111
132
"""
112
133
context .code (beta_interfaces .StatusCode .UNIMPLEMENTED )
134
+ def GetModelMetadata (self , request , context ):
135
+ """GetModelMetadata - provides access to metadata for loaded models.
136
+ """
137
+ context .code (beta_interfaces .StatusCode .UNIMPLEMENTED )
113
138
114
139
115
140
class BetaPredictionServiceStub (object ):
@@ -118,14 +143,20 @@ class BetaPredictionServiceStub(object):
118
143
It is recommended to use the GA API (classes and functions in this
119
144
file not marked beta) for all further purposes. This class was generated
120
145
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
121
- """PredictionService provides access to machine-learned models loaded by
146
+ """open source marker; do not remove
147
+ PredictionService provides access to machine-learned models loaded by
122
148
model_servers.
123
149
"""
124
150
def Predict (self , request , timeout , metadata = None , with_call = False , protocol_options = None ):
125
151
"""Predict -- provides access to loaded TensorFlow model.
126
152
"""
127
153
raise NotImplementedError ()
128
154
Predict .future = None
155
+ def GetModelMetadata (self , request , timeout , metadata = None , with_call = False , protocol_options = None ):
156
+ """GetModelMetadata - provides access to metadata for loaded models.
157
+ """
158
+ raise NotImplementedError ()
159
+ GetModelMetadata .future = None
129
160
130
161
131
162
def beta_create_PredictionService_server (servicer , pool = None , pool_size = None , default_timeout = None , maximum_timeout = None ):
@@ -135,12 +166,15 @@ def beta_create_PredictionService_server(servicer, pool=None, pool_size=None, de
135
166
file not marked beta) for all further purposes. This function was
136
167
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
137
168
request_deserializers = {
169
+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataRequest .FromString ,
138
170
('tensorflow.serving.PredictionService' , 'Predict' ): tensorflow__serving_dot_apis_dot_predict__pb2 .PredictRequest .FromString ,
139
171
}
140
172
response_serializers = {
173
+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataResponse .SerializeToString ,
141
174
('tensorflow.serving.PredictionService' , 'Predict' ): tensorflow__serving_dot_apis_dot_predict__pb2 .PredictResponse .SerializeToString ,
142
175
}
143
176
method_implementations = {
177
+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): face_utilities .unary_unary_inline (servicer .GetModelMetadata ),
144
178
('tensorflow.serving.PredictionService' , 'Predict' ): face_utilities .unary_unary_inline (servicer .Predict ),
145
179
}
146
180
server_options = beta_implementations .server_options (request_deserializers = request_deserializers , response_serializers = response_serializers , thread_pool = pool , thread_pool_size = pool_size , default_timeout = default_timeout , maximum_timeout = maximum_timeout )
@@ -154,13 +188,17 @@ def beta_create_PredictionService_stub(channel, host=None, metadata_transformer=
154
188
file not marked beta) for all further purposes. This function was
155
189
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
156
190
request_serializers = {
191
+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataRequest .SerializeToString ,
157
192
('tensorflow.serving.PredictionService' , 'Predict' ): tensorflow__serving_dot_apis_dot_predict__pb2 .PredictRequest .SerializeToString ,
158
193
}
159
194
response_deserializers = {
195
+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataResponse .FromString ,
160
196
('tensorflow.serving.PredictionService' , 'Predict' ): tensorflow__serving_dot_apis_dot_predict__pb2 .PredictResponse .FromString ,
161
197
}
162
198
cardinalities = {
199
+ 'GetModelMetadata' : cardinality .Cardinality .UNARY_UNARY ,
163
200
'Predict' : cardinality .Cardinality .UNARY_UNARY ,
164
201
}
165
202
stub_options = beta_implementations .stub_options (host = host , metadata_transformer = metadata_transformer , request_serializers = request_serializers , response_deserializers = response_deserializers , thread_pool = pool , thread_pool_size = pool_size )
166
203
return beta_implementations .dynamic_stub (channel , 'tensorflow.serving.PredictionService' , cardinalities , options = stub_options )
204
+ # @@protoc_insertion_point(module_scope)
0 commit comments