-
Notifications
You must be signed in to change notification settings - Fork 2.2k
/
Copy pathmodel_service_pb2.py
94 lines (81 loc) · 3.81 KB
/
model_service_pb2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow_serving/apis/model_service.proto
# To regenerate run
# python -m grpc.tools.protoc --python_out=. --grpc_python_out=. -I. tensorflow_serving/apis/model_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pb2
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorflow_serving.apis import (
get_model_status_pb2 as tensorflow__serving_dot_apis_dot_get__model__status__pb2,
)
from tensorflow_serving.apis import (
model_management_pb2 as tensorflow__serving_dot_apis_dot_model__management__pb2,
)
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow_serving/apis/model_service.proto',
package='tensorflow.serving',
syntax='proto3',
serialized_pb=_b(
'\n+tensorflow_serving/apis/model_service.proto\x12\x12tensorflow.serving\x1a.tensorflow_serving/apis/get_model_status.proto\x1a.tensorflow_serving/apis/model_management.proto2\xe7\x01\n\x0cModelService\x12g\n\x0eGetModelStatus\x12).tensorflow.serving.GetModelStatusRequest\x1a*.tensorflow.serving.GetModelStatusResponse\x12n\n\x19HandleReloadConfigRequest\x12\'.tensorflow.serving.ReloadConfigRequest\x1a(.tensorflow.serving.ReloadConfigResponseB\x03\xf8\x01\x01\x62\x06proto3'
),
dependencies=[
tensorflow__serving_dot_apis_dot_get__model__status__pb2.DESCRIPTOR,
tensorflow__serving_dot_apis_dot_model__management__pb2.DESCRIPTOR,
])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001'))
_MODELSERVICE = _descriptor.ServiceDescriptor(
name='ModelService',
full_name='tensorflow.serving.ModelService',
file=DESCRIPTOR,
index=0,
options=None,
serialized_start=164,
serialized_end=395,
methods=[
_descriptor.MethodDescriptor(
name='GetModelStatus',
full_name='tensorflow.serving.ModelService.GetModelStatus',
index=0,
containing_service=None,
input_type=tensorflow__serving_dot_apis_dot_get__model__status__pb2.
_GETMODELSTATUSREQUEST,
output_type=tensorflow__serving_dot_apis_dot_get__model__status__pb2.
_GETMODELSTATUSRESPONSE,
options=None,
),
_descriptor.MethodDescriptor(
name='HandleReloadConfigRequest',
full_name=
'tensorflow.serving.ModelService.HandleReloadConfigRequest',
index=1,
containing_service=None,
input_type=tensorflow__serving_dot_apis_dot_model__management__pb2.
_RELOADCONFIGREQUEST,
output_type=tensorflow__serving_dot_apis_dot_model__management__pb2.
_RELOADCONFIGRESPONSE,
options=None,
),
])
_sym_db.RegisterServiceDescriptor(_MODELSERVICE)
DESCRIPTOR.services_by_name['ModelService'] = _MODELSERVICE
# @@protoc_insertion_point(module_scope)