44from google .protobuf import runtime_version as _runtime_version
55from google .protobuf import symbol_database as _symbol_database
66from google .protobuf .internal import builder as _builder
7- _runtime_version .ValidateProtobufRuntimeVersion (_runtime_version .Domain .PUBLIC , 5 , 28 , 2 , '' , 'app/mlinference/v1/ml_inference.proto' )
7+ _runtime_version .ValidateProtobufRuntimeVersion (_runtime_version .Domain .PUBLIC , 5 , 29 , 1 , '' , 'app/mlinference/v1/ml_inference.proto' )
88_sym_db = _symbol_database .Default ()
99from ....app .data .v1 import data_pb2 as app_dot_data_dot_v1_dot_data__pb2
10- DESCRIPTOR = _descriptor_pool .Default ().AddSerializedFile (b'\n %app/mlinference/v1/ml_inference.proto\x12 \x17 viam.app.mlinference.v1\x1a \x16 app/data/v1/data.proto"\xd5 \x01 \n \x13 GetInferenceRequest\x12 (\n \x10 registry_item_id\x18 \x01 \x01 (\t R\x0e registryItemId\x12 2\n \x15 registry_item_version\x18 \x02 \x01 (\t R\x13 registryItemVersion\x12 7\n \t binary_id\x18 \x03 \x01 (\x0b 2\x1a .viam.app.data.v1.BinaryIDR\x08 binaryId\x12 \' \n \x0f organization_id\x18 \x04 \x01 (\t R\x0e organizationId"\x16 \n \x14 GetInferenceResponse2\x81 \x01 \n \x12 MLInferenceService\x12 k\n \x0c GetInference\x12 ,.viam.app.mlinference.v1.GetInferenceRequest\x1a -.viam.app.mlinference.v1.GetInferenceResponseB$Z"go.viam.com/api/app/mlinference/v1b\x06 proto3' )
10+ from ....service .mlmodel .v1 import mlmodel_pb2 as service_dot_mlmodel_dot_v1_dot_mlmodel__pb2
11+ DESCRIPTOR = _descriptor_pool .Default ().AddSerializedFile (b'\n %app/mlinference/v1/ml_inference.proto\x12 \x17 viam.app.mlinference.v1\x1a \x16 app/data/v1/data.proto\x1a service/mlmodel/v1/mlmodel.proto"\xd5 \x01 \n \x13 GetInferenceRequest\x12 (\n \x10 registry_item_id\x18 \x01 \x01 (\t R\x0e registryItemId\x12 2\n \x15 registry_item_version\x18 \x02 \x01 (\t R\x13 registryItemVersion\x12 7\n \t binary_id\x18 \x03 \x01 (\x0b 2\x1a .viam.app.data.v1.BinaryIDR\x08 binaryId\x12 \' \n \x0f organization_id\x18 \x04 \x01 (\t R\x0e organizationId"\xa4 \x01 \n \x14 GetInferenceResponse\x12 K\n \x0e output_tensors\x18 \x01 \x01 (\x0b 2$.viam.service.mlmodel.v1.FlatTensorsR\r outputTensors\x12 ?\n \x0b annotations\x18 \x02 \x01 (\x0b 2\x1d .viam.app.data.v1.AnnotationsR\x0b annotations2\x81 \x01 \n \x12 MLInferenceService\x12 k\n \x0c GetInference\x12 ,.viam.app.mlinference.v1.GetInferenceRequest\x1a -.viam.app.mlinference.v1.GetInferenceResponseB$Z"go.viam.com/api/app/mlinference/v1b\x06 proto3' )
1112_globals = globals ()
1213_builder .BuildMessageAndEnumDescriptors (DESCRIPTOR , _globals )
1314_builder .BuildTopDescriptorsAndMessages (DESCRIPTOR , 'app.mlinference.v1.ml_inference_pb2' , _globals )
1415if not _descriptor ._USE_C_DESCRIPTORS :
1516 _globals ['DESCRIPTOR' ]._loaded_options = None
1617 _globals ['DESCRIPTOR' ]._serialized_options = b'Z"go.viam.com/api/app/mlinference/v1'
17- _globals ['_GETINFERENCEREQUEST' ]._serialized_start = 91
18- _globals ['_GETINFERENCEREQUEST' ]._serialized_end = 304
19- _globals ['_GETINFERENCERESPONSE' ]._serialized_start = 306
20- _globals ['_GETINFERENCERESPONSE' ]._serialized_end = 328
21- _globals ['_MLINFERENCESERVICE' ]._serialized_start = 331
22- _globals ['_MLINFERENCESERVICE' ]._serialized_end = 460
18+ _globals ['_GETINFERENCEREQUEST' ]._serialized_start = 125
19+ _globals ['_GETINFERENCEREQUEST' ]._serialized_end = 338
20+ _globals ['_GETINFERENCERESPONSE' ]._serialized_start = 341
21+ _globals ['_GETINFERENCERESPONSE' ]._serialized_end = 505
22+ _globals ['_MLINFERENCESERVICE' ]._serialized_start = 508
23+ _globals ['_MLINFERENCESERVICE' ]._serialized_end = 637
0 commit comments