From aa5c7b93622b737d4fd35928d0e6725115aed41b Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Tue, 13 May 2025 17:21:43 +0000 Subject: [PATCH] CodeGen from PR 33810 in Azure/azure-rest-api-specs Merge f56e10be6feb15ce5befdc769f63da0716f2cdfb into 8ace4664e1bb2c1d1a1158d26d553991fa6f16b5 --- .../azure-mgmt-iotoperations/CHANGELOG.md | 450 + .../azure-mgmt-iotoperations/README.md | 6 +- .../azure-mgmt-iotoperations/_meta.json | 4 +- .../apiview-properties.json | 379 + .../azure/mgmt/iotoperations/_client.py | 61 +- .../mgmt/iotoperations/_configuration.py | 7 +- .../azure/mgmt/iotoperations/_patch.py | 9 +- .../mgmt/iotoperations/_utils/__init__.py | 6 + .../{_model_base.py => _utils/model_base.py} | 87 +- .../serialization.py} | 178 +- .../azure/mgmt/iotoperations/_validation.py | 50 + .../azure/mgmt/iotoperations/_version.py | 2 +- .../azure/mgmt/iotoperations/aio/_client.py | 63 +- .../mgmt/iotoperations/aio/_configuration.py | 7 +- .../azure/mgmt/iotoperations/aio/_patch.py | 9 +- .../iotoperations/aio/operations/__init__.py | 10 + .../aio/operations/_operations.py | 3370 ++++++- .../iotoperations/aio/operations/_patch.py | 9 +- .../mgmt/iotoperations/models/__init__.py | 256 + .../azure/mgmt/iotoperations/models/_enums.py | 207 + .../mgmt/iotoperations/models/_models.py | 8923 +++++++++++++---- .../azure/mgmt/iotoperations/models/_patch.py | 9 +- .../mgmt/iotoperations/operations/__init__.py | 10 + .../iotoperations/operations/_operations.py | 6030 +++++++++-- .../mgmt/iotoperations/operations/_patch.py | 9 +- ...nector_create_or_update_maximum_set_gen.py | 45 + .../akri_connector_delete_maximum_set_gen.py | 43 + .../akri_connector_get_maximum_set_gen.py | 44 + ...nector_list_by_template_maximum_set_gen.py | 44 + ...mplate_create_or_update_maximum_set_gen.py | 73 + ...nnector_template_delete_maximum_set_gen.py | 42 + ..._connector_template_get_maximum_set_gen.py | 43 + ...st_by_instance_resource_maximum_set_gen.py | 43 + ...andler_create_or_update_maximum_set_gen.py | 77 + ...iscovery_handler_delete_maximum_set_gen.py | 42 + ...i_discovery_handler_get_maximum_set_gen.py | 43 + ...st_by_instance_resource_maximum_set_gen.py | 43 + ...authentication_create_or_update_complex.py | 2 +- ...cation_create_or_update_maximum_set_gen.py | 2 +- ...r_authentication_delete_maximum_set_gen.py | 2 +- ...oker_authentication_get_maximum_set_gen.py | 2 +- ..._list_by_resource_group_maximum_set_gen.py | 2 +- ..._authorization_create_or_update_complex.py | 2 +- ...zation_create_or_update_maximum_set_gen.py | 2 +- ...r_authorization_create_or_update_simple.py | 2 +- ...er_authorization_delete_maximum_set_gen.py | 2 +- ...roker_authorization_get_maximum_set_gen.py | 2 +- ..._list_by_resource_group_maximum_set_gen.py | 2 +- .../broker_create_or_update_complex.py | 2 +- ...broker_create_or_update_maximum_set_gen.py | 2 +- .../broker_create_or_update_minimal.py | 2 +- .../broker_create_or_update_simple.py | 2 +- .../broker_delete_maximum_set_gen.py | 2 +- .../broker_get_maximum_set_gen.py | 2 +- ..._list_by_resource_group_maximum_set_gen.py | 2 +- ...roker_listener_create_or_update_complex.py | 2 +- ...stener_create_or_update_maximum_set_gen.py | 2 +- ...broker_listener_create_or_update_simple.py | 2 +- .../broker_listener_delete_maximum_set_gen.py | 2 +- .../broker_listener_get_maximum_set_gen.py | 2 +- ..._list_by_resource_group_maximum_set_gen.py | 2 +- ...ate_or_update_complex_contextualization.py | 2 +- ...flow_create_or_update_complex_event_hub.py | 2 +- ...taflow_create_or_update_filter_to_topic.py | 2 +- ...taflow_create_or_update_maximum_set_gen.py | 2 +- ...flow_create_or_update_simple_event_grid.py | 2 +- ...dataflow_create_or_update_simple_fabric.py | 2 +- .../dataflow_delete_maximum_set_gen.py | 2 +- ...taflow_endpoint_create_or_update_adlsv2.py | 2 +- .../dataflow_endpoint_create_or_update_adx.py | 2 +- .../dataflow_endpoint_create_or_update_aio.py | 2 +- ...ow_endpoint_create_or_update_event_grid.py | 2 +- ...low_endpoint_create_or_update_event_hub.py | 2 +- ...taflow_endpoint_create_or_update_fabric.py | 2 +- ...ataflow_endpoint_create_or_update_kafka.py | 2 +- ...endpoint_create_or_update_local_storage.py | 2 +- ...dpoint_create_or_update_maximum_set_gen.py | 2 +- ...dataflow_endpoint_create_or_update_mqtt.py | 2 +- ...ataflow_endpoint_delete_maximum_set_gen.py | 2 +- .../dataflow_endpoint_get_maximum_set_gen.py | 2 +- ..._list_by_resource_group_maximum_set_gen.py | 2 +- .../dataflow_get_maximum_set_gen.py | 2 +- ..._graph_create_or_update_maximum_set_gen.py | 61 + .../dataflow_graph_delete_maximum_set_gen.py | 43 + .../dataflow_graph_get_maximum_set_gen.py | 44 + ...ist_by_dataflow_profile_maximum_set_gen.py | 44 + ...ist_by_profile_resource_maximum_set_gen.py | 2 +- ...rofile_create_or_update_maximum_set_gen.py | 2 +- ...taflow_profile_create_or_update_minimal.py | 2 +- ...dataflow_profile_create_or_update_multi.py | 2 +- ...dataflow_profile_delete_maximum_set_gen.py | 2 +- .../dataflow_profile_get_maximum_set_gen.py | 2 +- ..._list_by_resource_group_maximum_set_gen.py | 2 +- ...stance_create_or_update_maximum_set_gen.py | 3 +- .../instance_delete_maximum_set_gen.py | 2 +- .../instance_get_maximum_set_gen.py | 2 +- ..._list_by_resource_group_maximum_set_gen.py | 2 +- ...ce_list_by_subscription_maximum_set_gen.py | 2 +- .../instance_update_maximum_set_gen.py | 2 +- .../operations_list_maximum_set_gen.py | 2 +- ...dpoint_create_or_update_maximum_set_gen.py | 50 + ...egistry_endpoint_delete_maximum_set_gen.py | 42 + .../registry_endpoint_get_maximum_set_gen.py | 43 + ...st_by_instance_resource_maximum_set_gen.py | 43 + ...erations_mgmt_akri_connector_operations.py | 85 + ...ns_mgmt_akri_connector_operations_async.py | 90 + ...mgmt_akri_connector_template_operations.py | 108 + ...kri_connector_template_operations_async.py | 113 + ..._mgmt_akri_discovery_handler_operations.py | 106 + ...akri_discovery_handler_operations_async.py | 111 + ...s_mgmt_broker_authentication_operations.py | 1 + ..._broker_authentication_operations_async.py | 1 + ...t_io_toperations_mgmt_broker_operations.py | 37 +- ...operations_mgmt_broker_operations_async.py | 37 +- ...tions_mgmt_dataflow_endpoint_operations.py | 7 + ...mgmt_dataflow_endpoint_operations_async.py | 7 + ...erations_mgmt_dataflow_graph_operations.py | 96 + ...ns_mgmt_dataflow_graph_operations_async.py | 101 + ...io_toperations_mgmt_instance_operations.py | 3 + ...erations_mgmt_instance_operations_async.py | 3 + ...tions_mgmt_registry_endpoint_operations.py | 86 + ...mgmt_registry_endpoint_operations_async.py | 91 + .../sdk_packaging.toml | 2 +- .../azure-mgmt-iotoperations/setup.py | 7 +- .../tsp-location.yaml | 2 +- 125 files changed, 18758 insertions(+), 3558 deletions(-) create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/apiview-properties.json create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/__init__.py rename sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/{_model_base.py => _utils/model_base.py} (94%) rename sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/{_serialization.py => _utils/serialization.py} (94%) create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_validation.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_create_or_update_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_delete_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_get_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_list_by_template_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_create_or_update_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_delete_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_get_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_list_by_instance_resource_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_create_or_update_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_delete_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_get_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_list_by_instance_resource_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_create_or_update_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_delete_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_get_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_list_by_dataflow_profile_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_create_or_update_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_delete_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_get_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_list_by_instance_resource_maximum_set_gen.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_operations.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_operations_async.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_template_operations.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_template_operations_async.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_discovery_handler_operations.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_discovery_handler_operations_async.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_graph_operations.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_graph_operations_async.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_registry_endpoint_operations.py create mode 100644 sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_registry_endpoint_operations_async.py diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/CHANGELOG.md b/sdk/iotoperations/azure-mgmt-iotoperations/CHANGELOG.md index 0a7e55f2f253..5bbee2ac7562 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/CHANGELOG.md +++ b/sdk/iotoperations/azure-mgmt-iotoperations/CHANGELOG.md @@ -1,5 +1,455 @@ # Release History +## 1.1.0b1 (2025-05-13) + +### Features Added + + - Client `IoTOperationsMgmtClient` added operation group `dataflow_graph` + - Client `IoTOperationsMgmtClient` added operation group `registry_endpoint` + - Client `IoTOperationsMgmtClient` added operation group `akri_connector_template` + - Client `IoTOperationsMgmtClient` added operation group `akri_connector` + - Client `IoTOperationsMgmtClient` added operation group `akri_discovery_handler` + - Model `BrokerAuthenticatorMethodX509` added property `additional_validation` + - Model `BrokerProperties` added property `persistence` + - Model `DataflowEndpointProperties` added property `host_type` + - Model `DataflowEndpointProperties` added property `open_telemetry_settings` + - Enum `EndpointType` added member `OPEN_TELEMETRY` + - Model `InstanceProperties` added property `default_secret_provider_class_ref` + - Model `InstanceProperties` added property `features` + - Model `InstanceProperties` added property `adr_namespace_ref` + - Model `VolumeClaimResourceRequirements` added property `claims` + - Added model `AkriConnectorProperties` + - Added model `AkriConnectorResource` + - Added model `AkriConnectorTemplateAioMetadata` + - Added model `AkriConnectorTemplateAllocation` + - Added enum `AkriConnectorTemplateAllocationPolicy` + - Added model `AkriConnectorTemplateBucketizedAllocation` + - Added model `AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs` + - Added model `AkriConnectorTemplateDeviceInboundEndpointType` + - Added model `AkriConnectorTemplateDiagnostics` + - Added model `AkriConnectorTemplateHelmAdvancedConfiguration` + - Added model `AkriConnectorTemplateHelmAuthSecretRef` + - Added model `AkriConnectorTemplateHelmConfiguration` + - Added model `AkriConnectorTemplateHelmConfigurationSettings` + - Added model `AkriConnectorTemplateHelmContainerRegistry` + - Added model `AkriConnectorTemplateHelmContainerRegistrySettings` + - Added model `AkriConnectorTemplateHelmDeleteConfiguration` + - Added model `AkriConnectorTemplateHelmInstallConfiguration` + - Added model `AkriConnectorTemplateHelmRegistryEndpointRef` + - Added model `AkriConnectorTemplateHelmRegistrySettings` + - Added enum `AkriConnectorTemplateHelmRegistrySettingsType` + - Added model `AkriConnectorTemplateHelmUpgradeConfiguration` + - Added model `AkriConnectorTemplateManagedConfiguration` + - Added model `AkriConnectorTemplateManagedConfigurationSettings` + - Added enum `AkriConnectorTemplateManagedConfigurationType` + - Added model `AkriConnectorTemplatePersistentVolumeClaim` + - Added model `AkriConnectorTemplateProperties` + - Added model `AkriConnectorTemplateResource` + - Added model `AkriConnectorTemplateRuntimeConfiguration` + - Added enum `AkriConnectorTemplateRuntimeConfigurationType` + - Added model `AkriConnectorTemplateRuntimeImageConfiguration` + - Added model `AkriConnectorTemplateRuntimeImageConfigurationSettings` + - Added model `AkriConnectorTemplateRuntimeStatefulSetConfiguration` + - Added model `AkriConnectorTemplateTrustList` + - Added model `AkriConnectorsContainerRegistry` + - Added model `AkriConnectorsContainerRegistrySettings` + - Added model `AkriConnectorsDiagnosticsLogs` + - Added model `AkriConnectorsDigest` + - Added enum `AkriConnectorsImagePullPolicy` + - Added model `AkriConnectorsImagePullSecret` + - Added model `AkriConnectorsMqttAuthentication` + - Added enum `AkriConnectorsMqttAuthenticationMethod` + - Added model `AkriConnectorsMqttConnectionConfiguration` + - Added enum `AkriConnectorsMqttProtocolType` + - Added model `AkriConnectorsRegistryEndpointRef` + - Added model `AkriConnectorsRegistrySettings` + - Added enum `AkriConnectorsRegistrySettingsType` + - Added model `AkriConnectorsSecret` + - Added model `AkriConnectorsServiceAccountAuthentication` + - Added model `AkriConnectorsServiceAccountTokenSettings` + - Added model `AkriConnectorsTag` + - Added model `AkriConnectorsTagDigestSettings` + - Added enum `AkriConnectorsTagDigestType` + - Added model `AkriDiscoveryHandlerAioMetadata` + - Added model `AkriDiscoveryHandlerDiagnostics` + - Added model `AkriDiscoveryHandlerDiscoverableDeviceEndpointType` + - Added model `AkriDiscoveryHandlerImageConfiguration` + - Added model `AkriDiscoveryHandlerProperties` + - Added model `AkriDiscoveryHandlerResource` + - Added model `AkriDiscoveryHandlerSchedule` + - Added model `AkriDiscoveryHandlerScheduleContinuous` + - Added model `AkriDiscoveryHandlerScheduleCron` + - Added model `AkriDiscoveryHandlerScheduleRunOnce` + - Added enum `AkriDiscoveryHandlerScheduleType` + - Added model `AzureDeviceRegistryNamespaceRef` + - Added enum `BrokerAuthenticatorValidationMethods` + - Added model `BrokerPersistence` + - Added model `BrokerPersistenceDynamicSettings` + - Added model `BrokerPersistenceEncryption` + - Added enum `BrokerPersistencePolicyMode` + - Added model `BrokerRetainMessagesCustomPolicy` + - Added model `BrokerRetainMessagesDynamic` + - Added model `BrokerRetainMessagesPolicy` + - Added model `BrokerRetainMessagesSettings` + - Added model `BrokerStateStoreCustomPolicy` + - Added model `BrokerStateStoreDynamic` + - Added enum `BrokerStateStoreKeyType` + - Added model `BrokerStateStorePolicy` + - Added model `BrokerStateStorePolicyResources` + - Added model `BrokerStateStorePolicySettings` + - Added model `BrokerSubscriberQueueCustomPolicy` + - Added model `BrokerSubscriberQueueCustomPolicySettings` + - Added model `BrokerSubscriberQueueDynamic` + - Added model `BrokerSubscriberQueuePolicy` + - Added model `DatafloGraphDestinationNode` + - Added model `DataflowEndpointAuthenticationAnonymous` + - Added model `DataflowEndpointOpenTelemetry` + - Added enum `DataflowEnpointHostType` + - Added model `DataflowGraphConnectionInput` + - Added model `DataflowGraphConnectionOutput` + - Added model `DataflowGraphDestinationNodeSettings` + - Added model `DataflowGraphGraphNode` + - Added model `DataflowGraphGraphNodeConfiguration` + - Added model `DataflowGraphNode` + - Added model `DataflowGraphNodeConnection` + - Added model `DataflowGraphNodeGraphSettings` + - Added enum `DataflowGraphNodeType` + - Added model `DataflowGraphProperties` + - Added model `DataflowGraphResource` + - Added model `DataflowGraphSchemaSettings` + - Added enum `DataflowGraphSerializationFormat` + - Added model `DataflowGraphSourceNode` + - Added model `DataflowGraphSourceSettings` + - Added model `DataflowOpenTelemetryAnonymousAuthentication` + - Added model `DataflowOpenTelemetryAuthentication` + - Added enum `DataflowOpenTelemetryAuthenticationMethod` + - Added model `DataflowOpenTelemetryServiceAccountAuthentication` + - Added model `DataflowOpenTelemetryX509CertificateAuthentication` + - Added model `InstanceFeature` + - Added enum `InstanceFeatureMode` + - Added model `RegistryEndpointAnonymousAuthentication` + - Added model `RegistryEndpointAnonymousSettings` + - Added model `RegistryEndpointArtifactPullSecretAuthentication` + - Added model `RegistryEndpointArtifactPullSecretSettings` + - Added model `RegistryEndpointAuthentication` + - Added enum `RegistryEndpointAuthenticationMethod` + - Added model `RegistryEndpointProperties` + - Added model `RegistryEndpointResource` + - Added model `RegistryEndpointSystemAssignedIdentityAuthentication` + - Added model `RegistryEndpointSystemAssignedManagedIdentitySettings` + - Added model `RegistryEndpointTrustedSettings` + - Added model `RegistryEndpointTrustedSigningKey` + - Added model `RegistryEndpointTrustedSigningKeyConfigMap` + - Added model `RegistryEndpointTrustedSigningKeySecret` + - Added enum `RegistryEndpointTrustedSigningKeyType` + - Added model `RegistryEndpointUserAssignedIdentityAuthentication` + - Added model `RegistryEndpointUserAssignedManagedIdentitySettings` + - Added model `SecretProviderClassRef` + - Added model `VolumeClaimResourceRequirementsClaims` + - Added model `AkriConnectorOperations` + - Added model `AkriConnectorTemplateOperations` + - Added model `AkriDiscoveryHandlerOperations` + - Added model `DataflowGraphOperations` + - Added model `RegistryEndpointOperations` + - Method `BrokerAuthenticationResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.BrokerAuthenticationProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `BrokerAuthenticatorMethodX509.__init__` has a new overload `def __init__(self: None, authorization_attributes: Optional[Dict[str, _models.BrokerAuthenticatorMethodX509Attributes]], trusted_client_ca_cert: Optional[str], additional_validation: Optional[Union[str, _models.BrokerAuthenticatorValidationMethods]])` + - Method `BrokerAuthorizationResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.BrokerAuthorizationProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `BrokerListenerResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.BrokerListenerProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `BrokerProperties.__init__` has a new overload `def __init__(self: None, advanced: Optional[_models.AdvancedSettings], cardinality: Optional[_models.Cardinality], diagnostics: Optional[_models.BrokerDiagnostics], disk_backed_message_buffer: Optional[_models.DiskBackedMessageBuffer], generate_resource_limits: Optional[_models.GenerateResourceLimits], memory_profile: Optional[Union[str, _models.BrokerMemoryProfile]], persistence: Optional[_models.BrokerPersistence])` + - Method `BrokerResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.BrokerProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `DataflowEndpointProperties.__init__` has a new overload `def __init__(self: None, endpoint_type: Union[str, _models.EndpointType], host_type: Optional[Union[str, _models.DataflowEnpointHostType]], data_explorer_settings: Optional[_models.DataflowEndpointDataExplorer], data_lake_storage_settings: Optional[_models.DataflowEndpointDataLakeStorage], fabric_one_lake_settings: Optional[_models.DataflowEndpointFabricOneLake], kafka_settings: Optional[_models.DataflowEndpointKafka], local_storage_settings: Optional[_models.DataflowEndpointLocalStorage], mqtt_settings: Optional[_models.DataflowEndpointMqtt], open_telemetry_settings: Optional[_models.DataflowEndpointOpenTelemetry])` + - Method `DataflowEndpointResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.DataflowEndpointProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `DataflowProfileResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.DataflowProfileProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `DataflowResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.DataflowProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `InstanceProperties.__init__` has a new overload `def __init__(self: None, schema_registry_ref: _models.SchemaRegistryRef, description: Optional[str], default_secret_provider_class_ref: Optional[_models.SecretProviderClassRef], features: Optional[Dict[str, _models.InstanceFeature]], adr_namespace_ref: Optional[_models.AzureDeviceRegistryNamespaceRef])` + - Method `Operation.__init__` has a new overload `def __init__(self: None, display: Optional[_models.OperationDisplay])` + - Method `VolumeClaimResourceRequirements.__init__` has a new overload `def __init__(self: None, limits: Optional[Dict[str, str]], requests: Optional[Dict[str, str]], claims: Optional[List[_models.VolumeClaimResourceRequirementsClaims]])` + - Method `AkriConnectorResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.AkriConnectorProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `AkriConnectorResource.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateAioMetadata.__init__` has a new overload `def __init__(self: None, aio_min_version: Optional[str], aio_max_version: Optional[str])` + - Method `AkriConnectorTemplateAioMetadata.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateAllocation.__init__` has a new overload `def __init__(self: None, policy: str)` + - Method `AkriConnectorTemplateAllocation.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateBucketizedAllocation.__init__` has a new overload `def __init__(self: None, bucket_size: int)` + - Method `AkriConnectorTemplateBucketizedAllocation.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateBucketizedAllocation.__init__` has a new overload `def __init__(self: None, policy: str)` + - Method `AkriConnectorTemplateBucketizedAllocation.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs.__init__` has a new overload `def __init__(self: None, default_dataset_config_schema_ref: Optional[str], default_events_config_schema_ref: Optional[str], default_process_control_config_schema_ref: Optional[str], default_streams_config_schema_ref: Optional[str], additional_config_schema_ref: Optional[str])` + - Method `AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateDeviceInboundEndpointType.__init__` has a new overload `def __init__(self: None, endpoint_type: str, version: str, configuration_schema_refs: Optional[_models.AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs])` + - Method `AkriConnectorTemplateDeviceInboundEndpointType.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateDiagnostics.__init__` has a new overload `def __init__(self: None, logs: _models.AkriConnectorsDiagnosticsLogs)` + - Method `AkriConnectorTemplateDiagnostics.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmAdvancedConfiguration.__init__` has a new overload `def __init__(self: None, delete: Optional[_models.AkriConnectorTemplateHelmDeleteConfiguration], install: Optional[_models.AkriConnectorTemplateHelmInstallConfiguration], upgrade: Optional[_models.AkriConnectorTemplateHelmUpgradeConfiguration])` + - Method `AkriConnectorTemplateHelmAdvancedConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmAuthSecretRef.__init__` has a new overload `def __init__(self: None, secret_ref: str, password_key: str, username_key: str)` + - Method `AkriConnectorTemplateHelmAuthSecretRef.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmConfiguration.__init__` has a new overload `def __init__(self: None, helm_configuration_settings: _models.AkriConnectorTemplateHelmConfigurationSettings)` + - Method `AkriConnectorTemplateHelmConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmConfiguration.__init__` has a new overload `def __init__(self: None, runtime_configuration_type: str)` + - Method `AkriConnectorTemplateHelmConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmConfigurationSettings.__init__` has a new overload `def __init__(self: None, release_name: str, version: str, registry_settings: Optional[_models.AkriConnectorTemplateHelmRegistrySettings], advanced_configuration: Optional[_models.AkriConnectorTemplateHelmAdvancedConfiguration], values_property: Optional[Dict[str, str]])` + - Method `AkriConnectorTemplateHelmConfigurationSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmContainerRegistry.__init__` has a new overload `def __init__(self: None, container_registry_settings: _models.AkriConnectorTemplateHelmContainerRegistrySettings)` + - Method `AkriConnectorTemplateHelmContainerRegistry.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmContainerRegistry.__init__` has a new overload `def __init__(self: None, registry_settings_type: str)` + - Method `AkriConnectorTemplateHelmContainerRegistry.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmContainerRegistrySettings.__init__` has a new overload `def __init__(self: None, registry: str, repository: str, auth_secret_ref: Optional[_models.AkriConnectorTemplateHelmAuthSecretRef])` + - Method `AkriConnectorTemplateHelmContainerRegistrySettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmDeleteConfiguration.__init__` has a new overload `def __init__(self: None, timeout: Optional[int], wait_for_jobs: Optional[bool], atomic: Optional[bool], disable_hooks: Optional[bool], wait: Optional[bool])` + - Method `AkriConnectorTemplateHelmDeleteConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmInstallConfiguration.__init__` has a new overload `def __init__(self: None, timeout: Optional[int], wait_for_jobs: Optional[bool], atomic: Optional[bool], disable_hooks: Optional[bool], wait: Optional[bool])` + - Method `AkriConnectorTemplateHelmInstallConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmRegistryEndpointRef.__init__` has a new overload `def __init__(self: None, registry_endpoint_ref: str)` + - Method `AkriConnectorTemplateHelmRegistryEndpointRef.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmRegistryEndpointRef.__init__` has a new overload `def __init__(self: None, registry_settings_type: str)` + - Method `AkriConnectorTemplateHelmRegistryEndpointRef.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmRegistrySettings.__init__` has a new overload `def __init__(self: None, registry_settings_type: str)` + - Method `AkriConnectorTemplateHelmRegistrySettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateHelmUpgradeConfiguration.__init__` has a new overload `def __init__(self: None, timeout: Optional[int], wait_for_jobs: Optional[bool], atomic: Optional[bool], disable_hooks: Optional[bool], wait: Optional[bool])` + - Method `AkriConnectorTemplateHelmUpgradeConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateManagedConfiguration.__init__` has a new overload `def __init__(self: None, managed_configuration_settings: _models.AkriConnectorTemplateManagedConfigurationSettings)` + - Method `AkriConnectorTemplateManagedConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateManagedConfiguration.__init__` has a new overload `def __init__(self: None, runtime_configuration_type: str)` + - Method `AkriConnectorTemplateManagedConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateManagedConfigurationSettings.__init__` has a new overload `def __init__(self: None, managed_configuration_type: str, allocation: Optional[_models.AkriConnectorTemplateAllocation], persistent_volume_claims: Optional[List[_models.AkriConnectorTemplatePersistentVolumeClaim]], additional_configuration: Optional[Dict[str, str]], persistent_volume_claim_templates: Optional[List[Dict[str, Any]]], secrets: Optional[List[_models.AkriConnectorsSecret]], trust_settings: Optional[_models.AkriConnectorTemplateTrustList])` + - Method `AkriConnectorTemplateManagedConfigurationSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplatePersistentVolumeClaim.__init__` has a new overload `def __init__(self: None, claim_name: str, mount_path: str)` + - Method `AkriConnectorTemplatePersistentVolumeClaim.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateProperties.__init__` has a new overload `def __init__(self: None, runtime_configuration: _models.AkriConnectorTemplateRuntimeConfiguration, device_inbound_endpoint_types: List[_models.AkriConnectorTemplateDeviceInboundEndpointType], aio_metadata: Optional[_models.AkriConnectorTemplateAioMetadata], diagnostics: Optional[_models.AkriConnectorTemplateDiagnostics], mqtt_connection_configuration: Optional[_models.AkriConnectorsMqttConnectionConfiguration])` + - Method `AkriConnectorTemplateProperties.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.AkriConnectorTemplateProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `AkriConnectorTemplateResource.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateRuntimeConfiguration.__init__` has a new overload `def __init__(self: None, runtime_configuration_type: str)` + - Method `AkriConnectorTemplateRuntimeConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateRuntimeImageConfiguration.__init__` has a new overload `def __init__(self: None, image_configuration_settings: _models.AkriConnectorTemplateRuntimeImageConfigurationSettings, allocation: Optional[_models.AkriConnectorTemplateAllocation], persistent_volume_claims: Optional[List[_models.AkriConnectorTemplatePersistentVolumeClaim]], additional_configuration: Optional[Dict[str, str]], persistent_volume_claim_templates: Optional[List[Dict[str, Any]]], secrets: Optional[List[_models.AkriConnectorsSecret]], trust_settings: Optional[_models.AkriConnectorTemplateTrustList])` + - Method `AkriConnectorTemplateRuntimeImageConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateRuntimeImageConfiguration.__init__` has a new overload `def __init__(self: None, managed_configuration_type: str, allocation: Optional[_models.AkriConnectorTemplateAllocation], persistent_volume_claims: Optional[List[_models.AkriConnectorTemplatePersistentVolumeClaim]], additional_configuration: Optional[Dict[str, str]], persistent_volume_claim_templates: Optional[List[Dict[str, Any]]], secrets: Optional[List[_models.AkriConnectorsSecret]], trust_settings: Optional[_models.AkriConnectorTemplateTrustList])` + - Method `AkriConnectorTemplateRuntimeImageConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateRuntimeImageConfigurationSettings.__init__` has a new overload `def __init__(self: None, image_name: str, image_pull_policy: Optional[Union[str, _models.AkriConnectorsImagePullPolicy]], replicas: Optional[int], registry_settings: Optional[_models.AkriConnectorsRegistrySettings], tag_digest_settings: Optional[_models.AkriConnectorsTagDigestSettings])` + - Method `AkriConnectorTemplateRuntimeImageConfigurationSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateRuntimeStatefulSetConfiguration.__init__` has a new overload `def __init__(self: None, stateful_set_configuration_settings: Dict[str, Any], allocation: Optional[_models.AkriConnectorTemplateAllocation], persistent_volume_claims: Optional[List[_models.AkriConnectorTemplatePersistentVolumeClaim]], additional_configuration: Optional[Dict[str, str]], persistent_volume_claim_templates: Optional[List[Dict[str, Any]]], secrets: Optional[List[_models.AkriConnectorsSecret]], trust_settings: Optional[_models.AkriConnectorTemplateTrustList])` + - Method `AkriConnectorTemplateRuntimeStatefulSetConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateRuntimeStatefulSetConfiguration.__init__` has a new overload `def __init__(self: None, managed_configuration_type: str, allocation: Optional[_models.AkriConnectorTemplateAllocation], persistent_volume_claims: Optional[List[_models.AkriConnectorTemplatePersistentVolumeClaim]], additional_configuration: Optional[Dict[str, str]], persistent_volume_claim_templates: Optional[List[Dict[str, Any]]], secrets: Optional[List[_models.AkriConnectorsSecret]], trust_settings: Optional[_models.AkriConnectorTemplateTrustList])` + - Method `AkriConnectorTemplateRuntimeStatefulSetConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorTemplateTrustList.__init__` has a new overload `def __init__(self: None, trust_list_secret_ref: str)` + - Method `AkriConnectorTemplateTrustList.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsContainerRegistry.__init__` has a new overload `def __init__(self: None, container_registry_settings: _models.AkriConnectorsContainerRegistrySettings)` + - Method `AkriConnectorsContainerRegistry.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsContainerRegistry.__init__` has a new overload `def __init__(self: None, registry_settings_type: str)` + - Method `AkriConnectorsContainerRegistry.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsContainerRegistrySettings.__init__` has a new overload `def __init__(self: None, registry: str, image_pull_secrets: Optional[List[_models.AkriConnectorsImagePullSecret]])` + - Method `AkriConnectorsContainerRegistrySettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsDiagnosticsLogs.__init__` has a new overload `def __init__(self: None, level: Optional[str])` + - Method `AkriConnectorsDiagnosticsLogs.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsDigest.__init__` has a new overload `def __init__(self: None, digest: str)` + - Method `AkriConnectorsDigest.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsDigest.__init__` has a new overload `def __init__(self: None, tag_digest_type: str)` + - Method `AkriConnectorsDigest.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsImagePullSecret.__init__` has a new overload `def __init__(self: None, secret_ref: str)` + - Method `AkriConnectorsImagePullSecret.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsMqttAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `AkriConnectorsMqttAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsMqttConnectionConfiguration.__init__` has a new overload `def __init__(self: None, authentication: Optional[_models.AkriConnectorsMqttAuthentication], host: Optional[str], protocol: Optional[Union[str, _models.AkriConnectorsMqttProtocolType]], keep_alive_seconds: Optional[int], max_inflight_messages: Optional[int], session_expiry_seconds: Optional[int], tls: Optional[_models.TlsProperties])` + - Method `AkriConnectorsMqttConnectionConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsRegistryEndpointRef.__init__` has a new overload `def __init__(self: None, registry_endpoint_ref: str)` + - Method `AkriConnectorsRegistryEndpointRef.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsRegistryEndpointRef.__init__` has a new overload `def __init__(self: None, registry_settings_type: str)` + - Method `AkriConnectorsRegistryEndpointRef.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsRegistrySettings.__init__` has a new overload `def __init__(self: None, registry_settings_type: str)` + - Method `AkriConnectorsRegistrySettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsSecret.__init__` has a new overload `def __init__(self: None, secret_key: str, secret_alias: str, secret_ref: str)` + - Method `AkriConnectorsSecret.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsServiceAccountAuthentication.__init__` has a new overload `def __init__(self: None, service_account_token_settings: _models.AkriConnectorsServiceAccountTokenSettings)` + - Method `AkriConnectorsServiceAccountAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsServiceAccountAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `AkriConnectorsServiceAccountAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsServiceAccountTokenSettings.__init__` has a new overload `def __init__(self: None, audience: str)` + - Method `AkriConnectorsServiceAccountTokenSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsTag.__init__` has a new overload `def __init__(self: None, tag: str)` + - Method `AkriConnectorsTag.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsTag.__init__` has a new overload `def __init__(self: None, tag_digest_type: str)` + - Method `AkriConnectorsTag.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorsTagDigestSettings.__init__` has a new overload `def __init__(self: None, tag_digest_type: str)` + - Method `AkriConnectorsTagDigestSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerAioMetadata.__init__` has a new overload `def __init__(self: None, aio_min_version: Optional[str], aio_max_version: Optional[str])` + - Method `AkriDiscoveryHandlerAioMetadata.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerDiagnostics.__init__` has a new overload `def __init__(self: None, logs: _models.AkriConnectorsDiagnosticsLogs)` + - Method `AkriDiscoveryHandlerDiagnostics.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerDiscoverableDeviceEndpointType.__init__` has a new overload `def __init__(self: None, endpoint_type: str, version: str)` + - Method `AkriDiscoveryHandlerDiscoverableDeviceEndpointType.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerImageConfiguration.__init__` has a new overload `def __init__(self: None, image_name: str, image_pull_policy: Optional[Union[str, _models.AkriConnectorsImagePullPolicy]], replicas: Optional[int], registry_settings: Optional[_models.AkriConnectorsRegistrySettings], tag_digest_settings: Optional[_models.AkriConnectorsTagDigestSettings])` + - Method `AkriDiscoveryHandlerImageConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerProperties.__init__` has a new overload `def __init__(self: None, discoverable_device_endpoint_types: List[_models.AkriDiscoveryHandlerDiscoverableDeviceEndpointType], image_configuration: _models.AkriDiscoveryHandlerImageConfiguration, schedule: _models.AkriDiscoveryHandlerSchedule, aio_metadata: Optional[_models.AkriDiscoveryHandlerAioMetadata], additional_configuration: Optional[Dict[str, str]], diagnostics: Optional[_models.AkriDiscoveryHandlerDiagnostics], mode: Optional[Union[str, _models.OperationalMode]], mqtt_connection_configuration: Optional[_models.AkriConnectorsMqttConnectionConfiguration], secrets: Optional[List[_models.AkriConnectorsSecret]])` + - Method `AkriDiscoveryHandlerProperties.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.AkriDiscoveryHandlerProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `AkriDiscoveryHandlerResource.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerSchedule.__init__` has a new overload `def __init__(self: None, schedule_type: str)` + - Method `AkriDiscoveryHandlerSchedule.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerScheduleContinuous.__init__` has a new overload `def __init__(self: None, continuous: str)` + - Method `AkriDiscoveryHandlerScheduleContinuous.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerScheduleContinuous.__init__` has a new overload `def __init__(self: None, schedule_type: str)` + - Method `AkriDiscoveryHandlerScheduleContinuous.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerScheduleCron.__init__` has a new overload `def __init__(self: None, cron: str)` + - Method `AkriDiscoveryHandlerScheduleCron.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerScheduleCron.__init__` has a new overload `def __init__(self: None, schedule_type: str)` + - Method `AkriDiscoveryHandlerScheduleCron.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerScheduleRunOnce.__init__` has a new overload `def __init__(self: None, run_once: str)` + - Method `AkriDiscoveryHandlerScheduleRunOnce.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriDiscoveryHandlerScheduleRunOnce.__init__` has a new overload `def __init__(self: None, schedule_type: str)` + - Method `AkriDiscoveryHandlerScheduleRunOnce.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AzureDeviceRegistryNamespaceRef.__init__` has a new overload `def __init__(self: None, resource_id: str)` + - Method `AzureDeviceRegistryNamespaceRef.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerPersistence.__init__` has a new overload `def __init__(self: None, max_size: str, dynamic_settings: Optional[_models.BrokerPersistenceDynamicSettings], persistent_volume_claim_spec: Optional[_models.VolumeClaimSpec], retain: Optional[_models.BrokerRetainMessagesPolicy], state_store: Optional[_models.BrokerStateStorePolicy], subscriber_queue: Optional[_models.BrokerSubscriberQueuePolicy], encryption: Optional[_models.BrokerPersistenceEncryption])` + - Method `BrokerPersistence.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerPersistenceDynamicSettings.__init__` has a new overload `def __init__(self: None, user_property_key: str, user_property_value: str)` + - Method `BrokerPersistenceDynamicSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerPersistenceEncryption.__init__` has a new overload `def __init__(self: None, mode: Union[str, _models.OperationalMode])` + - Method `BrokerPersistenceEncryption.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerRetainMessagesCustomPolicy.__init__` has a new overload `def __init__(self: None, retain_settings: _models.BrokerRetainMessagesSettings)` + - Method `BrokerRetainMessagesCustomPolicy.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerRetainMessagesCustomPolicy.__init__` has a new overload `def __init__(self: None, mode: str)` + - Method `BrokerRetainMessagesCustomPolicy.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerRetainMessagesDynamic.__init__` has a new overload `def __init__(self: None, mode: Union[str, _models.OperationalMode])` + - Method `BrokerRetainMessagesDynamic.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerRetainMessagesPolicy.__init__` has a new overload `def __init__(self: None, mode: str)` + - Method `BrokerRetainMessagesPolicy.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerRetainMessagesSettings.__init__` has a new overload `def __init__(self: None, topics: Optional[List[str]], dynamic: Optional[_models.BrokerRetainMessagesDynamic])` + - Method `BrokerRetainMessagesSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerStateStoreCustomPolicy.__init__` has a new overload `def __init__(self: None, state_store_settings: _models.BrokerStateStorePolicySettings)` + - Method `BrokerStateStoreCustomPolicy.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerStateStoreCustomPolicy.__init__` has a new overload `def __init__(self: None, mode: str)` + - Method `BrokerStateStoreCustomPolicy.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerStateStoreDynamic.__init__` has a new overload `def __init__(self: None, mode: Union[str, _models.OperationalMode])` + - Method `BrokerStateStoreDynamic.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerStateStorePolicy.__init__` has a new overload `def __init__(self: None, mode: str)` + - Method `BrokerStateStorePolicy.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerStateStorePolicyResources.__init__` has a new overload `def __init__(self: None, key_type: Union[str, _models.BrokerStateStoreKeyType], keys_property: List[str])` + - Method `BrokerStateStorePolicyResources.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerStateStorePolicySettings.__init__` has a new overload `def __init__(self: None, state_store_resources: Optional[List[_models.BrokerStateStorePolicyResources]], dynamic: Optional[_models.BrokerStateStoreDynamic])` + - Method `BrokerStateStorePolicySettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerSubscriberQueueCustomPolicy.__init__` has a new overload `def __init__(self: None, subscriber_queue_settings: _models.BrokerSubscriberQueueCustomPolicySettings)` + - Method `BrokerSubscriberQueueCustomPolicy.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerSubscriberQueueCustomPolicy.__init__` has a new overload `def __init__(self: None, mode: str)` + - Method `BrokerSubscriberQueueCustomPolicy.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerSubscriberQueueCustomPolicySettings.__init__` has a new overload `def __init__(self: None, subscriber_client_ids: Optional[List[str]], dynamic: Optional[_models.BrokerSubscriberQueueDynamic], topics: Optional[List[str]])` + - Method `BrokerSubscriberQueueCustomPolicySettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerSubscriberQueueDynamic.__init__` has a new overload `def __init__(self: None, mode: Union[str, _models.OperationalMode])` + - Method `BrokerSubscriberQueueDynamic.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `BrokerSubscriberQueuePolicy.__init__` has a new overload `def __init__(self: None, mode: str)` + - Method `BrokerSubscriberQueuePolicy.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DatafloGraphDestinationNode.__init__` has a new overload `def __init__(self: None, name: str, destination_settings: _models.DataflowGraphDestinationNodeSettings)` + - Method `DatafloGraphDestinationNode.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DatafloGraphDestinationNode.__init__` has a new overload `def __init__(self: None, name: str, type: str)` + - Method `DatafloGraphDestinationNode.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowEndpointOpenTelemetry.__init__` has a new overload `def __init__(self: None, host: str, authentication: _models.DataflowOpenTelemetryAuthentication, batching: Optional[_models.BatchingConfiguration], tls: Optional[_models.TlsProperties])` + - Method `DataflowEndpointOpenTelemetry.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphConnectionInput.__init__` has a new overload `def __init__(self: None, name: str, schema: Optional[_models.DataflowGraphSchemaSettings])` + - Method `DataflowGraphConnectionInput.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphConnectionOutput.__init__` has a new overload `def __init__(self: None, name: str)` + - Method `DataflowGraphConnectionOutput.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphDestinationNodeSettings.__init__` has a new overload `def __init__(self: None, endpoint_ref: str, data_destination: str, output_schema_settings: Optional[_models.DataflowGraphSchemaSettings])` + - Method `DataflowGraphDestinationNodeSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphGraphNode.__init__` has a new overload `def __init__(self: None, name: str, graph_settings: _models.DataflowGraphNodeGraphSettings)` + - Method `DataflowGraphGraphNode.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphGraphNode.__init__` has a new overload `def __init__(self: None, name: str, type: str)` + - Method `DataflowGraphGraphNode.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphGraphNodeConfiguration.__init__` has a new overload `def __init__(self: None, key: str, value: str)` + - Method `DataflowGraphGraphNodeConfiguration.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphNode.__init__` has a new overload `def __init__(self: None, name: str, type: str)` + - Method `DataflowGraphNode.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphNodeConnection.__init__` has a new overload `def __init__(self: None, from_property: _models.DataflowGraphConnectionInput, to: _models.DataflowGraphConnectionOutput)` + - Method `DataflowGraphNodeConnection.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphNodeGraphSettings.__init__` has a new overload `def __init__(self: None, registry_endpoint_ref: str, artifact: str, configuration: Optional[List[_models.DataflowGraphGraphNodeConfiguration]])` + - Method `DataflowGraphNodeGraphSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphProperties.__init__` has a new overload `def __init__(self: None, nodes: List[_models.DataflowGraphNode], node_connections: List[_models.DataflowGraphNodeConnection], mode: Optional[Union[str, _models.OperationalMode]], request_disk_persistence: Optional[Union[str, _models.OperationalMode]])` + - Method `DataflowGraphProperties.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.DataflowGraphProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `DataflowGraphResource.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphSchemaSettings.__init__` has a new overload `def __init__(self: None, schema_ref: str, serialization_format: Optional[Union[str, _models.DataflowGraphSerializationFormat]])` + - Method `DataflowGraphSchemaSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphSourceNode.__init__` has a new overload `def __init__(self: None, name: str, source_settings: _models.DataflowGraphSourceSettings)` + - Method `DataflowGraphSourceNode.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphSourceNode.__init__` has a new overload `def __init__(self: None, name: str, type: str)` + - Method `DataflowGraphSourceNode.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowGraphSourceSettings.__init__` has a new overload `def __init__(self: None, endpoint_ref: str, data_sources: List[str])` + - Method `DataflowGraphSourceSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowOpenTelemetryAnonymousAuthentication.__init__` has a new overload `def __init__(self: None, anonymous_settings: _models.DataflowEndpointAuthenticationAnonymous)` + - Method `DataflowOpenTelemetryAnonymousAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowOpenTelemetryAnonymousAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `DataflowOpenTelemetryAnonymousAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowOpenTelemetryAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `DataflowOpenTelemetryAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowOpenTelemetryServiceAccountAuthentication.__init__` has a new overload `def __init__(self: None, service_account_token_settings: _models.DataflowEndpointAuthenticationServiceAccountToken)` + - Method `DataflowOpenTelemetryServiceAccountAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowOpenTelemetryServiceAccountAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `DataflowOpenTelemetryServiceAccountAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowOpenTelemetryX509CertificateAuthentication.__init__` has a new overload `def __init__(self: None, x509_certificate_settings: _models.DataflowEndpointAuthenticationX509)` + - Method `DataflowOpenTelemetryX509CertificateAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `DataflowOpenTelemetryX509CertificateAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `DataflowOpenTelemetryX509CertificateAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `InstanceFeature.__init__` has a new overload `def __init__(self: None, mode: Optional[Union[str, _models.InstanceFeatureMode]], settings: Optional[Dict[str, Union[str, _models.OperationalMode]]])` + - Method `InstanceFeature.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointAnonymousAuthentication.__init__` has a new overload `def __init__(self: None, anonymous_settings: _models.RegistryEndpointAnonymousSettings)` + - Method `RegistryEndpointAnonymousAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointAnonymousAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `RegistryEndpointAnonymousAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointArtifactPullSecretAuthentication.__init__` has a new overload `def __init__(self: None, artifact_pull_secret_settings: _models.RegistryEndpointArtifactPullSecretSettings)` + - Method `RegistryEndpointArtifactPullSecretAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointArtifactPullSecretAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `RegistryEndpointArtifactPullSecretAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointArtifactPullSecretSettings.__init__` has a new overload `def __init__(self: None, secret_ref: str)` + - Method `RegistryEndpointArtifactPullSecretSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `RegistryEndpointAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointProperties.__init__` has a new overload `def __init__(self: None, host: str, authentication: _models.RegistryEndpointAuthentication, trust_settings: Optional[_models.RegistryEndpointTrustedSettings])` + - Method `RegistryEndpointProperties.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointResource.__init__` has a new overload `def __init__(self: None, properties: Optional[_models.RegistryEndpointProperties], extended_location: Optional[_models.ExtendedLocation])` + - Method `RegistryEndpointResource.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointSystemAssignedIdentityAuthentication.__init__` has a new overload `def __init__(self: None, system_assigned_managed_identity_settings: _models.RegistryEndpointSystemAssignedManagedIdentitySettings)` + - Method `RegistryEndpointSystemAssignedIdentityAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointSystemAssignedIdentityAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `RegistryEndpointSystemAssignedIdentityAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointSystemAssignedManagedIdentitySettings.__init__` has a new overload `def __init__(self: None, audience: Optional[str])` + - Method `RegistryEndpointSystemAssignedManagedIdentitySettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointTrustedSettings.__init__` has a new overload `def __init__(self: None, trusted_signing_keys: _models.RegistryEndpointTrustedSigningKey)` + - Method `RegistryEndpointTrustedSettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointTrustedSigningKey.__init__` has a new overload `def __init__(self: None, type: str)` + - Method `RegistryEndpointTrustedSigningKey.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointTrustedSigningKeyConfigMap.__init__` has a new overload `def __init__(self: None, config_map_ref: str)` + - Method `RegistryEndpointTrustedSigningKeyConfigMap.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointTrustedSigningKeyConfigMap.__init__` has a new overload `def __init__(self: None, type: str)` + - Method `RegistryEndpointTrustedSigningKeyConfigMap.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointTrustedSigningKeySecret.__init__` has a new overload `def __init__(self: None, secret_ref: str)` + - Method `RegistryEndpointTrustedSigningKeySecret.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointTrustedSigningKeySecret.__init__` has a new overload `def __init__(self: None, type: str)` + - Method `RegistryEndpointTrustedSigningKeySecret.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointUserAssignedIdentityAuthentication.__init__` has a new overload `def __init__(self: None, user_assigned_managed_identity_settings: _models.RegistryEndpointUserAssignedManagedIdentitySettings)` + - Method `RegistryEndpointUserAssignedIdentityAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointUserAssignedIdentityAuthentication.__init__` has a new overload `def __init__(self: None, method: str)` + - Method `RegistryEndpointUserAssignedIdentityAuthentication.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `RegistryEndpointUserAssignedManagedIdentitySettings.__init__` has a new overload `def __init__(self: None, client_id: str, tenant_id: str, scope: Optional[str])` + - Method `RegistryEndpointUserAssignedManagedIdentitySettings.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `SecretProviderClassRef.__init__` has a new overload `def __init__(self: None, resource_id: str)` + - Method `SecretProviderClassRef.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `VolumeClaimResourceRequirementsClaims.__init__` has a new overload `def __init__(self: None, name: str)` + - Method `VolumeClaimResourceRequirementsClaims.__init__` has a new overload `def __init__(self: None, mapping: Mapping[str, Any])` + - Method `AkriConnectorOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, akri_connector_template_name: str, akri_connector_name: str, resource: AkriConnectorResource, content_type: str)` + - Method `AkriConnectorOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, akri_connector_template_name: str, akri_connector_name: str, resource: JSON, content_type: str)` + - Method `AkriConnectorOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, akri_connector_template_name: str, akri_connector_name: str, resource: IO[bytes], content_type: str)` + - Method `AkriConnectorTemplateOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, akri_connector_template_name: str, resource: AkriConnectorTemplateResource, content_type: str)` + - Method `AkriConnectorTemplateOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, akri_connector_template_name: str, resource: JSON, content_type: str)` + - Method `AkriConnectorTemplateOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, akri_connector_template_name: str, resource: IO[bytes], content_type: str)` + - Method `AkriDiscoveryHandlerOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, resource: AkriDiscoveryHandlerResource, content_type: str)` + - Method `AkriDiscoveryHandlerOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, resource: JSON, content_type: str)` + - Method `AkriDiscoveryHandlerOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, resource: IO[bytes], content_type: str)` + - Method `DataflowGraphOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, dataflow_profile_name: str, dataflow_graph_name: str, resource: DataflowGraphResource, content_type: str)` + - Method `DataflowGraphOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, dataflow_profile_name: str, dataflow_graph_name: str, resource: JSON, content_type: str)` + - Method `DataflowGraphOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, dataflow_profile_name: str, dataflow_graph_name: str, resource: IO[bytes], content_type: str)` + - Method `RegistryEndpointOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, registry_endpoint_name: str, resource: RegistryEndpointResource, content_type: str)` + - Method `RegistryEndpointOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, registry_endpoint_name: str, resource: JSON, content_type: str)` + - Method `RegistryEndpointOperations.begin_create_or_update` has a new overload `def begin_create_or_update(self: None, resource_group_name: str, instance_name: str, registry_endpoint_name: str, resource: IO[bytes], content_type: str)` + ## 1.0.0 (2024-12-16) ### Other Changes diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/README.md b/sdk/iotoperations/azure-mgmt-iotoperations/README.md index 342b0ddcb20a..da0877345731 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/README.md +++ b/sdk/iotoperations/azure-mgmt-iotoperations/README.md @@ -1,7 +1,7 @@ # Microsoft Azure SDK for Python This is the Microsoft Azure Iotoperations Management Client Library. -This package has been tested with Python 3.8+. +This package has been tested with Python 3.9+. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). ## _Disclaimer_ @@ -12,7 +12,7 @@ _Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For ### Prerequisites -- Python 3.8+ is required to use this package. +- Python 3.9+ is required to use this package. - [Azure subscription](https://azure.microsoft.com/free/) ### Install the package @@ -24,7 +24,7 @@ pip install azure-identity ### Authentication -By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configure of following environment variables. +By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configuration of the following environment variables. - `AZURE_CLIENT_ID` for Azure client ID. - `AZURE_TENANT_ID` for Azure tenant ID. diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/_meta.json b/sdk/iotoperations/azure-mgmt-iotoperations/_meta.json index 829a4296a3e7..5f818b889577 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/_meta.json +++ b/sdk/iotoperations/azure-mgmt-iotoperations/_meta.json @@ -1,6 +1,6 @@ { - "commit": "ab67c148ec716a0d0075770742d54468f128c72e", + "commit": "804678a63f4e15829793b12c133f7a2877c232b4", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "typespec_src": "specification/iotoperations/IoTOperations.Management", - "@azure-tools/typespec-python": "0.37.0" + "@azure-tools/typespec-python": "0.44.2" } \ No newline at end of file diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/apiview-properties.json b/sdk/iotoperations/azure-mgmt-iotoperations/apiview-properties.json new file mode 100644 index 000000000000..55d67de143f7 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/apiview-properties.json @@ -0,0 +1,379 @@ +{ + "CrossLanguagePackageId": "Microsoft.IoTOperations", + "CrossLanguageDefinitionId": { + "azure.mgmt.iotoperations.models.AdvancedSettings": "Microsoft.IoTOperations.AdvancedSettings", + "azure.mgmt.iotoperations.models.AkriConnectorProperties": "Microsoft.IoTOperations.AkriConnectorProperties", + "azure.mgmt.iotoperations.models.Resource": "Azure.ResourceManager.CommonTypes.Resource", + "azure.mgmt.iotoperations.models.ProxyResource": "Azure.ResourceManager.CommonTypes.ProxyResource", + "azure.mgmt.iotoperations.models.AkriConnectorResource": "Microsoft.IoTOperations.AkriConnectorResource", + "azure.mgmt.iotoperations.models.AkriConnectorsRegistrySettings": "Microsoft.IoTOperations.AkriConnectorsRegistrySettings", + "azure.mgmt.iotoperations.models.AkriConnectorsContainerRegistry": "Microsoft.IoTOperations.AkriConnectorsContainerRegistry", + "azure.mgmt.iotoperations.models.AkriConnectorsContainerRegistrySettings": "Microsoft.IoTOperations.AkriConnectorsContainerRegistrySettings", + "azure.mgmt.iotoperations.models.AkriConnectorsDiagnosticsLogs": "Microsoft.IoTOperations.AkriConnectorsDiagnosticsLogs", + "azure.mgmt.iotoperations.models.AkriConnectorsTagDigestSettings": "Microsoft.IoTOperations.AkriConnectorsTagDigestSettings", + "azure.mgmt.iotoperations.models.AkriConnectorsDigest": "Microsoft.IoTOperations.AkriConnectorsDigest", + "azure.mgmt.iotoperations.models.AkriConnectorsImagePullSecret": "Microsoft.IoTOperations.AkriConnectorsImagePullSecret", + "azure.mgmt.iotoperations.models.AkriConnectorsMqttAuthentication": "Microsoft.IoTOperations.AkriConnectorsMqttAuthentication", + "azure.mgmt.iotoperations.models.AkriConnectorsMqttConnectionConfiguration": "Microsoft.IoTOperations.AkriConnectorsMqttConnectionConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorsRegistryEndpointRef": "Microsoft.IoTOperations.AkriConnectorsRegistryEndpointRef", + "azure.mgmt.iotoperations.models.AkriConnectorsSecret": "Microsoft.IoTOperations.AkriConnectorsSecret", + "azure.mgmt.iotoperations.models.AkriConnectorsServiceAccountAuthentication": "Microsoft.IoTOperations.AkriConnectorsServiceAccountAuthentication", + "azure.mgmt.iotoperations.models.AkriConnectorsServiceAccountTokenSettings": "Microsoft.IoTOperations.AkriConnectorsServiceAccountTokenSettings", + "azure.mgmt.iotoperations.models.AkriConnectorsTag": "Microsoft.IoTOperations.AkriConnectorsTag", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateAioMetadata": "Microsoft.IoTOperations.AkriConnectorTemplateAioMetadata", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateAllocation": "Microsoft.IoTOperations.AkriConnectorTemplateAllocation", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateBucketizedAllocation": "Microsoft.IoTOperations.AkriConnectorTemplateBucketizedAllocation", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs": "Microsoft.IoTOperations.AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateDeviceInboundEndpointType": "Microsoft.IoTOperations.AkriConnectorTemplateDeviceInboundEndpointType", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateDiagnostics": "Microsoft.IoTOperations.AkriConnectorTemplateDiagnostics", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmAdvancedConfiguration": "Microsoft.IoTOperations.AkriConnectorTemplateHelmAdvancedConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmAuthSecretRef": "Microsoft.IoTOperations.AkriConnectorTemplateHelmAuthSecretRef", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateRuntimeConfiguration": "Microsoft.IoTOperations.AkriConnectorTemplateRuntimeConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmConfiguration": "Microsoft.IoTOperations.AkriConnectorTemplateHelmConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmConfigurationSettings": "Microsoft.IoTOperations.AkriConnectorTemplateHelmConfigurationSettings", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmRegistrySettings": "Microsoft.IoTOperations.AkriConnectorTemplateHelmRegistrySettings", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmContainerRegistry": "Microsoft.IoTOperations.AkriConnectorTemplateHelmContainerRegistry", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmContainerRegistrySettings": "Microsoft.IoTOperations.AkriConnectorTemplateHelmContainerRegistrySettings", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmDeleteConfiguration": "Microsoft.IoTOperations.AkriConnectorTemplateHelmDeleteConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmInstallConfiguration": "Microsoft.IoTOperations.AkriConnectorTemplateHelmInstallConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmRegistryEndpointRef": "Microsoft.IoTOperations.AkriConnectorTemplateHelmRegistryEndpointRef", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmUpgradeConfiguration": "Microsoft.IoTOperations.AkriConnectorTemplateHelmUpgradeConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateManagedConfiguration": "Microsoft.IoTOperations.AkriConnectorTemplateManagedConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateManagedConfigurationSettings": "Microsoft.IoTOperations.AkriConnectorTemplateManagedConfigurationSettings", + "azure.mgmt.iotoperations.models.AkriConnectorTemplatePersistentVolumeClaim": "Microsoft.IoTOperations.AkriConnectorTemplatePersistentVolumeClaim", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateProperties": "Microsoft.IoTOperations.AkriConnectorTemplateProperties", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateResource": "Microsoft.IoTOperations.AkriConnectorTemplateResource", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateRuntimeImageConfiguration": "Microsoft.IoTOperations.AkriConnectorTemplateRuntimeImageConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateRuntimeImageConfigurationSettings": "Microsoft.IoTOperations.AkriConnectorTemplateRuntimeImageConfigurationSettings", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateRuntimeStatefulSetConfiguration": "Microsoft.IoTOperations.AkriConnectorTemplateRuntimeStatefulSetConfiguration", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateTrustList": "Microsoft.IoTOperations.AkriConnectorTemplateTrustList", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerAioMetadata": "Microsoft.IoTOperations.AkriDiscoveryHandlerAioMetadata", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerDiagnostics": "Microsoft.IoTOperations.AkriDiscoveryHandlerDiagnostics", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerDiscoverableDeviceEndpointType": "Microsoft.IoTOperations.AkriDiscoveryHandlerDiscoverableDeviceEndpointType", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerImageConfiguration": "Microsoft.IoTOperations.AkriDiscoveryHandlerImageConfiguration", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerProperties": "Microsoft.IoTOperations.AkriDiscoveryHandlerProperties", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource": "Microsoft.IoTOperations.AkriDiscoveryHandlerResource", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerSchedule": "Microsoft.IoTOperations.AkriDiscoveryHandlerSchedule", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerScheduleContinuous": "Microsoft.IoTOperations.AkriDiscoveryHandlerScheduleContinuous", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerScheduleCron": "Microsoft.IoTOperations.AkriDiscoveryHandlerScheduleCron", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerScheduleRunOnce": "Microsoft.IoTOperations.AkriDiscoveryHandlerScheduleRunOnce", + "azure.mgmt.iotoperations.models.AuthorizationConfig": "Microsoft.IoTOperations.AuthorizationConfig", + "azure.mgmt.iotoperations.models.AuthorizationRule": "Microsoft.IoTOperations.AuthorizationRule", + "azure.mgmt.iotoperations.models.AzureDeviceRegistryNamespaceRef": "Microsoft.IoTOperations.AzureDeviceRegistryNamespaceRef", + "azure.mgmt.iotoperations.models.BackendChain": "Microsoft.IoTOperations.BackendChain", + "azure.mgmt.iotoperations.models.BatchingConfiguration": "Microsoft.IoTOperations.BatchingConfiguration", + "azure.mgmt.iotoperations.models.BrokerAuthenticationProperties": "Microsoft.IoTOperations.BrokerAuthenticationProperties", + "azure.mgmt.iotoperations.models.BrokerAuthenticationResource": "Microsoft.IoTOperations.BrokerAuthenticationResource", + "azure.mgmt.iotoperations.models.BrokerAuthenticatorCustomAuth": "Microsoft.IoTOperations.BrokerAuthenticatorCustomAuth", + "azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodCustom": "Microsoft.IoTOperations.BrokerAuthenticatorMethodCustom", + "azure.mgmt.iotoperations.models.BrokerAuthenticatorMethods": "Microsoft.IoTOperations.BrokerAuthenticatorMethods", + "azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodSat": "Microsoft.IoTOperations.BrokerAuthenticatorMethodSat", + "azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodX509": "Microsoft.IoTOperations.BrokerAuthenticatorMethodX509", + "azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodX509Attributes": "Microsoft.IoTOperations.BrokerAuthenticatorMethodX509Attributes", + "azure.mgmt.iotoperations.models.BrokerAuthorizationProperties": "Microsoft.IoTOperations.BrokerAuthorizationProperties", + "azure.mgmt.iotoperations.models.BrokerAuthorizationResource": "Microsoft.IoTOperations.BrokerAuthorizationResource", + "azure.mgmt.iotoperations.models.BrokerDiagnostics": "Microsoft.IoTOperations.BrokerDiagnostics", + "azure.mgmt.iotoperations.models.BrokerListenerProperties": "Microsoft.IoTOperations.BrokerListenerProperties", + "azure.mgmt.iotoperations.models.BrokerListenerResource": "Microsoft.IoTOperations.BrokerListenerResource", + "azure.mgmt.iotoperations.models.BrokerPersistence": "Microsoft.IoTOperations.BrokerPersistence", + "azure.mgmt.iotoperations.models.BrokerPersistenceDynamicSettings": "Microsoft.IoTOperations.BrokerPersistenceDynamicSettings", + "azure.mgmt.iotoperations.models.BrokerPersistenceEncryption": "Microsoft.IoTOperations.BrokerPersistenceEncryption", + "azure.mgmt.iotoperations.models.BrokerProperties": "Microsoft.IoTOperations.BrokerProperties", + "azure.mgmt.iotoperations.models.BrokerResource": "Microsoft.IoTOperations.BrokerResource", + "azure.mgmt.iotoperations.models.BrokerResourceRule": "Microsoft.IoTOperations.BrokerResourceRule", + "azure.mgmt.iotoperations.models.BrokerRetainMessagesPolicy": "Microsoft.IoTOperations.BrokerRetainMessagesPolicy", + "azure.mgmt.iotoperations.models.BrokerRetainMessagesCustomPolicy": "Microsoft.IoTOperations.BrokerRetainMessagesCustomPolicy", + "azure.mgmt.iotoperations.models.BrokerRetainMessagesDynamic": "Microsoft.IoTOperations.BrokerRetainMessagesDynamic", + "azure.mgmt.iotoperations.models.BrokerRetainMessagesSettings": "Microsoft.IoTOperations.BrokerRetainMessagesSettings", + "azure.mgmt.iotoperations.models.BrokerStateStorePolicy": "Microsoft.IoTOperations.BrokerStateStorePolicy", + "azure.mgmt.iotoperations.models.BrokerStateStoreCustomPolicy": "Microsoft.IoTOperations.BrokerStateStoreCustomPolicy", + "azure.mgmt.iotoperations.models.BrokerStateStoreDynamic": "Microsoft.IoTOperations.BrokerStateStoreDynamic", + "azure.mgmt.iotoperations.models.BrokerStateStorePolicyResources": "Microsoft.IoTOperations.BrokerStateStorePolicyResources", + "azure.mgmt.iotoperations.models.BrokerStateStorePolicySettings": "Microsoft.IoTOperations.BrokerStateStorePolicySettings", + "azure.mgmt.iotoperations.models.BrokerSubscriberQueuePolicy": "Microsoft.IoTOperations.BrokerSubscriberQueuePolicy", + "azure.mgmt.iotoperations.models.BrokerSubscriberQueueCustomPolicy": "Microsoft.IoTOperations.BrokerSubscriberQueueCustomPolicy", + "azure.mgmt.iotoperations.models.BrokerSubscriberQueueCustomPolicySettings": "Microsoft.IoTOperations.BrokerSubscriberQueueCustomPolicySettings", + "azure.mgmt.iotoperations.models.BrokerSubscriberQueueDynamic": "Microsoft.IoTOperations.BrokerSubscriberQueueDynamic", + "azure.mgmt.iotoperations.models.Cardinality": "Microsoft.IoTOperations.Cardinality", + "azure.mgmt.iotoperations.models.CertManagerCertificateSpec": "Microsoft.IoTOperations.CertManagerCertificateSpec", + "azure.mgmt.iotoperations.models.CertManagerCertOptions": "Microsoft.IoTOperations.CertManagerCertOptions", + "azure.mgmt.iotoperations.models.CertManagerIssuerRef": "Microsoft.IoTOperations.CertManagerIssuerRef", + "azure.mgmt.iotoperations.models.CertManagerPrivateKey": "Microsoft.IoTOperations.CertManagerPrivateKey", + "azure.mgmt.iotoperations.models.ClientConfig": "Microsoft.IoTOperations.ClientConfig", + "azure.mgmt.iotoperations.models.DataflowGraphNode": "Microsoft.IoTOperations.DataflowGraphNode", + "azure.mgmt.iotoperations.models.DatafloGraphDestinationNode": "Microsoft.IoTOperations.DatafloGraphDestinationNode", + "azure.mgmt.iotoperations.models.DataflowBuiltInTransformationDataset": "Microsoft.IoTOperations.DataflowBuiltInTransformationDataset", + "azure.mgmt.iotoperations.models.DataflowBuiltInTransformationFilter": "Microsoft.IoTOperations.DataflowBuiltInTransformationFilter", + "azure.mgmt.iotoperations.models.DataflowBuiltInTransformationMap": "Microsoft.IoTOperations.DataflowBuiltInTransformationMap", + "azure.mgmt.iotoperations.models.DataflowBuiltInTransformationSettings": "Microsoft.IoTOperations.DataflowBuiltInTransformationSettings", + "azure.mgmt.iotoperations.models.DataflowDestinationOperationSettings": "Microsoft.IoTOperations.DataflowDestinationOperationSettings", + "azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationAccessToken": "Microsoft.IoTOperations.DataflowEndpointAuthenticationAccessToken", + "azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationAnonymous": "Microsoft.IoTOperations.DataflowEndpointAuthenticationAnonymous", + "azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSasl": "Microsoft.IoTOperations.DataflowEndpointAuthenticationSasl", + "azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationServiceAccountToken": "Microsoft.IoTOperations.DataflowEndpointAuthenticationServiceAccountToken", + "azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity": "Microsoft.IoTOperations.DataflowEndpointAuthenticationSystemAssignedManagedIdentity", + "azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity": "Microsoft.IoTOperations.DataflowEndpointAuthenticationUserAssignedManagedIdentity", + "azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationX509": "Microsoft.IoTOperations.DataflowEndpointAuthenticationX509", + "azure.mgmt.iotoperations.models.DataflowEndpointDataExplorer": "Microsoft.IoTOperations.DataflowEndpointDataExplorer", + "azure.mgmt.iotoperations.models.DataflowEndpointDataExplorerAuthentication": "Microsoft.IoTOperations.DataflowEndpointDataExplorerAuthentication", + "azure.mgmt.iotoperations.models.DataflowEndpointDataLakeStorage": "Microsoft.IoTOperations.DataflowEndpointDataLakeStorage", + "azure.mgmt.iotoperations.models.DataflowEndpointDataLakeStorageAuthentication": "Microsoft.IoTOperations.DataflowEndpointDataLakeStorageAuthentication", + "azure.mgmt.iotoperations.models.DataflowEndpointFabricOneLake": "Microsoft.IoTOperations.DataflowEndpointFabricOneLake", + "azure.mgmt.iotoperations.models.DataflowEndpointFabricOneLakeAuthentication": "Microsoft.IoTOperations.DataflowEndpointFabricOneLakeAuthentication", + "azure.mgmt.iotoperations.models.DataflowEndpointFabricOneLakeNames": "Microsoft.IoTOperations.DataflowEndpointFabricOneLakeNames", + "azure.mgmt.iotoperations.models.DataflowEndpointKafka": "Microsoft.IoTOperations.DataflowEndpointKafka", + "azure.mgmt.iotoperations.models.DataflowEndpointKafkaAuthentication": "Microsoft.IoTOperations.DataflowEndpointKafkaAuthentication", + "azure.mgmt.iotoperations.models.DataflowEndpointKafkaBatching": "Microsoft.IoTOperations.DataflowEndpointKafkaBatching", + "azure.mgmt.iotoperations.models.DataflowEndpointLocalStorage": "Microsoft.IoTOperations.DataflowEndpointLocalStorage", + "azure.mgmt.iotoperations.models.DataflowEndpointMqtt": "Microsoft.IoTOperations.DataflowEndpointMqtt", + "azure.mgmt.iotoperations.models.DataflowEndpointMqttAuthentication": "Microsoft.IoTOperations.DataflowEndpointMqttAuthentication", + "azure.mgmt.iotoperations.models.DataflowEndpointOpenTelemetry": "Microsoft.IoTOperations.DataflowEndpointOpenTelemetry", + "azure.mgmt.iotoperations.models.DataflowEndpointProperties": "Microsoft.IoTOperations.DataflowEndpointProperties", + "azure.mgmt.iotoperations.models.DataflowEndpointResource": "Microsoft.IoTOperations.DataflowEndpointResource", + "azure.mgmt.iotoperations.models.DataflowGraphConnectionInput": "Microsoft.IoTOperations.DataflowGraphConnectionInput", + "azure.mgmt.iotoperations.models.DataflowGraphConnectionOutput": "Microsoft.IoTOperations.DataflowGraphConnectionOutput", + "azure.mgmt.iotoperations.models.DataflowGraphDestinationNodeSettings": "Microsoft.IoTOperations.DataflowGraphDestinationNodeSettings", + "azure.mgmt.iotoperations.models.DataflowGraphGraphNode": "Microsoft.IoTOperations.DataflowGraphGraphNode", + "azure.mgmt.iotoperations.models.DataflowGraphGraphNodeConfiguration": "Microsoft.IoTOperations.DataflowGraphGraphNodeConfiguration", + "azure.mgmt.iotoperations.models.DataflowGraphNodeConnection": "Microsoft.IoTOperations.DataflowGraphNodeConnection", + "azure.mgmt.iotoperations.models.DataflowGraphNodeGraphSettings": "Microsoft.IoTOperations.DataflowGraphNodeGraphSettings", + "azure.mgmt.iotoperations.models.DataflowGraphProperties": "Microsoft.IoTOperations.DataflowGraphProperties", + "azure.mgmt.iotoperations.models.DataflowGraphResource": "Microsoft.IoTOperations.DataflowGraphResource", + "azure.mgmt.iotoperations.models.DataflowGraphSchemaSettings": "Microsoft.IoTOperations.DataflowGraphSchemaSettings", + "azure.mgmt.iotoperations.models.DataflowGraphSourceNode": "Microsoft.IoTOperations.DataflowGraphSourceNode", + "azure.mgmt.iotoperations.models.DataflowGraphSourceSettings": "Microsoft.IoTOperations.DataflowGraphSourceSettings", + "azure.mgmt.iotoperations.models.DataflowOpenTelemetryAuthentication": "Microsoft.IoTOperations.DataflowOpenTelemetryAuthentication", + "azure.mgmt.iotoperations.models.DataflowOpenTelemetryAnonymousAuthentication": "Microsoft.IoTOperations.DataflowOpenTelemetryAnonymousAuthentication", + "azure.mgmt.iotoperations.models.DataflowOpenTelemetryServiceAccountAuthentication": "Microsoft.IoTOperations.DataflowOpenTelemetryServiceAccountAuthentication", + "azure.mgmt.iotoperations.models.DataflowOpenTelemetryX509CertificateAuthentication": "Microsoft.IoTOperations.DataflowOpenTelemetryX509CertificateAuthentication", + "azure.mgmt.iotoperations.models.DataflowOperation": "Microsoft.IoTOperations.DataflowOperation", + "azure.mgmt.iotoperations.models.DataflowProfileProperties": "Microsoft.IoTOperations.DataflowProfileProperties", + "azure.mgmt.iotoperations.models.DataflowProfileResource": "Microsoft.IoTOperations.DataflowProfileResource", + "azure.mgmt.iotoperations.models.DataflowProperties": "Microsoft.IoTOperations.DataflowProperties", + "azure.mgmt.iotoperations.models.DataflowResource": "Microsoft.IoTOperations.DataflowResource", + "azure.mgmt.iotoperations.models.DataflowSourceOperationSettings": "Microsoft.IoTOperations.DataflowSourceOperationSettings", + "azure.mgmt.iotoperations.models.DiagnosticsLogs": "Microsoft.IoTOperations.DiagnosticsLogs", + "azure.mgmt.iotoperations.models.DiskBackedMessageBuffer": "Microsoft.IoTOperations.DiskBackedMessageBuffer", + "azure.mgmt.iotoperations.models.ErrorAdditionalInfo": "Azure.ResourceManager.CommonTypes.ErrorAdditionalInfo", + "azure.mgmt.iotoperations.models.ErrorDetail": "Azure.ResourceManager.CommonTypes.ErrorDetail", + "azure.mgmt.iotoperations.models.ErrorResponse": "Azure.ResourceManager.CommonTypes.ErrorResponse", + "azure.mgmt.iotoperations.models.ExtendedLocation": "Microsoft.IoTOperations.ExtendedLocation", + "azure.mgmt.iotoperations.models.Frontend": "Microsoft.IoTOperations.Frontend", + "azure.mgmt.iotoperations.models.GenerateResourceLimits": "Microsoft.IoTOperations.GenerateResourceLimits", + "azure.mgmt.iotoperations.models.InstanceFeature": "Microsoft.IoTOperations.InstanceFeature", + "azure.mgmt.iotoperations.models.InstancePatchModel": "Microsoft.IoTOperations.InstancePatchModel", + "azure.mgmt.iotoperations.models.InstanceProperties": "Microsoft.IoTOperations.InstanceProperties", + "azure.mgmt.iotoperations.models.TrackedResource": "Azure.ResourceManager.CommonTypes.TrackedResource", + "azure.mgmt.iotoperations.models.InstanceResource": "Microsoft.IoTOperations.InstanceResource", + "azure.mgmt.iotoperations.models.KubernetesReference": "Microsoft.IoTOperations.KubernetesReference", + "azure.mgmt.iotoperations.models.ListenerPort": "Microsoft.IoTOperations.ListenerPort", + "azure.mgmt.iotoperations.models.LocalKubernetesReference": "Microsoft.IoTOperations.LocalKubernetesReference", + "azure.mgmt.iotoperations.models.ManagedServiceIdentity": "Azure.ResourceManager.CommonTypes.ManagedServiceIdentity", + "azure.mgmt.iotoperations.models.Metrics": "Microsoft.IoTOperations.Metrics", + "azure.mgmt.iotoperations.models.Operation": "Azure.ResourceManager.CommonTypes.Operation", + "azure.mgmt.iotoperations.models.OperationDisplay": "Azure.ResourceManager.CommonTypes.OperationDisplay", + "azure.mgmt.iotoperations.models.PrincipalDefinition": "Microsoft.IoTOperations.PrincipalDefinition", + "azure.mgmt.iotoperations.models.ProfileDiagnostics": "Microsoft.IoTOperations.ProfileDiagnostics", + "azure.mgmt.iotoperations.models.RegistryEndpointAuthentication": "Microsoft.IoTOperations.RegistryEndpointAuthentication", + "azure.mgmt.iotoperations.models.RegistryEndpointAnonymousAuthentication": "Microsoft.IoTOperations.RegistryEndpointAnonymousAuthentication", + "azure.mgmt.iotoperations.models.RegistryEndpointAnonymousSettings": "Microsoft.IoTOperations.RegistryEndpointAnonymousSettings", + "azure.mgmt.iotoperations.models.RegistryEndpointArtifactPullSecretAuthentication": "Microsoft.IoTOperations.RegistryEndpointArtifactPullSecretAuthentication", + "azure.mgmt.iotoperations.models.RegistryEndpointArtifactPullSecretSettings": "Microsoft.IoTOperations.RegistryEndpointArtifactPullSecretSettings", + "azure.mgmt.iotoperations.models.RegistryEndpointProperties": "Microsoft.IoTOperations.RegistryEndpointProperties", + "azure.mgmt.iotoperations.models.RegistryEndpointResource": "Microsoft.IoTOperations.RegistryEndpointResource", + "azure.mgmt.iotoperations.models.RegistryEndpointSystemAssignedIdentityAuthentication": "Microsoft.IoTOperations.RegistryEndpointSystemAssignedIdentityAuthentication", + "azure.mgmt.iotoperations.models.RegistryEndpointSystemAssignedManagedIdentitySettings": "Microsoft.IoTOperations.RegistryEndpointSystemAssignedManagedIdentitySettings", + "azure.mgmt.iotoperations.models.RegistryEndpointTrustedSettings": "Microsoft.IoTOperations.RegistryEndpointTrustedSettings", + "azure.mgmt.iotoperations.models.RegistryEndpointTrustedSigningKey": "Microsoft.IoTOperations.RegistryEndpointTrustedSigningKey", + "azure.mgmt.iotoperations.models.RegistryEndpointTrustedSigningKeyConfigMap": "Microsoft.IoTOperations.RegistryEndpointTrustedSigningKeyConfigMap", + "azure.mgmt.iotoperations.models.RegistryEndpointTrustedSigningKeySecret": "Microsoft.IoTOperations.RegistryEndpointTrustedSigningKeySecret", + "azure.mgmt.iotoperations.models.RegistryEndpointUserAssignedIdentityAuthentication": "Microsoft.IoTOperations.RegistryEndpointUserAssignedIdentityAuthentication", + "azure.mgmt.iotoperations.models.RegistryEndpointUserAssignedManagedIdentitySettings": "Microsoft.IoTOperations.RegistryEndpointUserAssignedManagedIdentitySettings", + "azure.mgmt.iotoperations.models.SanForCert": "Microsoft.IoTOperations.SanForCert", + "azure.mgmt.iotoperations.models.SchemaRegistryRef": "Microsoft.IoTOperations.SchemaRegistryRef", + "azure.mgmt.iotoperations.models.SecretProviderClassRef": "Microsoft.IoTOperations.SecretProviderClassRef", + "azure.mgmt.iotoperations.models.SelfCheck": "Microsoft.IoTOperations.SelfCheck", + "azure.mgmt.iotoperations.models.SelfTracing": "Microsoft.IoTOperations.SelfTracing", + "azure.mgmt.iotoperations.models.StateStoreResourceRule": "Microsoft.IoTOperations.StateStoreResourceRule", + "azure.mgmt.iotoperations.models.SubscriberQueueLimit": "Microsoft.IoTOperations.SubscriberQueueLimit", + "azure.mgmt.iotoperations.models.SystemData": "Azure.ResourceManager.CommonTypes.SystemData", + "azure.mgmt.iotoperations.models.TlsCertMethod": "Microsoft.IoTOperations.TlsCertMethod", + "azure.mgmt.iotoperations.models.TlsProperties": "Microsoft.IoTOperations.TlsProperties", + "azure.mgmt.iotoperations.models.Traces": "Microsoft.IoTOperations.Traces", + "azure.mgmt.iotoperations.models.UserAssignedIdentity": "Azure.ResourceManager.CommonTypes.UserAssignedIdentity", + "azure.mgmt.iotoperations.models.VolumeClaimResourceRequirements": "Microsoft.IoTOperations.VolumeClaimResourceRequirements", + "azure.mgmt.iotoperations.models.VolumeClaimResourceRequirementsClaims": "Microsoft.IoTOperations.VolumeClaimResourceRequirementsClaims", + "azure.mgmt.iotoperations.models.VolumeClaimSpec": "Microsoft.IoTOperations.VolumeClaimSpec", + "azure.mgmt.iotoperations.models.VolumeClaimSpecSelector": "Microsoft.IoTOperations.VolumeClaimSpecSelector", + "azure.mgmt.iotoperations.models.VolumeClaimSpecSelectorMatchExpressions": "Microsoft.IoTOperations.VolumeClaimSpecSelectorMatchExpressions", + "azure.mgmt.iotoperations.models.X509ManualCertificate": "Microsoft.IoTOperations.X509ManualCertificate", + "azure.mgmt.iotoperations.models.Origin": "Azure.ResourceManager.CommonTypes.Origin", + "azure.mgmt.iotoperations.models.ActionType": "Azure.ResourceManager.CommonTypes.ActionType", + "azure.mgmt.iotoperations.models.CreatedByType": "Azure.ResourceManager.CommonTypes.createdByType", + "azure.mgmt.iotoperations.models.ProvisioningState": "Microsoft.IoTOperations.ProvisioningState", + "azure.mgmt.iotoperations.models.InstanceFeatureMode": "Microsoft.IoTOperations.InstanceFeatureMode", + "azure.mgmt.iotoperations.models.OperationalMode": "Microsoft.IoTOperations.OperationalMode", + "azure.mgmt.iotoperations.models.ExtendedLocationType": "Microsoft.IoTOperations.ExtendedLocationType", + "azure.mgmt.iotoperations.models.ManagedServiceIdentityType": "Azure.ResourceManager.CommonTypes.ManagedServiceIdentityType", + "azure.mgmt.iotoperations.models.SubscriberMessageDropStrategy": "Microsoft.IoTOperations.SubscriberMessageDropStrategy", + "azure.mgmt.iotoperations.models.PrivateKeyAlgorithm": "Microsoft.IoTOperations.PrivateKeyAlgorithm", + "azure.mgmt.iotoperations.models.PrivateKeyRotationPolicy": "Microsoft.IoTOperations.PrivateKeyRotationPolicy", + "azure.mgmt.iotoperations.models.OperatorValues": "Microsoft.IoTOperations.OperatorValues", + "azure.mgmt.iotoperations.models.BrokerMemoryProfile": "Microsoft.IoTOperations.BrokerMemoryProfile", + "azure.mgmt.iotoperations.models.BrokerPersistencePolicyMode": "Microsoft.IoTOperations.BrokerPersistencePolicyMode", + "azure.mgmt.iotoperations.models.BrokerStateStoreKeyType": "Microsoft.IoTOperations.BrokerStateStoreKeyType", + "azure.mgmt.iotoperations.models.BrokerProtocolType": "Microsoft.IoTOperations.BrokerProtocolType", + "azure.mgmt.iotoperations.models.TlsCertMethodMode": "Microsoft.IoTOperations.TlsCertMethodMode", + "azure.mgmt.iotoperations.models.CertManagerIssuerKind": "Microsoft.IoTOperations.CertManagerIssuerKind", + "azure.mgmt.iotoperations.models.ServiceType": "Microsoft.IoTOperations.ServiceType", + "azure.mgmt.iotoperations.models.BrokerAuthenticationMethod": "Microsoft.IoTOperations.BrokerAuthenticationMethod", + "azure.mgmt.iotoperations.models.BrokerAuthenticatorValidationMethods": "Microsoft.IoTOperations.BrokerAuthenticatorValidationMethods", + "azure.mgmt.iotoperations.models.BrokerResourceDefinitionMethods": "Microsoft.IoTOperations.BrokerResourceDefinitionMethods", + "azure.mgmt.iotoperations.models.StateStoreResourceKeyTypes": "Microsoft.IoTOperations.StateStoreResourceKeyTypes", + "azure.mgmt.iotoperations.models.StateStoreResourceDefinitionMethods": "Microsoft.IoTOperations.StateStoreResourceDefinitionMethods", + "azure.mgmt.iotoperations.models.OperationType": "Microsoft.IoTOperations.OperationType", + "azure.mgmt.iotoperations.models.SourceSerializationFormat": "Microsoft.IoTOperations.SourceSerializationFormat", + "azure.mgmt.iotoperations.models.TransformationSerializationFormat": "Microsoft.IoTOperations.TransformationSerializationFormat", + "azure.mgmt.iotoperations.models.FilterType": "Microsoft.IoTOperations.FilterType", + "azure.mgmt.iotoperations.models.DataflowMappingType": "Microsoft.IoTOperations.DataflowMappingType", + "azure.mgmt.iotoperations.models.EndpointType": "Microsoft.IoTOperations.EndpointType", + "azure.mgmt.iotoperations.models.DataflowEnpointHostType": "Microsoft.IoTOperations.DataflowEnpointHostType", + "azure.mgmt.iotoperations.models.DataExplorerAuthMethod": "Microsoft.IoTOperations.DataExplorerAuthMethod", + "azure.mgmt.iotoperations.models.DataLakeStorageAuthMethod": "Microsoft.IoTOperations.DataLakeStorageAuthMethod", + "azure.mgmt.iotoperations.models.FabricOneLakeAuthMethod": "Microsoft.IoTOperations.FabricOneLakeAuthMethod", + "azure.mgmt.iotoperations.models.DataflowEndpointFabricPathType": "Microsoft.IoTOperations.DataflowEndpointFabricPathType", + "azure.mgmt.iotoperations.models.KafkaAuthMethod": "Microsoft.IoTOperations.KafkaAuthMethod", + "azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSaslType": "Microsoft.IoTOperations.DataflowEndpointAuthenticationSaslType", + "azure.mgmt.iotoperations.models.DataflowEndpointKafkaCompression": "Microsoft.IoTOperations.DataflowEndpointKafkaCompression", + "azure.mgmt.iotoperations.models.DataflowEndpointKafkaAcks": "Microsoft.IoTOperations.DataflowEndpointKafkaAcks", + "azure.mgmt.iotoperations.models.DataflowEndpointKafkaPartitionStrategy": "Microsoft.IoTOperations.DataflowEndpointKafkaPartitionStrategy", + "azure.mgmt.iotoperations.models.CloudEventAttributeType": "Microsoft.IoTOperations.CloudEventAttributeType", + "azure.mgmt.iotoperations.models.MqttAuthMethod": "Microsoft.IoTOperations.MqttAuthMethod", + "azure.mgmt.iotoperations.models.MqttRetainType": "Microsoft.IoTOperations.MqttRetainType", + "azure.mgmt.iotoperations.models.DataflowOpenTelemetryAuthenticationMethod": "Microsoft.IoTOperations.DataflowOpenTelemetryAuthenticationMethod", + "azure.mgmt.iotoperations.models.DataflowGraphNodeType": "Microsoft.IoTOperations.DataflowGraphNodeType", + "azure.mgmt.iotoperations.models.DataflowGraphSerializationFormat": "Microsoft.IoTOperations.DataflowGraphSerializationFormat", + "azure.mgmt.iotoperations.models.RegistryEndpointAuthenticationMethod": "Microsoft.IoTOperations.RegistryEndpointAuthenticationMethod", + "azure.mgmt.iotoperations.models.RegistryEndpointTrustedSigningKeyType": "Microsoft.IoTOperations.RegistryEndpointTrustedSigningKeyType", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateRuntimeConfigurationType": "Microsoft.IoTOperations.AkriConnectorTemplateRuntimeConfigurationType", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmRegistrySettingsType": "Microsoft.IoTOperations.AkriConnectorTemplateHelmRegistrySettingsType", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateManagedConfigurationType": "Microsoft.IoTOperations.AkriConnectorTemplateManagedConfigurationType", + "azure.mgmt.iotoperations.models.AkriConnectorTemplateAllocationPolicy": "Microsoft.IoTOperations.AkriConnectorTemplateAllocationPolicy", + "azure.mgmt.iotoperations.models.AkriConnectorsImagePullPolicy": "Microsoft.IoTOperations.AkriConnectorsImagePullPolicy", + "azure.mgmt.iotoperations.models.AkriConnectorsRegistrySettingsType": "Microsoft.IoTOperations.AkriConnectorsRegistrySettingsType", + "azure.mgmt.iotoperations.models.AkriConnectorsTagDigestType": "Microsoft.IoTOperations.AkriConnectorsTagDigestType", + "azure.mgmt.iotoperations.models.AkriConnectorsMqttAuthenticationMethod": "Microsoft.IoTOperations.AkriConnectorsMqttAuthenticationMethod", + "azure.mgmt.iotoperations.models.AkriConnectorsMqttProtocolType": "Microsoft.IoTOperations.AkriConnectorsMqttProtocolType", + "azure.mgmt.iotoperations.models.AkriDiscoveryHandlerScheduleType": "Microsoft.IoTOperations.AkriDiscoveryHandlerScheduleType", + "azure.mgmt.iotoperations.operations.Operations.list": "Azure.ResourceManager.Operations.list", + "azure.mgmt.iotoperations.aio.operations.Operations.list": "Azure.ResourceManager.Operations.list", + "azure.mgmt.iotoperations.operations.InstanceOperations.get": "Microsoft.IoTOperations.Instance.get", + "azure.mgmt.iotoperations.aio.operations.InstanceOperations.get": "Microsoft.IoTOperations.Instance.get", + "azure.mgmt.iotoperations.operations.InstanceOperations.begin_create_or_update": "Microsoft.IoTOperations.Instance.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.InstanceOperations.begin_create_or_update": "Microsoft.IoTOperations.Instance.createOrUpdate", + "azure.mgmt.iotoperations.operations.InstanceOperations.update": "Microsoft.IoTOperations.Instance.update", + "azure.mgmt.iotoperations.aio.operations.InstanceOperations.update": "Microsoft.IoTOperations.Instance.update", + "azure.mgmt.iotoperations.operations.InstanceOperations.begin_delete": "Microsoft.IoTOperations.Instance.delete", + "azure.mgmt.iotoperations.aio.operations.InstanceOperations.begin_delete": "Microsoft.IoTOperations.Instance.delete", + "azure.mgmt.iotoperations.operations.InstanceOperations.list_by_resource_group": "Microsoft.IoTOperations.Instance.listByResourceGroup", + "azure.mgmt.iotoperations.aio.operations.InstanceOperations.list_by_resource_group": "Microsoft.IoTOperations.Instance.listByResourceGroup", + "azure.mgmt.iotoperations.operations.InstanceOperations.list_by_subscription": "Microsoft.IoTOperations.Instance.listBySubscription", + "azure.mgmt.iotoperations.aio.operations.InstanceOperations.list_by_subscription": "Microsoft.IoTOperations.Instance.listBySubscription", + "azure.mgmt.iotoperations.operations.BrokerOperations.get": "Microsoft.IoTOperations.Broker.get", + "azure.mgmt.iotoperations.aio.operations.BrokerOperations.get": "Microsoft.IoTOperations.Broker.get", + "azure.mgmt.iotoperations.operations.BrokerOperations.begin_create_or_update": "Microsoft.IoTOperations.Broker.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.BrokerOperations.begin_create_or_update": "Microsoft.IoTOperations.Broker.createOrUpdate", + "azure.mgmt.iotoperations.operations.BrokerOperations.begin_delete": "Microsoft.IoTOperations.Broker.delete", + "azure.mgmt.iotoperations.aio.operations.BrokerOperations.begin_delete": "Microsoft.IoTOperations.Broker.delete", + "azure.mgmt.iotoperations.operations.BrokerOperations.list_by_resource_group": "Microsoft.IoTOperations.Broker.listByResourceGroup", + "azure.mgmt.iotoperations.aio.operations.BrokerOperations.list_by_resource_group": "Microsoft.IoTOperations.Broker.listByResourceGroup", + "azure.mgmt.iotoperations.operations.BrokerListenerOperations.get": "Microsoft.IoTOperations.BrokerListener.get", + "azure.mgmt.iotoperations.aio.operations.BrokerListenerOperations.get": "Microsoft.IoTOperations.BrokerListener.get", + "azure.mgmt.iotoperations.operations.BrokerListenerOperations.begin_create_or_update": "Microsoft.IoTOperations.BrokerListener.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.BrokerListenerOperations.begin_create_or_update": "Microsoft.IoTOperations.BrokerListener.createOrUpdate", + "azure.mgmt.iotoperations.operations.BrokerListenerOperations.begin_delete": "Microsoft.IoTOperations.BrokerListener.delete", + "azure.mgmt.iotoperations.aio.operations.BrokerListenerOperations.begin_delete": "Microsoft.IoTOperations.BrokerListener.delete", + "azure.mgmt.iotoperations.operations.BrokerListenerOperations.list_by_resource_group": "Microsoft.IoTOperations.BrokerListener.listByResourceGroup", + "azure.mgmt.iotoperations.aio.operations.BrokerListenerOperations.list_by_resource_group": "Microsoft.IoTOperations.BrokerListener.listByResourceGroup", + "azure.mgmt.iotoperations.operations.BrokerAuthenticationOperations.get": "Microsoft.IoTOperations.BrokerAuthentication.get", + "azure.mgmt.iotoperations.aio.operations.BrokerAuthenticationOperations.get": "Microsoft.IoTOperations.BrokerAuthentication.get", + "azure.mgmt.iotoperations.operations.BrokerAuthenticationOperations.begin_create_or_update": "Microsoft.IoTOperations.BrokerAuthentication.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.BrokerAuthenticationOperations.begin_create_or_update": "Microsoft.IoTOperations.BrokerAuthentication.createOrUpdate", + "azure.mgmt.iotoperations.operations.BrokerAuthenticationOperations.begin_delete": "Microsoft.IoTOperations.BrokerAuthentication.delete", + "azure.mgmt.iotoperations.aio.operations.BrokerAuthenticationOperations.begin_delete": "Microsoft.IoTOperations.BrokerAuthentication.delete", + "azure.mgmt.iotoperations.operations.BrokerAuthenticationOperations.list_by_resource_group": "Microsoft.IoTOperations.BrokerAuthentication.listByResourceGroup", + "azure.mgmt.iotoperations.aio.operations.BrokerAuthenticationOperations.list_by_resource_group": "Microsoft.IoTOperations.BrokerAuthentication.listByResourceGroup", + "azure.mgmt.iotoperations.operations.BrokerAuthorizationOperations.get": "Microsoft.IoTOperations.BrokerAuthorization.get", + "azure.mgmt.iotoperations.aio.operations.BrokerAuthorizationOperations.get": "Microsoft.IoTOperations.BrokerAuthorization.get", + "azure.mgmt.iotoperations.operations.BrokerAuthorizationOperations.begin_create_or_update": "Microsoft.IoTOperations.BrokerAuthorization.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.BrokerAuthorizationOperations.begin_create_or_update": "Microsoft.IoTOperations.BrokerAuthorization.createOrUpdate", + "azure.mgmt.iotoperations.operations.BrokerAuthorizationOperations.begin_delete": "Microsoft.IoTOperations.BrokerAuthorization.delete", + "azure.mgmt.iotoperations.aio.operations.BrokerAuthorizationOperations.begin_delete": "Microsoft.IoTOperations.BrokerAuthorization.delete", + "azure.mgmt.iotoperations.operations.BrokerAuthorizationOperations.list_by_resource_group": "Microsoft.IoTOperations.BrokerAuthorization.listByResourceGroup", + "azure.mgmt.iotoperations.aio.operations.BrokerAuthorizationOperations.list_by_resource_group": "Microsoft.IoTOperations.BrokerAuthorization.listByResourceGroup", + "azure.mgmt.iotoperations.operations.DataflowProfileOperations.get": "Microsoft.IoTOperations.DataflowProfile.get", + "azure.mgmt.iotoperations.aio.operations.DataflowProfileOperations.get": "Microsoft.IoTOperations.DataflowProfile.get", + "azure.mgmt.iotoperations.operations.DataflowProfileOperations.begin_create_or_update": "Microsoft.IoTOperations.DataflowProfile.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.DataflowProfileOperations.begin_create_or_update": "Microsoft.IoTOperations.DataflowProfile.createOrUpdate", + "azure.mgmt.iotoperations.operations.DataflowProfileOperations.begin_delete": "Microsoft.IoTOperations.DataflowProfile.delete", + "azure.mgmt.iotoperations.aio.operations.DataflowProfileOperations.begin_delete": "Microsoft.IoTOperations.DataflowProfile.delete", + "azure.mgmt.iotoperations.operations.DataflowProfileOperations.list_by_resource_group": "Microsoft.IoTOperations.DataflowProfile.listByResourceGroup", + "azure.mgmt.iotoperations.aio.operations.DataflowProfileOperations.list_by_resource_group": "Microsoft.IoTOperations.DataflowProfile.listByResourceGroup", + "azure.mgmt.iotoperations.operations.DataflowOperations.get": "Microsoft.IoTOperations.Dataflow.get", + "azure.mgmt.iotoperations.aio.operations.DataflowOperations.get": "Microsoft.IoTOperations.Dataflow.get", + "azure.mgmt.iotoperations.operations.DataflowOperations.begin_create_or_update": "Microsoft.IoTOperations.Dataflow.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.DataflowOperations.begin_create_or_update": "Microsoft.IoTOperations.Dataflow.createOrUpdate", + "azure.mgmt.iotoperations.operations.DataflowOperations.begin_delete": "Microsoft.IoTOperations.Dataflow.delete", + "azure.mgmt.iotoperations.aio.operations.DataflowOperations.begin_delete": "Microsoft.IoTOperations.Dataflow.delete", + "azure.mgmt.iotoperations.operations.DataflowOperations.list_by_resource_group": "Microsoft.IoTOperations.Dataflow.listByResourceGroup", + "azure.mgmt.iotoperations.aio.operations.DataflowOperations.list_by_resource_group": "Microsoft.IoTOperations.Dataflow.listByResourceGroup", + "azure.mgmt.iotoperations.operations.DataflowEndpointOperations.get": "Microsoft.IoTOperations.DataflowEndpoint.get", + "azure.mgmt.iotoperations.aio.operations.DataflowEndpointOperations.get": "Microsoft.IoTOperations.DataflowEndpoint.get", + "azure.mgmt.iotoperations.operations.DataflowEndpointOperations.begin_create_or_update": "Microsoft.IoTOperations.DataflowEndpoint.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.DataflowEndpointOperations.begin_create_or_update": "Microsoft.IoTOperations.DataflowEndpoint.createOrUpdate", + "azure.mgmt.iotoperations.operations.DataflowEndpointOperations.begin_delete": "Microsoft.IoTOperations.DataflowEndpoint.delete", + "azure.mgmt.iotoperations.aio.operations.DataflowEndpointOperations.begin_delete": "Microsoft.IoTOperations.DataflowEndpoint.delete", + "azure.mgmt.iotoperations.operations.DataflowEndpointOperations.list_by_resource_group": "Microsoft.IoTOperations.DataflowEndpoint.listByResourceGroup", + "azure.mgmt.iotoperations.aio.operations.DataflowEndpointOperations.list_by_resource_group": "Microsoft.IoTOperations.DataflowEndpoint.listByResourceGroup", + "azure.mgmt.iotoperations.operations.DataflowGraphOperations.get": "Microsoft.IoTOperations.DataflowGraph.get", + "azure.mgmt.iotoperations.aio.operations.DataflowGraphOperations.get": "Microsoft.IoTOperations.DataflowGraph.get", + "azure.mgmt.iotoperations.operations.DataflowGraphOperations.begin_create_or_update": "Microsoft.IoTOperations.DataflowGraph.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.DataflowGraphOperations.begin_create_or_update": "Microsoft.IoTOperations.DataflowGraph.createOrUpdate", + "azure.mgmt.iotoperations.operations.DataflowGraphOperations.begin_delete": "Microsoft.IoTOperations.DataflowGraph.delete", + "azure.mgmt.iotoperations.aio.operations.DataflowGraphOperations.begin_delete": "Microsoft.IoTOperations.DataflowGraph.delete", + "azure.mgmt.iotoperations.operations.DataflowGraphOperations.list_by_dataflow_profile": "Microsoft.IoTOperations.DataflowGraph.listByDataflowProfile", + "azure.mgmt.iotoperations.aio.operations.DataflowGraphOperations.list_by_dataflow_profile": "Microsoft.IoTOperations.DataflowGraph.listByDataflowProfile", + "azure.mgmt.iotoperations.operations.RegistryEndpointOperations.get": "Microsoft.IoTOperations.RegistryEndpoint.get", + "azure.mgmt.iotoperations.aio.operations.RegistryEndpointOperations.get": "Microsoft.IoTOperations.RegistryEndpoint.get", + "azure.mgmt.iotoperations.operations.RegistryEndpointOperations.begin_create_or_update": "Microsoft.IoTOperations.RegistryEndpoint.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.RegistryEndpointOperations.begin_create_or_update": "Microsoft.IoTOperations.RegistryEndpoint.createOrUpdate", + "azure.mgmt.iotoperations.operations.RegistryEndpointOperations.begin_delete": "Microsoft.IoTOperations.RegistryEndpoint.delete", + "azure.mgmt.iotoperations.aio.operations.RegistryEndpointOperations.begin_delete": "Microsoft.IoTOperations.RegistryEndpoint.delete", + "azure.mgmt.iotoperations.operations.RegistryEndpointOperations.list_by_instance_resource": "Microsoft.IoTOperations.RegistryEndpoint.listByInstanceResource", + "azure.mgmt.iotoperations.aio.operations.RegistryEndpointOperations.list_by_instance_resource": "Microsoft.IoTOperations.RegistryEndpoint.listByInstanceResource", + "azure.mgmt.iotoperations.operations.AkriConnectorTemplateOperations.get": "Microsoft.IoTOperations.AkriConnectorTemplate.get", + "azure.mgmt.iotoperations.aio.operations.AkriConnectorTemplateOperations.get": "Microsoft.IoTOperations.AkriConnectorTemplate.get", + "azure.mgmt.iotoperations.operations.AkriConnectorTemplateOperations.begin_create_or_update": "Microsoft.IoTOperations.AkriConnectorTemplate.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.AkriConnectorTemplateOperations.begin_create_or_update": "Microsoft.IoTOperations.AkriConnectorTemplate.createOrUpdate", + "azure.mgmt.iotoperations.operations.AkriConnectorTemplateOperations.begin_delete": "Microsoft.IoTOperations.AkriConnectorTemplate.delete", + "azure.mgmt.iotoperations.aio.operations.AkriConnectorTemplateOperations.begin_delete": "Microsoft.IoTOperations.AkriConnectorTemplate.delete", + "azure.mgmt.iotoperations.operations.AkriConnectorTemplateOperations.list_by_instance_resource": "Microsoft.IoTOperations.AkriConnectorTemplate.listByInstanceResource", + "azure.mgmt.iotoperations.aio.operations.AkriConnectorTemplateOperations.list_by_instance_resource": "Microsoft.IoTOperations.AkriConnectorTemplate.listByInstanceResource", + "azure.mgmt.iotoperations.operations.AkriConnectorOperations.get": "Microsoft.IoTOperations.AkriConnector.get", + "azure.mgmt.iotoperations.aio.operations.AkriConnectorOperations.get": "Microsoft.IoTOperations.AkriConnector.get", + "azure.mgmt.iotoperations.operations.AkriConnectorOperations.begin_create_or_update": "Microsoft.IoTOperations.AkriConnector.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.AkriConnectorOperations.begin_create_or_update": "Microsoft.IoTOperations.AkriConnector.createOrUpdate", + "azure.mgmt.iotoperations.operations.AkriConnectorOperations.begin_delete": "Microsoft.IoTOperations.AkriConnector.delete", + "azure.mgmt.iotoperations.aio.operations.AkriConnectorOperations.begin_delete": "Microsoft.IoTOperations.AkriConnector.delete", + "azure.mgmt.iotoperations.operations.AkriConnectorOperations.list_by_template": "Microsoft.IoTOperations.AkriConnector.listByTemplate", + "azure.mgmt.iotoperations.aio.operations.AkriConnectorOperations.list_by_template": "Microsoft.IoTOperations.AkriConnector.listByTemplate", + "azure.mgmt.iotoperations.operations.AkriDiscoveryHandlerOperations.get": "Microsoft.IoTOperations.AkriDiscoveryHandler.get", + "azure.mgmt.iotoperations.aio.operations.AkriDiscoveryHandlerOperations.get": "Microsoft.IoTOperations.AkriDiscoveryHandler.get", + "azure.mgmt.iotoperations.operations.AkriDiscoveryHandlerOperations.begin_create_or_update": "Microsoft.IoTOperations.AkriDiscoveryHandler.createOrUpdate", + "azure.mgmt.iotoperations.aio.operations.AkriDiscoveryHandlerOperations.begin_create_or_update": "Microsoft.IoTOperations.AkriDiscoveryHandler.createOrUpdate", + "azure.mgmt.iotoperations.operations.AkriDiscoveryHandlerOperations.begin_delete": "Microsoft.IoTOperations.AkriDiscoveryHandler.delete", + "azure.mgmt.iotoperations.aio.operations.AkriDiscoveryHandlerOperations.begin_delete": "Microsoft.IoTOperations.AkriDiscoveryHandler.delete", + "azure.mgmt.iotoperations.operations.AkriDiscoveryHandlerOperations.list_by_instance_resource": "Microsoft.IoTOperations.AkriDiscoveryHandler.listByInstanceResource", + "azure.mgmt.iotoperations.aio.operations.AkriDiscoveryHandlerOperations.list_by_instance_resource": "Microsoft.IoTOperations.AkriDiscoveryHandler.listByInstanceResource" + } +} \ No newline at end of file diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_client.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_client.py index 6a329384b530..a65a335d6bdd 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_client.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_client.py @@ -7,26 +7,33 @@ # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, TYPE_CHECKING +from typing import Any, Optional, TYPE_CHECKING, cast from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse +from azure.core.settings import settings from azure.mgmt.core import ARMPipelineClient from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints from ._configuration import IoTOperationsMgmtClientConfiguration -from ._serialization import Deserializer, Serializer +from ._utils.serialization import Deserializer, Serializer from .operations import ( + AkriConnectorOperations, + AkriConnectorTemplateOperations, + AkriDiscoveryHandlerOperations, BrokerAuthenticationOperations, BrokerAuthorizationOperations, BrokerListenerOperations, BrokerOperations, DataflowEndpointOperations, + DataflowGraphOperations, DataflowOperations, DataflowProfileOperations, InstanceOperations, Operations, + RegistryEndpointOperations, ) if TYPE_CHECKING: @@ -56,30 +63,49 @@ class IoTOperationsMgmtClient: # pylint: disable=too-many-instance-attributes :vartype dataflow: azure.mgmt.iotoperations.operations.DataflowOperations :ivar dataflow_endpoint: DataflowEndpointOperations operations :vartype dataflow_endpoint: azure.mgmt.iotoperations.operations.DataflowEndpointOperations + :ivar dataflow_graph: DataflowGraphOperations operations + :vartype dataflow_graph: azure.mgmt.iotoperations.operations.DataflowGraphOperations + :ivar registry_endpoint: RegistryEndpointOperations operations + :vartype registry_endpoint: azure.mgmt.iotoperations.operations.RegistryEndpointOperations + :ivar akri_connector_template: AkriConnectorTemplateOperations operations + :vartype akri_connector_template: + azure.mgmt.iotoperations.operations.AkriConnectorTemplateOperations + :ivar akri_connector: AkriConnectorOperations operations + :vartype akri_connector: azure.mgmt.iotoperations.operations.AkriConnectorOperations + :ivar akri_discovery_handler: AkriDiscoveryHandlerOperations operations + :vartype akri_discovery_handler: + azure.mgmt.iotoperations.operations.AkriDiscoveryHandlerOperations :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. :type subscription_id: str - :param base_url: Service host. Default value is "https://management.azure.com". + :param base_url: Service host. Default value is None. :type base_url: str - :keyword api_version: The API version to use for this operation. Default value is "2024-11-01". - Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Default value is + "2025-07-01-preview". Note that overriding this default value may result in unsupported + behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( - self, - credential: "TokenCredential", - subscription_id: str, - base_url: str = "https://management.azure.com", - **kwargs: Any + self, credential: "TokenCredential", subscription_id: str, base_url: Optional[str] = None, **kwargs: Any ) -> None: _endpoint = "{endpoint}" + _cloud = kwargs.pop("cloud_setting", None) or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) self._config = IoTOperationsMgmtClientConfiguration( - credential=credential, subscription_id=subscription_id, base_url=base_url, **kwargs + credential=credential, + subscription_id=subscription_id, + base_url=cast(str, base_url), + credential_scopes=credential_scopes, + **kwargs ) + _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ @@ -98,7 +124,7 @@ def __init__( policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: ARMPipelineClient = ARMPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + self._client: ARMPipelineClient = ARMPipelineClient(base_url=cast(str, _endpoint), policies=_policies, **kwargs) self._serialize = Serializer() self._deserialize = Deserializer() @@ -120,6 +146,17 @@ def __init__( self.dataflow_endpoint = DataflowEndpointOperations( self._client, self._config, self._serialize, self._deserialize ) + self.dataflow_graph = DataflowGraphOperations(self._client, self._config, self._serialize, self._deserialize) + self.registry_endpoint = RegistryEndpointOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.akri_connector_template = AkriConnectorTemplateOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.akri_connector = AkriConnectorOperations(self._client, self._config, self._serialize, self._deserialize) + self.akri_discovery_handler = AkriDiscoveryHandlerOperations( + self._client, self._config, self._serialize, self._deserialize + ) def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_configuration.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_configuration.py index 8e328b39280f..a1c8245c2422 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_configuration.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_configuration.py @@ -29,8 +29,9 @@ class IoTOperationsMgmtClientConfiguration: # pylint: disable=too-many-instance :type subscription_id: str :param base_url: Service host. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: The API version to use for this operation. Default value is "2024-11-01". - Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Default value is + "2025-07-01-preview". Note that overriding this default value may result in unsupported + behavior. :paramtype api_version: str """ @@ -41,7 +42,7 @@ def __init__( base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - api_version: str = kwargs.pop("api_version", "2024-11-01") + api_version: str = kwargs.pop("api_version", "2025-07-01-preview") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_patch.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_patch.py index f7dd32510333..8bcb627aa475 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_patch.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_patch.py @@ -1,7 +1,8 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/__init__.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_model_base.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/model_base.py similarity index 94% rename from sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_model_base.py rename to sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/model_base.py index 7f73b97b23ef..49d5c7259389 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_model_base.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/model_base.py @@ -2,8 +2,9 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=protected-access, broad-except @@ -21,6 +22,7 @@ from datetime import datetime, date, time, timedelta, timezone from json import JSONEncoder import xml.etree.ElementTree as ET +from collections.abc import MutableMapping from typing_extensions import Self import isodate from azure.core.exceptions import DeserializationError @@ -28,11 +30,6 @@ from azure.core.pipeline import PipelineResponse from azure.core.serialization import _Null -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping - _LOGGER = logging.getLogger(__name__) __all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] @@ -347,7 +344,7 @@ def _get_model(module_name: str, model_name: str): _UNSET = object() -class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=unsubscriptable-object +class _MyMutableMapping(MutableMapping[str, typing.Any]): def __init__(self, data: typing.Dict[str, typing.Any]) -> None: self._data = data @@ -373,50 +370,97 @@ def __ne__(self, other: typing.Any) -> bool: return not self.__eq__(other) def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ return self._data.keys() def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ return self._data.values() def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ return self._data.items() def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ try: return self[key] except KeyError: return default @typing.overload - def pop(self, key: str) -> typing.Any: ... + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ @typing.overload - def pop(self, key: str, default: _T) -> _T: ... + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs @typing.overload - def pop(self, key: str, default: typing.Any) -> typing.Any: ... + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ if default is _UNSET: return self._data.pop(key) return self._data.pop(key, default) def popitem(self) -> typing.Tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ return self._data.popitem() def clear(self) -> None: + """ + Remove all items from D. + """ self._data.clear() - def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ self._data.update(*args, **kwargs) @typing.overload def setdefault(self, key: str, default: None = None) -> None: ... @typing.overload - def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ if default is _UNSET: return self._data.setdefault(key) return self._data.setdefault(key, default) @@ -597,7 +641,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") - return super().__new__(cls) # pylint: disable=no-value-for-parameter + return super().__new__(cls) def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: for base in cls.__bases__: @@ -633,7 +677,7 @@ def _deserialize(cls, data, exist_discriminators): discriminator_value = data.find(xml_name).text # pyright: ignore else: discriminator_value = data.get(discriminator._rest_name) - mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member return mapped_cls._deserialize(data, exist_discriminators) def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: @@ -910,6 +954,19 @@ def _failsafe_deserialize( return None +def _failsafe_deserialize_xml( + deserializer: typing.Any, + value: typing.Any, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, value) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + class _RestField: def __init__( self, diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_serialization.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/serialization.py similarity index 94% rename from sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_serialization.py rename to sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/serialization.py index b24ab2885450..eb86ea23c965 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_serialization.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_utils/serialization.py @@ -1,28 +1,10 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 # -------------------------------------------------------------------------- -# # Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pyright: reportUnnecessaryTypeIgnoreComment=false @@ -48,9 +30,7 @@ IO, Mapping, Callable, - TypeVar, MutableMapping, - Type, List, ) @@ -61,13 +41,13 @@ import xml.etree.ElementTree as ET import isodate # type: ignore +from typing_extensions import Self from azure.core.exceptions import DeserializationError, SerializationError from azure.core.serialization import NULL as CoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") -ModelType = TypeVar("ModelType", bound="Model") JSON = MutableMapping[str, Any] @@ -185,73 +165,7 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], except NameError: _long_type = int - -class UTC(datetime.tzinfo): - """Time Zone info for handling UTC""" - - def utcoffset(self, dt): - """UTF offset for UTC is 0. - - :param datetime.datetime dt: The datetime - :returns: The offset - :rtype: datetime.timedelta - """ - return datetime.timedelta(0) - - def tzname(self, dt): - """Timestamp representation. - - :param datetime.datetime dt: The datetime - :returns: The timestamp representation - :rtype: str - """ - return "Z" - - def dst(self, dt): - """No daylight saving for UTC. - - :param datetime.datetime dt: The datetime - :returns: The daylight saving time - :rtype: datetime.timedelta - """ - return datetime.timedelta(hours=1) - - -try: - from datetime import timezone as _FixedOffset # type: ignore -except ImportError: # Python 2.7 - - class _FixedOffset(datetime.tzinfo): # type: ignore - """Fixed offset in minutes east from UTC. - Copy/pasted from Python doc - :param datetime.timedelta offset: offset in timedelta format - """ - - def __init__(self, offset) -> None: - self.__offset = offset - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds() / 3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) - - def __getinitargs__(self): - return (self.__offset,) - - -try: - from datetime import timezone - - TZ_UTC = timezone.utc -except ImportError: - TZ_UTC = UTC() # type: ignore +TZ_UTC = datetime.timezone.utc _FLATTEN = re.compile(r"(? ModelType: + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def from_dict( - cls: Type[ModelType], + cls, data: Any, key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, content_type: Optional[str] = None, - ) -> ModelType: + ) -> Self: """Parse a dict using given key extractor return a model. By default consider key @@ -479,8 +393,8 @@ def from_dict( :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = ( # type: ignore @@ -626,7 +540,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to :param object target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict - :raises: SerializationError if serialization fails. + :raises SerializationError: if serialization fails. :returns: The serialized data. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) @@ -736,8 +650,8 @@ def body(self, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict - :raises: SerializationError if serialization fails. - :raises: ValueError if data is None + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None :returns: The serialized request body """ @@ -781,8 +695,8 @@ def url(self, name, data, data_type, **kwargs): :param str data_type: The type to be serialized from. :rtype: str :returns: The serialized URL path - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None """ try: output = self.serialize_data(data, data_type, **kwargs) @@ -805,8 +719,8 @@ def query(self, name, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, list - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None :returns: The serialized query parameter """ try: @@ -835,8 +749,8 @@ def header(self, name, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None :returns: The serialized header """ try: @@ -855,9 +769,9 @@ def serialize_data(self, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :raises: AttributeError if required data is None. - :raises: ValueError if data is None - :raises: SerializationError if serialization fails. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. :returns: The serialized data. :rtype: str, int, float, bool, dict, list """ @@ -1192,7 +1106,7 @@ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument :param Datetime attr: Object to be serialized. :rtype: str - :raises: TypeError if format invalid. + :raises TypeError: if format invalid. :return: serialized rfc """ try: @@ -1218,7 +1132,7 @@ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument :param Datetime attr: Object to be serialized. :rtype: str - :raises: SerializationError if format invalid. + :raises SerializationError: if format invalid. :return: serialized iso """ if isinstance(attr, str): @@ -1251,7 +1165,7 @@ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument :param Datetime attr: Object to be serialized. :rtype: int - :raises: SerializationError if format invalid + :raises SerializationError: if format invalid :return: serialied unix """ if isinstance(attr, int): @@ -1429,7 +1343,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument # Iter and wrapped, should have found one node only (the wrap one) if len(children) != 1: raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( xml_name ) ) @@ -1488,7 +1402,7 @@ def __call__(self, target_obj, response_data, content_type=None): :param str target_obj: Target data type to deserialize to. :param requests.Response response_data: REST response object. :param str content_type: Swagger "produces" if available. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. :rtype: object """ @@ -1502,7 +1416,7 @@ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. :rtype: object """ @@ -1717,7 +1631,7 @@ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return- :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. :rtype: object """ @@ -1799,7 +1713,7 @@ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return :param dict attr: Dictionary to be deserialized. :return: Deserialized object. :rtype: dict - :raises: TypeError if non-builtin datatype encountered. + :raises TypeError: if non-builtin datatype encountered. """ if attr is None: return None @@ -1845,7 +1759,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises: TypeError if string format is not valid. + :raises TypeError: if string format is not valid. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1936,7 +1850,7 @@ def deserialize_bytearray(attr): :param str attr: response string to be deserialized. :return: Deserialized bytearray :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1949,7 +1863,7 @@ def deserialize_base64(attr): :param str attr: response string to be deserialized. :return: Deserialized base64 string :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1964,7 +1878,7 @@ def deserialize_decimal(attr): :param str attr: response string to be deserialized. :return: Deserialized decimal - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. :rtype: decimal """ if isinstance(attr, ET.Element): @@ -1982,7 +1896,7 @@ def deserialize_long(attr): :param str attr: response string to be deserialized. :return: Deserialized int :rtype: long or int - :raises: ValueError if string format invalid. + :raises ValueError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1995,7 +1909,7 @@ def deserialize_duration(attr): :param str attr: response string to be deserialized. :return: Deserialized duration :rtype: TimeDelta - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2013,7 +1927,7 @@ def deserialize_date(attr): :param str attr: response string to be deserialized. :return: Deserialized date :rtype: Date - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2029,7 +1943,7 @@ def deserialize_time(attr): :param str attr: response string to be deserialized. :return: Deserialized time :rtype: datetime.time - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2044,14 +1958,14 @@ def deserialize_rfc(attr): :param str attr: response string to be deserialized. :return: Deserialized RFC datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: parsed_date = email.utils.parsedate_tz(attr) # type: ignore date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) ) if not date_obj.tzinfo: date_obj = date_obj.astimezone(tz=TZ_UTC) @@ -2067,7 +1981,7 @@ def deserialize_iso(attr): :param str attr: response string to be deserialized. :return: Deserialized ISO datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2105,7 +2019,7 @@ def deserialize_unix(attr): :param int attr: Object to be serialized. :return: Deserialized datetime :rtype: Datetime - :raises: DeserializationError if format invalid + :raises DeserializationError: if format invalid """ if isinstance(attr, ET.Element): attr = int(attr.text) # type: ignore diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_validation.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_validation.py new file mode 100644 index 000000000000..752b2822f9d3 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_validation.py @@ -0,0 +1,50 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import functools + + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if method_added_on > client_api_version: + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and api_version > client_api_version + } + if unsupported: + raise ValueError( + "".join( + [ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ] + ) + ) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_version.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_version.py index 0ec13ea52bbf..a1f432eddc4e 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_version.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0" +VERSION = "1.1.0b1" diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_client.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_client.py index 070e23fb5792..788e7ce6d2d7 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_client.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_client.py @@ -7,26 +7,33 @@ # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, Awaitable, TYPE_CHECKING +from typing import Any, Awaitable, Optional, TYPE_CHECKING, cast from typing_extensions import Self from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.settings import settings from azure.mgmt.core import AsyncARMPipelineClient from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints -from .._serialization import Deserializer, Serializer +from .._utils.serialization import Deserializer, Serializer from ._configuration import IoTOperationsMgmtClientConfiguration from .operations import ( + AkriConnectorOperations, + AkriConnectorTemplateOperations, + AkriDiscoveryHandlerOperations, BrokerAuthenticationOperations, BrokerAuthorizationOperations, BrokerListenerOperations, BrokerOperations, DataflowEndpointOperations, + DataflowGraphOperations, DataflowOperations, DataflowProfileOperations, InstanceOperations, Operations, + RegistryEndpointOperations, ) if TYPE_CHECKING: @@ -56,30 +63,49 @@ class IoTOperationsMgmtClient: # pylint: disable=too-many-instance-attributes :vartype dataflow: azure.mgmt.iotoperations.aio.operations.DataflowOperations :ivar dataflow_endpoint: DataflowEndpointOperations operations :vartype dataflow_endpoint: azure.mgmt.iotoperations.aio.operations.DataflowEndpointOperations + :ivar dataflow_graph: DataflowGraphOperations operations + :vartype dataflow_graph: azure.mgmt.iotoperations.aio.operations.DataflowGraphOperations + :ivar registry_endpoint: RegistryEndpointOperations operations + :vartype registry_endpoint: azure.mgmt.iotoperations.aio.operations.RegistryEndpointOperations + :ivar akri_connector_template: AkriConnectorTemplateOperations operations + :vartype akri_connector_template: + azure.mgmt.iotoperations.aio.operations.AkriConnectorTemplateOperations + :ivar akri_connector: AkriConnectorOperations operations + :vartype akri_connector: azure.mgmt.iotoperations.aio.operations.AkriConnectorOperations + :ivar akri_discovery_handler: AkriDiscoveryHandlerOperations operations + :vartype akri_discovery_handler: + azure.mgmt.iotoperations.aio.operations.AkriDiscoveryHandlerOperations :param credential: Credential used to authenticate requests to the service. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. :type subscription_id: str - :param base_url: Service host. Default value is "https://management.azure.com". + :param base_url: Service host. Default value is None. :type base_url: str - :keyword api_version: The API version to use for this operation. Default value is "2024-11-01". - Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Default value is + "2025-07-01-preview". Note that overriding this default value may result in unsupported + behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - base_url: str = "https://management.azure.com", - **kwargs: Any + self, credential: "AsyncTokenCredential", subscription_id: str, base_url: Optional[str] = None, **kwargs: Any ) -> None: _endpoint = "{endpoint}" + _cloud = kwargs.pop("cloud_setting", None) or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) self._config = IoTOperationsMgmtClientConfiguration( - credential=credential, subscription_id=subscription_id, base_url=base_url, **kwargs + credential=credential, + subscription_id=subscription_id, + base_url=cast(str, base_url), + credential_scopes=credential_scopes, + **kwargs ) + _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ @@ -98,7 +124,9 @@ def __init__( policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, self._config.http_logging_policy, ] - self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient( + base_url=cast(str, _endpoint), policies=_policies, **kwargs + ) self._serialize = Serializer() self._deserialize = Deserializer() @@ -120,6 +148,17 @@ def __init__( self.dataflow_endpoint = DataflowEndpointOperations( self._client, self._config, self._serialize, self._deserialize ) + self.dataflow_graph = DataflowGraphOperations(self._client, self._config, self._serialize, self._deserialize) + self.registry_endpoint = RegistryEndpointOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.akri_connector_template = AkriConnectorTemplateOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.akri_connector = AkriConnectorOperations(self._client, self._config, self._serialize, self._deserialize) + self.akri_discovery_handler = AkriDiscoveryHandlerOperations( + self._client, self._config, self._serialize, self._deserialize + ) def send_request( self, request: HttpRequest, *, stream: bool = False, **kwargs: Any diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_configuration.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_configuration.py index 559f07852993..5cf9a8b25082 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_configuration.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_configuration.py @@ -29,8 +29,9 @@ class IoTOperationsMgmtClientConfiguration: # pylint: disable=too-many-instance :type subscription_id: str :param base_url: Service host. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: The API version to use for this operation. Default value is "2024-11-01". - Note that overriding this default value may result in unsupported behavior. + :keyword api_version: The API version to use for this operation. Default value is + "2025-07-01-preview". Note that overriding this default value may result in unsupported + behavior. :paramtype api_version: str """ @@ -41,7 +42,7 @@ def __init__( base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - api_version: str = kwargs.pop("api_version", "2024-11-01") + api_version: str = kwargs.pop("api_version", "2025-07-01-preview") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_patch.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_patch.py index f7dd32510333..8bcb627aa475 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_patch.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/_patch.py @@ -1,7 +1,8 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/__init__.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/__init__.py index ab557d4ab2d6..8285d4d727f7 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/__init__.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/__init__.py @@ -21,6 +21,11 @@ from ._operations import DataflowProfileOperations # type: ignore from ._operations import DataflowOperations # type: ignore from ._operations import DataflowEndpointOperations # type: ignore +from ._operations import DataflowGraphOperations # type: ignore +from ._operations import RegistryEndpointOperations # type: ignore +from ._operations import AkriConnectorTemplateOperations # type: ignore +from ._operations import AkriConnectorOperations # type: ignore +from ._operations import AkriDiscoveryHandlerOperations # type: ignore from ._patch import __all__ as _patch_all from ._patch import * @@ -36,6 +41,11 @@ "DataflowProfileOperations", "DataflowOperations", "DataflowEndpointOperations", + "DataflowGraphOperations", + "RegistryEndpointOperations", + "AkriConnectorTemplateOperations", + "AkriConnectorOperations", + "AkriDiscoveryHandlerOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/_operations.py index 1ef33e3f4aef..ab73bb7bde71 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/_operations.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/_operations.py @@ -6,12 +6,13 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase import json -import sys from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload import urllib.parse +from azure.core import AsyncPipelineClient from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, @@ -33,8 +34,22 @@ from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models -from ..._model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._utils.serialization import Deserializer, Serializer +from ..._validation import api_version_validation from ...operations._operations import ( + build_akri_connector_create_or_update_request, + build_akri_connector_delete_request, + build_akri_connector_get_request, + build_akri_connector_list_by_template_request, + build_akri_connector_template_create_or_update_request, + build_akri_connector_template_delete_request, + build_akri_connector_template_get_request, + build_akri_connector_template_list_by_instance_resource_request, + build_akri_discovery_handler_create_or_update_request, + build_akri_discovery_handler_delete_request, + build_akri_discovery_handler_get_request, + build_akri_discovery_handler_list_by_instance_resource_request, build_broker_authentication_create_or_update_request, build_broker_authentication_delete_request, build_broker_authentication_get_request, @@ -58,6 +73,10 @@ build_dataflow_endpoint_get_request, build_dataflow_endpoint_list_by_resource_group_request, build_dataflow_get_request, + build_dataflow_graph_create_or_update_request, + build_dataflow_graph_delete_request, + build_dataflow_graph_get_request, + build_dataflow_graph_list_by_dataflow_profile_request, build_dataflow_list_by_resource_group_request, build_dataflow_profile_create_or_update_request, build_dataflow_profile_delete_request, @@ -70,15 +89,16 @@ build_instance_list_by_subscription_request, build_instance_update_request, build_operations_list_request, + build_registry_endpoint_create_or_update_request, + build_registry_endpoint_delete_request, + build_registry_endpoint_get_request, + build_registry_endpoint_list_by_instance_resource_request, ) +from .._configuration import IoTOperationsMgmtClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +JSON = MutableMapping[str, Any] class Operations: @@ -93,10 +113,10 @@ class Operations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: @@ -158,7 +178,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Operation], deserialized["value"]) + list_of_elem = _deserialize(List[_models.Operation], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -194,10 +214,10 @@ class InstanceOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get(self, resource_group_name: str, instance_name: str, **kwargs: Any) -> _models.InstanceResource: @@ -845,7 +865,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.InstanceResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.InstanceResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -930,7 +950,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.InstanceResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.InstanceResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -966,10 +986,10 @@ class BrokerOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get( @@ -1480,7 +1500,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.BrokerResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.BrokerResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -1516,10 +1536,10 @@ class BrokerListenerOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get( @@ -2059,7 +2079,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.BrokerListenerResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.BrokerListenerResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -2095,10 +2115,10 @@ class BrokerAuthenticationOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get( @@ -2640,7 +2660,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.BrokerAuthenticationResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.BrokerAuthenticationResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -2676,10 +2696,10 @@ class BrokerAuthorizationOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get( @@ -3221,7 +3241,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.BrokerAuthorizationResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.BrokerAuthorizationResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -3257,10 +3277,10 @@ class DataflowProfileOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get( @@ -3775,7 +3795,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DataflowProfileResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.DataflowProfileResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -3811,10 +3831,10 @@ class DataflowOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get( @@ -4365,7 +4385,7 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DataflowResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.DataflowResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) @@ -4401,10 +4421,10 @@ class DataflowEndpointOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def get( @@ -4920,7 +4940,3251 @@ def prepare_request(next_link=None): async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DataflowEndpointResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.DataflowEndpointResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class DataflowGraphOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.aio.IoTOperationsMgmtClient`'s + :attr:`dataflow_graph` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "accept", + ] + }, + ) + async def get( + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + **kwargs: Any + ) -> _models.DataflowGraphResource: + """Get a DataflowGraphResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str + :return: DataflowGraphResource. The DataflowGraphResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.DataflowGraphResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.DataflowGraphResource] = kwargs.pop("cls", None) + + _request = build_dataflow_graph_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.DataflowGraphResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "content_type", + "accept", + ] + }, + ) + async def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + resource: Union[_models.DataflowGraphResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_dataflow_graph_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + resource: _models.DataflowGraphResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataflowGraphResource]: + """Create a DataflowGraphResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.DataflowGraphResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns DataflowGraphResource. The + DataflowGraphResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.DataflowGraphResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataflowGraphResource]: + """Create a DataflowGraphResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns DataflowGraphResource. The + DataflowGraphResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.DataflowGraphResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataflowGraphResource]: + """Create a DataflowGraphResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns DataflowGraphResource. The + DataflowGraphResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.DataflowGraphResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "content_type", + "accept", + ] + }, + ) + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + resource: Union[_models.DataflowGraphResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.DataflowGraphResource]: + """Create a DataflowGraphResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str + :param resource: Resource create parameters. Is one of the following types: + DataflowGraphResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.DataflowGraphResource or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns DataflowGraphResource. The + DataflowGraphResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.DataflowGraphResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataflowGraphResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.DataflowGraphResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.DataflowGraphResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.DataflowGraphResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "accept", + ] + }, + ) + async def _delete_initial( + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_dataflow_graph_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "accept", + ] + }, + ) + async def begin_delete( + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a DataflowGraphResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "accept", + ] + }, + ) + def list_by_dataflow_profile( + self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any + ) -> AsyncIterable["_models.DataflowGraphResource"]: + """List DataflowGraphResource resources by DataflowProfileResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :return: An iterator like instance of DataflowGraphResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iotoperations.models.DataflowGraphResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.DataflowGraphResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_dataflow_graph_list_by_dataflow_profile_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.DataflowGraphResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class RegistryEndpointOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.aio.IoTOperationsMgmtClient`'s + :attr:`registry_endpoint` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "accept", + ] + }, + ) + async def get( + self, resource_group_name: str, instance_name: str, registry_endpoint_name: str, **kwargs: Any + ) -> _models.RegistryEndpointResource: + """Get a RegistryEndpointResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str + :return: RegistryEndpointResource. The RegistryEndpointResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.RegistryEndpointResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.RegistryEndpointResource] = kwargs.pop("cls", None) + + _request = build_registry_endpoint_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + registry_endpoint_name=registry_endpoint_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.RegistryEndpointResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "content_type", + "accept", + ] + }, + ) + async def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + registry_endpoint_name: str, + resource: Union[_models.RegistryEndpointResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_registry_endpoint_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + registry_endpoint_name=registry_endpoint_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + registry_endpoint_name: str, + resource: _models.RegistryEndpointResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.RegistryEndpointResource]: + """Create a RegistryEndpointResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.RegistryEndpointResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns RegistryEndpointResource. The + RegistryEndpointResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.RegistryEndpointResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + registry_endpoint_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.RegistryEndpointResource]: + """Create a RegistryEndpointResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns RegistryEndpointResource. The + RegistryEndpointResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.RegistryEndpointResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + registry_endpoint_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.RegistryEndpointResource]: + """Create a RegistryEndpointResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns RegistryEndpointResource. The + RegistryEndpointResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.RegistryEndpointResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "content_type", + "accept", + ] + }, + ) + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + registry_endpoint_name: str, + resource: Union[_models.RegistryEndpointResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.RegistryEndpointResource]: + """Create a RegistryEndpointResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str + :param resource: Resource create parameters. Is one of the following types: + RegistryEndpointResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.RegistryEndpointResource or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns RegistryEndpointResource. The + RegistryEndpointResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.RegistryEndpointResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.RegistryEndpointResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + registry_endpoint_name=registry_endpoint_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.RegistryEndpointResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.RegistryEndpointResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.RegistryEndpointResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "accept", + ] + }, + ) + async def _delete_initial( + self, resource_group_name: str, instance_name: str, registry_endpoint_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_registry_endpoint_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + registry_endpoint_name=registry_endpoint_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "accept", + ] + }, + ) + async def begin_delete( + self, resource_group_name: str, instance_name: str, registry_endpoint_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a RegistryEndpointResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + registry_endpoint_name=registry_endpoint_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": ["api_version", "subscription_id", "resource_group_name", "instance_name", "accept"] + }, + ) + def list_by_instance_resource( + self, resource_group_name: str, instance_name: str, **kwargs: Any + ) -> AsyncIterable["_models.RegistryEndpointResource"]: + """List RegistryEndpointResource resources by InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :return: An iterator like instance of RegistryEndpointResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iotoperations.models.RegistryEndpointResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.RegistryEndpointResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_registry_endpoint_list_by_instance_resource_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.RegistryEndpointResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class AkriConnectorTemplateOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.aio.IoTOperationsMgmtClient`'s + :attr:`akri_connector_template` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "accept", + ] + }, + ) + async def get( + self, resource_group_name: str, instance_name: str, akri_connector_template_name: str, **kwargs: Any + ) -> _models.AkriConnectorTemplateResource: + """Get a AkriConnectorTemplateResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :return: AkriConnectorTemplateResource. The AkriConnectorTemplateResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.AkriConnectorTemplateResource] = kwargs.pop("cls", None) + + _request = build_akri_connector_template_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.AkriConnectorTemplateResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "content_type", + "accept", + ] + }, + ) + async def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + resource: Union[_models.AkriConnectorTemplateResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_akri_connector_template_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + resource: _models.AkriConnectorTemplateResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriConnectorTemplateResource]: + """Create a AkriConnectorTemplateResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns AkriConnectorTemplateResource. The + AkriConnectorTemplateResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriConnectorTemplateResource]: + """Create a AkriConnectorTemplateResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns AkriConnectorTemplateResource. The + AkriConnectorTemplateResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriConnectorTemplateResource]: + """Create a AkriConnectorTemplateResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns AkriConnectorTemplateResource. The + AkriConnectorTemplateResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "content_type", + "accept", + ] + }, + ) + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + resource: Union[_models.AkriConnectorTemplateResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriConnectorTemplateResource]: + """Create a AkriConnectorTemplateResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param resource: Resource create parameters. Is one of the following types: + AkriConnectorTemplateResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource or JSON or + IO[bytes] + :return: An instance of AsyncLROPoller that returns AkriConnectorTemplateResource. The + AkriConnectorTemplateResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.AkriConnectorTemplateResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.AkriConnectorTemplateResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.AkriConnectorTemplateResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.AkriConnectorTemplateResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "accept", + ] + }, + ) + async def _delete_initial( + self, resource_group_name: str, instance_name: str, akri_connector_template_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_akri_connector_template_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "accept", + ] + }, + ) + async def begin_delete( + self, resource_group_name: str, instance_name: str, akri_connector_template_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a AkriConnectorTemplateResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": ["api_version", "subscription_id", "resource_group_name", "instance_name", "accept"] + }, + ) + def list_by_instance_resource( + self, resource_group_name: str, instance_name: str, **kwargs: Any + ) -> AsyncIterable["_models.AkriConnectorTemplateResource"]: + """List AkriConnectorTemplateResource resources by InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :return: An iterator like instance of AkriConnectorTemplateResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.AkriConnectorTemplateResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_akri_connector_template_list_by_instance_resource_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.AkriConnectorTemplateResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class AkriConnectorOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.aio.IoTOperationsMgmtClient`'s + :attr:`akri_connector` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "accept", + ] + }, + ) + async def get( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + **kwargs: Any + ) -> _models.AkriConnectorResource: + """Get a AkriConnectorResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str + :return: AkriConnectorResource. The AkriConnectorResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.AkriConnectorResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.AkriConnectorResource] = kwargs.pop("cls", None) + + _request = build_akri_connector_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.AkriConnectorResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "content_type", + "accept", + ] + }, + ) + async def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + resource: Union[_models.AkriConnectorResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_akri_connector_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + resource: _models.AkriConnectorResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriConnectorResource]: + """Create a AkriConnectorResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.AkriConnectorResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns AkriConnectorResource. The + AkriConnectorResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriConnectorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriConnectorResource]: + """Create a AkriConnectorResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns AkriConnectorResource. The + AkriConnectorResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriConnectorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriConnectorResource]: + """Create a AkriConnectorResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns AkriConnectorResource. The + AkriConnectorResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriConnectorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "content_type", + "accept", + ] + }, + ) + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + resource: Union[_models.AkriConnectorResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriConnectorResource]: + """Create a AkriConnectorResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str + :param resource: Resource create parameters. Is one of the following types: + AkriConnectorResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.AkriConnectorResource or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns AkriConnectorResource. The + AkriConnectorResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriConnectorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.AkriConnectorResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.AkriConnectorResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.AkriConnectorResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.AkriConnectorResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "accept", + ] + }, + ) + async def _delete_initial( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_akri_connector_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "accept", + ] + }, + ) + async def begin_delete( + self, + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a AkriConnectorResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "accept", + ] + }, + ) + def list_by_template( + self, resource_group_name: str, instance_name: str, akri_connector_template_name: str, **kwargs: Any + ) -> AsyncIterable["_models.AkriConnectorResource"]: + """List AkriConnectorResource resources by AkriConnectorTemplateResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :return: An iterator like instance of AkriConnectorResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iotoperations.models.AkriConnectorResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.AkriConnectorResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_akri_connector_list_by_template_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_connector_template_name=akri_connector_template_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.AkriConnectorResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class AkriDiscoveryHandlerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.aio.IoTOperationsMgmtClient`'s + :attr:`akri_discovery_handler` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "accept", + ] + }, + ) + async def get( + self, resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, **kwargs: Any + ) -> _models.AkriDiscoveryHandlerResource: + """Get a AkriDiscoveryHandlerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str + :return: AkriDiscoveryHandlerResource. The AkriDiscoveryHandlerResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.AkriDiscoveryHandlerResource] = kwargs.pop("cls", None) + + _request = build_akri_discovery_handler_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_discovery_handler_name=akri_discovery_handler_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.AkriDiscoveryHandlerResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "content_type", + "accept", + ] + }, + ) + async def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + akri_discovery_handler_name: str, + resource: Union[_models.AkriDiscoveryHandlerResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_akri_discovery_handler_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_discovery_handler_name=akri_discovery_handler_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_discovery_handler_name: str, + resource: _models.AkriDiscoveryHandlerResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriDiscoveryHandlerResource]: + """Create a AkriDiscoveryHandlerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns AkriDiscoveryHandlerResource. The + AkriDiscoveryHandlerResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_discovery_handler_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriDiscoveryHandlerResource]: + """Create a AkriDiscoveryHandlerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns AkriDiscoveryHandlerResource. The + AkriDiscoveryHandlerResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_discovery_handler_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriDiscoveryHandlerResource]: + """Create a AkriDiscoveryHandlerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns AkriDiscoveryHandlerResource. The + AkriDiscoveryHandlerResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "content_type", + "accept", + ] + }, + ) + async def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + akri_discovery_handler_name: str, + resource: Union[_models.AkriDiscoveryHandlerResource, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.AkriDiscoveryHandlerResource]: + """Create a AkriDiscoveryHandlerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str + :param resource: Resource create parameters. Is one of the following types: + AkriDiscoveryHandlerResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource or JSON or + IO[bytes] + :return: An instance of AsyncLROPoller that returns AkriDiscoveryHandlerResource. The + AkriDiscoveryHandlerResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.AkriDiscoveryHandlerResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_discovery_handler_name=akri_discovery_handler_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.AkriDiscoveryHandlerResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.AkriDiscoveryHandlerResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.AkriDiscoveryHandlerResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "accept", + ] + }, + ) + async def _delete_initial( + self, resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_akri_discovery_handler_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_discovery_handler_name=akri_discovery_handler_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "accept", + ] + }, + ) + async def begin_delete( + self, resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a AkriDiscoveryHandlerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + akri_discovery_handler_name=akri_discovery_handler_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": ["api_version", "subscription_id", "resource_group_name", "instance_name", "accept"] + }, + ) + def list_by_instance_resource( + self, resource_group_name: str, instance_name: str, **kwargs: Any + ) -> AsyncIterable["_models.AkriDiscoveryHandlerResource"]: + """List AkriDiscoveryHandlerResource resources by InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :return: An iterator like instance of AkriDiscoveryHandlerResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.AkriDiscoveryHandlerResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_akri_discovery_handler_list_by_instance_resource_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.AkriDiscoveryHandlerResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, AsyncList(list_of_elem) diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/_patch.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/_patch.py index f7dd32510333..8bcb627aa475 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/_patch.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/aio/operations/_patch.py @@ -1,7 +1,8 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/__init__.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/__init__.py index 048fa26a0622..e2c24745fa05 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/__init__.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/__init__.py @@ -15,8 +15,62 @@ from ._models import ( # type: ignore AdvancedSettings, + AkriConnectorProperties, + AkriConnectorResource, + AkriConnectorTemplateAioMetadata, + AkriConnectorTemplateAllocation, + AkriConnectorTemplateBucketizedAllocation, + AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs, + AkriConnectorTemplateDeviceInboundEndpointType, + AkriConnectorTemplateDiagnostics, + AkriConnectorTemplateHelmAdvancedConfiguration, + AkriConnectorTemplateHelmAuthSecretRef, + AkriConnectorTemplateHelmConfiguration, + AkriConnectorTemplateHelmConfigurationSettings, + AkriConnectorTemplateHelmContainerRegistry, + AkriConnectorTemplateHelmContainerRegistrySettings, + AkriConnectorTemplateHelmDeleteConfiguration, + AkriConnectorTemplateHelmInstallConfiguration, + AkriConnectorTemplateHelmRegistryEndpointRef, + AkriConnectorTemplateHelmRegistrySettings, + AkriConnectorTemplateHelmUpgradeConfiguration, + AkriConnectorTemplateManagedConfiguration, + AkriConnectorTemplateManagedConfigurationSettings, + AkriConnectorTemplatePersistentVolumeClaim, + AkriConnectorTemplateProperties, + AkriConnectorTemplateResource, + AkriConnectorTemplateRuntimeConfiguration, + AkriConnectorTemplateRuntimeImageConfiguration, + AkriConnectorTemplateRuntimeImageConfigurationSettings, + AkriConnectorTemplateRuntimeStatefulSetConfiguration, + AkriConnectorTemplateTrustList, + AkriConnectorsContainerRegistry, + AkriConnectorsContainerRegistrySettings, + AkriConnectorsDiagnosticsLogs, + AkriConnectorsDigest, + AkriConnectorsImagePullSecret, + AkriConnectorsMqttAuthentication, + AkriConnectorsMqttConnectionConfiguration, + AkriConnectorsRegistryEndpointRef, + AkriConnectorsRegistrySettings, + AkriConnectorsSecret, + AkriConnectorsServiceAccountAuthentication, + AkriConnectorsServiceAccountTokenSettings, + AkriConnectorsTag, + AkriConnectorsTagDigestSettings, + AkriDiscoveryHandlerAioMetadata, + AkriDiscoveryHandlerDiagnostics, + AkriDiscoveryHandlerDiscoverableDeviceEndpointType, + AkriDiscoveryHandlerImageConfiguration, + AkriDiscoveryHandlerProperties, + AkriDiscoveryHandlerResource, + AkriDiscoveryHandlerSchedule, + AkriDiscoveryHandlerScheduleContinuous, + AkriDiscoveryHandlerScheduleCron, + AkriDiscoveryHandlerScheduleRunOnce, AuthorizationConfig, AuthorizationRule, + AzureDeviceRegistryNamespaceRef, BackendChain, BatchingConfiguration, BrokerAuthenticationProperties, @@ -32,21 +86,39 @@ BrokerDiagnostics, BrokerListenerProperties, BrokerListenerResource, + BrokerPersistence, + BrokerPersistenceDynamicSettings, + BrokerPersistenceEncryption, BrokerProperties, BrokerResource, BrokerResourceRule, + BrokerRetainMessagesCustomPolicy, + BrokerRetainMessagesDynamic, + BrokerRetainMessagesPolicy, + BrokerRetainMessagesSettings, + BrokerStateStoreCustomPolicy, + BrokerStateStoreDynamic, + BrokerStateStorePolicy, + BrokerStateStorePolicyResources, + BrokerStateStorePolicySettings, + BrokerSubscriberQueueCustomPolicy, + BrokerSubscriberQueueCustomPolicySettings, + BrokerSubscriberQueueDynamic, + BrokerSubscriberQueuePolicy, Cardinality, CertManagerCertOptions, CertManagerCertificateSpec, CertManagerIssuerRef, CertManagerPrivateKey, ClientConfig, + DatafloGraphDestinationNode, DataflowBuiltInTransformationDataset, DataflowBuiltInTransformationFilter, DataflowBuiltInTransformationMap, DataflowBuiltInTransformationSettings, DataflowDestinationOperationSettings, DataflowEndpointAuthenticationAccessToken, + DataflowEndpointAuthenticationAnonymous, DataflowEndpointAuthenticationSasl, DataflowEndpointAuthenticationServiceAccountToken, DataflowEndpointAuthenticationSystemAssignedManagedIdentity, @@ -65,8 +137,26 @@ DataflowEndpointLocalStorage, DataflowEndpointMqtt, DataflowEndpointMqttAuthentication, + DataflowEndpointOpenTelemetry, DataflowEndpointProperties, DataflowEndpointResource, + DataflowGraphConnectionInput, + DataflowGraphConnectionOutput, + DataflowGraphDestinationNodeSettings, + DataflowGraphGraphNode, + DataflowGraphGraphNodeConfiguration, + DataflowGraphNode, + DataflowGraphNodeConnection, + DataflowGraphNodeGraphSettings, + DataflowGraphProperties, + DataflowGraphResource, + DataflowGraphSchemaSettings, + DataflowGraphSourceNode, + DataflowGraphSourceSettings, + DataflowOpenTelemetryAnonymousAuthentication, + DataflowOpenTelemetryAuthentication, + DataflowOpenTelemetryServiceAccountAuthentication, + DataflowOpenTelemetryX509CertificateAuthentication, DataflowOperation, DataflowProfileProperties, DataflowProfileResource, @@ -81,6 +171,7 @@ ExtendedLocation, Frontend, GenerateResourceLimits, + InstanceFeature, InstancePatchModel, InstanceProperties, InstanceResource, @@ -94,9 +185,25 @@ PrincipalDefinition, ProfileDiagnostics, ProxyResource, + RegistryEndpointAnonymousAuthentication, + RegistryEndpointAnonymousSettings, + RegistryEndpointArtifactPullSecretAuthentication, + RegistryEndpointArtifactPullSecretSettings, + RegistryEndpointAuthentication, + RegistryEndpointProperties, + RegistryEndpointResource, + RegistryEndpointSystemAssignedIdentityAuthentication, + RegistryEndpointSystemAssignedManagedIdentitySettings, + RegistryEndpointTrustedSettings, + RegistryEndpointTrustedSigningKey, + RegistryEndpointTrustedSigningKeyConfigMap, + RegistryEndpointTrustedSigningKeySecret, + RegistryEndpointUserAssignedIdentityAuthentication, + RegistryEndpointUserAssignedManagedIdentitySettings, Resource, SanForCert, SchemaRegistryRef, + SecretProviderClassRef, SelfCheck, SelfTracing, StateStoreResourceRule, @@ -108,6 +215,7 @@ TrackedResource, UserAssignedIdentity, VolumeClaimResourceRequirements, + VolumeClaimResourceRequirementsClaims, VolumeClaimSpec, VolumeClaimSpecSelector, VolumeClaimSpecSelectorMatchExpressions, @@ -116,10 +224,23 @@ from ._enums import ( # type: ignore ActionType, + AkriConnectorTemplateAllocationPolicy, + AkriConnectorTemplateHelmRegistrySettingsType, + AkriConnectorTemplateManagedConfigurationType, + AkriConnectorTemplateRuntimeConfigurationType, + AkriConnectorsImagePullPolicy, + AkriConnectorsMqttAuthenticationMethod, + AkriConnectorsMqttProtocolType, + AkriConnectorsRegistrySettingsType, + AkriConnectorsTagDigestType, + AkriDiscoveryHandlerScheduleType, BrokerAuthenticationMethod, + BrokerAuthenticatorValidationMethods, BrokerMemoryProfile, + BrokerPersistencePolicyMode, BrokerProtocolType, BrokerResourceDefinitionMethods, + BrokerStateStoreKeyType, CertManagerIssuerKind, CloudEventAttributeType, CreatedByType, @@ -130,11 +251,16 @@ DataflowEndpointKafkaAcks, DataflowEndpointKafkaCompression, DataflowEndpointKafkaPartitionStrategy, + DataflowEnpointHostType, + DataflowGraphNodeType, + DataflowGraphSerializationFormat, DataflowMappingType, + DataflowOpenTelemetryAuthenticationMethod, EndpointType, ExtendedLocationType, FabricOneLakeAuthMethod, FilterType, + InstanceFeatureMode, KafkaAuthMethod, ManagedServiceIdentityType, MqttAuthMethod, @@ -146,6 +272,8 @@ PrivateKeyAlgorithm, PrivateKeyRotationPolicy, ProvisioningState, + RegistryEndpointAuthenticationMethod, + RegistryEndpointTrustedSigningKeyType, ServiceType, SourceSerializationFormat, StateStoreResourceDefinitionMethods, @@ -160,8 +288,62 @@ __all__ = [ "AdvancedSettings", + "AkriConnectorProperties", + "AkriConnectorResource", + "AkriConnectorTemplateAioMetadata", + "AkriConnectorTemplateAllocation", + "AkriConnectorTemplateBucketizedAllocation", + "AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs", + "AkriConnectorTemplateDeviceInboundEndpointType", + "AkriConnectorTemplateDiagnostics", + "AkriConnectorTemplateHelmAdvancedConfiguration", + "AkriConnectorTemplateHelmAuthSecretRef", + "AkriConnectorTemplateHelmConfiguration", + "AkriConnectorTemplateHelmConfigurationSettings", + "AkriConnectorTemplateHelmContainerRegistry", + "AkriConnectorTemplateHelmContainerRegistrySettings", + "AkriConnectorTemplateHelmDeleteConfiguration", + "AkriConnectorTemplateHelmInstallConfiguration", + "AkriConnectorTemplateHelmRegistryEndpointRef", + "AkriConnectorTemplateHelmRegistrySettings", + "AkriConnectorTemplateHelmUpgradeConfiguration", + "AkriConnectorTemplateManagedConfiguration", + "AkriConnectorTemplateManagedConfigurationSettings", + "AkriConnectorTemplatePersistentVolumeClaim", + "AkriConnectorTemplateProperties", + "AkriConnectorTemplateResource", + "AkriConnectorTemplateRuntimeConfiguration", + "AkriConnectorTemplateRuntimeImageConfiguration", + "AkriConnectorTemplateRuntimeImageConfigurationSettings", + "AkriConnectorTemplateRuntimeStatefulSetConfiguration", + "AkriConnectorTemplateTrustList", + "AkriConnectorsContainerRegistry", + "AkriConnectorsContainerRegistrySettings", + "AkriConnectorsDiagnosticsLogs", + "AkriConnectorsDigest", + "AkriConnectorsImagePullSecret", + "AkriConnectorsMqttAuthentication", + "AkriConnectorsMqttConnectionConfiguration", + "AkriConnectorsRegistryEndpointRef", + "AkriConnectorsRegistrySettings", + "AkriConnectorsSecret", + "AkriConnectorsServiceAccountAuthentication", + "AkriConnectorsServiceAccountTokenSettings", + "AkriConnectorsTag", + "AkriConnectorsTagDigestSettings", + "AkriDiscoveryHandlerAioMetadata", + "AkriDiscoveryHandlerDiagnostics", + "AkriDiscoveryHandlerDiscoverableDeviceEndpointType", + "AkriDiscoveryHandlerImageConfiguration", + "AkriDiscoveryHandlerProperties", + "AkriDiscoveryHandlerResource", + "AkriDiscoveryHandlerSchedule", + "AkriDiscoveryHandlerScheduleContinuous", + "AkriDiscoveryHandlerScheduleCron", + "AkriDiscoveryHandlerScheduleRunOnce", "AuthorizationConfig", "AuthorizationRule", + "AzureDeviceRegistryNamespaceRef", "BackendChain", "BatchingConfiguration", "BrokerAuthenticationProperties", @@ -177,21 +359,39 @@ "BrokerDiagnostics", "BrokerListenerProperties", "BrokerListenerResource", + "BrokerPersistence", + "BrokerPersistenceDynamicSettings", + "BrokerPersistenceEncryption", "BrokerProperties", "BrokerResource", "BrokerResourceRule", + "BrokerRetainMessagesCustomPolicy", + "BrokerRetainMessagesDynamic", + "BrokerRetainMessagesPolicy", + "BrokerRetainMessagesSettings", + "BrokerStateStoreCustomPolicy", + "BrokerStateStoreDynamic", + "BrokerStateStorePolicy", + "BrokerStateStorePolicyResources", + "BrokerStateStorePolicySettings", + "BrokerSubscriberQueueCustomPolicy", + "BrokerSubscriberQueueCustomPolicySettings", + "BrokerSubscriberQueueDynamic", + "BrokerSubscriberQueuePolicy", "Cardinality", "CertManagerCertOptions", "CertManagerCertificateSpec", "CertManagerIssuerRef", "CertManagerPrivateKey", "ClientConfig", + "DatafloGraphDestinationNode", "DataflowBuiltInTransformationDataset", "DataflowBuiltInTransformationFilter", "DataflowBuiltInTransformationMap", "DataflowBuiltInTransformationSettings", "DataflowDestinationOperationSettings", "DataflowEndpointAuthenticationAccessToken", + "DataflowEndpointAuthenticationAnonymous", "DataflowEndpointAuthenticationSasl", "DataflowEndpointAuthenticationServiceAccountToken", "DataflowEndpointAuthenticationSystemAssignedManagedIdentity", @@ -210,8 +410,26 @@ "DataflowEndpointLocalStorage", "DataflowEndpointMqtt", "DataflowEndpointMqttAuthentication", + "DataflowEndpointOpenTelemetry", "DataflowEndpointProperties", "DataflowEndpointResource", + "DataflowGraphConnectionInput", + "DataflowGraphConnectionOutput", + "DataflowGraphDestinationNodeSettings", + "DataflowGraphGraphNode", + "DataflowGraphGraphNodeConfiguration", + "DataflowGraphNode", + "DataflowGraphNodeConnection", + "DataflowGraphNodeGraphSettings", + "DataflowGraphProperties", + "DataflowGraphResource", + "DataflowGraphSchemaSettings", + "DataflowGraphSourceNode", + "DataflowGraphSourceSettings", + "DataflowOpenTelemetryAnonymousAuthentication", + "DataflowOpenTelemetryAuthentication", + "DataflowOpenTelemetryServiceAccountAuthentication", + "DataflowOpenTelemetryX509CertificateAuthentication", "DataflowOperation", "DataflowProfileProperties", "DataflowProfileResource", @@ -226,6 +444,7 @@ "ExtendedLocation", "Frontend", "GenerateResourceLimits", + "InstanceFeature", "InstancePatchModel", "InstanceProperties", "InstanceResource", @@ -239,9 +458,25 @@ "PrincipalDefinition", "ProfileDiagnostics", "ProxyResource", + "RegistryEndpointAnonymousAuthentication", + "RegistryEndpointAnonymousSettings", + "RegistryEndpointArtifactPullSecretAuthentication", + "RegistryEndpointArtifactPullSecretSettings", + "RegistryEndpointAuthentication", + "RegistryEndpointProperties", + "RegistryEndpointResource", + "RegistryEndpointSystemAssignedIdentityAuthentication", + "RegistryEndpointSystemAssignedManagedIdentitySettings", + "RegistryEndpointTrustedSettings", + "RegistryEndpointTrustedSigningKey", + "RegistryEndpointTrustedSigningKeyConfigMap", + "RegistryEndpointTrustedSigningKeySecret", + "RegistryEndpointUserAssignedIdentityAuthentication", + "RegistryEndpointUserAssignedManagedIdentitySettings", "Resource", "SanForCert", "SchemaRegistryRef", + "SecretProviderClassRef", "SelfCheck", "SelfTracing", "StateStoreResourceRule", @@ -253,15 +488,29 @@ "TrackedResource", "UserAssignedIdentity", "VolumeClaimResourceRequirements", + "VolumeClaimResourceRequirementsClaims", "VolumeClaimSpec", "VolumeClaimSpecSelector", "VolumeClaimSpecSelectorMatchExpressions", "X509ManualCertificate", "ActionType", + "AkriConnectorTemplateAllocationPolicy", + "AkriConnectorTemplateHelmRegistrySettingsType", + "AkriConnectorTemplateManagedConfigurationType", + "AkriConnectorTemplateRuntimeConfigurationType", + "AkriConnectorsImagePullPolicy", + "AkriConnectorsMqttAuthenticationMethod", + "AkriConnectorsMqttProtocolType", + "AkriConnectorsRegistrySettingsType", + "AkriConnectorsTagDigestType", + "AkriDiscoveryHandlerScheduleType", "BrokerAuthenticationMethod", + "BrokerAuthenticatorValidationMethods", "BrokerMemoryProfile", + "BrokerPersistencePolicyMode", "BrokerProtocolType", "BrokerResourceDefinitionMethods", + "BrokerStateStoreKeyType", "CertManagerIssuerKind", "CloudEventAttributeType", "CreatedByType", @@ -272,11 +521,16 @@ "DataflowEndpointKafkaAcks", "DataflowEndpointKafkaCompression", "DataflowEndpointKafkaPartitionStrategy", + "DataflowEnpointHostType", + "DataflowGraphNodeType", + "DataflowGraphSerializationFormat", "DataflowMappingType", + "DataflowOpenTelemetryAuthenticationMethod", "EndpointType", "ExtendedLocationType", "FabricOneLakeAuthMethod", "FilterType", + "InstanceFeatureMode", "KafkaAuthMethod", "ManagedServiceIdentityType", "MqttAuthMethod", @@ -288,6 +542,8 @@ "PrivateKeyAlgorithm", "PrivateKeyRotationPolicy", "ProvisioningState", + "RegistryEndpointAuthenticationMethod", + "RegistryEndpointTrustedSigningKeyType", "ServiceType", "SourceSerializationFormat", "StateStoreResourceDefinitionMethods", diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_enums.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_enums.py index 494bf7b4726e..c5509b3fab30 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_enums.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_enums.py @@ -19,6 +19,94 @@ class ActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Actions are for internal-only APIs.""" +class AkriConnectorsImagePullPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Image pull policy.""" + + ALWAYS = "Always" + """Always pull the image.""" + IF_NOT_PRESENT = "IfNotPresent" + """IfNotPresent pull the image.""" + NEVER = "Never" + """Never pull the image.""" + + +class AkriConnectorsMqttAuthenticationMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AkriConnectorsMqttAuthenticationMethod properties.""" + + SERVICE_ACCOUNT_TOKEN = "ServiceAccountToken" + """Service Account Token authentication.""" + + +class AkriConnectorsMqttProtocolType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Mqtt protocol types.""" + + MQTT = "Mqtt" + """Mqtt protocol.""" + + +class AkriConnectorsRegistrySettingsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AkriConnectorsRegistrySettings properties.""" + + REGISTRY_ENDPOINT_REF = "RegistryEndpointRef" + """A Registry Endpoint reference.""" + CONTAINER_REGISTRY = "ContainerRegistry" + """A Container Registry reference.""" + + +class AkriConnectorsTagDigestType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AkriConnectorsTagDigestType values.""" + + TAG = "Tag" + """Indicates that a tag should be specified.""" + DIGEST = "Digest" + """Indicates that a digest should be specified.""" + + +class AkriConnectorTemplateAllocationPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AkriConnectorTemplateAllocationPolicy properties.""" + + BUCKETIZED = "Bucketized" + """Bucketized allocation policy.""" + + +class AkriConnectorTemplateHelmRegistrySettingsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AkriConnectorTemplateHelmRegistrySettingsType values.""" + + REGISTRY_ENDPOINT_REF = "RegistryEndpointRef" + """A Registry Endpoint reference.""" + CONTAINER_REGISTRY = "ContainerRegistry" + """A Container Registry reference.""" + + +class AkriConnectorTemplateManagedConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Managed configuration types.""" + + IMAGE_CONFIGURATION = "ImageConfiguration" + """Image Configuration Type.""" + STATEFUL_SET_CONFIGURATION = "StatefulSetConfiguration" + """StatefulSet Configuration Type.""" + + +class AkriConnectorTemplateRuntimeConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Runtime configuration types.""" + + HELM_CONFIGURATION = "HelmConfiguration" + """Helm Configuration Type.""" + MANAGED_CONFIGURATION = "ManagedConfiguration" + """Managed Configuration Type.""" + + +class AkriDiscoveryHandlerScheduleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AkriDiscoveryHandlerScheduleType properties.""" + + CRON = "Cron" + """The schedule is a cron expression.""" + RUN_ONCE = "RunOnce" + """The discovery handler should run once.""" + CONTINUOUS = "Continuous" + """The discovery handler should run continuously.""" + + class BrokerAuthenticationMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Broker Authentication Mode.""" @@ -30,6 +118,15 @@ class BrokerAuthenticationMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): """X.509 authentication configuration.""" +class BrokerAuthenticatorValidationMethods(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """X509 authentication validation methods.""" + + NONE = "None" + """No additional validation is performed""" + AZURE_DEVICE_REGISTRY = "AzureDeviceRegistry" + """Additional validation is performed using the Azure Device Registry.""" + + class BrokerMemoryProfile(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The memory profile settings of the Broker.""" @@ -43,6 +140,17 @@ class BrokerMemoryProfile(str, Enum, metaclass=CaseInsensitiveEnumMeta): """High memory profile.""" +class BrokerPersistencePolicyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Broker Persistence Policy Mode values.""" + + ALL = "All" + """Policy mode for All.""" + NONE = "None" + """Policy mode for None.""" + CUSTOM = "Custom" + """Indicates that the policy is a custom policy.""" + + class BrokerProtocolType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Broker Protocol types.""" @@ -63,6 +171,18 @@ class BrokerResourceDefinitionMethods(str, Enum, metaclass=CaseInsensitiveEnumMe """Allowed Subscribing to Broker""" +class BrokerStateStoreKeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Broker State Store Key Type properties.""" + + PATTERN = "Pattern" + """Used for glob-style pattern matching.""" + STRING = "String" + """Used to do exact match, for example, when a key contains characters that might be otherwise + matched as a pattern (*, ?, [0-9]).""" + BINARY = "Binary" + """Used to match a binary key.""" + + class CertManagerIssuerKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): """CertManagerIssuerKind properties.""" @@ -161,6 +281,47 @@ class DataflowEndpointKafkaPartitionStrategy(str, Enum, metaclass=CaseInsensitiv """PROPERTY Option""" +class DataflowEnpointHostType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DataflowEndpoint Host Type properties.""" + + FABRIC_RT = "FabricRT" + """Fabric Real-Time Type""" + EVENT_GRID = "EventGrid" + """EventGrid Type""" + LOCAL_BROKER = "LocalBroker" + """Local MQTT Type""" + EVENTHUB = "Eventhub" + """EventHub Type""" + CUSTOM_MQTT = "CustomMqtt" + """Custom MQTT Type""" + CUSTOM_KAFKA = "CustomKafka" + """Custom Kafka Type""" + + +class DataflowGraphNodeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DataflowGraph node types.""" + + SOURCE = "Source" + """Dataflow source node.""" + GRAPH = "Graph" + """Dataflow graph node.""" + DESTINATION = "Destination" + """Dataflow destination node.""" + + +class DataflowGraphSerializationFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Serialization format for dataflow graph.""" + + DELTA = "Delta" + """Delta Format""" + JSON = "Json" + """JSON Format""" + PARQUET = "Parquet" + """Parquet Format""" + AVRO = "Avro" + """Avro serialization format.""" + + class DataflowMappingType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Dataflow type mapping properties.""" @@ -176,6 +337,17 @@ class DataflowMappingType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Built in function type""" +class DataflowOpenTelemetryAuthenticationMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Dataflow OpenTelemetry authentication method values.""" + + SERVICE_ACCOUNT_TOKEN = "ServiceAccountToken" + """Uses serviceaccount token.""" + X509_CERTIFICATE = "X509Certificate" + """Uses x509 certificate.""" + ANONYMOUS = "Anonymous" + """Connects anonymously.""" + + class DataLakeStorageAuthMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): """DataflowEndpoint Data Lake Storage Authentication Method properties.""" @@ -202,6 +374,8 @@ class EndpointType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Local Storage Type""" MQTT = "Mqtt" """Broker Type""" + OPEN_TELEMETRY = "OpenTelemetry" + """OpenTelemetry Type""" class ExtendedLocationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -227,6 +401,17 @@ class FilterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Filter type""" +class InstanceFeatureMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The enum defining mode of a feature.""" + + STABLE = "Stable" + """Opt in to enable a stable feature""" + PREVIEW = "Preview" + """Opt in to enable a preview feature""" + DISABLED = "Disabled" + """Opt out of a feature""" + + class KafkaAuthMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): """DataflowEndpoint Kafka Authentication Method properties.""" @@ -374,6 +559,28 @@ class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Resource has been Accepted.""" +class RegistryEndpointAuthenticationMethod(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The authentication method.""" + + SYSTEM_ASSIGNED_MANAGED_IDENTITY = "SystemAssignedManagedIdentity" + """SystemAssignedManagedIdentity type""" + USER_ASSIGNED_MANAGED_IDENTITY = "UserAssignedManagedIdentity" + """UserAssignedManagedIdentity type""" + ANONYMOUS = "Anonymous" + """Anonymous Option""" + ARTIFACT_PULL_SECRET = "ArtifactPullSecret" + """Artifact Pull Secret authentication""" + + +class RegistryEndpointTrustedSigningKeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """RegistryEndpointTrustedSigningKeyType values.""" + + SECRET = "Secret" + """Trust settings stored in a Kubernetes Secret.""" + CONFIG_MAP = "ConfigMap" + """Trust settings stored in a Kubernetes ConfigMap.""" + + class ServiceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Kubernetes Service Types supported by Listener.""" diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_models.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_models.py index 6b3c3b2a8144..dcfc63f8faf4 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_models.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_models.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -9,16 +9,30 @@ # pylint: disable=useless-super-delegation import datetime -from typing import Any, Dict, List, Mapping, Optional, TYPE_CHECKING, Union, overload - -from .. import _model_base -from .._model_base import rest_field +from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from .._utils.model_base import Model as _Model, rest_discriminator, rest_field +from ._enums import ( + AkriConnectorTemplateAllocationPolicy, + AkriConnectorTemplateHelmRegistrySettingsType, + AkriConnectorTemplateManagedConfigurationType, + AkriConnectorTemplateRuntimeConfigurationType, + AkriConnectorsMqttAuthenticationMethod, + AkriConnectorsRegistrySettingsType, + AkriConnectorsTagDigestType, + AkriDiscoveryHandlerScheduleType, + BrokerPersistencePolicyMode, + DataflowGraphNodeType, + DataflowOpenTelemetryAuthenticationMethod, + RegistryEndpointAuthenticationMethod, + RegistryEndpointTrustedSigningKeyType, +) if TYPE_CHECKING: from .. import models as _models -class AdvancedSettings(_model_base.Model): +class AdvancedSettings(_Model): """Broker Advanced Settings. :ivar clients: Configurations related to All Clients. @@ -30,14 +44,16 @@ class AdvancedSettings(_model_base.Model): :vartype internal_certs: ~azure.mgmt.iotoperations.models.CertManagerCertOptions """ - clients: Optional["_models.ClientConfig"] = rest_field() + clients: Optional["_models.ClientConfig"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Configurations related to All Clients.""" encrypt_internal_traffic: Optional[Union[str, "_models.OperationalMode"]] = rest_field( - name="encryptInternalTraffic" + name="encryptInternalTraffic", visibility=["read", "create", "update", "delete", "query"] ) """The setting to enable or disable encryption of internal Traffic. Known values are: \"Enabled\" and \"Disabled\".""" - internal_certs: Optional["_models.CertManagerCertOptions"] = rest_field(name="internalCerts") + internal_certs: Optional["_models.CertManagerCertOptions"] = rest_field( + name="internalCerts", visibility=["read", "create", "update", "delete", "query"] + ) """Certificate rotation and private key configuration.""" @overload @@ -60,203 +76,26 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class AuthorizationConfig(_model_base.Model): - """Broker AuthorizationConfig properties. - - :ivar cache: Enable caching of the authorization rules. Known values are: "Enabled" and - "Disabled". - :vartype cache: str or ~azure.mgmt.iotoperations.models.OperationalMode - :ivar rules: The authorization rules to follow. If no rule is set, but Authorization Resource - is used that would mean DenyAll. - :vartype rules: list[~azure.mgmt.iotoperations.models.AuthorizationRule] - """ - - cache: Optional[Union[str, "_models.OperationalMode"]] = rest_field() - """Enable caching of the authorization rules. Known values are: \"Enabled\" and \"Disabled\".""" - rules: Optional[List["_models.AuthorizationRule"]] = rest_field() - """The authorization rules to follow. If no rule is set, but Authorization Resource is used that - would mean DenyAll.""" - - @overload - def __init__( - self, - *, - cache: Optional[Union[str, "_models.OperationalMode"]] = None, - rules: Optional[List["_models.AuthorizationRule"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class AuthorizationRule(_model_base.Model): - """AuthorizationConfig Rule Properties. - - - :ivar broker_resources: Give access to Broker methods and topics. Required. - :vartype broker_resources: list[~azure.mgmt.iotoperations.models.BrokerResourceRule] - :ivar principals: Give access to clients based on the following properties. Required. - :vartype principals: ~azure.mgmt.iotoperations.models.PrincipalDefinition - :ivar state_store_resources: Give access to state store resources. - :vartype state_store_resources: list[~azure.mgmt.iotoperations.models.StateStoreResourceRule] - """ - - broker_resources: List["_models.BrokerResourceRule"] = rest_field(name="brokerResources") - """Give access to Broker methods and topics. Required.""" - principals: "_models.PrincipalDefinition" = rest_field() - """Give access to clients based on the following properties. Required.""" - state_store_resources: Optional[List["_models.StateStoreResourceRule"]] = rest_field(name="stateStoreResources") - """Give access to state store resources.""" - - @overload - def __init__( - self, - *, - broker_resources: List["_models.BrokerResourceRule"], - principals: "_models.PrincipalDefinition", - state_store_resources: Optional[List["_models.StateStoreResourceRule"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BackendChain(_model_base.Model): - """Desired properties of the backend instances of the broker. - - - :ivar partitions: The desired number of physical backend partitions. Required. - :vartype partitions: int - :ivar redundancy_factor: The desired numbers of backend replicas (pods) in a physical - partition. Required. - :vartype redundancy_factor: int - :ivar workers: Number of logical backend workers per replica (pod). - :vartype workers: int - """ - - partitions: int = rest_field() - """The desired number of physical backend partitions. Required.""" - redundancy_factor: int = rest_field(name="redundancyFactor") - """The desired numbers of backend replicas (pods) in a physical partition. Required.""" - workers: Optional[int] = rest_field() - """Number of logical backend workers per replica (pod).""" - - @overload - def __init__( - self, - *, - partitions: int, - redundancy_factor: int, - workers: Optional[int] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BatchingConfiguration(_model_base.Model): - """Batching configuration. - - :ivar latency_seconds: Batching latency in seconds. - :vartype latency_seconds: int - :ivar max_messages: Maximum number of messages in a batch. - :vartype max_messages: int - """ - - latency_seconds: Optional[int] = rest_field(name="latencySeconds") - """Batching latency in seconds.""" - max_messages: Optional[int] = rest_field(name="maxMessages") - """Maximum number of messages in a batch.""" - - @overload - def __init__( - self, - *, - latency_seconds: Optional[int] = None, - max_messages: Optional[int] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BrokerAuthenticationProperties(_model_base.Model): - """BrokerAuthentication Resource properties. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriConnectorProperties(_Model): + """AkriConnector properties. - - :ivar authentication_methods: Defines a set of Broker authentication methods to be used on - ``BrokerListeners``. For each array element one authenticator type supported. Required. - :vartype authentication_methods: - list[~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethods] :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState """ - authentication_methods: List["_models.BrokerAuthenticatorMethods"] = rest_field(name="authenticationMethods") - """Defines a set of Broker authentication methods to be used on ``BrokerListeners``. For each - array element one authenticator type supported. Required.""" provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( name="provisioningState", visibility=["read"] ) """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" - @overload - def __init__( - self, - *, - authentication_methods: List["_models.BrokerAuthenticatorMethods"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - -class Resource(_model_base.Model): - """Common fields that are returned in the response for all Azure Resource Manager resources. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class Resource(_Model): + """Resource. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -270,7 +109,7 @@ class Resource(_model_base.Model): id: Optional[str] = rest_field(visibility=["read"]) """Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long""" + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.""" name: Optional[str] = rest_field(visibility=["read"]) """The name of the resource.""" type: Optional[str] = rest_field(visibility=["read"]) @@ -281,13 +120,10 @@ class Resource(_model_base.Model): class ProxyResource(Resource): - """The resource model definition for a Azure Resource Manager proxy resource. It will not have - tags and a location. - - Readonly variables are only populated by the server, and will be ignored when sending a request. + """Proxy Resource. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -300,14 +136,11 @@ class ProxyResource(Resource): """ -class BrokerAuthenticationResource(ProxyResource): - """Instance broker authentication resource. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - +class AkriConnectorResource(ProxyResource): + """AkriConnector resource. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -318,22 +151,26 @@ class BrokerAuthenticationResource(ProxyResource): information. :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData :ivar properties: The resource-specific properties for this resource. - :vartype properties: ~azure.mgmt.iotoperations.models.BrokerAuthenticationProperties - :ivar extended_location: Edge location of the resource. Required. + :vartype properties: ~azure.mgmt.iotoperations.models.AkriConnectorProperties + :ivar extended_location: Edge location of the resource. :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation """ - properties: Optional["_models.BrokerAuthenticationProperties"] = rest_field() + properties: Optional["_models.AkriConnectorProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """The resource-specific properties for this resource.""" - extended_location: "_models.ExtendedLocation" = rest_field(name="extendedLocation", visibility=["read", "create"]) - """Edge location of the resource. Required.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" @overload def __init__( self, *, - extended_location: "_models.ExtendedLocation", - properties: Optional["_models.BrokerAuthenticationProperties"] = None, + properties: Optional["_models.AkriConnectorProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, ) -> None: ... @overload @@ -347,22 +184,29 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerAuthenticatorCustomAuth(_model_base.Model): - """Custom Authentication properties. +class AkriConnectorsRegistrySettings(_Model): + """AkriConnectorsRegistrySettings properties. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AkriConnectorsContainerRegistry, AkriConnectorsRegistryEndpointRef - :ivar x509: X509 Custom Auth type details. Required. - :vartype x509: ~azure.mgmt.iotoperations.models.X509ManualCertificate + :ivar registry_settings_type: Required. Known values are: "RegistryEndpointRef" and + "ContainerRegistry". + :vartype registry_settings_type: str or + ~azure.mgmt.iotoperations.models.AkriConnectorsRegistrySettingsType """ - x509: "_models.X509ManualCertificate" = rest_field() - """X509 Custom Auth type details. Required.""" + __mapping__: Dict[str, _Model] = {} + registry_settings_type: str = rest_discriminator( + name="registrySettingsType", visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Known values are: \"RegistryEndpointRef\" and \"ContainerRegistry\".""" @overload def __init__( self, *, - x509: "_models.X509ManualCertificate", + registry_settings_type: str, ) -> None: ... @overload @@ -376,40 +220,29 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerAuthenticatorMethodCustom(_model_base.Model): - """Custom method for BrokerAuthentication. - +class AkriConnectorsContainerRegistry(AkriConnectorsRegistrySettings, discriminator="ContainerRegistry"): + """AkriConnectorsContainerRegistry properties. - :ivar auth: Optional authentication needed for authenticating with the custom authentication - server. - :vartype auth: ~azure.mgmt.iotoperations.models.BrokerAuthenticatorCustomAuth - :ivar ca_cert_config_map: Optional CA certificate for validating the custom authentication - server's certificate. - :vartype ca_cert_config_map: str - :ivar endpoint: Endpoint of the custom authentication server. Must be an HTTPS endpoint. - Required. - :vartype endpoint: str - :ivar headers: Additional HTTP headers to pass to the custom authentication server. - :vartype headers: dict[str, str] + :ivar registry_settings_type: The registry settings type. Required. A Container Registry + reference. + :vartype registry_settings_type: str or ~azure.mgmt.iotoperations.models.CONTAINER_REGISTRY + :ivar container_registry_settings: The registry settings for the container registry. Required. + :vartype container_registry_settings: + ~azure.mgmt.iotoperations.models.AkriConnectorsContainerRegistrySettings """ - auth: Optional["_models.BrokerAuthenticatorCustomAuth"] = rest_field() - """Optional authentication needed for authenticating with the custom authentication server.""" - ca_cert_config_map: Optional[str] = rest_field(name="caCertConfigMap") - """Optional CA certificate for validating the custom authentication server's certificate.""" - endpoint: str = rest_field() - """Endpoint of the custom authentication server. Must be an HTTPS endpoint. Required.""" - headers: Optional[Dict[str, str]] = rest_field() - """Additional HTTP headers to pass to the custom authentication server.""" + registry_settings_type: Literal[AkriConnectorsRegistrySettingsType.CONTAINER_REGISTRY] = rest_discriminator(name="registrySettingsType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The registry settings type. Required. A Container Registry reference.""" + container_registry_settings: "_models.AkriConnectorsContainerRegistrySettings" = rest_field( + name="containerRegistrySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """The registry settings for the container registry. Required.""" @overload def __init__( self, *, - endpoint: str, - auth: Optional["_models.BrokerAuthenticatorCustomAuth"] = None, - ca_cert_config_map: Optional[str] = None, - headers: Optional[Dict[str, str]] = None, + container_registry_settings: "_models.AkriConnectorsContainerRegistrySettings", ) -> None: ... @overload @@ -420,45 +253,34 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - + super().__init__(*args, registry_settings_type=AkriConnectorsRegistrySettingsType.CONTAINER_REGISTRY, **kwargs) -class BrokerAuthenticatorMethods(_model_base.Model): - """Set of broker authentication policies. Only one method is supported for each entry. +class AkriConnectorsContainerRegistrySettings(_Model): + """AkriConnectorsContainerRegistry properties. - :ivar method: Custom authentication configuration. Required. Known values are: "Custom", - "ServiceAccountToken", and "X509". - :vartype method: str or ~azure.mgmt.iotoperations.models.BrokerAuthenticationMethod - :ivar custom_settings: Custom authentication configuration. - :vartype custom_settings: ~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodCustom - :ivar service_account_token_settings: ServiceAccountToken authentication configuration. - :vartype service_account_token_settings: - ~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodSat - :ivar x509_settings: X.509 authentication configuration. - :vartype x509_settings: ~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodX509 + :ivar registry: The container registry to use for the artifact. Required. + :vartype registry: str + :ivar image_pull_secrets: Optional list of references to secrets in the same namespace to use + for pulling the connector image. + :vartype image_pull_secrets: + list[~azure.mgmt.iotoperations.models.AkriConnectorsImagePullSecret] """ - method: Union[str, "_models.BrokerAuthenticationMethod"] = rest_field() - """Custom authentication configuration. Required. Known values are: \"Custom\", - \"ServiceAccountToken\", and \"X509\".""" - custom_settings: Optional["_models.BrokerAuthenticatorMethodCustom"] = rest_field(name="customSettings") - """Custom authentication configuration.""" - service_account_token_settings: Optional["_models.BrokerAuthenticatorMethodSat"] = rest_field( - name="serviceAccountTokenSettings" + registry: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The container registry to use for the artifact. Required.""" + image_pull_secrets: Optional[List["_models.AkriConnectorsImagePullSecret"]] = rest_field( + name="imagePullSecrets", visibility=["read", "create", "update", "delete", "query"] ) - """ServiceAccountToken authentication configuration.""" - x509_settings: Optional["_models.BrokerAuthenticatorMethodX509"] = rest_field(name="x509Settings") - """X.509 authentication configuration.""" + """Optional list of references to secrets in the same namespace to use for pulling the connector + image.""" @overload def __init__( self, *, - method: Union[str, "_models.BrokerAuthenticationMethod"], - custom_settings: Optional["_models.BrokerAuthenticatorMethodCustom"] = None, - service_account_token_settings: Optional["_models.BrokerAuthenticatorMethodSat"] = None, - x509_settings: Optional["_models.BrokerAuthenticatorMethodX509"] = None, + registry: str, + image_pull_secrets: Optional[List["_models.AkriConnectorsImagePullSecret"]] = None, ) -> None: ... @overload @@ -472,22 +294,21 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerAuthenticatorMethodSat(_model_base.Model): - """Service Account Token for BrokerAuthentication. - +class AkriConnectorsDiagnosticsLogs(_Model): + """AkriConnectorsDiagnostic Log properties. - :ivar audiences: List of allowed audience. Required. - :vartype audiences: list[str] + :ivar level: The log level. Examples - 'debug', 'info', 'warn', 'error', 'trace'. + :vartype level: str """ - audiences: List[str] = rest_field() - """List of allowed audience. Required.""" + level: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The log level. Examples - 'debug', 'info', 'warn', 'error', 'trace'.""" @overload def __init__( self, *, - audiences: List[str], + level: Optional[str] = None, ) -> None: ... @overload @@ -501,29 +322,27 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerAuthenticatorMethodX509(_model_base.Model): - """X509 for BrokerAuthentication. +class AkriConnectorsTagDigestSettings(_Model): + """AkriConnectorsTagDigestSettings properties. - :ivar authorization_attributes: X509 authorization attributes properties. - :vartype authorization_attributes: dict[str, - ~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodX509Attributes] - :ivar trusted_client_ca_cert: Name of the trusted client ca cert resource. - :vartype trusted_client_ca_cert: str + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AkriConnectorsDigest, AkriConnectorsTag + + :ivar tag_digest_type: The tag or digest type. Required. Known values are: "Tag" and "Digest". + :vartype tag_digest_type: str or ~azure.mgmt.iotoperations.models.AkriConnectorsTagDigestType """ - authorization_attributes: Optional[Dict[str, "_models.BrokerAuthenticatorMethodX509Attributes"]] = rest_field( - name="authorizationAttributes" + __mapping__: Dict[str, _Model] = {} + tag_digest_type: str = rest_discriminator( + name="tagDigestType", visibility=["read", "create", "update", "delete", "query"] ) - """X509 authorization attributes properties.""" - trusted_client_ca_cert: Optional[str] = rest_field(name="trustedClientCaCert") - """Name of the trusted client ca cert resource.""" + """The tag or digest type. Required. Known values are: \"Tag\" and \"Digest\".""" @overload def __init__( self, *, - authorization_attributes: Optional[Dict[str, "_models.BrokerAuthenticatorMethodX509Attributes"]] = None, - trusted_client_ca_cert: Optional[str] = None, + tag_digest_type: str, ) -> None: ... @overload @@ -537,27 +356,26 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerAuthenticatorMethodX509Attributes(_model_base.Model): - """BrokerAuthenticatorMethodX509Attributes properties. - +class AkriConnectorsDigest(AkriConnectorsTagDigestSettings, discriminator="Digest"): + """AkriConnectorsDigest properties. - :ivar attributes: Attributes object. Required. - :vartype attributes: dict[str, str] - :ivar subject: Subject of the X509 attribute. Required. - :vartype subject: str + :ivar tag_digest_type: The tag or digest type. Required. Indicates that a digest should be + specified. + :vartype tag_digest_type: str or ~azure.mgmt.iotoperations.models.DIGEST + :ivar digest: The digest of the image. Required. + :vartype digest: str """ - attributes: Dict[str, str] = rest_field() - """Attributes object. Required.""" - subject: str = rest_field() - """Subject of the X509 attribute. Required.""" + tag_digest_type: Literal[AkriConnectorsTagDigestType.DIGEST] = rest_discriminator(name="tagDigestType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The tag or digest type. Required. Indicates that a digest should be specified.""" + digest: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The digest of the image. Required.""" @overload def __init__( self, *, - attributes: Dict[str, str], - subject: str, + digest: str, ) -> None: ... @overload @@ -568,36 +386,24 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) + super().__init__(*args, tag_digest_type=AkriConnectorsTagDigestType.DIGEST, **kwargs) -class BrokerAuthorizationProperties(_model_base.Model): - """BrokerAuthorization Resource properties. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriConnectorsImagePullSecret(_Model): + """AkriConnectorsImagePullSecret properties. - - :ivar authorization_policies: The list of authorization policies supported by the Authorization - Resource. Required. - :vartype authorization_policies: ~azure.mgmt.iotoperations.models.AuthorizationConfig - :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", - "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". - :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + :ivar secret_ref: The name of the image pull secret. Required. + :vartype secret_ref: str """ - authorization_policies: "_models.AuthorizationConfig" = rest_field(name="authorizationPolicies") - """The list of authorization policies supported by the Authorization Resource. Required.""" - provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( - name="provisioningState", visibility=["read"] - ) - """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", - \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + secret_ref: str = rest_field(name="secretRef", visibility=["read", "create", "update", "delete", "query"]) + """The name of the image pull secret. Required.""" @overload def __init__( self, *, - authorization_policies: "_models.AuthorizationConfig", + secret_ref: str, ) -> None: ... @overload @@ -611,40 +417,26 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerAuthorizationResource(ProxyResource): - """Instance broker authorizations resource. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriConnectorsMqttAuthentication(_Model): + """AkriConnectorsMqttAuthentication properties. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AkriConnectorsServiceAccountAuthentication - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData - :ivar properties: The resource-specific properties for this resource. - :vartype properties: ~azure.mgmt.iotoperations.models.BrokerAuthorizationProperties - :ivar extended_location: Edge location of the resource. Required. - :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + :ivar method: The authentication method for the MQTT connection. Required. + "ServiceAccountToken" + :vartype method: str or ~azure.mgmt.iotoperations.models.AkriConnectorsMqttAuthenticationMethod """ - properties: Optional["_models.BrokerAuthorizationProperties"] = rest_field() - """The resource-specific properties for this resource.""" - extended_location: "_models.ExtendedLocation" = rest_field(name="extendedLocation", visibility=["read", "create"]) - """Edge location of the resource. Required.""" + __mapping__: Dict[str, _Model] = {} + method: str = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) + """The authentication method for the MQTT connection. Required. \"ServiceAccountToken\"""" @overload def __init__( self, *, - extended_location: "_models.ExtendedLocation", - properties: Optional["_models.BrokerAuthorizationProperties"] = None, + method: str, ) -> None: ... @overload @@ -658,36 +450,65 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerDiagnostics(_model_base.Model): - """Broker Diagnostic Setting properties. +class AkriConnectorsMqttConnectionConfiguration(_Model): # pylint: disable=name-too-long + """AkriConnectorsMqttConnectionConfiguration properties. - :ivar logs: Diagnostic log settings for the resource. - :vartype logs: ~azure.mgmt.iotoperations.models.DiagnosticsLogs - :ivar metrics: The metrics settings for the resource. - :vartype metrics: ~azure.mgmt.iotoperations.models.Metrics - :ivar self_check: The self check properties. - :vartype self_check: ~azure.mgmt.iotoperations.models.SelfCheck - :ivar traces: The trace properties. - :vartype traces: ~azure.mgmt.iotoperations.models.Traces + :ivar authentication: Authentication properties. + :vartype authentication: ~azure.mgmt.iotoperations.models.AkriConnectorsMqttAuthentication + :ivar host: Host of the Broker in the form of :. + :vartype host: str + :ivar protocol: The protocol to use for the connection. Currently only ``mqtt`` is supported. + "Mqtt" + :vartype protocol: str or ~azure.mgmt.iotoperations.models.AkriConnectorsMqttProtocolType + :ivar keep_alive_seconds: KeepAlive for connection in seconds. + :vartype keep_alive_seconds: int + :ivar max_inflight_messages: The max number of messages to keep in flight. For subscribe, this + is the receive maximum. For publish, this is the maximum number of messages to send before + waiting for an ack. + :vartype max_inflight_messages: int + :ivar session_expiry_seconds: Session expiry in seconds. + :vartype session_expiry_seconds: int + :ivar tls: TLS configuration. + :vartype tls: ~azure.mgmt.iotoperations.models.TlsProperties """ - logs: Optional["_models.DiagnosticsLogs"] = rest_field() - """Diagnostic log settings for the resource.""" - metrics: Optional["_models.Metrics"] = rest_field() - """The metrics settings for the resource.""" - self_check: Optional["_models.SelfCheck"] = rest_field(name="selfCheck") - """The self check properties.""" - traces: Optional["_models.Traces"] = rest_field() - """The trace properties.""" + authentication: Optional["_models.AkriConnectorsMqttAuthentication"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Authentication properties.""" + host: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Host of the Broker in the form of :.""" + protocol: Optional[Union[str, "_models.AkriConnectorsMqttProtocolType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The protocol to use for the connection. Currently only ``mqtt`` is supported. \"Mqtt\"""" + keep_alive_seconds: Optional[int] = rest_field( + name="keepAliveSeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """KeepAlive for connection in seconds.""" + max_inflight_messages: Optional[int] = rest_field( + name="maxInflightMessages", visibility=["read", "create", "update", "delete", "query"] + ) + """The max number of messages to keep in flight. For subscribe, this is the receive maximum. For + publish, this is the maximum number of messages to send before waiting for an ack.""" + session_expiry_seconds: Optional[int] = rest_field( + name="sessionExpirySeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Session expiry in seconds.""" + tls: Optional["_models.TlsProperties"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """TLS configuration.""" @overload def __init__( self, *, - logs: Optional["_models.DiagnosticsLogs"] = None, - metrics: Optional["_models.Metrics"] = None, - self_check: Optional["_models.SelfCheck"] = None, - traces: Optional["_models.Traces"] = None, + authentication: Optional["_models.AkriConnectorsMqttAuthentication"] = None, + host: Optional[str] = None, + protocol: Optional[Union[str, "_models.AkriConnectorsMqttProtocolType"]] = None, + keep_alive_seconds: Optional[int] = None, + max_inflight_messages: Optional[int] = None, + session_expiry_seconds: Optional[int] = None, + tls: Optional["_models.TlsProperties"] = None, ) -> None: ... @overload @@ -701,45 +522,28 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerListenerProperties(_model_base.Model): - """Defines a Broker listener. A listener is a collection of ports on which the broker accepts - connections from clients. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - +class AkriConnectorsRegistryEndpointRef(AkriConnectorsRegistrySettings, discriminator="RegistryEndpointRef"): + """AkriConnectorsRegistryEndpointRef properties. - :ivar service_name: Kubernetes Service name of this listener. - :vartype service_name: str - :ivar ports: Ports on which this listener accepts client connections. Required. - :vartype ports: list[~azure.mgmt.iotoperations.models.ListenerPort] - :ivar service_type: Kubernetes Service type of this listener. Known values are: "ClusterIp", - "LoadBalancer", and "NodePort". - :vartype service_type: str or ~azure.mgmt.iotoperations.models.ServiceType - :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", - "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". - :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + :ivar registry_settings_type: The registry endpoint reference. Required. A Registry Endpoint + reference. + :vartype registry_settings_type: str or ~azure.mgmt.iotoperations.models.REGISTRY_ENDPOINT_REF + :ivar registry_endpoint_ref: The name of the registry endpoint. Required. + :vartype registry_endpoint_ref: str """ - service_name: Optional[str] = rest_field(name="serviceName") - """Kubernetes Service name of this listener.""" - ports: List["_models.ListenerPort"] = rest_field() - """Ports on which this listener accepts client connections. Required.""" - service_type: Optional[Union[str, "_models.ServiceType"]] = rest_field(name="serviceType") - """Kubernetes Service type of this listener. Known values are: \"ClusterIp\", \"LoadBalancer\", - and \"NodePort\".""" - provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( - name="provisioningState", visibility=["read"] + registry_settings_type: Literal[AkriConnectorsRegistrySettingsType.REGISTRY_ENDPOINT_REF] = rest_discriminator(name="registrySettingsType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The registry endpoint reference. Required. A Registry Endpoint reference.""" + registry_endpoint_ref: str = rest_field( + name="registryEndpointRef", visibility=["read", "create", "update", "delete", "query"] ) - """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", - \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + """The name of the registry endpoint. Required.""" @overload def __init__( self, *, - ports: List["_models.ListenerPort"], - service_name: Optional[str] = None, - service_type: Optional[Union[str, "_models.ServiceType"]] = None, + registry_endpoint_ref: str, ) -> None: ... @overload @@ -750,43 +554,36 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - + super().__init__( + *args, registry_settings_type=AkriConnectorsRegistrySettingsType.REGISTRY_ENDPOINT_REF, **kwargs + ) -class BrokerListenerResource(ProxyResource): - """Instance broker resource. - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriConnectorsSecret(_Model): + """AkriConnectorsSecret properties. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData - :ivar properties: The resource-specific properties for this resource. - :vartype properties: ~azure.mgmt.iotoperations.models.BrokerListenerProperties - :ivar extended_location: Edge location of the resource. Required. - :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + :ivar secret_key: The key in the secret to be mounted. Required. + :vartype secret_key: str + :ivar secret_alias: The application-defined alias for the secret. Required. + :vartype secret_alias: str + :ivar secret_ref: The name of the secret to be mounted. Required. + :vartype secret_ref: str """ - properties: Optional["_models.BrokerListenerProperties"] = rest_field() - """The resource-specific properties for this resource.""" - extended_location: "_models.ExtendedLocation" = rest_field(name="extendedLocation", visibility=["read", "create"]) - """Edge location of the resource. Required.""" + secret_key: str = rest_field(name="secretKey", visibility=["read", "create", "update", "delete", "query"]) + """The key in the secret to be mounted. Required.""" + secret_alias: str = rest_field(name="secretAlias", visibility=["read", "create", "update", "delete", "query"]) + """The application-defined alias for the secret. Required.""" + secret_ref: str = rest_field(name="secretRef", visibility=["read", "create", "update", "delete", "query"]) + """The name of the secret to be mounted. Required.""" @overload def __init__( self, *, - extended_location: "_models.ExtendedLocation", - properties: Optional["_models.BrokerListenerProperties"] = None, + secret_key: str, + secret_alias: str, + secret_ref: str, ) -> None: ... @overload @@ -800,68 +597,33 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerProperties(_model_base.Model): - """Broker Resource properties. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriConnectorsServiceAccountAuthentication( + AkriConnectorsMqttAuthentication, discriminator="ServiceAccountToken" +): # pylint: disable=name-too-long + """AkriConnectorsServiceAccountAuthentication properties. - :ivar advanced: Advanced settings of Broker. - :vartype advanced: ~azure.mgmt.iotoperations.models.AdvancedSettings - :ivar cardinality: The cardinality details of the broker. - :vartype cardinality: ~azure.mgmt.iotoperations.models.Cardinality - :ivar diagnostics: Spec defines the desired identities of Broker diagnostics settings. - :vartype diagnostics: ~azure.mgmt.iotoperations.models.BrokerDiagnostics - :ivar disk_backed_message_buffer: Settings of Disk Backed Message Buffer. - :vartype disk_backed_message_buffer: ~azure.mgmt.iotoperations.models.DiskBackedMessageBuffer - :ivar generate_resource_limits: This setting controls whether Kubernetes CPU resource limits - are requested. Increasing the number of replicas or workers proportionally increases the amount - of CPU resources requested. If this setting is enabled and there are insufficient CPU - resources, an error will be emitted. - :vartype generate_resource_limits: ~azure.mgmt.iotoperations.models.GenerateResourceLimits - :ivar memory_profile: Memory profile of Broker. Known values are: "Tiny", "Low", "Medium", and - "High". - :vartype memory_profile: str or ~azure.mgmt.iotoperations.models.BrokerMemoryProfile - :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", - "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". - :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + :ivar method: The authentication method for the MQTT connection. Required. Service Account + Token authentication. + :vartype method: str or ~azure.mgmt.iotoperations.models.SERVICE_ACCOUNT_TOKEN + :ivar service_account_token_settings: The service account token for the MQTT connection. + Required. + :vartype service_account_token_settings: + ~azure.mgmt.iotoperations.models.AkriConnectorsServiceAccountTokenSettings """ - advanced: Optional["_models.AdvancedSettings"] = rest_field(visibility=["read", "create"]) - """Advanced settings of Broker.""" - cardinality: Optional["_models.Cardinality"] = rest_field(visibility=["read", "create"]) - """The cardinality details of the broker.""" - diagnostics: Optional["_models.BrokerDiagnostics"] = rest_field() - """Spec defines the desired identities of Broker diagnostics settings.""" - disk_backed_message_buffer: Optional["_models.DiskBackedMessageBuffer"] = rest_field( - name="diskBackedMessageBuffer", visibility=["read", "create"] - ) - """Settings of Disk Backed Message Buffer.""" - generate_resource_limits: Optional["_models.GenerateResourceLimits"] = rest_field( - name="generateResourceLimits", visibility=["read", "create"] - ) - """This setting controls whether Kubernetes CPU resource limits are requested. Increasing the - number of replicas or workers proportionally increases the amount of CPU resources requested. - If this setting is enabled and there are insufficient CPU resources, an error will be emitted.""" - memory_profile: Optional[Union[str, "_models.BrokerMemoryProfile"]] = rest_field( - name="memoryProfile", visibility=["read", "create"] + method: Literal[AkriConnectorsMqttAuthenticationMethod.SERVICE_ACCOUNT_TOKEN] = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The authentication method for the MQTT connection. Required. Service Account Token + authentication.""" + service_account_token_settings: "_models.AkriConnectorsServiceAccountTokenSettings" = rest_field( + name="serviceAccountTokenSettings", visibility=["read", "create", "update", "delete", "query"] ) - """Memory profile of Broker. Known values are: \"Tiny\", \"Low\", \"Medium\", and \"High\".""" - provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( - name="provisioningState", visibility=["read"] - ) - """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", - \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + """The service account token for the MQTT connection. Required.""" @overload def __init__( self, *, - advanced: Optional["_models.AdvancedSettings"] = None, - cardinality: Optional["_models.Cardinality"] = None, - diagnostics: Optional["_models.BrokerDiagnostics"] = None, - disk_backed_message_buffer: Optional["_models.DiskBackedMessageBuffer"] = None, - generate_resource_limits: Optional["_models.GenerateResourceLimits"] = None, - memory_profile: Optional[Union[str, "_models.BrokerMemoryProfile"]] = None, + service_account_token_settings: "_models.AkriConnectorsServiceAccountTokenSettings", ) -> None: ... @overload @@ -872,43 +634,24 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class BrokerResource(ProxyResource): - """Instance broker resource. + super().__init__(*args, method=AkriConnectorsMqttAuthenticationMethod.SERVICE_ACCOUNT_TOKEN, **kwargs) - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriConnectorsServiceAccountTokenSettings(_Model): # pylint: disable=name-too-long + """AkriConnectorsServiceAccountTokenSettings properties. - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData - :ivar properties: The resource-specific properties for this resource. - :vartype properties: ~azure.mgmt.iotoperations.models.BrokerProperties - :ivar extended_location: Edge location of the resource. Required. - :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + :ivar audience: The audience for the service account token. Required. + :vartype audience: str """ - properties: Optional["_models.BrokerProperties"] = rest_field() - """The resource-specific properties for this resource.""" - extended_location: "_models.ExtendedLocation" = rest_field(name="extendedLocation", visibility=["read", "create"]) - """Edge location of the resource. Required.""" + audience: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The audience for the service account token. Required.""" @overload def __init__( self, *, - extended_location: "_models.ExtendedLocation", - properties: Optional["_models.BrokerProperties"] = None, + audience: str, ) -> None: ... @overload @@ -922,41 +665,26 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class BrokerResourceRule(_model_base.Model): - """Broker Resource Rule properties. This defines the objects that represent the actions or topics, - such as - method.Connect, method.Publish, etc. - +class AkriConnectorsTag(AkriConnectorsTagDigestSettings, discriminator="Tag"): + """AkriConnectorsTag properties. - :ivar method: Give access for a Broker method (i.e., Connect, Subscribe, or Publish). Required. - Known values are: "Connect", "Publish", and "Subscribe". - :vartype method: str or ~azure.mgmt.iotoperations.models.BrokerResourceDefinitionMethods - :ivar client_ids: A list of client IDs that match the clients. The client IDs are - case-sensitive and must match the client IDs provided by the clients during connection. This - subfield may be set if the method is Connect. - :vartype client_ids: list[str] - :ivar topics: A list of topics or topic patterns that match the topics that the clients can - publish or subscribe to. This subfield is required if the method is Publish or Subscribe. - :vartype topics: list[str] + :ivar tag_digest_type: The tag or digest type. Required. Indicates that a tag should be + specified. + :vartype tag_digest_type: str or ~azure.mgmt.iotoperations.models.TAG + :ivar tag: The tag of the image. Required. + :vartype tag: str """ - method: Union[str, "_models.BrokerResourceDefinitionMethods"] = rest_field() - """Give access for a Broker method (i.e., Connect, Subscribe, or Publish). Required. Known values - are: \"Connect\", \"Publish\", and \"Subscribe\".""" - client_ids: Optional[List[str]] = rest_field(name="clientIds") - """A list of client IDs that match the clients. The client IDs are case-sensitive and must match - the client IDs provided by the clients during connection. This subfield may be set if the - method is Connect.""" - topics: Optional[List[str]] = rest_field() - """A list of topics or topic patterns that match the topics that the clients can publish or - subscribe to. This subfield is required if the method is Publish or Subscribe.""" + tag_digest_type: Literal[AkriConnectorsTagDigestType.TAG] = rest_discriminator(name="tagDigestType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The tag or digest type. Required. Indicates that a tag should be specified.""" + tag: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The tag of the image. Required.""" @overload def __init__( self, *, - method: Union[str, "_models.BrokerResourceDefinitionMethods"], - client_ids: Optional[List[str]] = None, - topics: Optional[List[str]] = None, + tag: str, ) -> None: ... @overload @@ -967,30 +695,33 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) + super().__init__(*args, tag_digest_type=AkriConnectorsTagDigestType.TAG, **kwargs) -class Cardinality(_model_base.Model): - """Cardinality properties. - +class AkriConnectorTemplateAioMetadata(_Model): + """AkriConnectorTemplateAioMetadata properties. - :ivar backend_chain: The backend broker desired properties. Required. - :vartype backend_chain: ~azure.mgmt.iotoperations.models.BackendChain - :ivar frontend: The frontend desired properties. Required. - :vartype frontend: ~azure.mgmt.iotoperations.models.Frontend + :ivar aio_min_version: The minimum version of AIO required for the connector. + :vartype aio_min_version: str + :ivar aio_max_version: The maximum version of AIO required for the connector. + :vartype aio_max_version: str """ - backend_chain: "_models.BackendChain" = rest_field(name="backendChain") - """The backend broker desired properties. Required.""" - frontend: "_models.Frontend" = rest_field() - """The frontend desired properties. Required.""" + aio_min_version: Optional[str] = rest_field( + name="aioMinVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """The minimum version of AIO required for the connector.""" + aio_max_version: Optional[str] = rest_field( + name="aioMaxVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum version of AIO required for the connector.""" @overload def __init__( self, *, - backend_chain: "_models.BackendChain", - frontend: "_models.Frontend", + aio_min_version: Optional[str] = None, + aio_max_version: Optional[str] = None, ) -> None: ... @overload @@ -1004,54 +735,25 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class CertManagerCertificateSpec(_model_base.Model): - """Automatic TLS server certificate management with cert-manager. +class AkriConnectorTemplateAllocation(_Model): + """AkriConnectorTemplateAllocation properties. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AkriConnectorTemplateBucketizedAllocation - :ivar duration: Lifetime of certificate. Must be specified using a Go time.Duration format - (h|m|s). E.g. 240h for 240 hours and 45m for 45 minutes. - :vartype duration: str - :ivar secret_name: Secret for storing server certificate. Any existing data will be - overwritten. This is a reference to the secret through an identifying name, not the secret - itself. - :vartype secret_name: str - :ivar renew_before: When to begin renewing certificate. Must be specified using a Go - time.Duration format (h|m|s). E.g. 240h for 240 hours and 45m for 45 minutes. - :vartype renew_before: str - :ivar issuer_ref: cert-manager issuerRef. Required. - :vartype issuer_ref: ~azure.mgmt.iotoperations.models.CertManagerIssuerRef - :ivar private_key: Type of certificate private key. - :vartype private_key: ~azure.mgmt.iotoperations.models.CertManagerPrivateKey - :ivar san: Additional Subject Alternative Names (SANs) to include in the certificate. - :vartype san: ~azure.mgmt.iotoperations.models.SanForCert + :ivar policy: The allocation policy type. Required. "Bucketized" + :vartype policy: str or ~azure.mgmt.iotoperations.models.AkriConnectorTemplateAllocationPolicy """ - duration: Optional[str] = rest_field() - """Lifetime of certificate. Must be specified using a Go time.Duration format (h|m|s). E.g. 240h - for 240 hours and 45m for 45 minutes.""" - secret_name: Optional[str] = rest_field(name="secretName") - """Secret for storing server certificate. Any existing data will be overwritten. This is a - reference to the secret through an identifying name, not the secret itself.""" - renew_before: Optional[str] = rest_field(name="renewBefore") - """When to begin renewing certificate. Must be specified using a Go time.Duration format (h|m|s). - E.g. 240h for 240 hours and 45m for 45 minutes.""" - issuer_ref: "_models.CertManagerIssuerRef" = rest_field(name="issuerRef") - """cert-manager issuerRef. Required.""" - private_key: Optional["_models.CertManagerPrivateKey"] = rest_field(name="privateKey") - """Type of certificate private key.""" - san: Optional["_models.SanForCert"] = rest_field() - """Additional Subject Alternative Names (SANs) to include in the certificate.""" + __mapping__: Dict[str, _Model] = {} + policy: str = rest_discriminator(name="policy", visibility=["read", "create", "update", "delete", "query"]) + """The allocation policy type. Required. \"Bucketized\"""" @overload def __init__( self, *, - issuer_ref: "_models.CertManagerIssuerRef", - duration: Optional[str] = None, - secret_name: Optional[str] = None, - renew_before: Optional[str] = None, - private_key: Optional["_models.CertManagerPrivateKey"] = None, - san: Optional["_models.SanForCert"] = None, + policy: str, ) -> None: ... @overload @@ -1065,36 +767,27 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class CertManagerCertOptions(_model_base.Model): - """Cert Manager Cert properties. - +class AkriConnectorTemplateBucketizedAllocation( + AkriConnectorTemplateAllocation, discriminator="Bucketized" +): # pylint: disable=name-too-long + """AkriConnectorTemplateBucketizedAllocation properties. - :ivar duration: Lifetime of certificate. Must be specified using a Go time.Duration format - (h|m|s). E.g. 240h for 240 hours and 45m for 45 minutes. Required. - :vartype duration: str - :ivar renew_before: When to begin renewing certificate. Must be specified using a Go - time.Duration format (h|m|s). E.g. 240h for 240 hours and 45m for 45 minutes. Required. - :vartype renew_before: str - :ivar private_key: Configuration of certificate private key. Required. - :vartype private_key: ~azure.mgmt.iotoperations.models.CertManagerPrivateKey + :ivar policy: The allocation policy type. Required. Bucketized allocation policy. + :vartype policy: str or ~azure.mgmt.iotoperations.models.BUCKETIZED + :ivar bucket_size: The bucketized allocation of AEPs for connectors. Required. + :vartype bucket_size: int """ - duration: str = rest_field() - """Lifetime of certificate. Must be specified using a Go time.Duration format (h|m|s). E.g. 240h - for 240 hours and 45m for 45 minutes. Required.""" - renew_before: str = rest_field(name="renewBefore") - """When to begin renewing certificate. Must be specified using a Go time.Duration format (h|m|s). - E.g. 240h for 240 hours and 45m for 45 minutes. Required.""" - private_key: "_models.CertManagerPrivateKey" = rest_field(name="privateKey") - """Configuration of certificate private key. Required.""" + policy: Literal[AkriConnectorTemplateAllocationPolicy.BUCKETIZED] = rest_discriminator(name="policy", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The allocation policy type. Required. Bucketized allocation policy.""" + bucket_size: int = rest_field(name="bucketSize", visibility=["read", "create", "update", "delete", "query"]) + """The bucketized allocation of AEPs for connectors. Required.""" @overload def __init__( self, *, - duration: str, - renew_before: str, - private_key: "_models.CertManagerPrivateKey", + bucket_size: int, ) -> None: ... @overload @@ -1105,37 +798,57 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CertManagerIssuerRef(_model_base.Model): - """Cert-Manager issuerRef properties. - - - :ivar group: group of issuer. Required. - :vartype group: str - :ivar kind: kind of issuer (Issuer or ClusterIssuer). Required. Known values are: "Issuer" and - "ClusterIssuer". - :vartype kind: str or ~azure.mgmt.iotoperations.models.CertManagerIssuerKind - :ivar name: name of issuer. Required. - :vartype name: str + super().__init__(*args, policy=AkriConnectorTemplateAllocationPolicy.BUCKETIZED, **kwargs) + + +class AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs properties. + + :ivar default_dataset_config_schema_ref: The default configuration schema reference for + datasets. + :vartype default_dataset_config_schema_ref: str + :ivar default_events_config_schema_ref: The default configuration schema reference for events. + :vartype default_events_config_schema_ref: str + :ivar default_process_control_config_schema_ref: The default configuration schema reference for + process control. + :vartype default_process_control_config_schema_ref: str + :ivar default_streams_config_schema_ref: The default configuration schema reference for + streams. + :vartype default_streams_config_schema_ref: str + :ivar additional_config_schema_ref: The additional configuration schema reference. + :vartype additional_config_schema_ref: str """ - group: str = rest_field() - """group of issuer. Required.""" - kind: Union[str, "_models.CertManagerIssuerKind"] = rest_field() - """kind of issuer (Issuer or ClusterIssuer). Required. Known values are: \"Issuer\" and - \"ClusterIssuer\".""" - name: str = rest_field() - """name of issuer. Required.""" + default_dataset_config_schema_ref: Optional[str] = rest_field( + name="defaultDatasetConfigSchemaRef", visibility=["read", "create", "update", "delete", "query"] + ) + """The default configuration schema reference for datasets.""" + default_events_config_schema_ref: Optional[str] = rest_field( + name="defaultEventsConfigSchemaRef", visibility=["read", "create", "update", "delete", "query"] + ) + """The default configuration schema reference for events.""" + default_process_control_config_schema_ref: Optional[str] = rest_field( + name="defaultProcessControlConfigSchemaRef", visibility=["read", "create", "update", "delete", "query"] + ) + """The default configuration schema reference for process control.""" + default_streams_config_schema_ref: Optional[str] = rest_field( + name="defaultStreamsConfigSchemaRef", visibility=["read", "create", "update", "delete", "query"] + ) + """The default configuration schema reference for streams.""" + additional_config_schema_ref: Optional[str] = rest_field( + name="additionalConfigSchemaRef", visibility=["read", "create", "update", "delete", "query"] + ) + """The additional configuration schema reference.""" @overload def __init__( self, *, - group: str, - kind: Union[str, "_models.CertManagerIssuerKind"], - name: str, + default_dataset_config_schema_ref: Optional[str] = None, + default_events_config_schema_ref: Optional[str] = None, + default_process_control_config_schema_ref: Optional[str] = None, + default_streams_config_schema_ref: Optional[str] = None, + additional_config_schema_ref: Optional[str] = None, ) -> None: ... @overload @@ -1149,30 +862,37 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class CertManagerPrivateKey(_model_base.Model): - """Cert Manager private key properties. - +class AkriConnectorTemplateDeviceInboundEndpointType(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateDeviceInboundEndpointType properties. - :ivar algorithm: algorithm for private key. Required. Known values are: "Ec256", "Ec384", - "Ec521", "Ed25519", "Rsa2048", "Rsa4096", and "Rsa8192". - :vartype algorithm: str or ~azure.mgmt.iotoperations.models.PrivateKeyAlgorithm - :ivar rotation_policy: cert-manager private key rotationPolicy. Required. Known values are: - "Always" and "Never". - :vartype rotation_policy: str or ~azure.mgmt.iotoperations.models.PrivateKeyRotationPolicy + :ivar endpoint_type: The type of the device inbound endpoint. Required. + :vartype endpoint_type: str + :ivar version: The version of the device inbound endpoint. Required. + :vartype version: str + :ivar configuration_schema_refs: The configuration schema references for the device inbound + endpoint. + :vartype configuration_schema_refs: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs """ - algorithm: Union[str, "_models.PrivateKeyAlgorithm"] = rest_field() - """algorithm for private key. Required. Known values are: \"Ec256\", \"Ec384\", \"Ec521\", - \"Ed25519\", \"Rsa2048\", \"Rsa4096\", and \"Rsa8192\".""" - rotation_policy: Union[str, "_models.PrivateKeyRotationPolicy"] = rest_field(name="rotationPolicy") - """cert-manager private key rotationPolicy. Required. Known values are: \"Always\" and \"Never\".""" + endpoint_type: str = rest_field(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) + """The type of the device inbound endpoint. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version of the device inbound endpoint. Required.""" + configuration_schema_refs: Optional["_models.AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs"] = ( + rest_field(name="configurationSchemaRefs", visibility=["read", "create", "update", "delete", "query"]) + ) + """The configuration schema references for the device inbound endpoint.""" @overload def __init__( self, *, - algorithm: Union[str, "_models.PrivateKeyAlgorithm"], - rotation_policy: Union[str, "_models.PrivateKeyRotationPolicy"], + endpoint_type: str, + version: str, + configuration_schema_refs: Optional[ + "_models.AkriConnectorTemplateDeviceInboundEndpointConfigurationSchemaRefs" + ] = None, ) -> None: ... @overload @@ -1186,47 +906,23 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ClientConfig(_model_base.Model): - """The settings of Client Config. +class AkriConnectorTemplateDiagnostics(_Model): + """AkriConnectorTemplateDiagnostics properties. - :ivar max_session_expiry_seconds: Upper bound of Session Expiry Interval, in seconds. - :vartype max_session_expiry_seconds: int - :ivar max_message_expiry_seconds: Upper bound of Message Expiry Interval, in seconds. - :vartype max_message_expiry_seconds: int - :ivar max_packet_size_bytes: Max message size for a packet in Bytes. - :vartype max_packet_size_bytes: int - :ivar subscriber_queue_limit: The limit on the number of queued messages for a subscriber. - :vartype subscriber_queue_limit: ~azure.mgmt.iotoperations.models.SubscriberQueueLimit - :ivar max_receive_maximum: Upper bound of Receive Maximum that a client can request in the - CONNECT packet. - :vartype max_receive_maximum: int - :ivar max_keep_alive_seconds: Upper bound of a client's Keep Alive, in seconds. - :vartype max_keep_alive_seconds: int + :ivar logs: The log settings for the Connector template. Required. + :vartype logs: ~azure.mgmt.iotoperations.models.AkriConnectorsDiagnosticsLogs """ - max_session_expiry_seconds: Optional[int] = rest_field(name="maxSessionExpirySeconds") - """Upper bound of Session Expiry Interval, in seconds.""" - max_message_expiry_seconds: Optional[int] = rest_field(name="maxMessageExpirySeconds") - """Upper bound of Message Expiry Interval, in seconds.""" - max_packet_size_bytes: Optional[int] = rest_field(name="maxPacketSizeBytes") - """Max message size for a packet in Bytes.""" - subscriber_queue_limit: Optional["_models.SubscriberQueueLimit"] = rest_field(name="subscriberQueueLimit") - """The limit on the number of queued messages for a subscriber.""" - max_receive_maximum: Optional[int] = rest_field(name="maxReceiveMaximum") - """Upper bound of Receive Maximum that a client can request in the CONNECT packet.""" - max_keep_alive_seconds: Optional[int] = rest_field(name="maxKeepAliveSeconds") - """Upper bound of a client's Keep Alive, in seconds.""" + logs: "_models.AkriConnectorsDiagnosticsLogs" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The log settings for the Connector template. Required.""" @overload def __init__( self, *, - max_session_expiry_seconds: Optional[int] = None, - max_message_expiry_seconds: Optional[int] = None, - max_packet_size_bytes: Optional[int] = None, - subscriber_queue_limit: Optional["_models.SubscriberQueueLimit"] = None, - max_receive_maximum: Optional[int] = None, - max_keep_alive_seconds: Optional[int] = None, + logs: "_models.AkriConnectorsDiagnosticsLogs", ) -> None: ... @overload @@ -1240,45 +936,39 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowBuiltInTransformationDataset(_model_base.Model): - """Dataflow BuiltIn Transformation dataset properties. - +class AkriConnectorTemplateHelmAdvancedConfiguration(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateHelmAdvancedConfiguration properties. - :ivar key: The key of the dataset. Required. - :vartype key: str - :ivar description: A user provided optional description of the dataset. - :vartype description: str - :ivar schema_ref: The reference to the schema that describes the dataset. Allowed: JSON - Schema/draft-7. - :vartype schema_ref: str - :ivar inputs: List of fields for enriching from the Broker State Store. Required. - :vartype inputs: list[str] - :ivar expression: Condition to enrich data from Broker State Store. Example: $1 < 0 || $1 > $2 - (Assuming inputs section $1 and $2 are provided). - :vartype expression: str + :ivar delete: Delete operation configuration for the Helm chart. + :vartype delete: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmDeleteConfiguration + :ivar install: Install operation configuration for the Helm chart. + :vartype install: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmInstallConfiguration + :ivar upgrade: Upgrade operation configuration for the Helm chart. + :vartype upgrade: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmUpgradeConfiguration """ - key: str = rest_field() - """The key of the dataset. Required.""" - description: Optional[str] = rest_field() - """A user provided optional description of the dataset.""" - schema_ref: Optional[str] = rest_field(name="schemaRef") - """The reference to the schema that describes the dataset. Allowed: JSON Schema/draft-7.""" - inputs: List[str] = rest_field() - """List of fields for enriching from the Broker State Store. Required.""" - expression: Optional[str] = rest_field() - """Condition to enrich data from Broker State Store. Example: $1 < 0 || $1 > $2 (Assuming inputs - section $1 and $2 are provided).""" + delete: Optional["_models.AkriConnectorTemplateHelmDeleteConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Delete operation configuration for the Helm chart.""" + install: Optional["_models.AkriConnectorTemplateHelmInstallConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Install operation configuration for the Helm chart.""" + upgrade: Optional["_models.AkriConnectorTemplateHelmUpgradeConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Upgrade operation configuration for the Helm chart.""" @overload def __init__( self, *, - key: str, - inputs: List[str], - description: Optional[str] = None, - schema_ref: Optional[str] = None, - expression: Optional[str] = None, + delete: Optional["_models.AkriConnectorTemplateHelmDeleteConfiguration"] = None, + install: Optional["_models.AkriConnectorTemplateHelmInstallConfiguration"] = None, + upgrade: Optional["_models.AkriConnectorTemplateHelmUpgradeConfiguration"] = None, ) -> None: ... @overload @@ -1292,41 +982,31 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowBuiltInTransformationFilter(_model_base.Model): - """Dataflow BuiltIn Transformation filter properties. - +class AkriConnectorTemplateHelmAuthSecretRef(_Model): + """AkriConnectorTemplateHelmAuthSecretRef properties. - :ivar type: The type of dataflow operation. "Filter" - :vartype type: str or ~azure.mgmt.iotoperations.models.FilterType - :ivar description: A user provided optional description of the filter. - :vartype description: str - :ivar inputs: List of fields for filtering in JSON path expression. Required. - :vartype inputs: list[str] - :ivar expression: Condition to filter data. Can reference input fields with {n} where n is the - index of the input field starting from 1. Example: $1 < 0 || $1 > $2 (Assuming inputs section - $1 and $2 are provided). Required. - :vartype expression: str + :ivar secret_ref: The name of the secret. Required. + :vartype secret_ref: str + :ivar password_key: The key of the password in the secret. Required. + :vartype password_key: str + :ivar username_key: The key of the username in the secret. Required. + :vartype username_key: str """ - type: Optional[Union[str, "_models.FilterType"]] = rest_field() - """The type of dataflow operation. \"Filter\"""" - description: Optional[str] = rest_field() - """A user provided optional description of the filter.""" - inputs: List[str] = rest_field() - """List of fields for filtering in JSON path expression. Required.""" - expression: str = rest_field() - """Condition to filter data. Can reference input fields with {n} where n is the index of the input - field starting from 1. Example: $1 < 0 || $1 > $2 (Assuming inputs section $1 and $2 are - provided). Required.""" + secret_ref: str = rest_field(name="secretRef", visibility=["read", "create", "update", "delete", "query"]) + """The name of the secret. Required.""" + password_key: str = rest_field(name="passwordKey", visibility=["read", "create", "update", "delete", "query"]) + """The key of the password in the secret. Required.""" + username_key: str = rest_field(name="usernameKey", visibility=["read", "create", "update", "delete", "query"]) + """The key of the username in the secret. Required.""" @overload def __init__( self, *, - inputs: List[str], - expression: str, - type: Optional[Union[str, "_models.FilterType"]] = None, - description: Optional[str] = None, + secret_ref: str, + password_key: str, + username_key: str, ) -> None: ... @overload @@ -1340,46 +1020,30 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowBuiltInTransformationMap(_model_base.Model): - """Dataflow BuiltIn Transformation map properties. +class AkriConnectorTemplateRuntimeConfiguration(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateRuntimeConfiguration properties. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AkriConnectorTemplateHelmConfiguration, AkriConnectorTemplateManagedConfiguration - :ivar type: Type of transformation. Known values are: "NewProperties", "Rename", "Compute", - "PassThrough", and "BuiltInFunction". - :vartype type: str or ~azure.mgmt.iotoperations.models.DataflowMappingType - :ivar description: A user provided optional description of the mapping function. - :vartype description: str - :ivar inputs: List of fields for mapping in JSON path expression. Required. - :vartype inputs: list[str] - :ivar expression: Modify the inputs field(s) to the final output field. Example: $1 * 2.2 - (Assuming inputs section $1 is provided). - :vartype expression: str - :ivar output: Where and how the input fields to be organized in the output record. Required. - :vartype output: str + :ivar runtime_configuration_type: Runtime configuration type for the Connector template. + Required. Known values are: "HelmConfiguration" and "ManagedConfiguration". + :vartype runtime_configuration_type: str or + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateRuntimeConfigurationType """ - type: Optional[Union[str, "_models.DataflowMappingType"]] = rest_field() - """Type of transformation. Known values are: \"NewProperties\", \"Rename\", \"Compute\", - \"PassThrough\", and \"BuiltInFunction\".""" - description: Optional[str] = rest_field() - """A user provided optional description of the mapping function.""" - inputs: List[str] = rest_field() - """List of fields for mapping in JSON path expression. Required.""" - expression: Optional[str] = rest_field() - """Modify the inputs field(s) to the final output field. Example: $1 * 2.2 (Assuming inputs - section $1 is provided).""" - output: str = rest_field() - """Where and how the input fields to be organized in the output record. Required.""" + __mapping__: Dict[str, _Model] = {} + runtime_configuration_type: str = rest_discriminator( + name="runtimeConfigurationType", visibility=["read", "create", "update", "delete", "query"] + ) + """Runtime configuration type for the Connector template. Required. Known values are: + \"HelmConfiguration\" and \"ManagedConfiguration\".""" @overload def __init__( self, *, - inputs: List[str], - output: str, - type: Optional[Union[str, "_models.DataflowMappingType"]] = None, - description: Optional[str] = None, - expression: Optional[str] = None, + runtime_configuration_type: str, ) -> None: ... @overload @@ -1393,49 +1057,31 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowBuiltInTransformationSettings(_model_base.Model): - """Dataflow BuiltIn Transformation properties. +class AkriConnectorTemplateHelmConfiguration( + AkriConnectorTemplateRuntimeConfiguration, discriminator="HelmConfiguration" +): + """AkriConnectorTemplateHelmConfiguration properties. - :ivar serialization_format: Serialization format. Optional; defaults to JSON. Allowed value - JSON Schema/draft-7, Parquet. Default: Json. Known values are: "Delta", "Json", and "Parquet". - :vartype serialization_format: str or - ~azure.mgmt.iotoperations.models.TransformationSerializationFormat - :ivar schema_ref: Reference to the schema that describes the output of the transformation. - :vartype schema_ref: str - :ivar datasets: Enrich data from Broker State Store. Dataset references a key in Broker State - Store. - :vartype datasets: list[~azure.mgmt.iotoperations.models.DataflowBuiltInTransformationDataset] - :ivar filter: Filters input record or datapoints based on condition. - :vartype filter: list[~azure.mgmt.iotoperations.models.DataflowBuiltInTransformationFilter] - :ivar map: Maps input to output message. - :vartype map: list[~azure.mgmt.iotoperations.models.DataflowBuiltInTransformationMap] + :ivar runtime_configuration_type: The runtime configuration type for the Connector template. + Required. Helm Configuration Type. + :vartype runtime_configuration_type: str or ~azure.mgmt.iotoperations.models.HELM_CONFIGURATION + :ivar helm_configuration_settings: The Helm configuration settings. Required. + :vartype helm_configuration_settings: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmConfigurationSettings """ - serialization_format: Optional[Union[str, "_models.TransformationSerializationFormat"]] = rest_field( - name="serializationFormat" + runtime_configuration_type: Literal[AkriConnectorTemplateRuntimeConfigurationType.HELM_CONFIGURATION] = rest_discriminator(name="runtimeConfigurationType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The runtime configuration type for the Connector template. Required. Helm Configuration Type.""" + helm_configuration_settings: "_models.AkriConnectorTemplateHelmConfigurationSettings" = rest_field( + name="helmConfigurationSettings", visibility=["read", "create", "update", "delete", "query"] ) - """Serialization format. Optional; defaults to JSON. Allowed value JSON Schema/draft-7, Parquet. - Default: Json. Known values are: \"Delta\", \"Json\", and \"Parquet\".""" - schema_ref: Optional[str] = rest_field(name="schemaRef") - """Reference to the schema that describes the output of the transformation.""" - datasets: Optional[List["_models.DataflowBuiltInTransformationDataset"]] = rest_field() - """Enrich data from Broker State Store. Dataset references a key in Broker State Store.""" - filter: Optional[List["_models.DataflowBuiltInTransformationFilter"]] = rest_field() - """Filters input record or datapoints based on condition.""" - map: Optional[List["_models.DataflowBuiltInTransformationMap"]] = rest_field() - """Maps input to output message.""" + """The Helm configuration settings. Required.""" @overload def __init__( self, *, - serialization_format: Optional[Union[str, "_models.TransformationSerializationFormat"]] = None, - schema_ref: Optional[str] = None, - datasets: Optional[List["_models.DataflowBuiltInTransformationDataset"]] = None, - filter: Optional[ - List["_models.DataflowBuiltInTransformationFilter"] - ] = None, # pylint: disable=redefined-builtin - map: Optional[List["_models.DataflowBuiltInTransformationMap"]] = None, + helm_configuration_settings: "_models.AkriConnectorTemplateHelmConfigurationSettings", ) -> None: ... @overload @@ -1446,34 +1092,58 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - + super().__init__( + *args, runtime_configuration_type=AkriConnectorTemplateRuntimeConfigurationType.HELM_CONFIGURATION, **kwargs + ) -class DataflowDestinationOperationSettings(_model_base.Model): - """Dataflow Destination Operation properties. +class AkriConnectorTemplateHelmConfigurationSettings(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateHelmConfiguration properties. - :ivar endpoint_ref: Reference to the Endpoint CR. Can be of Broker, Kafka, Fabric, ADLS, ADX - type. Required. - :vartype endpoint_ref: str - :ivar data_destination: Destination location, can be a topic or table name. Supports dynamic - values with $topic, $systemProperties, $userProperties, $payload, $context, and $subscription. - Required. - :vartype data_destination: str + :ivar registry_settings: The registry settings for the helm chart to be used. + :vartype registry_settings: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmRegistrySettings + :ivar release_name: The release name of the Helm chart. Required. + :vartype release_name: str + :ivar version: The version of the Helm chart. Required. + :vartype version: str + :ivar advanced_configuration: Advanced configuration for the Helm chart. + Install, upgrade, and uninstall options for the helm chart such as atomic, wait, timeout, + ``wait_for_jobs``, and ``disable_hooks``. + :vartype advanced_configuration: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmAdvancedConfiguration + :ivar values_property: A map of values to pass to the helm chart. + :vartype values_property: dict[str, str] """ - endpoint_ref: str = rest_field(name="endpointRef") - """Reference to the Endpoint CR. Can be of Broker, Kafka, Fabric, ADLS, ADX type. Required.""" - data_destination: str = rest_field(name="dataDestination") - """Destination location, can be a topic or table name. Supports dynamic values with $topic, - $systemProperties, $userProperties, $payload, $context, and $subscription. Required.""" + registry_settings: Optional["_models.AkriConnectorTemplateHelmRegistrySettings"] = rest_field( + name="registrySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """The registry settings for the helm chart to be used.""" + release_name: str = rest_field(name="releaseName", visibility=["read", "create", "update", "delete", "query"]) + """The release name of the Helm chart. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version of the Helm chart. Required.""" + advanced_configuration: Optional["_models.AkriConnectorTemplateHelmAdvancedConfiguration"] = rest_field( + name="advancedConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Advanced configuration for the Helm chart. + Install, upgrade, and uninstall options for the helm chart such as atomic, wait, timeout, + ``wait_for_jobs``, and ``disable_hooks``.""" + values_property: Optional[Dict[str, str]] = rest_field( + name="values", visibility=["read", "create", "update", "delete", "query"] + ) + """A map of values to pass to the helm chart.""" @overload def __init__( self, *, - endpoint_ref: str, - data_destination: str, + release_name: str, + version: str, + registry_settings: Optional["_models.AkriConnectorTemplateHelmRegistrySettings"] = None, + advanced_configuration: Optional["_models.AkriConnectorTemplateHelmAdvancedConfiguration"] = None, + values_property: Optional[Dict[str, str]] = None, ) -> None: ... @overload @@ -1487,22 +1157,29 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointAuthenticationAccessToken(_model_base.Model): # pylint: disable=name-too-long - """DataflowEndpoint Authentication Access Token properties. +class AkriConnectorTemplateHelmRegistrySettings(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateHelmRegistrySettings properties. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AkriConnectorTemplateHelmContainerRegistry, AkriConnectorTemplateHelmRegistryEndpointRef - :ivar secret_ref: Token secret name. Required. - :vartype secret_ref: str + :ivar registry_settings_type: Required. Known values are: "RegistryEndpointRef" and + "ContainerRegistry". + :vartype registry_settings_type: str or + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmRegistrySettingsType """ - secret_ref: str = rest_field(name="secretRef") - """Token secret name. Required.""" + __mapping__: Dict[str, _Model] = {} + registry_settings_type: str = rest_discriminator( + name="registrySettingsType", visibility=["read", "create", "update", "delete", "query"] + ) + """Required. Known values are: \"RegistryEndpointRef\" and \"ContainerRegistry\".""" @overload def __init__( self, *, - secret_ref: str, + registry_settings_type: str, ) -> None: ... @overload @@ -1516,30 +1193,31 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointAuthenticationSasl(_model_base.Model): - """DataflowEndpoint Authentication Sasl properties. - +class AkriConnectorTemplateHelmContainerRegistry( + AkriConnectorTemplateHelmRegistrySettings, discriminator="ContainerRegistry" +): # pylint: disable=name-too-long + """AkriConnectorTemplateHelmContainerRegistry properties. - :ivar sasl_type: Type of SASL authentication. Can be PLAIN, SCRAM-SHA-256, or SCRAM-SHA-512. - Required. Known values are: "Plain", "ScramSha256", and "ScramSha512". - :vartype sasl_type: str or - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSaslType - :ivar secret_ref: Token secret name. Required. - :vartype secret_ref: str + :ivar registry_settings_type: The container registry reference. Required. A Container Registry + reference. + :vartype registry_settings_type: str or ~azure.mgmt.iotoperations.models.CONTAINER_REGISTRY + :ivar container_registry_settings: The registry settings for the container registry. Required. + :vartype container_registry_settings: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmContainerRegistrySettings """ - sasl_type: Union[str, "_models.DataflowEndpointAuthenticationSaslType"] = rest_field(name="saslType") - """Type of SASL authentication. Can be PLAIN, SCRAM-SHA-256, or SCRAM-SHA-512. Required. Known - values are: \"Plain\", \"ScramSha256\", and \"ScramSha512\".""" - secret_ref: str = rest_field(name="secretRef") - """Token secret name. Required.""" + registry_settings_type: Literal[AkriConnectorTemplateHelmRegistrySettingsType.CONTAINER_REGISTRY] = rest_discriminator(name="registrySettingsType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The container registry reference. Required. A Container Registry reference.""" + container_registry_settings: "_models.AkriConnectorTemplateHelmContainerRegistrySettings" = rest_field( + name="containerRegistrySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """The registry settings for the container registry. Required.""" @overload def __init__( self, *, - sasl_type: Union[str, "_models.DataflowEndpointAuthenticationSaslType"], - secret_ref: str, + container_registry_settings: "_models.AkriConnectorTemplateHelmContainerRegistrySettings", ) -> None: ... @overload @@ -1550,27 +1228,40 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class DataflowEndpointAuthenticationServiceAccountToken(_model_base.Model): # pylint: disable=name-too-long - """Service Account Token for BrokerAuthentication. - - - :ivar audience: Audience of the service account. Optional, defaults to the broker internal - service account audience. Required. - :vartype audience: str + super().__init__( + *args, registry_settings_type=AkriConnectorTemplateHelmRegistrySettingsType.CONTAINER_REGISTRY, **kwargs + ) + + +class AkriConnectorTemplateHelmContainerRegistrySettings(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateHelmContainerRegistrySettings properties. + + :ivar registry: The registry to use for the Helm chart. Required. + :vartype registry: str + :ivar repository: The repository to use for the Helm chart. Required. + :vartype repository: str + :ivar auth_secret_ref: Optional reference to a secret in the same namespace to use for pulling + the Helm chart. + :vartype auth_secret_ref: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateHelmAuthSecretRef """ - audience: str = rest_field() - """Audience of the service account. Optional, defaults to the broker internal service account - audience. Required.""" + registry: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The registry to use for the Helm chart. Required.""" + repository: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The repository to use for the Helm chart. Required.""" + auth_secret_ref: Optional["_models.AkriConnectorTemplateHelmAuthSecretRef"] = rest_field( + name="authSecretRef", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional reference to a secret in the same namespace to use for pulling the Helm chart.""" @overload def __init__( self, *, - audience: str, + registry: str, + repository: str, + auth_secret_ref: Optional["_models.AkriConnectorTemplateHelmAuthSecretRef"] = None, ) -> None: ... @overload @@ -1584,23 +1275,45 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointAuthenticationSystemAssignedManagedIdentity(_model_base.Model): # pylint: disable=name-too-long - """DataflowEndpoint Authentication SystemAssignedManagedIdentity properties. +class AkriConnectorTemplateHelmDeleteConfiguration(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateHelmDeleteConfiguration properties. - :ivar audience: Audience of the service to authenticate against. Optional; defaults to the - audience for Service host configuration. - :vartype audience: str + :ivar timeout: The timeout for the operation in seconds. + :vartype timeout: int + :ivar wait_for_jobs: The wait for jobs flag. + :vartype wait_for_jobs: bool + :ivar atomic: Atomic flag for the operation. + :vartype atomic: bool + :ivar disable_hooks: Disable hooks flag for the operation. + :vartype disable_hooks: bool + :ivar wait: The wait flag for the operation. + :vartype wait: bool """ - audience: Optional[str] = rest_field() - """Audience of the service to authenticate against. Optional; defaults to the audience for Service - host configuration.""" + timeout: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The timeout for the operation in seconds.""" + wait_for_jobs: Optional[bool] = rest_field( + name="waitForJobs", visibility=["read", "create", "update", "delete", "query"] + ) + """The wait for jobs flag.""" + atomic: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Atomic flag for the operation.""" + disable_hooks: Optional[bool] = rest_field( + name="disableHooks", visibility=["read", "create", "update", "delete", "query"] + ) + """Disable hooks flag for the operation.""" + wait: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The wait flag for the operation.""" @overload def __init__( self, *, - audience: Optional[str] = None, + timeout: Optional[int] = None, + wait_for_jobs: Optional[bool] = None, + atomic: Optional[bool] = None, + disable_hooks: Optional[bool] = None, + wait: Optional[bool] = None, ) -> None: ... @overload @@ -1614,33 +1327,45 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointAuthenticationUserAssignedManagedIdentity(_model_base.Model): # pylint: disable=name-too-long - """DataflowEndpoint Authentication UserAssignedManagedIdentity properties. - +class AkriConnectorTemplateHelmInstallConfiguration(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateHelmInstallConfiguration properties. - :ivar client_id: Client ID for the user-assigned managed identity. Required. - :vartype client_id: str - :ivar scope: Resource identifier (application ID URI) of the resource, affixed with the - .default suffix. - :vartype scope: str - :ivar tenant_id: Tenant ID. Required. - :vartype tenant_id: str + :ivar timeout: The timeout for the operation in seconds. + :vartype timeout: int + :ivar wait_for_jobs: The wait for jobs flag. + :vartype wait_for_jobs: bool + :ivar atomic: Atomic flag for the operation. + :vartype atomic: bool + :ivar disable_hooks: Disable hooks flag for the operation. + :vartype disable_hooks: bool + :ivar wait: The wait flag for the operation. + :vartype wait: bool """ - client_id: str = rest_field(name="clientId") - """Client ID for the user-assigned managed identity. Required.""" - scope: Optional[str] = rest_field() - """Resource identifier (application ID URI) of the resource, affixed with the .default suffix.""" - tenant_id: str = rest_field(name="tenantId") - """Tenant ID. Required.""" + timeout: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The timeout for the operation in seconds.""" + wait_for_jobs: Optional[bool] = rest_field( + name="waitForJobs", visibility=["read", "create", "update", "delete", "query"] + ) + """The wait for jobs flag.""" + atomic: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Atomic flag for the operation.""" + disable_hooks: Optional[bool] = rest_field( + name="disableHooks", visibility=["read", "create", "update", "delete", "query"] + ) + """Disable hooks flag for the operation.""" + wait: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The wait flag for the operation.""" @overload def __init__( self, *, - client_id: str, - tenant_id: str, - scope: Optional[str] = None, + timeout: Optional[int] = None, + wait_for_jobs: Optional[bool] = None, + atomic: Optional[bool] = None, + disable_hooks: Optional[bool] = None, + wait: Optional[bool] = None, ) -> None: ... @overload @@ -1654,22 +1379,30 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointAuthenticationX509(_model_base.Model): - """DataflowEndpoint Authentication X509 properties. - +class AkriConnectorTemplateHelmRegistryEndpointRef( + AkriConnectorTemplateHelmRegistrySettings, discriminator="RegistryEndpointRef" +): # pylint: disable=name-too-long + """AkriConnectorTemplateHelmRegistryEndpointRef properties. - :ivar secret_ref: Secret reference of the X.509 certificate. Required. - :vartype secret_ref: str + :ivar registry_settings_type: The registry endpoint reference. Required. A Registry Endpoint + reference. + :vartype registry_settings_type: str or ~azure.mgmt.iotoperations.models.REGISTRY_ENDPOINT_REF + :ivar registry_endpoint_ref: The name of the registry endpoint. Required. + :vartype registry_endpoint_ref: str """ - secret_ref: str = rest_field(name="secretRef") - """Secret reference of the X.509 certificate. Required.""" + registry_settings_type: Literal[AkriConnectorTemplateHelmRegistrySettingsType.REGISTRY_ENDPOINT_REF] = rest_discriminator(name="registrySettingsType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The registry endpoint reference. Required. A Registry Endpoint reference.""" + registry_endpoint_ref: str = rest_field( + name="registryEndpointRef", visibility=["read", "create", "update", "delete", "query"] + ) + """The name of the registry endpoint. Required.""" @overload def __init__( self, *, - secret_ref: str, + registry_endpoint_ref: str, ) -> None: ... @overload @@ -1680,45 +1413,50 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class DataflowEndpointDataExplorer(_model_base.Model): - """Azure Data Explorer endpoint properties. - - - :ivar authentication: Authentication configuration. NOTE - only authentication property is - allowed per entry. Required. - :vartype authentication: - ~azure.mgmt.iotoperations.models.DataflowEndpointDataExplorerAuthentication - :ivar database: Database name. Required. - :vartype database: str - :ivar host: Host of the Azure Data Explorer in the form of - :code:``.:code:``.kusto.windows.net . Required. - :vartype host: str - :ivar batching: Azure Data Explorer endpoint batching configuration. - :vartype batching: ~azure.mgmt.iotoperations.models.BatchingConfiguration + super().__init__( + *args, registry_settings_type=AkriConnectorTemplateHelmRegistrySettingsType.REGISTRY_ENDPOINT_REF, **kwargs + ) + + +class AkriConnectorTemplateHelmUpgradeConfiguration(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateHelmUpgradeConfiguration properties. + + :ivar timeout: The timeout for the operation in seconds. + :vartype timeout: int + :ivar wait_for_jobs: The wait for jobs flag. + :vartype wait_for_jobs: bool + :ivar atomic: Atomic flag for the operation. + :vartype atomic: bool + :ivar disable_hooks: Disable hooks flag for the operation. + :vartype disable_hooks: bool + :ivar wait: The wait flag for the operation. + :vartype wait: bool """ - authentication: "_models.DataflowEndpointDataExplorerAuthentication" = rest_field() - """Authentication configuration. NOTE - only authentication property is allowed per entry. - Required.""" - database: str = rest_field() - """Database name. Required.""" - host: str = rest_field() - """Host of the Azure Data Explorer in the form of - :code:``.:code:``.kusto.windows.net . Required.""" - batching: Optional["_models.BatchingConfiguration"] = rest_field() - """Azure Data Explorer endpoint batching configuration.""" + timeout: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The timeout for the operation in seconds.""" + wait_for_jobs: Optional[bool] = rest_field( + name="waitForJobs", visibility=["read", "create", "update", "delete", "query"] + ) + """The wait for jobs flag.""" + atomic: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Atomic flag for the operation.""" + disable_hooks: Optional[bool] = rest_field( + name="disableHooks", visibility=["read", "create", "update", "delete", "query"] + ) + """Disable hooks flag for the operation.""" + wait: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The wait flag for the operation.""" @overload def __init__( self, *, - authentication: "_models.DataflowEndpointDataExplorerAuthentication", - database: str, - host: str, - batching: Optional["_models.BatchingConfiguration"] = None, + timeout: Optional[int] = None, + wait_for_jobs: Optional[bool] = None, + atomic: Optional[bool] = None, + disable_hooks: Optional[bool] = None, + wait: Optional[bool] = None, ) -> None: ... @overload @@ -1732,46 +1470,33 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointDataExplorerAuthentication(_model_base.Model): # pylint: disable=name-too-long - """Azure Data Explorer Authentication properties. NOTE - only authentication property is allowed - per entry. +class AkriConnectorTemplateManagedConfiguration( + AkriConnectorTemplateRuntimeConfiguration, discriminator="ManagedConfiguration" +): # pylint: disable=name-too-long + """AkriConnectorTemplateManagedConfiguration properties. - - :ivar method: Mode of Authentication. Required. Known values are: - "SystemAssignedManagedIdentity" and "UserAssignedManagedIdentity". - :vartype method: str or ~azure.mgmt.iotoperations.models.DataExplorerAuthMethod - :ivar system_assigned_managed_identity_settings: System-assigned managed identity - authentication. - :vartype system_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity - :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. - :vartype user_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity + :ivar runtime_configuration_type: The runtime configuration type for the Connector template. + Required. Managed Configuration Type. + :vartype runtime_configuration_type: str or + ~azure.mgmt.iotoperations.models.MANAGED_CONFIGURATION + :ivar managed_configuration_settings: The managed configuration settings. Required. + :vartype managed_configuration_settings: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateManagedConfigurationSettings """ - method: Union[str, "_models.DataExplorerAuthMethod"] = rest_field() - """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\" and - \"UserAssignedManagedIdentity\".""" - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = rest_field(name="systemAssignedManagedIdentitySettings") - """System-assigned managed identity authentication.""" - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = rest_field(name="userAssignedManagedIdentitySettings") - """User-assigned managed identity authentication.""" + runtime_configuration_type: Literal[AkriConnectorTemplateRuntimeConfigurationType.MANAGED_CONFIGURATION] = rest_discriminator(name="runtimeConfigurationType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The runtime configuration type for the Connector template. Required. Managed Configuration + Type.""" + managed_configuration_settings: "_models.AkriConnectorTemplateManagedConfigurationSettings" = rest_field( + name="managedConfigurationSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """The managed configuration settings. Required.""" @overload def __init__( self, *, - method: Union[str, "_models.DataExplorerAuthMethod"], - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = None, - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = None, + managed_configuration_settings: "_models.AkriConnectorTemplateManagedConfigurationSettings", ) -> None: ... @overload @@ -1782,39 +1507,91 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class DataflowEndpointDataLakeStorage(_model_base.Model): - """Azure Data Lake endpoint properties. - - - :ivar authentication: Authentication configuration. NOTE - only authentication property is - allowed per entry. Required. - :vartype authentication: - ~azure.mgmt.iotoperations.models.DataflowEndpointDataLakeStorageAuthentication - :ivar host: Host of the Azure Data Lake in the form of :code:``.blob.core.windows.net - . Required. - :vartype host: str - :ivar batching: Azure Data Lake endpoint batching configuration. - :vartype batching: ~azure.mgmt.iotoperations.models.BatchingConfiguration + super().__init__( + *args, + runtime_configuration_type=AkriConnectorTemplateRuntimeConfigurationType.MANAGED_CONFIGURATION, + **kwargs, + ) + + +class AkriConnectorTemplateManagedConfigurationSettings(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateManagedConfiguration properties. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AkriConnectorTemplateRuntimeImageConfiguration, + AkriConnectorTemplateRuntimeStatefulSetConfiguration + + :ivar managed_configuration_type: The type of the managed configuration. Required. Known values + are: "ImageConfiguration" and "StatefulSetConfiguration". + :vartype managed_configuration_type: str or + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateManagedConfigurationType + :ivar allocation: Allocation settings for the managed configuration. + :vartype allocation: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateAllocation + :ivar persistent_volume_claims: The persistent volume claims for the managed configuration. + :vartype persistent_volume_claims: + list[~azure.mgmt.iotoperations.models.AkriConnectorTemplatePersistentVolumeClaim] + :ivar additional_configuration: Additional configuration for the image of the managed + configuration. + :vartype additional_configuration: dict[str, str] + :ivar persistent_volume_claim_templates: The persistent volume claim templates for the managed + configuration. + See + `https://raw.githubusercontent.com/kubernetes/kubernetes/refs/heads/master/api/openapi-spec/v3/apis__apps__v1_openapi.json + `_. + :vartype persistent_volume_claim_templates: list[dict[str, any]] + :ivar secrets: Connector secrets that will be mounted onto all connector instances. + :vartype secrets: list[~azure.mgmt.iotoperations.models.AkriConnectorsSecret] + :ivar trust_settings: Trust list for the connector. This is used to specify the certificates + that all connector instances should trust. + :vartype trust_settings: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateTrustList """ - authentication: "_models.DataflowEndpointDataLakeStorageAuthentication" = rest_field() - """Authentication configuration. NOTE - only authentication property is allowed per entry. - Required.""" - host: str = rest_field() - """Host of the Azure Data Lake in the form of :code:``.blob.core.windows.net . Required.""" - batching: Optional["_models.BatchingConfiguration"] = rest_field() - """Azure Data Lake endpoint batching configuration.""" + __mapping__: Dict[str, _Model] = {} + managed_configuration_type: str = rest_discriminator( + name="managedConfigurationType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of the managed configuration. Required. Known values are: \"ImageConfiguration\" and + \"StatefulSetConfiguration\".""" + allocation: Optional["_models.AkriConnectorTemplateAllocation"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Allocation settings for the managed configuration.""" + persistent_volume_claims: Optional[List["_models.AkriConnectorTemplatePersistentVolumeClaim"]] = rest_field( + name="persistentVolumeClaims", visibility=["read", "create", "update", "delete", "query"] + ) + """The persistent volume claims for the managed configuration.""" + additional_configuration: Optional[Dict[str, str]] = rest_field( + name="additionalConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Additional configuration for the image of the managed configuration.""" + persistent_volume_claim_templates: Optional[List[Dict[str, Any]]] = rest_field( + name="persistentVolumeClaimTemplates", visibility=["read", "create", "update", "delete", "query"] + ) + """The persistent volume claim templates for the managed configuration. + See + `https://raw.githubusercontent.com/kubernetes/kubernetes/refs/heads/master/api/openapi-spec/v3/apis__apps__v1_openapi.json + `_.""" + secrets: Optional[List["_models.AkriConnectorsSecret"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Connector secrets that will be mounted onto all connector instances.""" + trust_settings: Optional["_models.AkriConnectorTemplateTrustList"] = rest_field( + name="trustSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Trust list for the connector. This is used to specify the certificates that all connector + instances should trust.""" @overload def __init__( self, *, - authentication: "_models.DataflowEndpointDataLakeStorageAuthentication", - host: str, - batching: Optional["_models.BatchingConfiguration"] = None, + managed_configuration_type: str, + allocation: Optional["_models.AkriConnectorTemplateAllocation"] = None, + persistent_volume_claims: Optional[List["_models.AkriConnectorTemplatePersistentVolumeClaim"]] = None, + additional_configuration: Optional[Dict[str, str]] = None, + persistent_volume_claim_templates: Optional[List[Dict[str, Any]]] = None, + secrets: Optional[List["_models.AkriConnectorsSecret"]] = None, + trust_settings: Optional["_models.AkriConnectorTemplateTrustList"] = None, ) -> None: ... @overload @@ -1828,54 +1605,26 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointDataLakeStorageAuthentication(_model_base.Model): # pylint: disable=name-too-long - """Azure Data Lake endpoint Authentication properties. NOTE Enum - Only one method is supported - for one entry. +class AkriConnectorTemplatePersistentVolumeClaim(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplatePersistentVolumeClaim properties. - - :ivar method: Mode of Authentication. Required. Known values are: - "SystemAssignedManagedIdentity", "UserAssignedManagedIdentity", and "AccessToken". - :vartype method: str or ~azure.mgmt.iotoperations.models.DataLakeStorageAuthMethod - :ivar access_token_settings: SAS token authentication. - :vartype access_token_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationAccessToken - :ivar system_assigned_managed_identity_settings: System-assigned managed identity - authentication. - :vartype system_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity - :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. - :vartype user_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity + :ivar claim_name: The name of the persistent volume claim. Required. + :vartype claim_name: str + :ivar mount_path: The mount path for the persistent volume claim. Required. + :vartype mount_path: str """ - method: Union[str, "_models.DataLakeStorageAuthMethod"] = rest_field() - """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\", - \"UserAssignedManagedIdentity\", and \"AccessToken\".""" - access_token_settings: Optional["_models.DataflowEndpointAuthenticationAccessToken"] = rest_field( - name="accessTokenSettings" - ) - """SAS token authentication.""" - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = rest_field(name="systemAssignedManagedIdentitySettings") - """System-assigned managed identity authentication.""" - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = rest_field(name="userAssignedManagedIdentitySettings") - """User-assigned managed identity authentication.""" + claim_name: str = rest_field(name="claimName", visibility=["read", "create", "update", "delete", "query"]) + """The name of the persistent volume claim. Required.""" + mount_path: str = rest_field(name="mountPath", visibility=["read", "create", "update", "delete", "query"]) + """The mount path for the persistent volume claim. Required.""" @overload def __init__( self, *, - method: Union[str, "_models.DataLakeStorageAuthMethod"], - access_token_settings: Optional["_models.DataflowEndpointAuthenticationAccessToken"] = None, - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = None, - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = None, + claim_name: str, + mount_path: str, ) -> None: ... @overload @@ -1889,50 +1638,62 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointFabricOneLake(_model_base.Model): - """Microsoft Fabric endpoint properties. - +class AkriConnectorTemplateProperties(_Model): + """AkriConnectorTemplate properties. - :ivar authentication: Authentication configuration. NOTE - only one authentication property is - allowed per entry. Required. - :vartype authentication: - ~azure.mgmt.iotoperations.models.DataflowEndpointFabricOneLakeAuthentication - :ivar names: Names of the workspace and lakehouse. Required. - :vartype names: ~azure.mgmt.iotoperations.models.DataflowEndpointFabricOneLakeNames - :ivar one_lake_path_type: Type of location of the data in the workspace. Can be either tables - or files. Required. Known values are: "Files" and "Tables". - :vartype one_lake_path_type: str or - ~azure.mgmt.iotoperations.models.DataflowEndpointFabricPathType - :ivar host: Host of the Microsoft Fabric in the form of - https://:code:``.fabric.microsoft.com. Required. - :vartype host: str - :ivar batching: Batching configuration. - :vartype batching: ~azure.mgmt.iotoperations.models.BatchingConfiguration + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + :ivar aio_metadata: Metadata about AIO. + :vartype aio_metadata: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateAioMetadata + :ivar runtime_configuration: The runtime configuration for the Connector template. Required. + :vartype runtime_configuration: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateRuntimeConfiguration + :ivar diagnostics: Diagnostics settings for the Connector template. + :vartype diagnostics: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateDiagnostics + :ivar device_inbound_endpoint_types: Device inbound endpoint types. Required. + :vartype device_inbound_endpoint_types: + list[~azure.mgmt.iotoperations.models.AkriConnectorTemplateDeviceInboundEndpointType] + :ivar mqtt_connection_configuration: Mqtt connection configuration settings. + :vartype mqtt_connection_configuration: + ~azure.mgmt.iotoperations.models.AkriConnectorsMqttConnectionConfiguration """ - authentication: "_models.DataflowEndpointFabricOneLakeAuthentication" = rest_field() - """Authentication configuration. NOTE - only one authentication property is allowed per entry. - Required.""" - names: "_models.DataflowEndpointFabricOneLakeNames" = rest_field() - """Names of the workspace and lakehouse. Required.""" - one_lake_path_type: Union[str, "_models.DataflowEndpointFabricPathType"] = rest_field(name="oneLakePathType") - """Type of location of the data in the workspace. Can be either tables or files. Required. Known - values are: \"Files\" and \"Tables\".""" - host: str = rest_field() - """Host of the Microsoft Fabric in the form of https://:code:``.fabric.microsoft.com. - Required.""" - batching: Optional["_models.BatchingConfiguration"] = rest_field() - """Batching configuration.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + aio_metadata: Optional["_models.AkriConnectorTemplateAioMetadata"] = rest_field( + name="aioMetadata", visibility=["read", "create", "update", "delete", "query"] + ) + """Metadata about AIO.""" + runtime_configuration: "_models.AkriConnectorTemplateRuntimeConfiguration" = rest_field( + name="runtimeConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """The runtime configuration for the Connector template. Required.""" + diagnostics: Optional["_models.AkriConnectorTemplateDiagnostics"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Diagnostics settings for the Connector template.""" + device_inbound_endpoint_types: List["_models.AkriConnectorTemplateDeviceInboundEndpointType"] = rest_field( + name="deviceInboundEndpointTypes", visibility=["read", "create", "update", "delete", "query"] + ) + """Device inbound endpoint types. Required.""" + mqtt_connection_configuration: Optional["_models.AkriConnectorsMqttConnectionConfiguration"] = rest_field( + name="mqttConnectionConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Mqtt connection configuration settings.""" @overload def __init__( self, *, - authentication: "_models.DataflowEndpointFabricOneLakeAuthentication", - names: "_models.DataflowEndpointFabricOneLakeNames", - one_lake_path_type: Union[str, "_models.DataflowEndpointFabricPathType"], - host: str, - batching: Optional["_models.BatchingConfiguration"] = None, + runtime_configuration: "_models.AkriConnectorTemplateRuntimeConfiguration", + device_inbound_endpoint_types: List["_models.AkriConnectorTemplateDeviceInboundEndpointType"], + aio_metadata: Optional["_models.AkriConnectorTemplateAioMetadata"] = None, + diagnostics: Optional["_models.AkriConnectorTemplateDiagnostics"] = None, + mqtt_connection_configuration: Optional["_models.AkriConnectorsMqttConnectionConfiguration"] = None, ) -> None: ... @overload @@ -1946,46 +1707,41 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointFabricOneLakeAuthentication(_model_base.Model): # pylint: disable=name-too-long - """Microsoft Fabric endpoint. Authentication properties. NOTE - Only one method is supported for - one entry. - +class AkriConnectorTemplateResource(ProxyResource): + """AkriConnectorTemplate resource. - :ivar method: Mode of Authentication. Required. Known values are: - "SystemAssignedManagedIdentity" and "UserAssignedManagedIdentity". - :vartype method: str or ~azure.mgmt.iotoperations.models.FabricOneLakeAuthMethod - :ivar system_assigned_managed_identity_settings: System-assigned managed identity - authentication. - :vartype system_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity - :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. - :vartype user_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateProperties + :ivar extended_location: Edge location of the resource. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation """ - method: Union[str, "_models.FabricOneLakeAuthMethod"] = rest_field() - """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\" and - \"UserAssignedManagedIdentity\".""" - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = rest_field(name="systemAssignedManagedIdentitySettings") - """System-assigned managed identity authentication.""" - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = rest_field(name="userAssignedManagedIdentitySettings") - """User-assigned managed identity authentication.""" + properties: Optional["_models.AkriConnectorTemplateProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" @overload def __init__( self, *, - method: Union[str, "_models.FabricOneLakeAuthMethod"], - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = None, - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = None, + properties: Optional["_models.AkriConnectorTemplateProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, ) -> None: ... @overload @@ -1999,27 +1755,57 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointFabricOneLakeNames(_model_base.Model): - """Microsoft Fabric endpoint Names properties. - - - :ivar lakehouse_name: Lakehouse name. Required. - :vartype lakehouse_name: str - :ivar workspace_name: Workspace name. Required. - :vartype workspace_name: str +class AkriConnectorTemplateRuntimeImageConfiguration( + AkriConnectorTemplateManagedConfigurationSettings, discriminator="ImageConfiguration" +): # pylint: disable=name-too-long + """AkriConnectorTemplateRuntimeImageConfiguration properties. + + :ivar allocation: Allocation settings for the managed configuration. + :vartype allocation: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateAllocation + :ivar persistent_volume_claims: The persistent volume claims for the managed configuration. + :vartype persistent_volume_claims: + list[~azure.mgmt.iotoperations.models.AkriConnectorTemplatePersistentVolumeClaim] + :ivar additional_configuration: Additional configuration for the image of the managed + configuration. + :vartype additional_configuration: dict[str, str] + :ivar persistent_volume_claim_templates: The persistent volume claim templates for the managed + configuration. + See + `https://raw.githubusercontent.com/kubernetes/kubernetes/refs/heads/master/api/openapi-spec/v3/apis__apps__v1_openapi.json + `_. + :vartype persistent_volume_claim_templates: list[dict[str, any]] + :ivar secrets: Connector secrets that will be mounted onto all connector instances. + :vartype secrets: list[~azure.mgmt.iotoperations.models.AkriConnectorsSecret] + :ivar trust_settings: Trust list for the connector. This is used to specify the certificates + that all connector instances should trust. + :vartype trust_settings: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateTrustList + :ivar managed_configuration_type: The managed configuration type for the Connector template. + Required. Image Configuration Type. + :vartype managed_configuration_type: str or + ~azure.mgmt.iotoperations.models.IMAGE_CONFIGURATION + :ivar image_configuration_settings: The image configuration settings. Required. + :vartype image_configuration_settings: + ~azure.mgmt.iotoperations.models.AkriConnectorTemplateRuntimeImageConfigurationSettings """ - lakehouse_name: str = rest_field(name="lakehouseName") - """Lakehouse name. Required.""" - workspace_name: str = rest_field(name="workspaceName") - """Workspace name. Required.""" + managed_configuration_type: Literal[AkriConnectorTemplateManagedConfigurationType.IMAGE_CONFIGURATION] = rest_discriminator(name="managedConfigurationType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The managed configuration type for the Connector template. Required. Image Configuration Type.""" + image_configuration_settings: "_models.AkriConnectorTemplateRuntimeImageConfigurationSettings" = rest_field( + name="imageConfigurationSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """The image configuration settings. Required.""" @overload def __init__( self, *, - lakehouse_name: str, - workspace_name: str, + image_configuration_settings: "_models.AkriConnectorTemplateRuntimeImageConfigurationSettings", + allocation: Optional["_models.AkriConnectorTemplateAllocation"] = None, + persistent_volume_claims: Optional[List["_models.AkriConnectorTemplatePersistentVolumeClaim"]] = None, + additional_configuration: Optional[Dict[str, str]] = None, + persistent_volume_claim_templates: Optional[List[Dict[str, Any]]] = None, + secrets: Optional[List["_models.AkriConnectorsSecret"]] = None, + trust_settings: Optional["_models.AkriConnectorTemplateTrustList"] = None, ) -> None: ... @overload @@ -2030,89 +1816,123 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) + super().__init__( + *args, + managed_configuration_type=AkriConnectorTemplateManagedConfigurationType.IMAGE_CONFIGURATION, + **kwargs, + ) + + +class AkriConnectorTemplateRuntimeImageConfigurationSettings(_Model): # pylint: disable=name-too-long + """AkriConnectorTemplateRuntimeImageConfiguration properties. + + :ivar image_name: The image name without any registry reference, tag or digest. Required. + :vartype image_name: str + :ivar image_pull_policy: The pull policy of the image. Known values are: "Always", + "IfNotPresent", and "Never". + :vartype image_pull_policy: str or + ~azure.mgmt.iotoperations.models.AkriConnectorsImagePullPolicy + :ivar replicas: The number of replicas to be set up. + :vartype replicas: int + :ivar registry_settings: The registry settings for the image. You can omit this field if using + the default docker hub repository or using a local image. + :vartype registry_settings: ~azure.mgmt.iotoperations.models.AkriConnectorsRegistrySettings + :ivar tag_digest_settings: Optional image tag or digest. If not specified, the default tag is + ``latest``. + :vartype tag_digest_settings: ~azure.mgmt.iotoperations.models.AkriConnectorsTagDigestSettings + """ + image_name: str = rest_field(name="imageName", visibility=["read", "create", "update", "delete", "query"]) + """The image name without any registry reference, tag or digest. Required.""" + image_pull_policy: Optional[Union[str, "_models.AkriConnectorsImagePullPolicy"]] = rest_field( + name="imagePullPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """The pull policy of the image. Known values are: \"Always\", \"IfNotPresent\", and \"Never\".""" + replicas: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of replicas to be set up.""" + registry_settings: Optional["_models.AkriConnectorsRegistrySettings"] = rest_field( + name="registrySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """The registry settings for the image. You can omit this field if using the default docker hub + repository or using a local image.""" + tag_digest_settings: Optional["_models.AkriConnectorsTagDigestSettings"] = rest_field( + name="tagDigestSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional image tag or digest. If not specified, the default tag is ``latest``.""" -class DataflowEndpointKafka(_model_base.Model): - """Kafka endpoint properties. + @overload + def __init__( + self, + *, + image_name: str, + image_pull_policy: Optional[Union[str, "_models.AkriConnectorsImagePullPolicy"]] = None, + replicas: Optional[int] = None, + registry_settings: Optional["_models.AkriConnectorsRegistrySettings"] = None, + tag_digest_settings: Optional["_models.AkriConnectorsTagDigestSettings"] = None, + ) -> None: ... + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ - :ivar authentication: Authentication configuration. NOTE - only authentication property is - allowed per entry. Required. - :vartype authentication: ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaAuthentication - :ivar consumer_group_id: Consumer group ID. - :vartype consumer_group_id: str - :ivar host: Kafka endpoint host. Required. - :vartype host: str - :ivar batching: Batching configuration. - :vartype batching: ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaBatching - :ivar copy_mqtt_properties: Copy Broker properties. No effect if the endpoint is used as a - source or if the dataflow doesn't have an Broker source. Known values are: "Enabled" and - "Disabled". - :vartype copy_mqtt_properties: str or ~azure.mgmt.iotoperations.models.OperationalMode - :ivar compression: Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is - used as a source. Known values are: "None", "Gzip", "Snappy", and "Lz4". - :vartype compression: str or ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaCompression - :ivar kafka_acks: Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a - source. Known values are: "Zero", "One", and "All". - :vartype kafka_acks: str or ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaAcks - :ivar partition_strategy: Partition handling strategy. Can be default or static. No effect if - the endpoint is used as a source. Known values are: "Default", "Static", "Topic", and - "Property". - :vartype partition_strategy: str or - ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaPartitionStrategy - :ivar tls: TLS configuration. - :vartype tls: ~azure.mgmt.iotoperations.models.TlsProperties - :ivar cloud_event_attributes: Cloud event mapping config. Known values are: "Propagate" and - "CreateOrRemap". - :vartype cloud_event_attributes: str or - ~azure.mgmt.iotoperations.models.CloudEventAttributeType + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AkriConnectorTemplateRuntimeStatefulSetConfiguration( + AkriConnectorTemplateManagedConfigurationSettings, discriminator="StatefulSetConfiguration" +): # pylint: disable=name-too-long + """AkriConnectorTemplateRuntimeStatefulSetConfiguration properties. + + :ivar allocation: Allocation settings for the managed configuration. + :vartype allocation: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateAllocation + :ivar persistent_volume_claims: The persistent volume claims for the managed configuration. + :vartype persistent_volume_claims: + list[~azure.mgmt.iotoperations.models.AkriConnectorTemplatePersistentVolumeClaim] + :ivar additional_configuration: Additional configuration for the image of the managed + configuration. + :vartype additional_configuration: dict[str, str] + :ivar persistent_volume_claim_templates: The persistent volume claim templates for the managed + configuration. + See + `https://raw.githubusercontent.com/kubernetes/kubernetes/refs/heads/master/api/openapi-spec/v3/apis__apps__v1_openapi.json + `_. + :vartype persistent_volume_claim_templates: list[dict[str, any]] + :ivar secrets: Connector secrets that will be mounted onto all connector instances. + :vartype secrets: list[~azure.mgmt.iotoperations.models.AkriConnectorsSecret] + :ivar trust_settings: Trust list for the connector. This is used to specify the certificates + that all connector instances should trust. + :vartype trust_settings: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateTrustList + :ivar managed_configuration_type: The managed configuration type for the Connector template. + Required. StatefulSet Configuration Type. + :vartype managed_configuration_type: str or + ~azure.mgmt.iotoperations.models.STATEFUL_SET_CONFIGURATION + :ivar stateful_set_configuration_settings: The stateful set configuration settings. Required. + :vartype stateful_set_configuration_settings: dict[str, any] """ - authentication: "_models.DataflowEndpointKafkaAuthentication" = rest_field() - """Authentication configuration. NOTE - only authentication property is allowed per entry. - Required.""" - consumer_group_id: Optional[str] = rest_field(name="consumerGroupId") - """Consumer group ID.""" - host: str = rest_field() - """Kafka endpoint host. Required.""" - batching: Optional["_models.DataflowEndpointKafkaBatching"] = rest_field() - """Batching configuration.""" - copy_mqtt_properties: Optional[Union[str, "_models.OperationalMode"]] = rest_field(name="copyMqttProperties") - """Copy Broker properties. No effect if the endpoint is used as a source or if the dataflow - doesn't have an Broker source. Known values are: \"Enabled\" and \"Disabled\".""" - compression: Optional[Union[str, "_models.DataflowEndpointKafkaCompression"]] = rest_field() - """Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is used as a source. - Known values are: \"None\", \"Gzip\", \"Snappy\", and \"Lz4\".""" - kafka_acks: Optional[Union[str, "_models.DataflowEndpointKafkaAcks"]] = rest_field(name="kafkaAcks") - """Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a source. Known - values are: \"Zero\", \"One\", and \"All\".""" - partition_strategy: Optional[Union[str, "_models.DataflowEndpointKafkaPartitionStrategy"]] = rest_field( - name="partitionStrategy" - ) - """Partition handling strategy. Can be default or static. No effect if the endpoint is used as a - source. Known values are: \"Default\", \"Static\", \"Topic\", and \"Property\".""" - tls: Optional["_models.TlsProperties"] = rest_field() - """TLS configuration.""" - cloud_event_attributes: Optional[Union[str, "_models.CloudEventAttributeType"]] = rest_field( - name="cloudEventAttributes" + managed_configuration_type: Literal[AkriConnectorTemplateManagedConfigurationType.STATEFUL_SET_CONFIGURATION] = rest_discriminator(name="managedConfigurationType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The managed configuration type for the Connector template. Required. StatefulSet Configuration + Type.""" + stateful_set_configuration_settings: Dict[str, Any] = rest_field( + name="statefulSetConfigurationSettings", visibility=["read", "create", "update", "delete", "query"] ) - """Cloud event mapping config. Known values are: \"Propagate\" and \"CreateOrRemap\".""" + """The stateful set configuration settings. Required.""" @overload def __init__( self, *, - authentication: "_models.DataflowEndpointKafkaAuthentication", - host: str, - consumer_group_id: Optional[str] = None, - batching: Optional["_models.DataflowEndpointKafkaBatching"] = None, - copy_mqtt_properties: Optional[Union[str, "_models.OperationalMode"]] = None, - compression: Optional[Union[str, "_models.DataflowEndpointKafkaCompression"]] = None, - kafka_acks: Optional[Union[str, "_models.DataflowEndpointKafkaAcks"]] = None, - partition_strategy: Optional[Union[str, "_models.DataflowEndpointKafkaPartitionStrategy"]] = None, - tls: Optional["_models.TlsProperties"] = None, - cloud_event_attributes: Optional[Union[str, "_models.CloudEventAttributeType"]] = None, + stateful_set_configuration_settings: Dict[str, Any], + allocation: Optional["_models.AkriConnectorTemplateAllocation"] = None, + persistent_volume_claims: Optional[List["_models.AkriConnectorTemplatePersistentVolumeClaim"]] = None, + additional_configuration: Optional[Dict[str, str]] = None, + persistent_volume_claim_templates: Optional[List[Dict[str, Any]]] = None, + secrets: Optional[List["_models.AkriConnectorsSecret"]] = None, + trust_settings: Optional["_models.AkriConnectorTemplateTrustList"] = None, ) -> None: ... @overload @@ -2123,63 +1943,30 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - + super().__init__( + *args, + managed_configuration_type=AkriConnectorTemplateManagedConfigurationType.STATEFUL_SET_CONFIGURATION, + **kwargs, + ) -class DataflowEndpointKafkaAuthentication(_model_base.Model): - """Kafka endpoint Authentication properties. NOTE - only authentication property is allowed per - entry. +class AkriConnectorTemplateTrustList(_Model): + """AkriConnectorTemplateTrustList properties. - :ivar method: Mode of Authentication. Required. Known values are: - "SystemAssignedManagedIdentity", "UserAssignedManagedIdentity", "Sasl", "X509Certificate", and - "Anonymous". - :vartype method: str or ~azure.mgmt.iotoperations.models.KafkaAuthMethod - :ivar system_assigned_managed_identity_settings: System-assigned managed identity - authentication. - :vartype system_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity - :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. - :vartype user_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity - :ivar sasl_settings: SASL authentication. - :vartype sasl_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSasl - :ivar x509_certificate_settings: X.509 certificate authentication. - :vartype x509_certificate_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationX509 + :ivar trust_list_secret_ref: The secret reference for certificates to trust. Required. + :vartype trust_list_secret_ref: str """ - method: Union[str, "_models.KafkaAuthMethod"] = rest_field() - """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\", - \"UserAssignedManagedIdentity\", \"Sasl\", \"X509Certificate\", and \"Anonymous\".""" - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = rest_field(name="systemAssignedManagedIdentitySettings") - """System-assigned managed identity authentication.""" - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = rest_field(name="userAssignedManagedIdentitySettings") - """User-assigned managed identity authentication.""" - sasl_settings: Optional["_models.DataflowEndpointAuthenticationSasl"] = rest_field(name="saslSettings") - """SASL authentication.""" - x509_certificate_settings: Optional["_models.DataflowEndpointAuthenticationX509"] = rest_field( - name="x509CertificateSettings" + trust_list_secret_ref: str = rest_field( + name="trustListSecretRef", visibility=["read", "create", "update", "delete", "query"] ) - """X.509 certificate authentication.""" + """The secret reference for certificates to trust. Required.""" @overload def __init__( self, *, - method: Union[str, "_models.KafkaAuthMethod"], - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = None, - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = None, - sasl_settings: Optional["_models.DataflowEndpointAuthenticationSasl"] = None, - x509_certificate_settings: Optional["_models.DataflowEndpointAuthenticationX509"] = None, + trust_list_secret_ref: str, ) -> None: ... @overload @@ -2193,36 +1980,30 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointKafkaBatching(_model_base.Model): - """Kafka endpoint Batching properties. +class AkriDiscoveryHandlerAioMetadata(_Model): + """Metadata about AIO. - :ivar mode: Mode for batching. Known values are: "Enabled" and "Disabled". - :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode - :ivar latency_ms: Batching latency in milliseconds. - :vartype latency_ms: int - :ivar max_bytes: Maximum number of bytes in a batch. - :vartype max_bytes: int - :ivar max_messages: Maximum number of messages in a batch. - :vartype max_messages: int + :ivar aio_min_version: The minimum version of AIO required for the connector. + :vartype aio_min_version: str + :ivar aio_max_version: The maximum version of AIO required for the connector. + :vartype aio_max_version: str """ - mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field() - """Mode for batching. Known values are: \"Enabled\" and \"Disabled\".""" - latency_ms: Optional[int] = rest_field(name="latencyMs") - """Batching latency in milliseconds.""" - max_bytes: Optional[int] = rest_field(name="maxBytes") - """Maximum number of bytes in a batch.""" - max_messages: Optional[int] = rest_field(name="maxMessages") - """Maximum number of messages in a batch.""" + aio_min_version: Optional[str] = rest_field( + name="aioMinVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """The minimum version of AIO required for the connector.""" + aio_max_version: Optional[str] = rest_field( + name="aioMaxVersion", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum version of AIO required for the connector.""" @overload def __init__( self, *, - mode: Optional[Union[str, "_models.OperationalMode"]] = None, - latency_ms: Optional[int] = None, - max_bytes: Optional[int] = None, - max_messages: Optional[int] = None, + aio_min_version: Optional[str] = None, + aio_max_version: Optional[str] = None, ) -> None: ... @overload @@ -2236,22 +2017,23 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointLocalStorage(_model_base.Model): - """Local persistent volume endpoint properties. - +class AkriDiscoveryHandlerDiagnostics(_Model): + """AkriDiscoveryHandlerDiagnostics properties. - :ivar persistent_volume_claim_ref: Persistent volume claim name. Required. - :vartype persistent_volume_claim_ref: str + :ivar logs: The log settings for the Connector template. Required. + :vartype logs: ~azure.mgmt.iotoperations.models.AkriConnectorsDiagnosticsLogs """ - persistent_volume_claim_ref: str = rest_field(name="persistentVolumeClaimRef") - """Persistent volume claim name. Required.""" + logs: "_models.AkriConnectorsDiagnosticsLogs" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The log settings for the Connector template. Required.""" @overload def __init__( self, *, - persistent_volume_claim_ref: str, + logs: "_models.AkriConnectorsDiagnosticsLogs", ) -> None: ... @overload @@ -2265,85 +2047,26 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointMqtt(_model_base.Model): - """Broker endpoint properties. - +class AkriDiscoveryHandlerDiscoverableDeviceEndpointType(_Model): # pylint: disable=name-too-long + """AkriDiscoveryHandlerDiscoverableDeviceEndpointType properties. - :ivar authentication: authentication properties. DEFAULT: kubernetes.audience=aio-internal. - NOTE - Enum field only property is allowed. Required. - :vartype authentication: ~azure.mgmt.iotoperations.models.DataflowEndpointMqttAuthentication - :ivar client_id_prefix: Client ID prefix. Client ID generated by the dataflow is - :code:``-TBD. Optional; no prefix if omitted. - :vartype client_id_prefix: str - :ivar host: Host of the Broker in the form of :code:``::code:``. Optional; - connects to Broker if omitted. - :vartype host: str - :ivar protocol: Enable or disable websockets. Known values are: "Mqtt" and "WebSockets". - :vartype protocol: str or ~azure.mgmt.iotoperations.models.BrokerProtocolType - :ivar keep_alive_seconds: Broker KeepAlive for connection in seconds. - :vartype keep_alive_seconds: int - :ivar retain: Whether or not to keep the retain setting. Known values are: "Keep" and "Never". - :vartype retain: str or ~azure.mgmt.iotoperations.models.MqttRetainType - :ivar max_inflight_messages: The max number of messages to keep in flight. For subscribe, this - is the receive maximum. For publish, this is the maximum number of messages to send before - waiting for an ack. - :vartype max_inflight_messages: int - :ivar qos: Qos for Broker connection. - :vartype qos: int - :ivar session_expiry_seconds: Session expiry in seconds. - :vartype session_expiry_seconds: int - :ivar tls: TLS configuration. - :vartype tls: ~azure.mgmt.iotoperations.models.TlsProperties - :ivar cloud_event_attributes: Cloud event mapping config. Known values are: "Propagate" and - "CreateOrRemap". - :vartype cloud_event_attributes: str or - ~azure.mgmt.iotoperations.models.CloudEventAttributeType + :ivar endpoint_type: The type of the endpoint. Required. + :vartype endpoint_type: str + :ivar version: The version of the endpoint. Required. + :vartype version: str """ - authentication: "_models.DataflowEndpointMqttAuthentication" = rest_field() - """authentication properties. DEFAULT: kubernetes.audience=aio-internal. NOTE - Enum field only - property is allowed. Required.""" - client_id_prefix: Optional[str] = rest_field(name="clientIdPrefix") - """Client ID prefix. Client ID generated by the dataflow is :code:``-TBD. Optional; no - prefix if omitted.""" - host: Optional[str] = rest_field() - """Host of the Broker in the form of :code:``::code:``. Optional; connects to - Broker if omitted.""" - protocol: Optional[Union[str, "_models.BrokerProtocolType"]] = rest_field() - """Enable or disable websockets. Known values are: \"Mqtt\" and \"WebSockets\".""" - keep_alive_seconds: Optional[int] = rest_field(name="keepAliveSeconds") - """Broker KeepAlive for connection in seconds.""" - retain: Optional[Union[str, "_models.MqttRetainType"]] = rest_field() - """Whether or not to keep the retain setting. Known values are: \"Keep\" and \"Never\".""" - max_inflight_messages: Optional[int] = rest_field(name="maxInflightMessages") - """The max number of messages to keep in flight. For subscribe, this is the receive maximum. For - publish, this is the maximum number of messages to send before waiting for an ack.""" - qos: Optional[int] = rest_field() - """Qos for Broker connection.""" - session_expiry_seconds: Optional[int] = rest_field(name="sessionExpirySeconds") - """Session expiry in seconds.""" - tls: Optional["_models.TlsProperties"] = rest_field() - """TLS configuration.""" - cloud_event_attributes: Optional[Union[str, "_models.CloudEventAttributeType"]] = rest_field( - name="cloudEventAttributes" - ) - """Cloud event mapping config. Known values are: \"Propagate\" and \"CreateOrRemap\".""" + endpoint_type: str = rest_field(name="endpointType", visibility=["read", "create", "update", "delete", "query"]) + """The type of the endpoint. Required.""" + version: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The version of the endpoint. Required.""" @overload def __init__( self, *, - authentication: "_models.DataflowEndpointMqttAuthentication", - client_id_prefix: Optional[str] = None, - host: Optional[str] = None, - protocol: Optional[Union[str, "_models.BrokerProtocolType"]] = None, - keep_alive_seconds: Optional[int] = None, - retain: Optional[Union[str, "_models.MqttRetainType"]] = None, - max_inflight_messages: Optional[int] = None, - qos: Optional[int] = None, - session_expiry_seconds: Optional[int] = None, - tls: Optional["_models.TlsProperties"] = None, - cloud_event_attributes: Optional[Union[str, "_models.CloudEventAttributeType"]] = None, + endpoint_type: str, + version: str, ) -> None: ... @overload @@ -2357,65 +2080,52 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointMqttAuthentication(_model_base.Model): - """Mqtt endpoint Authentication properties. NOTE - only authentication property is allowed per - entry. +class AkriDiscoveryHandlerImageConfiguration(_Model): + """AkriDiscoveryHandler Image configuration properties. - - :ivar method: Mode of Authentication. Required. Known values are: - "SystemAssignedManagedIdentity", "UserAssignedManagedIdentity", "ServiceAccountToken", - "X509Certificate", and "Anonymous". - :vartype method: str or ~azure.mgmt.iotoperations.models.MqttAuthMethod - :ivar system_assigned_managed_identity_settings: System-assigned managed identity - authentication. - :vartype system_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity - :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. - :vartype user_assigned_managed_identity_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity - :ivar service_account_token_settings: Kubernetes service account token authentication. Default - audience if not set is aio-internal. - :vartype service_account_token_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationServiceAccountToken - :ivar x509_certificate_settings: X.509 certificate authentication. - :vartype x509_certificate_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationX509 + :ivar image_name: The image name without any registry reference, tag or digest. Required. + :vartype image_name: str + :ivar image_pull_policy: The pull policy of the image. Known values are: "Always", + "IfNotPresent", and "Never". + :vartype image_pull_policy: str or + ~azure.mgmt.iotoperations.models.AkriConnectorsImagePullPolicy + :ivar replicas: The number of replicas to be set up. + :vartype replicas: int + :ivar registry_settings: The registry settings for the image. You can omit this field if using + the default docker hub repository or using a local image. + :vartype registry_settings: ~azure.mgmt.iotoperations.models.AkriConnectorsRegistrySettings + :ivar tag_digest_settings: Optional image tag or digest. If not specified, the default tag is + ``latest``. + :vartype tag_digest_settings: ~azure.mgmt.iotoperations.models.AkriConnectorsTagDigestSettings """ - method: Union[str, "_models.MqttAuthMethod"] = rest_field() - """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\", - \"UserAssignedManagedIdentity\", \"ServiceAccountToken\", \"X509Certificate\", and - \"Anonymous\".""" - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = rest_field(name="systemAssignedManagedIdentitySettings") - """System-assigned managed identity authentication.""" - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = rest_field(name="userAssignedManagedIdentitySettings") - """User-assigned managed identity authentication.""" - service_account_token_settings: Optional["_models.DataflowEndpointAuthenticationServiceAccountToken"] = rest_field( - name="serviceAccountTokenSettings" + image_name: str = rest_field(name="imageName", visibility=["read", "create", "update", "delete", "query"]) + """The image name without any registry reference, tag or digest. Required.""" + image_pull_policy: Optional[Union[str, "_models.AkriConnectorsImagePullPolicy"]] = rest_field( + name="imagePullPolicy", visibility=["read", "create", "update", "delete", "query"] ) - """Kubernetes service account token authentication. Default audience if not set is aio-internal.""" - x509_certificate_settings: Optional["_models.DataflowEndpointAuthenticationX509"] = rest_field( - name="x509CertificateSettings" + """The pull policy of the image. Known values are: \"Always\", \"IfNotPresent\", and \"Never\".""" + replicas: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The number of replicas to be set up.""" + registry_settings: Optional["_models.AkriConnectorsRegistrySettings"] = rest_field( + name="registrySettings", visibility=["read", "create", "update", "delete", "query"] ) - """X.509 certificate authentication.""" + """The registry settings for the image. You can omit this field if using the default docker hub + repository or using a local image.""" + tag_digest_settings: Optional["_models.AkriConnectorsTagDigestSettings"] = rest_field( + name="tagDigestSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional image tag or digest. If not specified, the default tag is ``latest``.""" @overload def __init__( self, *, - method: Union[str, "_models.MqttAuthMethod"], - system_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" - ] = None, - user_assigned_managed_identity_settings: Optional[ - "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" - ] = None, - service_account_token_settings: Optional["_models.DataflowEndpointAuthenticationServiceAccountToken"] = None, - x509_certificate_settings: Optional["_models.DataflowEndpointAuthenticationX509"] = None, + image_name: str, + image_pull_policy: Optional[Union[str, "_models.AkriConnectorsImagePullPolicy"]] = None, + replicas: Optional[int] = None, + registry_settings: Optional["_models.AkriConnectorsRegistrySettings"] = None, + tag_digest_settings: Optional["_models.AkriConnectorsTagDigestSettings"] = None, ) -> None: ... @overload @@ -2429,71 +2139,90 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointProperties(_model_base.Model): - """DataflowEndpoint Resource properties. NOTE - Only one type of endpoint is supported for one - Resource. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriDiscoveryHandlerProperties(_Model): + """AkriDiscoveryHandler properties. + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + :ivar aio_metadata: Metadata about AIO. + :vartype aio_metadata: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerAioMetadata + :ivar additional_configuration: Additional configuration for the AkriDiscoveryHandler. + :vartype additional_configuration: dict[str, str] + :ivar diagnostics: Diagnostics settings for the AkriDiscoveryHandler. + :vartype diagnostics: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerDiagnostics + :ivar mode: Mode of the AkriDiscoveryHandler. Known values are: "Enabled" and "Disabled". + :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode + :ivar discoverable_device_endpoint_types: Device inbound endpoint types. Required. + :vartype discoverable_device_endpoint_types: + list[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerDiscoverableDeviceEndpointType] + :ivar mqtt_connection_configuration: Mqtt connection configuration settings. + :vartype mqtt_connection_configuration: + ~azure.mgmt.iotoperations.models.AkriConnectorsMqttConnectionConfiguration + :ivar image_configuration: The image configuration for the AkriDiscoveryHandler. Required. + :vartype image_configuration: + ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerImageConfiguration + :ivar schedule: Schedule for the AkriDiscoveryHandler. Required. + :vartype schedule: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerSchedule + :ivar secrets: Secrets that will be mounted onto discovery handler. + :vartype secrets: list[~azure.mgmt.iotoperations.models.AkriConnectorsSecret] + """ - :ivar endpoint_type: Endpoint Type. Required. Known values are: "DataExplorer", - "DataLakeStorage", "FabricOneLake", "Kafka", "LocalStorage", and "Mqtt". - :vartype endpoint_type: str or ~azure.mgmt.iotoperations.models.EndpointType - :ivar data_explorer_settings: Azure Data Explorer endpoint. - :vartype data_explorer_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointDataExplorer - :ivar data_lake_storage_settings: Azure Data Lake endpoint. - :vartype data_lake_storage_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointDataLakeStorage - :ivar fabric_one_lake_settings: Microsoft Fabric endpoint. - :vartype fabric_one_lake_settings: - ~azure.mgmt.iotoperations.models.DataflowEndpointFabricOneLake - :ivar kafka_settings: Kafka endpoint. - :vartype kafka_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointKafka - :ivar local_storage_settings: Local persistent volume endpoint. - :vartype local_storage_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointLocalStorage - :ivar mqtt_settings: Broker endpoint. - :vartype mqtt_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointMqtt - :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", - "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". - :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState - """ - - endpoint_type: Union[str, "_models.EndpointType"] = rest_field(name="endpointType") - """Endpoint Type. Required. Known values are: \"DataExplorer\", \"DataLakeStorage\", - \"FabricOneLake\", \"Kafka\", \"LocalStorage\", and \"Mqtt\".""" - data_explorer_settings: Optional["_models.DataflowEndpointDataExplorer"] = rest_field(name="dataExplorerSettings") - """Azure Data Explorer endpoint.""" - data_lake_storage_settings: Optional["_models.DataflowEndpointDataLakeStorage"] = rest_field( - name="dataLakeStorageSettings" - ) - """Azure Data Lake endpoint.""" - fabric_one_lake_settings: Optional["_models.DataflowEndpointFabricOneLake"] = rest_field( - name="fabricOneLakeSettings" - ) - """Microsoft Fabric endpoint.""" - kafka_settings: Optional["_models.DataflowEndpointKafka"] = rest_field(name="kafkaSettings") - """Kafka endpoint.""" - local_storage_settings: Optional["_models.DataflowEndpointLocalStorage"] = rest_field(name="localStorageSettings") - """Local persistent volume endpoint.""" - mqtt_settings: Optional["_models.DataflowEndpointMqtt"] = rest_field(name="mqttSettings") - """Broker endpoint.""" provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( name="provisioningState", visibility=["read"] ) """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + aio_metadata: Optional["_models.AkriDiscoveryHandlerAioMetadata"] = rest_field( + name="aioMetadata", visibility=["read", "create", "update", "delete", "query"] + ) + """Metadata about AIO.""" + additional_configuration: Optional[Dict[str, str]] = rest_field( + name="additionalConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Additional configuration for the AkriDiscoveryHandler.""" + diagnostics: Optional["_models.AkriDiscoveryHandlerDiagnostics"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Diagnostics settings for the AkriDiscoveryHandler.""" + mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Mode of the AkriDiscoveryHandler. Known values are: \"Enabled\" and \"Disabled\".""" + discoverable_device_endpoint_types: List["_models.AkriDiscoveryHandlerDiscoverableDeviceEndpointType"] = rest_field( + name="discoverableDeviceEndpointTypes", visibility=["read", "create", "update", "delete", "query"] + ) + """Device inbound endpoint types. Required.""" + mqtt_connection_configuration: Optional["_models.AkriConnectorsMqttConnectionConfiguration"] = rest_field( + name="mqttConnectionConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """Mqtt connection configuration settings.""" + image_configuration: "_models.AkriDiscoveryHandlerImageConfiguration" = rest_field( + name="imageConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """The image configuration for the AkriDiscoveryHandler. Required.""" + schedule: "_models.AkriDiscoveryHandlerSchedule" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Schedule for the AkriDiscoveryHandler. Required.""" + secrets: Optional[List["_models.AkriConnectorsSecret"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Secrets that will be mounted onto discovery handler.""" @overload def __init__( self, *, - endpoint_type: Union[str, "_models.EndpointType"], - data_explorer_settings: Optional["_models.DataflowEndpointDataExplorer"] = None, - data_lake_storage_settings: Optional["_models.DataflowEndpointDataLakeStorage"] = None, - fabric_one_lake_settings: Optional["_models.DataflowEndpointFabricOneLake"] = None, - kafka_settings: Optional["_models.DataflowEndpointKafka"] = None, - local_storage_settings: Optional["_models.DataflowEndpointLocalStorage"] = None, - mqtt_settings: Optional["_models.DataflowEndpointMqtt"] = None, + discoverable_device_endpoint_types: List["_models.AkriDiscoveryHandlerDiscoverableDeviceEndpointType"], + image_configuration: "_models.AkriDiscoveryHandlerImageConfiguration", + schedule: "_models.AkriDiscoveryHandlerSchedule", + aio_metadata: Optional["_models.AkriDiscoveryHandlerAioMetadata"] = None, + additional_configuration: Optional[Dict[str, str]] = None, + diagnostics: Optional["_models.AkriDiscoveryHandlerDiagnostics"] = None, + mode: Optional[Union[str, "_models.OperationalMode"]] = None, + mqtt_connection_configuration: Optional["_models.AkriConnectorsMqttConnectionConfiguration"] = None, + secrets: Optional[List["_models.AkriConnectorsSecret"]] = None, ) -> None: ... @overload @@ -2507,14 +2236,11 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowEndpointResource(ProxyResource): - """Instance dataflowEndpoint resource. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - +class AkriDiscoveryHandlerResource(ProxyResource): + """AkriDiscoveryHandler resource. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -2525,22 +2251,26 @@ class DataflowEndpointResource(ProxyResource): information. :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData :ivar properties: The resource-specific properties for this resource. - :vartype properties: ~azure.mgmt.iotoperations.models.DataflowEndpointProperties - :ivar extended_location: Edge location of the resource. Required. + :vartype properties: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerProperties + :ivar extended_location: Edge location of the resource. :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation """ - properties: Optional["_models.DataflowEndpointProperties"] = rest_field() + properties: Optional["_models.AkriDiscoveryHandlerProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """The resource-specific properties for this resource.""" - extended_location: "_models.ExtendedLocation" = rest_field(name="extendedLocation", visibility=["read", "create"]) - """Edge location of the resource. Required.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" @overload def __init__( self, *, - extended_location: "_models.ExtendedLocation", - properties: Optional["_models.DataflowEndpointProperties"] = None, + properties: Optional["_models.AkriDiscoveryHandlerProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, ) -> None: ... @overload @@ -2554,50 +2284,30 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowOperation(_model_base.Model): - """Dataflow Operation properties. NOTE - One only method is allowed to be used for one entry. +class AkriDiscoveryHandlerSchedule(_Model): + """AkriDiscoveryHandlerSchedule properties. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AkriDiscoveryHandlerScheduleContinuous, AkriDiscoveryHandlerScheduleCron, + AkriDiscoveryHandlerScheduleRunOnce - :ivar operation_type: Type of operation. Required. Known values are: "Source", "Destination", - and "BuiltInTransformation". - :vartype operation_type: str or ~azure.mgmt.iotoperations.models.OperationType - :ivar name: Optional user provided name of the transformation. - :vartype name: str - :ivar source_settings: Source configuration. - :vartype source_settings: ~azure.mgmt.iotoperations.models.DataflowSourceOperationSettings - :ivar built_in_transformation_settings: Built In Transformation configuration. - :vartype built_in_transformation_settings: - ~azure.mgmt.iotoperations.models.DataflowBuiltInTransformationSettings - :ivar destination_settings: Destination configuration. - :vartype destination_settings: - ~azure.mgmt.iotoperations.models.DataflowDestinationOperationSettings + :ivar schedule_type: Schedule type. Required. Known values are: "Cron", "RunOnce", and + "Continuous". + :vartype schedule_type: str or + ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerScheduleType """ - operation_type: Union[str, "_models.OperationType"] = rest_field(name="operationType") - """Type of operation. Required. Known values are: \"Source\", \"Destination\", and - \"BuiltInTransformation\".""" - name: Optional[str] = rest_field() - """Optional user provided name of the transformation.""" - source_settings: Optional["_models.DataflowSourceOperationSettings"] = rest_field(name="sourceSettings") - """Source configuration.""" - built_in_transformation_settings: Optional["_models.DataflowBuiltInTransformationSettings"] = rest_field( - name="builtInTransformationSettings" - ) - """Built In Transformation configuration.""" - destination_settings: Optional["_models.DataflowDestinationOperationSettings"] = rest_field( - name="destinationSettings" + __mapping__: Dict[str, _Model] = {} + schedule_type: str = rest_discriminator( + name="scheduleType", visibility=["read", "create", "update", "delete", "query"] ) - """Destination configuration.""" + """Schedule type. Required. Known values are: \"Cron\", \"RunOnce\", and \"Continuous\".""" @overload def __init__( self, *, - operation_type: Union[str, "_models.OperationType"], - name: Optional[str] = None, - source_settings: Optional["_models.DataflowSourceOperationSettings"] = None, - built_in_transformation_settings: Optional["_models.DataflowBuiltInTransformationSettings"] = None, - destination_settings: Optional["_models.DataflowDestinationOperationSettings"] = None, + schedule_type: str, ) -> None: ... @overload @@ -2611,38 +2321,25 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowProfileProperties(_model_base.Model): - """DataflowProfile Resource properties. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriDiscoveryHandlerScheduleContinuous(AkriDiscoveryHandlerSchedule, discriminator="Continuous"): + """AkriDiscoveryHandlerScheduleContinuous properties. - :ivar diagnostics: Spec defines the desired identities of NBC diagnostics settings. - :vartype diagnostics: ~azure.mgmt.iotoperations.models.ProfileDiagnostics - :ivar instance_count: To manually scale the dataflow profile, specify the maximum number of - instances you want to run. - :vartype instance_count: int - :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", - "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". - :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + :ivar schedule_type: Schedule type. Required. The discovery handler should run continuously. + :vartype schedule_type: str or ~azure.mgmt.iotoperations.models.CONTINUOUS + :ivar continuous: The time to run the discovery handler. Required. + :vartype continuous: str """ - diagnostics: Optional["_models.ProfileDiagnostics"] = rest_field() - """Spec defines the desired identities of NBC diagnostics settings.""" - instance_count: Optional[int] = rest_field(name="instanceCount") - """To manually scale the dataflow profile, specify the maximum number of instances you want to - run.""" - provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( - name="provisioningState", visibility=["read"] - ) - """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", - \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + schedule_type: Literal[AkriDiscoveryHandlerScheduleType.CONTINUOUS] = rest_discriminator(name="scheduleType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Schedule type. Required. The discovery handler should run continuously.""" + continuous: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The time to run the discovery handler. Required.""" @overload def __init__( self, *, - diagnostics: Optional["_models.ProfileDiagnostics"] = None, - instance_count: Optional[int] = None, + continuous: str, ) -> None: ... @overload @@ -2653,43 +2350,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - + super().__init__(*args, schedule_type=AkriDiscoveryHandlerScheduleType.CONTINUOUS, **kwargs) -class DataflowProfileResource(ProxyResource): - """Instance dataflowProfile resource. - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriDiscoveryHandlerScheduleCron(AkriDiscoveryHandlerSchedule, discriminator="Cron"): + """AkriDiscoveryHandlerScheduleCron properties. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData - :ivar properties: The resource-specific properties for this resource. - :vartype properties: ~azure.mgmt.iotoperations.models.DataflowProfileProperties - :ivar extended_location: Edge location of the resource. Required. - :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + :ivar schedule_type: Schedule type. Required. The schedule is a cron expression. + :vartype schedule_type: str or ~azure.mgmt.iotoperations.models.CRON + :ivar cron: The cron expression for the schedule. Required. + :vartype cron: str """ - properties: Optional["_models.DataflowProfileProperties"] = rest_field() - """The resource-specific properties for this resource.""" - extended_location: "_models.ExtendedLocation" = rest_field(name="extendedLocation", visibility=["read", "create"]) - """Edge location of the resource. Required.""" + schedule_type: Literal[AkriDiscoveryHandlerScheduleType.CRON] = rest_discriminator(name="scheduleType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Schedule type. Required. The schedule is a cron expression.""" + cron: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The cron expression for the schedule. Required.""" @overload def __init__( self, *, - extended_location: "_models.ExtendedLocation", - properties: Optional["_models.DataflowProfileProperties"] = None, + cron: str, ) -> None: ... @overload @@ -2700,44 +2382,28 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - + super().__init__(*args, schedule_type=AkriDiscoveryHandlerScheduleType.CRON, **kwargs) -class DataflowProperties(_model_base.Model): - """Dataflow Resource properties. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AkriDiscoveryHandlerScheduleRunOnce(AkriDiscoveryHandlerSchedule, discriminator="RunOnce"): + """AkriDiscoveryHandlerScheduleRunOnce properties. - :ivar mode: Mode for Dataflow. Optional; defaults to Enabled. Known values are: "Enabled" and - "Disabled". - :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode - :ivar operations: List of operations including source and destination references as well as - transformation. Required. - :vartype operations: list[~azure.mgmt.iotoperations.models.DataflowOperation] - :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", - "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". - :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + :ivar schedule_type: Schedule type. Required. The discovery handler should run once. + :vartype schedule_type: str or ~azure.mgmt.iotoperations.models.RUN_ONCE + :ivar run_once: The time to run the discovery handler. Required. + :vartype run_once: str """ - mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field() - """Mode for Dataflow. Optional; defaults to Enabled. Known values are: \"Enabled\" and - \"Disabled\".""" - operations: List["_models.DataflowOperation"] = rest_field() - """List of operations including source and destination references as well as transformation. - Required.""" - provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( - name="provisioningState", visibility=["read"] - ) - """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", - \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + schedule_type: Literal[AkriDiscoveryHandlerScheduleType.RUN_ONCE] = rest_discriminator(name="scheduleType", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Schedule type. Required. The discovery handler should run once.""" + run_once: str = rest_field(name="runOnce", visibility=["read", "create", "update", "delete", "query"]) + """The time to run the discovery handler. Required.""" @overload def __init__( self, *, - operations: List["_models.DataflowOperation"], - mode: Optional[Union[str, "_models.OperationalMode"]] = None, + run_once: str, ) -> None: ... @overload @@ -2748,43 +2414,36 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - + super().__init__(*args, schedule_type=AkriDiscoveryHandlerScheduleType.RUN_ONCE, **kwargs) -class DataflowResource(ProxyResource): - """Instance dataflowProfile dataflow resource. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class AuthorizationConfig(_Model): + """Broker AuthorizationConfig properties. - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData - :ivar properties: The resource-specific properties for this resource. - :vartype properties: ~azure.mgmt.iotoperations.models.DataflowProperties - :ivar extended_location: Edge location of the resource. Required. - :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + :ivar cache: Enable caching of the authorization rules. Known values are: "Enabled" and + "Disabled". + :vartype cache: str or ~azure.mgmt.iotoperations.models.OperationalMode + :ivar rules: The authorization rules to follow. If no rule is set, but Authorization Resource + is used that would mean DenyAll. + :vartype rules: list[~azure.mgmt.iotoperations.models.AuthorizationRule] """ - properties: Optional["_models.DataflowProperties"] = rest_field() - """The resource-specific properties for this resource.""" - extended_location: "_models.ExtendedLocation" = rest_field(name="extendedLocation", visibility=["read", "create"]) - """Edge location of the resource. Required.""" + cache: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Enable caching of the authorization rules. Known values are: \"Enabled\" and \"Disabled\".""" + rules: Optional[List["_models.AuthorizationRule"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The authorization rules to follow. If no rule is set, but Authorization Resource is used that + would mean DenyAll.""" @overload def __init__( self, *, - extended_location: "_models.ExtendedLocation", - properties: Optional["_models.DataflowProperties"] = None, + cache: Optional[Union[str, "_models.OperationalMode"]] = None, + rules: Optional[List["_models.AuthorizationRule"]] = None, ) -> None: ... @overload @@ -2798,51 +2457,35 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DataflowSourceOperationSettings(_model_base.Model): - """Dataflow Source Operation properties. - +class AuthorizationRule(_Model): + """AuthorizationConfig Rule Properties. - :ivar endpoint_ref: Reference to the Dataflow Endpoint resource. Can only be of Broker and - Kafka type. Required. - :vartype endpoint_ref: str - :ivar asset_ref: Reference to the resource in Azure Device Registry where the data in the - endpoint originates from. - :vartype asset_ref: str - :ivar serialization_format: Content is a JSON Schema. Allowed: JSON Schema/draft-7. "Json" - :vartype serialization_format: str or - ~azure.mgmt.iotoperations.models.SourceSerializationFormat - :ivar schema_ref: Schema CR reference. Data will be deserialized according to the schema, and - dropped if it doesn't match. - :vartype schema_ref: str - :ivar data_sources: List of source locations. Can be Broker or Kafka topics. Supports wildcards - # and +. Required. - :vartype data_sources: list[str] + :ivar broker_resources: Give access to Broker methods and topics. Required. + :vartype broker_resources: list[~azure.mgmt.iotoperations.models.BrokerResourceRule] + :ivar principals: Give access to clients based on the following properties. Required. + :vartype principals: ~azure.mgmt.iotoperations.models.PrincipalDefinition + :ivar state_store_resources: Give access to state store resources. + :vartype state_store_resources: list[~azure.mgmt.iotoperations.models.StateStoreResourceRule] """ - endpoint_ref: str = rest_field(name="endpointRef") - """Reference to the Dataflow Endpoint resource. Can only be of Broker and Kafka type. Required.""" - asset_ref: Optional[str] = rest_field(name="assetRef") - """Reference to the resource in Azure Device Registry where the data in the endpoint originates - from.""" - serialization_format: Optional[Union[str, "_models.SourceSerializationFormat"]] = rest_field( - name="serializationFormat" + broker_resources: List["_models.BrokerResourceRule"] = rest_field( + name="brokerResources", visibility=["read", "create", "update", "delete", "query"] ) - """Content is a JSON Schema. Allowed: JSON Schema/draft-7. \"Json\"""" - schema_ref: Optional[str] = rest_field(name="schemaRef") - """Schema CR reference. Data will be deserialized according to the schema, and dropped if it - doesn't match.""" - data_sources: List[str] = rest_field(name="dataSources") - """List of source locations. Can be Broker or Kafka topics. Supports wildcards # and +. Required.""" + """Give access to Broker methods and topics. Required.""" + principals: "_models.PrincipalDefinition" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Give access to clients based on the following properties. Required.""" + state_store_resources: Optional[List["_models.StateStoreResourceRule"]] = rest_field( + name="stateStoreResources", visibility=["read", "create", "update", "delete", "query"] + ) + """Give access to state store resources.""" @overload def __init__( self, *, - endpoint_ref: str, - data_sources: List[str], - asset_ref: Optional[str] = None, - serialization_format: Optional[Union[str, "_models.SourceSerializationFormat"]] = None, - schema_ref: Optional[str] = None, + broker_resources: List["_models.BrokerResourceRule"], + principals: "_models.PrincipalDefinition", + state_store_resources: Optional[List["_models.StateStoreResourceRule"]] = None, ) -> None: ... @overload @@ -2856,21 +2499,21 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DiagnosticsLogs(_model_base.Model): - """Diagnostic Log properties. +class AzureDeviceRegistryNamespaceRef(_Model): + """Azure Device Registry Namespace reference. - :ivar level: The log level. Examples - 'debug', 'info', 'warn', 'error', 'trace'. - :vartype level: str + :ivar resource_id: The resource ID of the Azure Device Registry Namespace. Required. + :vartype resource_id: str """ - level: Optional[str] = rest_field() - """The log level. Examples - 'debug', 'info', 'warn', 'error', 'trace'.""" + resource_id: str = rest_field(name="resourceId", visibility=["read", "create", "update", "delete", "query"]) + """The resource ID of the Azure Device Registry Namespace. Required.""" @overload def __init__( self, *, - level: Optional[str] = None, + resource_id: str, ) -> None: ... @overload @@ -2884,50 +2527,71 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class DiskBackedMessageBuffer(_model_base.Model): - """DiskBackedMessageBuffer properties. +class BackendChain(_Model): + """Desired properties of the backend instances of the broker. + :ivar partitions: The desired number of physical backend partitions. Required. + :vartype partitions: int + :ivar redundancy_factor: The desired numbers of backend replicas (pods) in a physical + partition. Required. + :vartype redundancy_factor: int + :ivar workers: Number of logical backend workers per replica (pod). + :vartype workers: int + """ - :ivar max_size: The max size of the message buffer on disk. If a PVC template is specified - using one of ephemeralVolumeClaimSpec or persistentVolumeClaimSpec, then this size is used as - the request and limit sizes of that template. If neither ephemeralVolumeClaimSpec nor - persistentVolumeClaimSpec are specified, then an emptyDir volume is mounted with this size as - its limit. See https://kubernetes.io/docs/concepts/storage/volumes/#emptydir for details. - Required. - :vartype max_size: str - :ivar ephemeral_volume_claim_spec: Use the specified persistent volume claim template to mount - a "generic ephemeral volume" for the message buffer. See - https://kubernetes.io/docs/concepts/storage/ephemeral-volumes/#generic-ephemeral-volumes for - details. - :vartype ephemeral_volume_claim_spec: ~azure.mgmt.iotoperations.models.VolumeClaimSpec - :ivar persistent_volume_claim_spec: Use the specified persistent volume claim template to mount - a persistent volume for the message buffer. - :vartype persistent_volume_claim_spec: ~azure.mgmt.iotoperations.models.VolumeClaimSpec + partitions: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The desired number of physical backend partitions. Required.""" + redundancy_factor: int = rest_field( + name="redundancyFactor", visibility=["read", "create", "update", "delete", "query"] + ) + """The desired numbers of backend replicas (pods) in a physical partition. Required.""" + workers: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Number of logical backend workers per replica (pod).""" + + @overload + def __init__( + self, + *, + partitions: int, + redundancy_factor: int, + workers: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BatchingConfiguration(_Model): + """Batching configuration. + + :ivar latency_seconds: Batching latency in seconds. + :vartype latency_seconds: int + :ivar max_messages: Maximum number of messages in a batch. + :vartype max_messages: int """ - max_size: str = rest_field(name="maxSize") - """The max size of the message buffer on disk. If a PVC template is specified using one of - ephemeralVolumeClaimSpec or persistentVolumeClaimSpec, then this size is used as the request - and limit sizes of that template. If neither ephemeralVolumeClaimSpec nor - persistentVolumeClaimSpec are specified, then an emptyDir volume is mounted with this size as - its limit. See https://kubernetes.io/docs/concepts/storage/volumes/#emptydir for details. - Required.""" - ephemeral_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = rest_field(name="ephemeralVolumeClaimSpec") - """Use the specified persistent volume claim template to mount a \"generic ephemeral volume\" for - the message buffer. See - https://kubernetes.io/docs/concepts/storage/ephemeral-volumes/#generic-ephemeral-volumes for - details.""" - persistent_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = rest_field(name="persistentVolumeClaimSpec") - """Use the specified persistent volume claim template to mount a persistent volume for the message - buffer.""" + latency_seconds: Optional[int] = rest_field( + name="latencySeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Batching latency in seconds.""" + max_messages: Optional[int] = rest_field( + name="maxMessages", visibility=["read", "create", "update", "delete", "query"] + ) + """Maximum number of messages in a batch.""" @overload def __init__( self, *, - max_size: str, - ephemeral_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = None, - persistent_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = None, + latency_seconds: Optional[int] = None, + max_messages: Optional[int] = None, ) -> None: ... @overload @@ -2941,70 +2605,5012 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ErrorAdditionalInfo(_model_base.Model): - """The resource management error additional info. +class BrokerAuthenticationProperties(_Model): + """BrokerAuthentication Resource properties. + + :ivar authentication_methods: Defines a set of Broker authentication methods to be used on + ``BrokerListeners``. For each array element one authenticator type supported. Required. + :vartype authentication_methods: + list[~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethods] + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + """ - Readonly variables are only populated by the server, and will be ignored when sending a request. + authentication_methods: List["_models.BrokerAuthenticatorMethods"] = rest_field( + name="authenticationMethods", visibility=["read", "create", "update", "delete", "query"] + ) + """Defines a set of Broker authentication methods to be used on ``BrokerListeners``. For each + array element one authenticator type supported. Required.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" - :ivar type: The additional info type. + @overload + def __init__( + self, + *, + authentication_methods: List["_models.BrokerAuthenticatorMethods"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerAuthenticationResource(ProxyResource): + """Instance broker authentication resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str - :ivar info: The additional info. - :vartype info: any + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.BrokerAuthenticationProperties + :ivar extended_location: Edge location of the resource. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation """ - type: Optional[str] = rest_field(visibility=["read"]) - """The additional info type.""" - info: Optional[Any] = rest_field(visibility=["read"]) - """The additional info.""" + properties: Optional["_models.BrokerAuthenticationProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" + @overload + def __init__( + self, + *, + properties: Optional["_models.BrokerAuthenticationProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, + ) -> None: ... -class ErrorDetail(_model_base.Model): - """The error detail. + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ - Readonly variables are only populated by the server, and will be ignored when sending a request. + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) - :ivar code: The error code. - :vartype code: str - :ivar message: The error message. - :vartype message: str - :ivar target: The error target. - :vartype target: str - :ivar details: The error details. - :vartype details: list[~azure.mgmt.iotoperations.models.ErrorDetail] - :ivar additional_info: The error additional info. - :vartype additional_info: list[~azure.mgmt.iotoperations.models.ErrorAdditionalInfo] + +class BrokerAuthenticatorCustomAuth(_Model): + """Custom Authentication properties. + + :ivar x509: X509 Custom Auth type details. Required. + :vartype x509: ~azure.mgmt.iotoperations.models.X509ManualCertificate + """ + + x509: "_models.X509ManualCertificate" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """X509 Custom Auth type details. Required.""" + + @overload + def __init__( + self, + *, + x509: "_models.X509ManualCertificate", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerAuthenticatorMethodCustom(_Model): + """Custom method for BrokerAuthentication. + + :ivar auth: Optional authentication needed for authenticating with the custom authentication + server. + :vartype auth: ~azure.mgmt.iotoperations.models.BrokerAuthenticatorCustomAuth + :ivar ca_cert_config_map: Optional CA certificate for validating the custom authentication + server's certificate. + :vartype ca_cert_config_map: str + :ivar endpoint: Endpoint of the custom authentication server. Must be an HTTPS endpoint. + Required. + :vartype endpoint: str + :ivar headers: Additional HTTP headers to pass to the custom authentication server. + :vartype headers: dict[str, str] + """ + + auth: Optional["_models.BrokerAuthenticatorCustomAuth"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Optional authentication needed for authenticating with the custom authentication server.""" + ca_cert_config_map: Optional[str] = rest_field( + name="caCertConfigMap", visibility=["read", "create", "update", "delete", "query"] + ) + """Optional CA certificate for validating the custom authentication server's certificate.""" + endpoint: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Endpoint of the custom authentication server. Must be an HTTPS endpoint. Required.""" + headers: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional HTTP headers to pass to the custom authentication server.""" + + @overload + def __init__( + self, + *, + endpoint: str, + auth: Optional["_models.BrokerAuthenticatorCustomAuth"] = None, + ca_cert_config_map: Optional[str] = None, + headers: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerAuthenticatorMethods(_Model): + """Set of broker authentication policies. Only one method is supported for each entry. + + :ivar method: Custom authentication configuration. Required. Known values are: "Custom", + "ServiceAccountToken", and "X509". + :vartype method: str or ~azure.mgmt.iotoperations.models.BrokerAuthenticationMethod + :ivar custom_settings: Custom authentication configuration. + :vartype custom_settings: ~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodCustom + :ivar service_account_token_settings: ServiceAccountToken authentication configuration. + :vartype service_account_token_settings: + ~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodSat + :ivar x509_settings: X.509 authentication configuration. + :vartype x509_settings: ~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodX509 + """ + + method: Union[str, "_models.BrokerAuthenticationMethod"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Custom authentication configuration. Required. Known values are: \"Custom\", + \"ServiceAccountToken\", and \"X509\".""" + custom_settings: Optional["_models.BrokerAuthenticatorMethodCustom"] = rest_field( + name="customSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Custom authentication configuration.""" + service_account_token_settings: Optional["_models.BrokerAuthenticatorMethodSat"] = rest_field( + name="serviceAccountTokenSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """ServiceAccountToken authentication configuration.""" + x509_settings: Optional["_models.BrokerAuthenticatorMethodX509"] = rest_field( + name="x509Settings", visibility=["read", "create", "update", "delete", "query"] + ) + """X.509 authentication configuration.""" + + @overload + def __init__( + self, + *, + method: Union[str, "_models.BrokerAuthenticationMethod"], + custom_settings: Optional["_models.BrokerAuthenticatorMethodCustom"] = None, + service_account_token_settings: Optional["_models.BrokerAuthenticatorMethodSat"] = None, + x509_settings: Optional["_models.BrokerAuthenticatorMethodX509"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerAuthenticatorMethodSat(_Model): + """Service Account Token for BrokerAuthentication. + + :ivar audiences: List of allowed audience. Required. + :vartype audiences: list[str] + """ + + audiences: List[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of allowed audience. Required.""" + + @overload + def __init__( + self, + *, + audiences: List[str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerAuthenticatorMethodX509(_Model): + """X509 for BrokerAuthentication. + + :ivar authorization_attributes: X509 authorization attributes properties. + :vartype authorization_attributes: dict[str, + ~azure.mgmt.iotoperations.models.BrokerAuthenticatorMethodX509Attributes] + :ivar trusted_client_ca_cert: Name of the trusted client ca cert resource. + :vartype trusted_client_ca_cert: str + :ivar additional_validation: X509 authentication attributes properties. Known values are: + "None" and "AzureDeviceRegistry". + :vartype additional_validation: str or + ~azure.mgmt.iotoperations.models.BrokerAuthenticatorValidationMethods + """ + + authorization_attributes: Optional[Dict[str, "_models.BrokerAuthenticatorMethodX509Attributes"]] = rest_field( + name="authorizationAttributes", visibility=["read", "create", "update", "delete", "query"] + ) + """X509 authorization attributes properties.""" + trusted_client_ca_cert: Optional[str] = rest_field( + name="trustedClientCaCert", visibility=["read", "create", "update", "delete", "query"] + ) + """Name of the trusted client ca cert resource.""" + additional_validation: Optional[Union[str, "_models.BrokerAuthenticatorValidationMethods"]] = rest_field( + name="additionalValidation", visibility=["read", "create", "update", "delete", "query"] + ) + """X509 authentication attributes properties. Known values are: \"None\" and + \"AzureDeviceRegistry\".""" + + @overload + def __init__( + self, + *, + authorization_attributes: Optional[Dict[str, "_models.BrokerAuthenticatorMethodX509Attributes"]] = None, + trusted_client_ca_cert: Optional[str] = None, + additional_validation: Optional[Union[str, "_models.BrokerAuthenticatorValidationMethods"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerAuthenticatorMethodX509Attributes(_Model): + """BrokerAuthenticatorMethodX509Attributes properties. + + :ivar attributes: Attributes object. Required. + :vartype attributes: dict[str, str] + :ivar subject: Subject of the X509 attribute. Required. + :vartype subject: str + """ + + attributes: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Attributes object. Required.""" + subject: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Subject of the X509 attribute. Required.""" + + @overload + def __init__( + self, + *, + attributes: Dict[str, str], + subject: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerAuthorizationProperties(_Model): + """BrokerAuthorization Resource properties. + + :ivar authorization_policies: The list of authorization policies supported by the Authorization + Resource. Required. + :vartype authorization_policies: ~azure.mgmt.iotoperations.models.AuthorizationConfig + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + """ + + authorization_policies: "_models.AuthorizationConfig" = rest_field( + name="authorizationPolicies", visibility=["read", "create", "update", "delete", "query"] + ) + """The list of authorization policies supported by the Authorization Resource. Required.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + + @overload + def __init__( + self, + *, + authorization_policies: "_models.AuthorizationConfig", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerAuthorizationResource(ProxyResource): + """Instance broker authorizations resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.BrokerAuthorizationProperties + :ivar extended_location: Edge location of the resource. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + """ + + properties: Optional["_models.BrokerAuthorizationProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.BrokerAuthorizationProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerDiagnostics(_Model): + """Broker Diagnostic Setting properties. + + :ivar logs: Diagnostic log settings for the resource. + :vartype logs: ~azure.mgmt.iotoperations.models.DiagnosticsLogs + :ivar metrics: The metrics settings for the resource. + :vartype metrics: ~azure.mgmt.iotoperations.models.Metrics + :ivar self_check: The self check properties. + :vartype self_check: ~azure.mgmt.iotoperations.models.SelfCheck + :ivar traces: The trace properties. + :vartype traces: ~azure.mgmt.iotoperations.models.Traces + """ + + logs: Optional["_models.DiagnosticsLogs"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Diagnostic log settings for the resource.""" + metrics: Optional["_models.Metrics"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metrics settings for the resource.""" + self_check: Optional["_models.SelfCheck"] = rest_field( + name="selfCheck", visibility=["read", "create", "update", "delete", "query"] + ) + """The self check properties.""" + traces: Optional["_models.Traces"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The trace properties.""" + + @overload + def __init__( + self, + *, + logs: Optional["_models.DiagnosticsLogs"] = None, + metrics: Optional["_models.Metrics"] = None, + self_check: Optional["_models.SelfCheck"] = None, + traces: Optional["_models.Traces"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerListenerProperties(_Model): + """Defines a Broker listener. A listener is a collection of ports on which the broker accepts + connections from clients. + + :ivar service_name: Kubernetes Service name of this listener. + :vartype service_name: str + :ivar ports: Ports on which this listener accepts client connections. Required. + :vartype ports: list[~azure.mgmt.iotoperations.models.ListenerPort] + :ivar service_type: Kubernetes Service type of this listener. Known values are: "ClusterIp", + "LoadBalancer", and "NodePort". + :vartype service_type: str or ~azure.mgmt.iotoperations.models.ServiceType + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + """ + + service_name: Optional[str] = rest_field( + name="serviceName", visibility=["read", "create", "update", "delete", "query"] + ) + """Kubernetes Service name of this listener.""" + ports: List["_models.ListenerPort"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Ports on which this listener accepts client connections. Required.""" + service_type: Optional[Union[str, "_models.ServiceType"]] = rest_field( + name="serviceType", visibility=["read", "create", "update", "delete", "query"] + ) + """Kubernetes Service type of this listener. Known values are: \"ClusterIp\", \"LoadBalancer\", + and \"NodePort\".""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + + @overload + def __init__( + self, + *, + ports: List["_models.ListenerPort"], + service_name: Optional[str] = None, + service_type: Optional[Union[str, "_models.ServiceType"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerListenerResource(ProxyResource): + """Instance broker resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.BrokerListenerProperties + :ivar extended_location: Edge location of the resource. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + """ + + properties: Optional["_models.BrokerListenerProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.BrokerListenerProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerPersistence(_Model): + """Disk persistence configuration. + + When persistence is enabled, certain items (non-performance-critical data) selected for + persistence will reside only on disk. Below are the affected items: + + + + * Retained messages will be stored on disk only. + * WILL messages will be stored on disk only. + * DSS key/value pairs will be stored on disk only, except for performance-critical items like + timed locks, which remain in both disk and memory for improved performance. + + Optional. Everything is in-memory if not set. + Note: if configured, all MQTT session states are written to disk. + + :ivar dynamic_settings: Client sets the specified user property key/value in the + CONNECT/SUBSCRIBE/PUBLISH. + Optionally, if the customer specifies a configurable user property, it will work to enable + persistence dynamically. The default user property key is 'aio-persistence' and value 'true'. + :vartype dynamic_settings: ~azure.mgmt.iotoperations.models.BrokerPersistenceDynamicSettings + :ivar max_size: The max size of the message buffer on disk. If a PVC template is specified + using persistentVolumeClaimSpec Then this size is used as the request and limit sizes of that + template. If a PVC template isn't specified Then local-path provisioner is requested with this + size limit. Required. Required. + :vartype max_size: str + :ivar persistent_volume_claim_spec: Use the specified persistent volume claim template to mount + a persistent volume. Same object as in diskBackedMessageBuffer, but with a limitation that + access modes field must be set to ``ReadWriteOncePod``. + + If unset, a default PVC with default properties will be used. Among other things this PVC will + use the cluster default storage class, which may or may not be using a local path provisioner. + User is opting in to sub-optimal behavior if they leave this unset or set it without the + storage class field, and their cluster default is not a local path class. + :vartype persistent_volume_claim_spec: ~azure.mgmt.iotoperations.models.VolumeClaimSpec + :ivar retain: Controls which topic's retained messages should be persisted to disk. + :vartype retain: ~azure.mgmt.iotoperations.models.BrokerRetainMessagesPolicy + :ivar state_store: Controls which keys should be persisted to disk for the state store. + :vartype state_store: ~azure.mgmt.iotoperations.models.BrokerStateStorePolicy + :ivar subscriber_queue: Controls which subscriber message queues should be persisted to disk. + Important: to facilitate reconnection, session state metadata are ALWAYS written to disk if any + persistence setting is specified, even if this section isn't set. + :vartype subscriber_queue: ~azure.mgmt.iotoperations.models.BrokerSubscriberQueuePolicy + :ivar encryption: Controls settings related to encryption of the persistence database. + Optional, defaults to enabling encryption. + :vartype encryption: ~azure.mgmt.iotoperations.models.BrokerPersistenceEncryption + """ + + dynamic_settings: Optional["_models.BrokerPersistenceDynamicSettings"] = rest_field( + name="dynamicSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Client sets the specified user property key/value in the CONNECT/SUBSCRIBE/PUBLISH. + Optionally, if the customer specifies a configurable user property, it will work to enable + persistence dynamically. The default user property key is 'aio-persistence' and value 'true'.""" + max_size: str = rest_field(name="maxSize", visibility=["read", "create"]) + """The max size of the message buffer on disk. If a PVC template is specified using + persistentVolumeClaimSpec Then this size is used as the request and limit sizes of that + template. If a PVC template isn't specified Then local-path provisioner is requested with this + size limit. Required. Required.""" + persistent_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = rest_field( + name="persistentVolumeClaimSpec", visibility=["read", "create"] + ) + """Use the specified persistent volume claim template to mount a persistent volume. Same object as + in diskBackedMessageBuffer, but with a limitation that access modes field must be set to + ``ReadWriteOncePod``. + + If unset, a default PVC with default properties will be used. Among other things this PVC will + use the cluster default storage class, which may or may not be using a local path provisioner. + User is opting in to sub-optimal behavior if they leave this unset or set it without the + storage class field, and their cluster default is not a local path class.""" + retain: Optional["_models.BrokerRetainMessagesPolicy"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Controls which topic's retained messages should be persisted to disk.""" + state_store: Optional["_models.BrokerStateStorePolicy"] = rest_field( + name="stateStore", visibility=["read", "create", "update", "delete", "query"] + ) + """Controls which keys should be persisted to disk for the state store.""" + subscriber_queue: Optional["_models.BrokerSubscriberQueuePolicy"] = rest_field( + name="subscriberQueue", visibility=["read", "create", "update", "delete", "query"] + ) + """Controls which subscriber message queues should be persisted to disk. Important: to facilitate + reconnection, session state metadata are ALWAYS written to disk if any persistence setting is + specified, even if this section isn't set.""" + encryption: Optional["_models.BrokerPersistenceEncryption"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Controls settings related to encryption of the persistence database. Optional, defaults to + enabling encryption.""" + + @overload + def __init__( + self, + *, + max_size: str, + dynamic_settings: Optional["_models.BrokerPersistenceDynamicSettings"] = None, + persistent_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = None, + retain: Optional["_models.BrokerRetainMessagesPolicy"] = None, + state_store: Optional["_models.BrokerStateStorePolicy"] = None, + subscriber_queue: Optional["_models.BrokerSubscriberQueuePolicy"] = None, + encryption: Optional["_models.BrokerPersistenceEncryption"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerPersistenceDynamicSettings(_Model): + """Dynamic settings of the persistence. + + :ivar user_property_key: The user property key to enable persistence. Required. + :vartype user_property_key: str + :ivar user_property_value: The user property value to enable persistence. Required. + :vartype user_property_value: str + """ + + user_property_key: str = rest_field( + name="userPropertyKey", visibility=["read", "create", "update", "delete", "query"] + ) + """The user property key to enable persistence. Required.""" + user_property_value: str = rest_field( + name="userPropertyValue", visibility=["read", "create", "update", "delete", "query"] + ) + """The user property value to enable persistence. Required.""" + + @overload + def __init__( + self, + *, + user_property_key: str, + user_property_value: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerPersistenceEncryption(_Model): + """Broker Persistence Encryption properties. + + :ivar mode: Determines if encryption is enabled. Required. Known values are: "Enabled" and + "Disabled". + :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode + """ + + mode: Union[str, "_models.OperationalMode"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Determines if encryption is enabled. Required. Known values are: \"Enabled\" and \"Disabled\".""" + + @overload + def __init__( + self, + *, + mode: Union[str, "_models.OperationalMode"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerProperties(_Model): + """Broker Resource properties. + + :ivar advanced: Advanced settings of Broker. + :vartype advanced: ~azure.mgmt.iotoperations.models.AdvancedSettings + :ivar cardinality: The cardinality details of the broker. + :vartype cardinality: ~azure.mgmt.iotoperations.models.Cardinality + :ivar diagnostics: Spec defines the desired identities of Broker diagnostics settings. + :vartype diagnostics: ~azure.mgmt.iotoperations.models.BrokerDiagnostics + :ivar disk_backed_message_buffer: Settings of Disk Backed Message Buffer. + :vartype disk_backed_message_buffer: ~azure.mgmt.iotoperations.models.DiskBackedMessageBuffer + :ivar generate_resource_limits: This setting controls whether Kubernetes CPU resource limits + are requested. Increasing the number of replicas or workers proportionally increases the amount + of CPU resources requested. If this setting is enabled and there are insufficient CPU + resources, an error will be emitted. + :vartype generate_resource_limits: ~azure.mgmt.iotoperations.models.GenerateResourceLimits + :ivar memory_profile: Memory profile of Broker. Known values are: "Tiny", "Low", "Medium", and + "High". + :vartype memory_profile: str or ~azure.mgmt.iotoperations.models.BrokerMemoryProfile + :ivar persistence: The persistence settings of the Broker. + :vartype persistence: ~azure.mgmt.iotoperations.models.BrokerPersistence + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + """ + + advanced: Optional["_models.AdvancedSettings"] = rest_field(visibility=["read", "create"]) + """Advanced settings of Broker.""" + cardinality: Optional["_models.Cardinality"] = rest_field(visibility=["read", "create"]) + """The cardinality details of the broker.""" + diagnostics: Optional["_models.BrokerDiagnostics"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Spec defines the desired identities of Broker diagnostics settings.""" + disk_backed_message_buffer: Optional["_models.DiskBackedMessageBuffer"] = rest_field( + name="diskBackedMessageBuffer", visibility=["read", "create"] + ) + """Settings of Disk Backed Message Buffer.""" + generate_resource_limits: Optional["_models.GenerateResourceLimits"] = rest_field( + name="generateResourceLimits", visibility=["read", "create"] + ) + """This setting controls whether Kubernetes CPU resource limits are requested. Increasing the + number of replicas or workers proportionally increases the amount of CPU resources requested. + If this setting is enabled and there are insufficient CPU resources, an error will be emitted.""" + memory_profile: Optional[Union[str, "_models.BrokerMemoryProfile"]] = rest_field( + name="memoryProfile", visibility=["read", "create"] + ) + """Memory profile of Broker. Known values are: \"Tiny\", \"Low\", \"Medium\", and \"High\".""" + persistence: Optional["_models.BrokerPersistence"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The persistence settings of the Broker.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + + @overload + def __init__( + self, + *, + advanced: Optional["_models.AdvancedSettings"] = None, + cardinality: Optional["_models.Cardinality"] = None, + diagnostics: Optional["_models.BrokerDiagnostics"] = None, + disk_backed_message_buffer: Optional["_models.DiskBackedMessageBuffer"] = None, + generate_resource_limits: Optional["_models.GenerateResourceLimits"] = None, + memory_profile: Optional[Union[str, "_models.BrokerMemoryProfile"]] = None, + persistence: Optional["_models.BrokerPersistence"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerResource(ProxyResource): + """Instance broker resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.BrokerProperties + :ivar extended_location: Edge location of the resource. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + """ + + properties: Optional["_models.BrokerProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.BrokerProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerResourceRule(_Model): + """Broker Resource Rule properties. This defines the objects that represent the actions or topics, + such as - method.Connect, method.Publish, etc. + + :ivar method: Give access for a Broker method (i.e., Connect, Subscribe, or Publish). Required. + Known values are: "Connect", "Publish", and "Subscribe". + :vartype method: str or ~azure.mgmt.iotoperations.models.BrokerResourceDefinitionMethods + :ivar client_ids: A list of client IDs that match the clients. The client IDs are + case-sensitive and must match the client IDs provided by the clients during connection. This + subfield may be set if the method is Connect. + :vartype client_ids: list[str] + :ivar topics: A list of topics or topic patterns that match the topics that the clients can + publish or subscribe to. This subfield is required if the method is Publish or Subscribe. + :vartype topics: list[str] + """ + + method: Union[str, "_models.BrokerResourceDefinitionMethods"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Give access for a Broker method (i.e., Connect, Subscribe, or Publish). Required. Known values + are: \"Connect\", \"Publish\", and \"Subscribe\".""" + client_ids: Optional[List[str]] = rest_field( + name="clientIds", visibility=["read", "create", "update", "delete", "query"] + ) + """A list of client IDs that match the clients. The client IDs are case-sensitive and must match + the client IDs provided by the clients during connection. This subfield may be set if the + method is Connect.""" + topics: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A list of topics or topic patterns that match the topics that the clients can publish or + subscribe to. This subfield is required if the method is Publish or Subscribe.""" + + @overload + def __init__( + self, + *, + method: Union[str, "_models.BrokerResourceDefinitionMethods"], + client_ids: Optional[List[str]] = None, + topics: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerRetainMessagesPolicy(_Model): + """Broker Retain policy properties. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BrokerRetainMessagesCustomPolicy + + :ivar mode: 'All' to persist all retain messages, 'None' to not persist any, 'Custom' to + persist only the specified topics. Required. Known values are: "All", "None", and "Custom". + :vartype mode: str or ~azure.mgmt.iotoperations.models.BrokerPersistencePolicyMode + """ + + __mapping__: Dict[str, _Model] = {} + mode: str = rest_discriminator(name="mode", visibility=["read", "create", "update", "delete", "query"]) + """'All' to persist all retain messages, 'None' to not persist any, 'Custom' to persist only the + specified topics. Required. Known values are: \"All\", \"None\", and \"Custom\".""" + + @overload + def __init__( + self, + *, + mode: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerRetainMessagesCustomPolicy(BrokerRetainMessagesPolicy, discriminator="Custom"): + """Custom Broker Retain Message Policy. + + :ivar mode: The mode of the policy. Required. Indicates that the policy is a custom policy. + :vartype mode: str or ~azure.mgmt.iotoperations.models.CUSTOM + :ivar retain_settings: Settings for the policy. Required. + :vartype retain_settings: ~azure.mgmt.iotoperations.models.BrokerRetainMessagesSettings + """ + + mode: Literal[BrokerPersistencePolicyMode.CUSTOM] = rest_discriminator(name="mode", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The mode of the policy. Required. Indicates that the policy is a custom policy.""" + retain_settings: "_models.BrokerRetainMessagesSettings" = rest_field( + name="retainSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Settings for the policy. Required.""" + + @overload + def __init__( + self, + *, + retain_settings: "_models.BrokerRetainMessagesSettings", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, mode=BrokerPersistencePolicyMode.CUSTOM, **kwargs) + + +class BrokerRetainMessagesDynamic(_Model): + """Dynamic settings of BrokerRetainMessagesCustomPolicy. + + :ivar mode: Mode of the BrokerRetainMessagesCustomPolicy. Required. Known values are: "Enabled" + and "Disabled". + :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode + """ + + mode: Union[str, "_models.OperationalMode"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Mode of the BrokerRetainMessagesCustomPolicy. Required. Known values are: \"Enabled\" and + \"Disabled\".""" + + @overload + def __init__( + self, + *, + mode: Union[str, "_models.OperationalMode"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerRetainMessagesSettings(_Model): + """Broker Retain Messages properties. + + :ivar topics: List of topics under which retained messages would be persisted to disk. + Wildcards # and + supported. + :vartype topics: list[str] + :ivar dynamic: Controls if MQTT clients can request for disk persistence via ``MQTTv5`` user + property. Works in addition to other groups (logical OR). + :vartype dynamic: ~azure.mgmt.iotoperations.models.BrokerRetainMessagesDynamic + """ + + topics: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of topics under which retained messages would be persisted to disk. Wildcards # and + + supported.""" + dynamic: Optional["_models.BrokerRetainMessagesDynamic"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Controls if MQTT clients can request for disk persistence via ``MQTTv5`` user property. Works + in addition to other groups (logical OR).""" + + @overload + def __init__( + self, + *, + topics: Optional[List[str]] = None, + dynamic: Optional["_models.BrokerRetainMessagesDynamic"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerStateStorePolicy(_Model): + """Broker State Store Policy. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BrokerStateStoreCustomPolicy + + :ivar mode: 'All' to persist all keys, 'None' to not persist any, 'Custom' to persist only the + specified keys. Required. Known values are: "All", "None", and "Custom". + :vartype mode: str or ~azure.mgmt.iotoperations.models.BrokerPersistencePolicyMode + """ + + __mapping__: Dict[str, _Model] = {} + mode: str = rest_discriminator(name="mode", visibility=["read", "create", "update", "delete", "query"]) + """'All' to persist all keys, 'None' to not persist any, 'Custom' to persist only the specified + keys. Required. Known values are: \"All\", \"None\", and \"Custom\".""" + + @overload + def __init__( + self, + *, + mode: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerStateStoreCustomPolicy(BrokerStateStorePolicy, discriminator="Custom"): + """Broker State Store Custom Policy. + + :ivar mode: The mode of the policy. Required. Indicates that the policy is a custom policy. + :vartype mode: str or ~azure.mgmt.iotoperations.models.CUSTOM + :ivar state_store_settings: Settings for the policy. Required. + :vartype state_store_settings: ~azure.mgmt.iotoperations.models.BrokerStateStorePolicySettings + """ + + mode: Literal[BrokerPersistencePolicyMode.CUSTOM] = rest_discriminator(name="mode", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The mode of the policy. Required. Indicates that the policy is a custom policy.""" + state_store_settings: "_models.BrokerStateStorePolicySettings" = rest_field( + name="stateStoreSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Settings for the policy. Required.""" + + @overload + def __init__( + self, + *, + state_store_settings: "_models.BrokerStateStorePolicySettings", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, mode=BrokerPersistencePolicyMode.CUSTOM, **kwargs) + + +class BrokerStateStoreDynamic(_Model): + """Dynamic settings of BrokerStateStoreCustomPolicy. + + :ivar mode: Mode of the BrokerStateStoreCustomPolicy. Required. Known values are: "Enabled" and + "Disabled". + :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode + """ + + mode: Union[str, "_models.OperationalMode"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Mode of the BrokerStateStoreCustomPolicy. Required. Known values are: \"Enabled\" and + \"Disabled\".""" + + @overload + def __init__( + self, + *, + mode: Union[str, "_models.OperationalMode"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerStateStorePolicyResources(_Model): + """Broker State Store Policy Resources properties. + + :ivar key_type: The key to persist to disk. Required. Known values are: "Pattern", "String", + and "Binary". + :vartype key_type: str or ~azure.mgmt.iotoperations.models.BrokerStateStoreKeyType + :ivar keys_property: List of keys to persist to disk, required. Required. + :vartype keys_property: list[str] + """ + + key_type: Union[str, "_models.BrokerStateStoreKeyType"] = rest_field( + name="keyType", visibility=["read", "create", "update", "delete", "query"] + ) + """The key to persist to disk. Required. Known values are: \"Pattern\", \"String\", and + \"Binary\".""" + keys_property: List[str] = rest_field(name="keys", visibility=["read", "create", "update", "delete", "query"]) + """List of keys to persist to disk, required. Required.""" + + @overload + def __init__( + self, + *, + key_type: Union[str, "_models.BrokerStateStoreKeyType"], + keys_property: List[str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerStateStorePolicySettings(_Model): + """Broker State Store Custom Policy Settings. + + :ivar state_store_resources: List of key and key type to persist to disk. + :vartype state_store_resources: + list[~azure.mgmt.iotoperations.models.BrokerStateStorePolicyResources] + :ivar dynamic: Controls if MQTT clients can request for disk persistence via ``MQTTv5`` user + property. Works in addition to other groups (logical OR). + :vartype dynamic: ~azure.mgmt.iotoperations.models.BrokerStateStoreDynamic + """ + + state_store_resources: Optional[List["_models.BrokerStateStorePolicyResources"]] = rest_field( + name="stateStoreResources", visibility=["read", "create", "update", "delete", "query"] + ) + """List of key and key type to persist to disk.""" + dynamic: Optional["_models.BrokerStateStoreDynamic"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Controls if MQTT clients can request for disk persistence via ``MQTTv5`` user property. Works + in addition to other groups (logical OR).""" + + @overload + def __init__( + self, + *, + state_store_resources: Optional[List["_models.BrokerStateStorePolicyResources"]] = None, + dynamic: Optional["_models.BrokerStateStoreDynamic"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerSubscriberQueuePolicy(_Model): + """Broker Subscriber Queue Policy properties. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BrokerSubscriberQueueCustomPolicy + + :ivar mode: 'All' to persist all subscriber queues, 'None' to not persist any, 'Custom' to + persist only the specified queues. Required. Known values are: "All", "None", and "Custom". + :vartype mode: str or ~azure.mgmt.iotoperations.models.BrokerPersistencePolicyMode + """ + + __mapping__: Dict[str, _Model] = {} + mode: str = rest_discriminator(name="mode", visibility=["read", "create", "update", "delete", "query"]) + """'All' to persist all subscriber queues, 'None' to not persist any, 'Custom' to persist only the + specified queues. Required. Known values are: \"All\", \"None\", and \"Custom\".""" + + @overload + def __init__( + self, + *, + mode: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerSubscriberQueueCustomPolicy(BrokerSubscriberQueuePolicy, discriminator="Custom"): + """Custom Subscriber Queue Policy Properties. + + :ivar mode: The mode of the policy. Required. Indicates that the policy is a custom policy. + :vartype mode: str or ~azure.mgmt.iotoperations.models.CUSTOM + :ivar subscriber_queue_settings: Custom policy, required if mode is Custom. Subscriber queues + from all groups are persisted to disk (logical OR). Required. + :vartype subscriber_queue_settings: + ~azure.mgmt.iotoperations.models.BrokerSubscriberQueueCustomPolicySettings + """ + + mode: Literal[BrokerPersistencePolicyMode.CUSTOM] = rest_discriminator(name="mode", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The mode of the policy. Required. Indicates that the policy is a custom policy.""" + subscriber_queue_settings: "_models.BrokerSubscriberQueueCustomPolicySettings" = rest_field( + name="subscriberQueueSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Custom policy, required if mode is Custom. Subscriber queues from all groups are persisted to + disk (logical OR). Required.""" + + @overload + def __init__( + self, + *, + subscriber_queue_settings: "_models.BrokerSubscriberQueueCustomPolicySettings", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, mode=BrokerPersistencePolicyMode.CUSTOM, **kwargs) + + +class BrokerSubscriberQueueCustomPolicySettings(_Model): # pylint: disable=name-too-long + """Broker Subscriber Queue Custom Policy properties. + + :ivar subscriber_client_ids: List of client IDs of the subscribers, wildcard * supported. + :vartype subscriber_client_ids: list[str] + :ivar dynamic: Controls if MQTT clients can request for disk persistence via ``MQTTv5`` user + property. Works in addition to other groups (logical OR). + :vartype dynamic: ~azure.mgmt.iotoperations.models.BrokerSubscriberQueueDynamic + :ivar topics: List of topics under which messages would be persisted to disk for each + subscriber. Wildcards # and + supported. + :vartype topics: list[str] + """ + + subscriber_client_ids: Optional[List[str]] = rest_field( + name="subscriberClientIds", visibility=["read", "create", "update", "delete", "query"] + ) + """List of client IDs of the subscribers, wildcard * supported.""" + dynamic: Optional["_models.BrokerSubscriberQueueDynamic"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Controls if MQTT clients can request for disk persistence via ``MQTTv5`` user property. Works + in addition to other groups (logical OR).""" + topics: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of topics under which messages would be persisted to disk for each subscriber. Wildcards # + and + supported.""" + + @overload + def __init__( + self, + *, + subscriber_client_ids: Optional[List[str]] = None, + dynamic: Optional["_models.BrokerSubscriberQueueDynamic"] = None, + topics: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BrokerSubscriberQueueDynamic(_Model): + """Dynamic settings of BrokerSubscriberQueueCustomPolicy. + + :ivar mode: Mode of the BrokerSubscriberQueueCustomPolicy. Required. Known values are: + "Enabled" and "Disabled". + :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode + """ + + mode: Union[str, "_models.OperationalMode"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Mode of the BrokerSubscriberQueueCustomPolicy. Required. Known values are: \"Enabled\" and + \"Disabled\".""" + + @overload + def __init__( + self, + *, + mode: Union[str, "_models.OperationalMode"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Cardinality(_Model): + """Cardinality properties. + + :ivar backend_chain: The backend broker desired properties. Required. + :vartype backend_chain: ~azure.mgmt.iotoperations.models.BackendChain + :ivar frontend: The frontend desired properties. Required. + :vartype frontend: ~azure.mgmt.iotoperations.models.Frontend + """ + + backend_chain: "_models.BackendChain" = rest_field( + name="backendChain", visibility=["read", "create", "update", "delete", "query"] + ) + """The backend broker desired properties. Required.""" + frontend: "_models.Frontend" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The frontend desired properties. Required.""" + + @overload + def __init__( + self, + *, + backend_chain: "_models.BackendChain", + frontend: "_models.Frontend", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CertManagerCertificateSpec(_Model): + """Automatic TLS server certificate management with cert-manager. + + :ivar duration: Lifetime of certificate. Must be specified using a Go time.Duration format + (h|m|s). E.g. 240h for 240 hours and 45m for 45 minutes. + :vartype duration: str + :ivar secret_name: Secret for storing server certificate. Any existing data will be + overwritten. This is a reference to the secret through an identifying name, not the secret + itself. + :vartype secret_name: str + :ivar renew_before: When to begin renewing certificate. Must be specified using a Go + time.Duration format (h|m|s). E.g. 240h for 240 hours and 45m for 45 minutes. + :vartype renew_before: str + :ivar issuer_ref: cert-manager issuerRef. Required. + :vartype issuer_ref: ~azure.mgmt.iotoperations.models.CertManagerIssuerRef + :ivar private_key: Type of certificate private key. + :vartype private_key: ~azure.mgmt.iotoperations.models.CertManagerPrivateKey + :ivar san: Additional Subject Alternative Names (SANs) to include in the certificate. + :vartype san: ~azure.mgmt.iotoperations.models.SanForCert + """ + + duration: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Lifetime of certificate. Must be specified using a Go time.Duration format (h|m|s). E.g. 240h + for 240 hours and 45m for 45 minutes.""" + secret_name: Optional[str] = rest_field( + name="secretName", visibility=["read", "create", "update", "delete", "query"] + ) + """Secret for storing server certificate. Any existing data will be overwritten. This is a + reference to the secret through an identifying name, not the secret itself.""" + renew_before: Optional[str] = rest_field( + name="renewBefore", visibility=["read", "create", "update", "delete", "query"] + ) + """When to begin renewing certificate. Must be specified using a Go time.Duration format (h|m|s). + E.g. 240h for 240 hours and 45m for 45 minutes.""" + issuer_ref: "_models.CertManagerIssuerRef" = rest_field( + name="issuerRef", visibility=["read", "create", "update", "delete", "query"] + ) + """cert-manager issuerRef. Required.""" + private_key: Optional["_models.CertManagerPrivateKey"] = rest_field( + name="privateKey", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of certificate private key.""" + san: Optional["_models.SanForCert"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Additional Subject Alternative Names (SANs) to include in the certificate.""" + + @overload + def __init__( + self, + *, + issuer_ref: "_models.CertManagerIssuerRef", + duration: Optional[str] = None, + secret_name: Optional[str] = None, + renew_before: Optional[str] = None, + private_key: Optional["_models.CertManagerPrivateKey"] = None, + san: Optional["_models.SanForCert"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CertManagerCertOptions(_Model): + """Cert Manager Cert properties. + + :ivar duration: Lifetime of certificate. Must be specified using a Go time.Duration format + (h|m|s). E.g. 240h for 240 hours and 45m for 45 minutes. Required. + :vartype duration: str + :ivar renew_before: When to begin renewing certificate. Must be specified using a Go + time.Duration format (h|m|s). E.g. 240h for 240 hours and 45m for 45 minutes. Required. + :vartype renew_before: str + :ivar private_key: Configuration of certificate private key. Required. + :vartype private_key: ~azure.mgmt.iotoperations.models.CertManagerPrivateKey + """ + + duration: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Lifetime of certificate. Must be specified using a Go time.Duration format (h|m|s). E.g. 240h + for 240 hours and 45m for 45 minutes. Required.""" + renew_before: str = rest_field(name="renewBefore", visibility=["read", "create", "update", "delete", "query"]) + """When to begin renewing certificate. Must be specified using a Go time.Duration format (h|m|s). + E.g. 240h for 240 hours and 45m for 45 minutes. Required.""" + private_key: "_models.CertManagerPrivateKey" = rest_field( + name="privateKey", visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration of certificate private key. Required.""" + + @overload + def __init__( + self, + *, + duration: str, + renew_before: str, + private_key: "_models.CertManagerPrivateKey", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CertManagerIssuerRef(_Model): + """Cert-Manager issuerRef properties. + + :ivar group: group of issuer. Required. + :vartype group: str + :ivar kind: kind of issuer (Issuer or ClusterIssuer). Required. Known values are: "Issuer" and + "ClusterIssuer". + :vartype kind: str or ~azure.mgmt.iotoperations.models.CertManagerIssuerKind + :ivar name: name of issuer. Required. + :vartype name: str + """ + + group: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """group of issuer. Required.""" + kind: Union[str, "_models.CertManagerIssuerKind"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """kind of issuer (Issuer or ClusterIssuer). Required. Known values are: \"Issuer\" and + \"ClusterIssuer\".""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """name of issuer. Required.""" + + @overload + def __init__( + self, + *, + group: str, + kind: Union[str, "_models.CertManagerIssuerKind"], + name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CertManagerPrivateKey(_Model): + """Cert Manager private key properties. + + :ivar algorithm: algorithm for private key. Required. Known values are: "Ec256", "Ec384", + "Ec521", "Ed25519", "Rsa2048", "Rsa4096", and "Rsa8192". + :vartype algorithm: str or ~azure.mgmt.iotoperations.models.PrivateKeyAlgorithm + :ivar rotation_policy: cert-manager private key rotationPolicy. Required. Known values are: + "Always" and "Never". + :vartype rotation_policy: str or ~azure.mgmt.iotoperations.models.PrivateKeyRotationPolicy + """ + + algorithm: Union[str, "_models.PrivateKeyAlgorithm"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """algorithm for private key. Required. Known values are: \"Ec256\", \"Ec384\", \"Ec521\", + \"Ed25519\", \"Rsa2048\", \"Rsa4096\", and \"Rsa8192\".""" + rotation_policy: Union[str, "_models.PrivateKeyRotationPolicy"] = rest_field( + name="rotationPolicy", visibility=["read", "create", "update", "delete", "query"] + ) + """cert-manager private key rotationPolicy. Required. Known values are: \"Always\" and \"Never\".""" + + @overload + def __init__( + self, + *, + algorithm: Union[str, "_models.PrivateKeyAlgorithm"], + rotation_policy: Union[str, "_models.PrivateKeyRotationPolicy"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ClientConfig(_Model): + """The settings of Client Config. + + :ivar max_session_expiry_seconds: Upper bound of Session Expiry Interval, in seconds. + :vartype max_session_expiry_seconds: int + :ivar max_message_expiry_seconds: Upper bound of Message Expiry Interval, in seconds. + :vartype max_message_expiry_seconds: int + :ivar max_packet_size_bytes: Max message size for a packet in Bytes. + :vartype max_packet_size_bytes: int + :ivar subscriber_queue_limit: The limit on the number of queued messages for a subscriber. + :vartype subscriber_queue_limit: ~azure.mgmt.iotoperations.models.SubscriberQueueLimit + :ivar max_receive_maximum: Upper bound of Receive Maximum that a client can request in the + CONNECT packet. + :vartype max_receive_maximum: int + :ivar max_keep_alive_seconds: Upper bound of a client's Keep Alive, in seconds. + :vartype max_keep_alive_seconds: int + """ + + max_session_expiry_seconds: Optional[int] = rest_field( + name="maxSessionExpirySeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Upper bound of Session Expiry Interval, in seconds.""" + max_message_expiry_seconds: Optional[int] = rest_field( + name="maxMessageExpirySeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Upper bound of Message Expiry Interval, in seconds.""" + max_packet_size_bytes: Optional[int] = rest_field( + name="maxPacketSizeBytes", visibility=["read", "create", "update", "delete", "query"] + ) + """Max message size for a packet in Bytes.""" + subscriber_queue_limit: Optional["_models.SubscriberQueueLimit"] = rest_field( + name="subscriberQueueLimit", visibility=["read", "create", "update", "delete", "query"] + ) + """The limit on the number of queued messages for a subscriber.""" + max_receive_maximum: Optional[int] = rest_field( + name="maxReceiveMaximum", visibility=["read", "create", "update", "delete", "query"] + ) + """Upper bound of Receive Maximum that a client can request in the CONNECT packet.""" + max_keep_alive_seconds: Optional[int] = rest_field( + name="maxKeepAliveSeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Upper bound of a client's Keep Alive, in seconds.""" + + @overload + def __init__( + self, + *, + max_session_expiry_seconds: Optional[int] = None, + max_message_expiry_seconds: Optional[int] = None, + max_packet_size_bytes: Optional[int] = None, + subscriber_queue_limit: Optional["_models.SubscriberQueueLimit"] = None, + max_receive_maximum: Optional[int] = None, + max_keep_alive_seconds: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphNode(_Model): + """DataflowGraph node properties. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DatafloGraphDestinationNode, DataflowGraphGraphNode, DataflowGraphSourceNode + + :ivar name: Name of the node. Required. + :vartype name: str + :ivar type: Type of the node. Required. Known values are: "Source", "Graph", and "Destination". + :vartype type: str or ~azure.mgmt.iotoperations.models.DataflowGraphNodeType + """ + + __mapping__: Dict[str, _Model] = {} + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the node. Required.""" + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """Type of the node. Required. Known values are: \"Source\", \"Graph\", and \"Destination\".""" + + @overload + def __init__( + self, + *, + name: str, + type: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DatafloGraphDestinationNode(DataflowGraphNode, discriminator="Destination"): + """DataflowGraph destination node properties. + + :ivar name: Name of the node. Required. + :vartype name: str + :ivar type: Type of the destination node. Required. Dataflow destination node. + :vartype type: str or ~azure.mgmt.iotoperations.models.DESTINATION + :ivar destination_settings: Destination configuration. Required. + :vartype destination_settings: + ~azure.mgmt.iotoperations.models.DataflowGraphDestinationNodeSettings + """ + + type: Literal[DataflowGraphNodeType.DESTINATION] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of the destination node. Required. Dataflow destination node.""" + destination_settings: "_models.DataflowGraphDestinationNodeSettings" = rest_field( + name="destinationSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Destination configuration. Required.""" + + @overload + def __init__( + self, + *, + name: str, + destination_settings: "_models.DataflowGraphDestinationNodeSettings", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=DataflowGraphNodeType.DESTINATION, **kwargs) + + +class DataflowBuiltInTransformationDataset(_Model): + """Dataflow BuiltIn Transformation dataset properties. + + :ivar key: The key of the dataset. Required. + :vartype key: str + :ivar description: A user provided optional description of the dataset. + :vartype description: str + :ivar schema_ref: The reference to the schema that describes the dataset. Allowed: JSON + Schema/draft-7. + :vartype schema_ref: str + :ivar inputs: List of fields for enriching from the Broker State Store. Required. + :vartype inputs: list[str] + :ivar expression: Condition to enrich data from Broker State Store. Example: $1 < 0 || $1 > $2 + (Assuming inputs section $1 and $2 are provided). + :vartype expression: str + """ + + key: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The key of the dataset. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A user provided optional description of the dataset.""" + schema_ref: Optional[str] = rest_field(name="schemaRef", visibility=["read", "create", "update", "delete", "query"]) + """The reference to the schema that describes the dataset. Allowed: JSON Schema/draft-7.""" + inputs: List[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of fields for enriching from the Broker State Store. Required.""" + expression: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Condition to enrich data from Broker State Store. Example: $1 < 0 || $1 > $2 (Assuming inputs + section $1 and $2 are provided).""" + + @overload + def __init__( + self, + *, + key: str, + inputs: List[str], + description: Optional[str] = None, + schema_ref: Optional[str] = None, + expression: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowBuiltInTransformationFilter(_Model): + """Dataflow BuiltIn Transformation filter properties. + + :ivar type: The type of dataflow operation. "Filter" + :vartype type: str or ~azure.mgmt.iotoperations.models.FilterType + :ivar description: A user provided optional description of the filter. + :vartype description: str + :ivar inputs: List of fields for filtering in JSON path expression. Required. + :vartype inputs: list[str] + :ivar expression: Condition to filter data. Can reference input fields with {n} where n is the + index of the input field starting from 1. Example: $1 < 0 || $1 > $2 (Assuming inputs section + $1 and $2 are provided). Required. + :vartype expression: str + """ + + type: Optional[Union[str, "_models.FilterType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of dataflow operation. \"Filter\"""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A user provided optional description of the filter.""" + inputs: List[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of fields for filtering in JSON path expression. Required.""" + expression: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Condition to filter data. Can reference input fields with {n} where n is the index of the input + field starting from 1. Example: $1 < 0 || $1 > $2 (Assuming inputs section $1 and $2 are + provided). Required.""" + + @overload + def __init__( + self, + *, + inputs: List[str], + expression: str, + type: Optional[Union[str, "_models.FilterType"]] = None, + description: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowBuiltInTransformationMap(_Model): + """Dataflow BuiltIn Transformation map properties. + + :ivar type: Type of transformation. Known values are: "NewProperties", "Rename", "Compute", + "PassThrough", and "BuiltInFunction". + :vartype type: str or ~azure.mgmt.iotoperations.models.DataflowMappingType + :ivar description: A user provided optional description of the mapping function. + :vartype description: str + :ivar inputs: List of fields for mapping in JSON path expression. Required. + :vartype inputs: list[str] + :ivar expression: Modify the inputs field(s) to the final output field. Example: $1 * 2.2 + (Assuming inputs section $1 is provided). + :vartype expression: str + :ivar output: Where and how the input fields to be organized in the output record. Required. + :vartype output: str + """ + + type: Optional[Union[str, "_models.DataflowMappingType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Type of transformation. Known values are: \"NewProperties\", \"Rename\", \"Compute\", + \"PassThrough\", and \"BuiltInFunction\".""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A user provided optional description of the mapping function.""" + inputs: List[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of fields for mapping in JSON path expression. Required.""" + expression: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Modify the inputs field(s) to the final output field. Example: $1 * 2.2 (Assuming inputs + section $1 is provided).""" + output: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Where and how the input fields to be organized in the output record. Required.""" + + @overload + def __init__( + self, + *, + inputs: List[str], + output: str, + type: Optional[Union[str, "_models.DataflowMappingType"]] = None, + description: Optional[str] = None, + expression: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowBuiltInTransformationSettings(_Model): + """Dataflow BuiltIn Transformation properties. + + :ivar serialization_format: Serialization format. Optional; defaults to JSON. Allowed value + JSON Schema/draft-7, Parquet. Default: Json. Known values are: "Delta", "Json", and "Parquet". + :vartype serialization_format: str or + ~azure.mgmt.iotoperations.models.TransformationSerializationFormat + :ivar schema_ref: Reference to the schema that describes the output of the transformation. + :vartype schema_ref: str + :ivar datasets: Enrich data from Broker State Store. Dataset references a key in Broker State + Store. + :vartype datasets: list[~azure.mgmt.iotoperations.models.DataflowBuiltInTransformationDataset] + :ivar filter: Filters input record or datapoints based on condition. + :vartype filter: list[~azure.mgmt.iotoperations.models.DataflowBuiltInTransformationFilter] + :ivar map: Maps input to output message. + :vartype map: list[~azure.mgmt.iotoperations.models.DataflowBuiltInTransformationMap] + """ + + serialization_format: Optional[Union[str, "_models.TransformationSerializationFormat"]] = rest_field( + name="serializationFormat", visibility=["read", "create", "update", "delete", "query"] + ) + """Serialization format. Optional; defaults to JSON. Allowed value JSON Schema/draft-7, Parquet. + Default: Json. Known values are: \"Delta\", \"Json\", and \"Parquet\".""" + schema_ref: Optional[str] = rest_field(name="schemaRef", visibility=["read", "create", "update", "delete", "query"]) + """Reference to the schema that describes the output of the transformation.""" + datasets: Optional[List["_models.DataflowBuiltInTransformationDataset"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Enrich data from Broker State Store. Dataset references a key in Broker State Store.""" + filter: Optional[List["_models.DataflowBuiltInTransformationFilter"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Filters input record or datapoints based on condition.""" + map: Optional[List["_models.DataflowBuiltInTransformationMap"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Maps input to output message.""" + + @overload + def __init__( + self, + *, + serialization_format: Optional[Union[str, "_models.TransformationSerializationFormat"]] = None, + schema_ref: Optional[str] = None, + datasets: Optional[List["_models.DataflowBuiltInTransformationDataset"]] = None, + filter: Optional[ + List["_models.DataflowBuiltInTransformationFilter"] + ] = None, # pylint: disable=redefined-builtin + map: Optional[List["_models.DataflowBuiltInTransformationMap"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowDestinationOperationSettings(_Model): + """Dataflow Destination Operation properties. + + :ivar endpoint_ref: Reference to the Endpoint CR. Can be of Broker, Kafka, Fabric, ADLS, ADX + type. Required. + :vartype endpoint_ref: str + :ivar data_destination: Destination location, can be a topic or table name. Supports dynamic + values with $topic, $systemProperties, $userProperties, $payload, $context, and $subscription. + Required. + :vartype data_destination: str + """ + + endpoint_ref: str = rest_field(name="endpointRef", visibility=["read", "create", "update", "delete", "query"]) + """Reference to the Endpoint CR. Can be of Broker, Kafka, Fabric, ADLS, ADX type. Required.""" + data_destination: str = rest_field( + name="dataDestination", visibility=["read", "create", "update", "delete", "query"] + ) + """Destination location, can be a topic or table name. Supports dynamic values with $topic, + $systemProperties, $userProperties, $payload, $context, and $subscription. Required.""" + + @overload + def __init__( + self, + *, + endpoint_ref: str, + data_destination: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointAuthenticationAccessToken(_Model): # pylint: disable=name-too-long + """DataflowEndpoint Authentication Access Token properties. + + :ivar secret_ref: Token secret name. Required. + :vartype secret_ref: str + """ + + secret_ref: str = rest_field(name="secretRef", visibility=["read", "create", "update", "delete", "query"]) + """Token secret name. Required.""" + + @overload + def __init__( + self, + *, + secret_ref: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointAuthenticationAnonymous(_Model): + """DataflowEndpoint Anonymous Authentication properties.""" + + +class DataflowEndpointAuthenticationSasl(_Model): + """DataflowEndpoint Authentication Sasl properties. + + :ivar sasl_type: Type of SASL authentication. Can be PLAIN, SCRAM-SHA-256, or SCRAM-SHA-512. + Required. Known values are: "Plain", "ScramSha256", and "ScramSha512". + :vartype sasl_type: str or + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSaslType + :ivar secret_ref: Token secret name. Required. + :vartype secret_ref: str + """ + + sasl_type: Union[str, "_models.DataflowEndpointAuthenticationSaslType"] = rest_field( + name="saslType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of SASL authentication. Can be PLAIN, SCRAM-SHA-256, or SCRAM-SHA-512. Required. Known + values are: \"Plain\", \"ScramSha256\", and \"ScramSha512\".""" + secret_ref: str = rest_field(name="secretRef", visibility=["read", "create", "update", "delete", "query"]) + """Token secret name. Required.""" + + @overload + def __init__( + self, + *, + sasl_type: Union[str, "_models.DataflowEndpointAuthenticationSaslType"], + secret_ref: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointAuthenticationServiceAccountToken(_Model): # pylint: disable=name-too-long + """Service Account Token for BrokerAuthentication. + + :ivar audience: Audience of the service account. Optional, defaults to the broker internal + service account audience. Required. + :vartype audience: str + """ + + audience: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Audience of the service account. Optional, defaults to the broker internal service account + audience. Required.""" + + @overload + def __init__( + self, + *, + audience: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointAuthenticationSystemAssignedManagedIdentity(_Model): # pylint: disable=name-too-long + """DataflowEndpoint Authentication SystemAssignedManagedIdentity properties. + + :ivar audience: Audience of the service to authenticate against. Optional; defaults to the + audience for Service host configuration. + :vartype audience: str + """ + + audience: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Audience of the service to authenticate against. Optional; defaults to the audience for Service + host configuration.""" + + @overload + def __init__( + self, + *, + audience: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointAuthenticationUserAssignedManagedIdentity(_Model): # pylint: disable=name-too-long + """DataflowEndpoint Authentication UserAssignedManagedIdentity properties. + + :ivar client_id: Client ID for the user-assigned managed identity. Required. + :vartype client_id: str + :ivar scope: Resource identifier (application ID URI) of the resource, affixed with the + .default suffix. + :vartype scope: str + :ivar tenant_id: Tenant ID. Required. + :vartype tenant_id: str + """ + + client_id: str = rest_field(name="clientId", visibility=["read", "create", "update", "delete", "query"]) + """Client ID for the user-assigned managed identity. Required.""" + scope: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource identifier (application ID URI) of the resource, affixed with the .default suffix.""" + tenant_id: str = rest_field(name="tenantId", visibility=["read", "create", "update", "delete", "query"]) + """Tenant ID. Required.""" + + @overload + def __init__( + self, + *, + client_id: str, + tenant_id: str, + scope: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointAuthenticationX509(_Model): + """DataflowEndpoint Authentication X509 properties. + + :ivar secret_ref: Secret reference of the X.509 certificate. Required. + :vartype secret_ref: str + """ + + secret_ref: str = rest_field(name="secretRef", visibility=["read", "create", "update", "delete", "query"]) + """Secret reference of the X.509 certificate. Required.""" + + @overload + def __init__( + self, + *, + secret_ref: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointDataExplorer(_Model): + """Azure Data Explorer endpoint properties. + + :ivar authentication: Authentication configuration. NOTE - only authentication property is + allowed per entry. Required. + :vartype authentication: + ~azure.mgmt.iotoperations.models.DataflowEndpointDataExplorerAuthentication + :ivar database: Database name. Required. + :vartype database: str + :ivar host: Host of the Azure Data Explorer in the form of ..kusto.windows.net + . Required. + :vartype host: str + :ivar batching: Azure Data Explorer endpoint batching configuration. + :vartype batching: ~azure.mgmt.iotoperations.models.BatchingConfiguration + """ + + authentication: "_models.DataflowEndpointDataExplorerAuthentication" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Authentication configuration. NOTE - only authentication property is allowed per entry. + Required.""" + database: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Database name. Required.""" + host: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Host of the Azure Data Explorer in the form of ..kusto.windows.net . Required.""" + batching: Optional["_models.BatchingConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure Data Explorer endpoint batching configuration.""" + + @overload + def __init__( + self, + *, + authentication: "_models.DataflowEndpointDataExplorerAuthentication", + database: str, + host: str, + batching: Optional["_models.BatchingConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointDataExplorerAuthentication(_Model): # pylint: disable=name-too-long + """Azure Data Explorer Authentication properties. NOTE - only authentication property is allowed + per entry. + + :ivar method: Mode of Authentication. Required. Known values are: + "SystemAssignedManagedIdentity" and "UserAssignedManagedIdentity". + :vartype method: str or ~azure.mgmt.iotoperations.models.DataExplorerAuthMethod + :ivar system_assigned_managed_identity_settings: System-assigned managed identity + authentication. + :vartype system_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity + :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. + :vartype user_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity + """ + + method: Union[str, "_models.DataExplorerAuthMethod"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\" and + \"UserAssignedManagedIdentity\".""" + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = rest_field( + name="systemAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """System-assigned managed identity authentication.""" + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = rest_field( + name="userAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """User-assigned managed identity authentication.""" + + @overload + def __init__( + self, + *, + method: Union[str, "_models.DataExplorerAuthMethod"], + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = None, + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointDataLakeStorage(_Model): + """Azure Data Lake endpoint properties. + + :ivar authentication: Authentication configuration. NOTE - only authentication property is + allowed per entry. Required. + :vartype authentication: + ~azure.mgmt.iotoperations.models.DataflowEndpointDataLakeStorageAuthentication + :ivar host: Host of the Azure Data Lake in the form of .blob.core.windows.net . + Required. + :vartype host: str + :ivar batching: Azure Data Lake endpoint batching configuration. + :vartype batching: ~azure.mgmt.iotoperations.models.BatchingConfiguration + """ + + authentication: "_models.DataflowEndpointDataLakeStorageAuthentication" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Authentication configuration. NOTE - only authentication property is allowed per entry. + Required.""" + host: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Host of the Azure Data Lake in the form of .blob.core.windows.net . Required.""" + batching: Optional["_models.BatchingConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure Data Lake endpoint batching configuration.""" + + @overload + def __init__( + self, + *, + authentication: "_models.DataflowEndpointDataLakeStorageAuthentication", + host: str, + batching: Optional["_models.BatchingConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointDataLakeStorageAuthentication(_Model): # pylint: disable=name-too-long + """Azure Data Lake endpoint Authentication properties. NOTE Enum - Only one method is supported + for one entry. + + :ivar method: Mode of Authentication. Required. Known values are: + "SystemAssignedManagedIdentity", "UserAssignedManagedIdentity", and "AccessToken". + :vartype method: str or ~azure.mgmt.iotoperations.models.DataLakeStorageAuthMethod + :ivar access_token_settings: SAS token authentication. + :vartype access_token_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationAccessToken + :ivar system_assigned_managed_identity_settings: System-assigned managed identity + authentication. + :vartype system_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity + :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. + :vartype user_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity + """ + + method: Union[str, "_models.DataLakeStorageAuthMethod"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\", + \"UserAssignedManagedIdentity\", and \"AccessToken\".""" + access_token_settings: Optional["_models.DataflowEndpointAuthenticationAccessToken"] = rest_field( + name="accessTokenSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """SAS token authentication.""" + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = rest_field( + name="systemAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """System-assigned managed identity authentication.""" + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = rest_field( + name="userAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """User-assigned managed identity authentication.""" + + @overload + def __init__( + self, + *, + method: Union[str, "_models.DataLakeStorageAuthMethod"], + access_token_settings: Optional["_models.DataflowEndpointAuthenticationAccessToken"] = None, + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = None, + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointFabricOneLake(_Model): + """Microsoft Fabric endpoint properties. + + :ivar authentication: Authentication configuration. NOTE - only one authentication property is + allowed per entry. Required. + :vartype authentication: + ~azure.mgmt.iotoperations.models.DataflowEndpointFabricOneLakeAuthentication + :ivar names: Names of the workspace and lakehouse. Required. + :vartype names: ~azure.mgmt.iotoperations.models.DataflowEndpointFabricOneLakeNames + :ivar one_lake_path_type: Type of location of the data in the workspace. Can be either tables + or files. Required. Known values are: "Files" and "Tables". + :vartype one_lake_path_type: str or + ~azure.mgmt.iotoperations.models.DataflowEndpointFabricPathType + :ivar host: Host of the Microsoft Fabric in the form of https://.fabric.microsoft.com. + Required. + :vartype host: str + :ivar batching: Batching configuration. + :vartype batching: ~azure.mgmt.iotoperations.models.BatchingConfiguration + """ + + authentication: "_models.DataflowEndpointFabricOneLakeAuthentication" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Authentication configuration. NOTE - only one authentication property is allowed per entry. + Required.""" + names: "_models.DataflowEndpointFabricOneLakeNames" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Names of the workspace and lakehouse. Required.""" + one_lake_path_type: Union[str, "_models.DataflowEndpointFabricPathType"] = rest_field( + name="oneLakePathType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of location of the data in the workspace. Can be either tables or files. Required. Known + values are: \"Files\" and \"Tables\".""" + host: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Host of the Microsoft Fabric in the form of https://.fabric.microsoft.com. Required.""" + batching: Optional["_models.BatchingConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Batching configuration.""" + + @overload + def __init__( + self, + *, + authentication: "_models.DataflowEndpointFabricOneLakeAuthentication", + names: "_models.DataflowEndpointFabricOneLakeNames", + one_lake_path_type: Union[str, "_models.DataflowEndpointFabricPathType"], + host: str, + batching: Optional["_models.BatchingConfiguration"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointFabricOneLakeAuthentication(_Model): # pylint: disable=name-too-long + """Microsoft Fabric endpoint. Authentication properties. NOTE - Only one method is supported for + one entry. + + :ivar method: Mode of Authentication. Required. Known values are: + "SystemAssignedManagedIdentity" and "UserAssignedManagedIdentity". + :vartype method: str or ~azure.mgmt.iotoperations.models.FabricOneLakeAuthMethod + :ivar system_assigned_managed_identity_settings: System-assigned managed identity + authentication. + :vartype system_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity + :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. + :vartype user_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity + """ + + method: Union[str, "_models.FabricOneLakeAuthMethod"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\" and + \"UserAssignedManagedIdentity\".""" + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = rest_field( + name="systemAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """System-assigned managed identity authentication.""" + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = rest_field( + name="userAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """User-assigned managed identity authentication.""" + + @overload + def __init__( + self, + *, + method: Union[str, "_models.FabricOneLakeAuthMethod"], + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = None, + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointFabricOneLakeNames(_Model): + """Microsoft Fabric endpoint Names properties. + + :ivar lakehouse_name: Lakehouse name. Required. + :vartype lakehouse_name: str + :ivar workspace_name: Workspace name. Required. + :vartype workspace_name: str + """ + + lakehouse_name: str = rest_field(name="lakehouseName", visibility=["read", "create", "update", "delete", "query"]) + """Lakehouse name. Required.""" + workspace_name: str = rest_field(name="workspaceName", visibility=["read", "create", "update", "delete", "query"]) + """Workspace name. Required.""" + + @overload + def __init__( + self, + *, + lakehouse_name: str, + workspace_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointKafka(_Model): + """Kafka endpoint properties. + + :ivar authentication: Authentication configuration. NOTE - only authentication property is + allowed per entry. Required. + :vartype authentication: ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaAuthentication + :ivar consumer_group_id: Consumer group ID. + :vartype consumer_group_id: str + :ivar host: Kafka endpoint host. Required. + :vartype host: str + :ivar batching: Batching configuration. + :vartype batching: ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaBatching + :ivar copy_mqtt_properties: Copy Broker properties. No effect if the endpoint is used as a + source or if the dataflow doesn't have an Broker source. Known values are: "Enabled" and + "Disabled". + :vartype copy_mqtt_properties: str or ~azure.mgmt.iotoperations.models.OperationalMode + :ivar compression: Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is + used as a source. Known values are: "None", "Gzip", "Snappy", and "Lz4". + :vartype compression: str or ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaCompression + :ivar kafka_acks: Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a + source. Known values are: "Zero", "One", and "All". + :vartype kafka_acks: str or ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaAcks + :ivar partition_strategy: Partition handling strategy. Can be default or static. No effect if + the endpoint is used as a source. Known values are: "Default", "Static", "Topic", and + "Property". + :vartype partition_strategy: str or + ~azure.mgmt.iotoperations.models.DataflowEndpointKafkaPartitionStrategy + :ivar tls: TLS configuration. + :vartype tls: ~azure.mgmt.iotoperations.models.TlsProperties + :ivar cloud_event_attributes: Cloud event mapping config. Known values are: "Propagate" and + "CreateOrRemap". + :vartype cloud_event_attributes: str or + ~azure.mgmt.iotoperations.models.CloudEventAttributeType + """ + + authentication: "_models.DataflowEndpointKafkaAuthentication" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Authentication configuration. NOTE - only authentication property is allowed per entry. + Required.""" + consumer_group_id: Optional[str] = rest_field( + name="consumerGroupId", visibility=["read", "create", "update", "delete", "query"] + ) + """Consumer group ID.""" + host: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Kafka endpoint host. Required.""" + batching: Optional["_models.DataflowEndpointKafkaBatching"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Batching configuration.""" + copy_mqtt_properties: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + name="copyMqttProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Copy Broker properties. No effect if the endpoint is used as a source or if the dataflow + doesn't have an Broker source. Known values are: \"Enabled\" and \"Disabled\".""" + compression: Optional[Union[str, "_models.DataflowEndpointKafkaCompression"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Compression. Can be none, gzip, lz4, or snappy. No effect if the endpoint is used as a source. + Known values are: \"None\", \"Gzip\", \"Snappy\", and \"Lz4\".""" + kafka_acks: Optional[Union[str, "_models.DataflowEndpointKafkaAcks"]] = rest_field( + name="kafkaAcks", visibility=["read", "create", "update", "delete", "query"] + ) + """Kafka acks. Can be all, one, or zero. No effect if the endpoint is used as a source. Known + values are: \"Zero\", \"One\", and \"All\".""" + partition_strategy: Optional[Union[str, "_models.DataflowEndpointKafkaPartitionStrategy"]] = rest_field( + name="partitionStrategy", visibility=["read", "create", "update", "delete", "query"] + ) + """Partition handling strategy. Can be default or static. No effect if the endpoint is used as a + source. Known values are: \"Default\", \"Static\", \"Topic\", and \"Property\".""" + tls: Optional["_models.TlsProperties"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """TLS configuration.""" + cloud_event_attributes: Optional[Union[str, "_models.CloudEventAttributeType"]] = rest_field( + name="cloudEventAttributes", visibility=["read", "create", "update", "delete", "query"] + ) + """Cloud event mapping config. Known values are: \"Propagate\" and \"CreateOrRemap\".""" + + @overload + def __init__( + self, + *, + authentication: "_models.DataflowEndpointKafkaAuthentication", + host: str, + consumer_group_id: Optional[str] = None, + batching: Optional["_models.DataflowEndpointKafkaBatching"] = None, + copy_mqtt_properties: Optional[Union[str, "_models.OperationalMode"]] = None, + compression: Optional[Union[str, "_models.DataflowEndpointKafkaCompression"]] = None, + kafka_acks: Optional[Union[str, "_models.DataflowEndpointKafkaAcks"]] = None, + partition_strategy: Optional[Union[str, "_models.DataflowEndpointKafkaPartitionStrategy"]] = None, + tls: Optional["_models.TlsProperties"] = None, + cloud_event_attributes: Optional[Union[str, "_models.CloudEventAttributeType"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointKafkaAuthentication(_Model): + """Kafka endpoint Authentication properties. NOTE - only authentication property is allowed per + entry. + + :ivar method: Mode of Authentication. Required. Known values are: + "SystemAssignedManagedIdentity", "UserAssignedManagedIdentity", "Sasl", "X509Certificate", and + "Anonymous". + :vartype method: str or ~azure.mgmt.iotoperations.models.KafkaAuthMethod + :ivar system_assigned_managed_identity_settings: System-assigned managed identity + authentication. + :vartype system_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity + :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. + :vartype user_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity + :ivar sasl_settings: SASL authentication. + :vartype sasl_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSasl + :ivar x509_certificate_settings: X.509 certificate authentication. + :vartype x509_certificate_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationX509 + """ + + method: Union[str, "_models.KafkaAuthMethod"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\", + \"UserAssignedManagedIdentity\", \"Sasl\", \"X509Certificate\", and \"Anonymous\".""" + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = rest_field( + name="systemAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """System-assigned managed identity authentication.""" + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = rest_field( + name="userAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """User-assigned managed identity authentication.""" + sasl_settings: Optional["_models.DataflowEndpointAuthenticationSasl"] = rest_field( + name="saslSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """SASL authentication.""" + x509_certificate_settings: Optional["_models.DataflowEndpointAuthenticationX509"] = rest_field( + name="x509CertificateSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """X.509 certificate authentication.""" + + @overload + def __init__( + self, + *, + method: Union[str, "_models.KafkaAuthMethod"], + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = None, + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = None, + sasl_settings: Optional["_models.DataflowEndpointAuthenticationSasl"] = None, + x509_certificate_settings: Optional["_models.DataflowEndpointAuthenticationX509"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointKafkaBatching(_Model): + """Kafka endpoint Batching properties. + + :ivar mode: Mode for batching. Known values are: "Enabled" and "Disabled". + :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode + :ivar latency_ms: Batching latency in milliseconds. + :vartype latency_ms: int + :ivar max_bytes: Maximum number of bytes in a batch. + :vartype max_bytes: int + :ivar max_messages: Maximum number of messages in a batch. + :vartype max_messages: int + """ + + mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Mode for batching. Known values are: \"Enabled\" and \"Disabled\".""" + latency_ms: Optional[int] = rest_field(name="latencyMs", visibility=["read", "create", "update", "delete", "query"]) + """Batching latency in milliseconds.""" + max_bytes: Optional[int] = rest_field(name="maxBytes", visibility=["read", "create", "update", "delete", "query"]) + """Maximum number of bytes in a batch.""" + max_messages: Optional[int] = rest_field( + name="maxMessages", visibility=["read", "create", "update", "delete", "query"] + ) + """Maximum number of messages in a batch.""" + + @overload + def __init__( + self, + *, + mode: Optional[Union[str, "_models.OperationalMode"]] = None, + latency_ms: Optional[int] = None, + max_bytes: Optional[int] = None, + max_messages: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointLocalStorage(_Model): + """Local persistent volume endpoint properties. + + :ivar persistent_volume_claim_ref: Persistent volume claim name. Required. + :vartype persistent_volume_claim_ref: str + """ + + persistent_volume_claim_ref: str = rest_field( + name="persistentVolumeClaimRef", visibility=["read", "create", "update", "delete", "query"] + ) + """Persistent volume claim name. Required.""" + + @overload + def __init__( + self, + *, + persistent_volume_claim_ref: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointMqtt(_Model): + """Broker endpoint properties. + + :ivar authentication: authentication properties. DEFAULT: kubernetes.audience=aio-internal. + NOTE - Enum field only property is allowed. Required. + :vartype authentication: ~azure.mgmt.iotoperations.models.DataflowEndpointMqttAuthentication + :ivar client_id_prefix: Client ID prefix. Client ID generated by the dataflow is -TBD. + Optional; no prefix if omitted. + :vartype client_id_prefix: str + :ivar host: Host of the Broker in the form of :. Optional; connects to Broker + if omitted. + :vartype host: str + :ivar protocol: Enable or disable websockets. Known values are: "Mqtt" and "WebSockets". + :vartype protocol: str or ~azure.mgmt.iotoperations.models.BrokerProtocolType + :ivar keep_alive_seconds: Broker KeepAlive for connection in seconds. + :vartype keep_alive_seconds: int + :ivar retain: Whether or not to keep the retain setting. Known values are: "Keep" and "Never". + :vartype retain: str or ~azure.mgmt.iotoperations.models.MqttRetainType + :ivar max_inflight_messages: The max number of messages to keep in flight. For subscribe, this + is the receive maximum. For publish, this is the maximum number of messages to send before + waiting for an ack. + :vartype max_inflight_messages: int + :ivar qos: Qos for Broker connection. + :vartype qos: int + :ivar session_expiry_seconds: Session expiry in seconds. + :vartype session_expiry_seconds: int + :ivar tls: TLS configuration. + :vartype tls: ~azure.mgmt.iotoperations.models.TlsProperties + :ivar cloud_event_attributes: Cloud event mapping config. Known values are: "Propagate" and + "CreateOrRemap". + :vartype cloud_event_attributes: str or + ~azure.mgmt.iotoperations.models.CloudEventAttributeType + """ + + authentication: "_models.DataflowEndpointMqttAuthentication" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """authentication properties. DEFAULT: kubernetes.audience=aio-internal. NOTE - Enum field only + property is allowed. Required.""" + client_id_prefix: Optional[str] = rest_field( + name="clientIdPrefix", visibility=["read", "create", "update", "delete", "query"] + ) + """Client ID prefix. Client ID generated by the dataflow is -TBD. Optional; no prefix if + omitted.""" + host: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Host of the Broker in the form of :. Optional; connects to Broker if omitted.""" + protocol: Optional[Union[str, "_models.BrokerProtocolType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Enable or disable websockets. Known values are: \"Mqtt\" and \"WebSockets\".""" + keep_alive_seconds: Optional[int] = rest_field( + name="keepAliveSeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Broker KeepAlive for connection in seconds.""" + retain: Optional[Union[str, "_models.MqttRetainType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Whether or not to keep the retain setting. Known values are: \"Keep\" and \"Never\".""" + max_inflight_messages: Optional[int] = rest_field( + name="maxInflightMessages", visibility=["read", "create", "update", "delete", "query"] + ) + """The max number of messages to keep in flight. For subscribe, this is the receive maximum. For + publish, this is the maximum number of messages to send before waiting for an ack.""" + qos: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Qos for Broker connection.""" + session_expiry_seconds: Optional[int] = rest_field( + name="sessionExpirySeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Session expiry in seconds.""" + tls: Optional["_models.TlsProperties"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """TLS configuration.""" + cloud_event_attributes: Optional[Union[str, "_models.CloudEventAttributeType"]] = rest_field( + name="cloudEventAttributes", visibility=["read", "create", "update", "delete", "query"] + ) + """Cloud event mapping config. Known values are: \"Propagate\" and \"CreateOrRemap\".""" + + @overload + def __init__( + self, + *, + authentication: "_models.DataflowEndpointMqttAuthentication", + client_id_prefix: Optional[str] = None, + host: Optional[str] = None, + protocol: Optional[Union[str, "_models.BrokerProtocolType"]] = None, + keep_alive_seconds: Optional[int] = None, + retain: Optional[Union[str, "_models.MqttRetainType"]] = None, + max_inflight_messages: Optional[int] = None, + qos: Optional[int] = None, + session_expiry_seconds: Optional[int] = None, + tls: Optional["_models.TlsProperties"] = None, + cloud_event_attributes: Optional[Union[str, "_models.CloudEventAttributeType"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointMqttAuthentication(_Model): + """Mqtt endpoint Authentication properties. NOTE - only authentication property is allowed per + entry. + + :ivar method: Mode of Authentication. Required. Known values are: + "SystemAssignedManagedIdentity", "UserAssignedManagedIdentity", "ServiceAccountToken", + "X509Certificate", and "Anonymous". + :vartype method: str or ~azure.mgmt.iotoperations.models.MqttAuthMethod + :ivar system_assigned_managed_identity_settings: System-assigned managed identity + authentication. + :vartype system_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity + :ivar user_assigned_managed_identity_settings: User-assigned managed identity authentication. + :vartype user_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationUserAssignedManagedIdentity + :ivar service_account_token_settings: Kubernetes service account token authentication. Default + audience if not set is aio-internal. + :vartype service_account_token_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationServiceAccountToken + :ivar x509_certificate_settings: X.509 certificate authentication. + :vartype x509_certificate_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationX509 + """ + + method: Union[str, "_models.MqttAuthMethod"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Mode of Authentication. Required. Known values are: \"SystemAssignedManagedIdentity\", + \"UserAssignedManagedIdentity\", \"ServiceAccountToken\", \"X509Certificate\", and + \"Anonymous\".""" + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = rest_field( + name="systemAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """System-assigned managed identity authentication.""" + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = rest_field( + name="userAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """User-assigned managed identity authentication.""" + service_account_token_settings: Optional["_models.DataflowEndpointAuthenticationServiceAccountToken"] = rest_field( + name="serviceAccountTokenSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Kubernetes service account token authentication. Default audience if not set is aio-internal.""" + x509_certificate_settings: Optional["_models.DataflowEndpointAuthenticationX509"] = rest_field( + name="x509CertificateSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """X.509 certificate authentication.""" + + @overload + def __init__( + self, + *, + method: Union[str, "_models.MqttAuthMethod"], + system_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationSystemAssignedManagedIdentity" + ] = None, + user_assigned_managed_identity_settings: Optional[ + "_models.DataflowEndpointAuthenticationUserAssignedManagedIdentity" + ] = None, + service_account_token_settings: Optional["_models.DataflowEndpointAuthenticationServiceAccountToken"] = None, + x509_certificate_settings: Optional["_models.DataflowEndpointAuthenticationX509"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointOpenTelemetry(_Model): + """OpenTelemetry endpoint properties. + + :ivar host: Host of the OpenTelemetry in the form of :. Required. + :vartype host: str + :ivar batching: Batching configuration. + :vartype batching: ~azure.mgmt.iotoperations.models.BatchingConfiguration + :ivar tls: TLS configuration. + :vartype tls: ~azure.mgmt.iotoperations.models.TlsProperties + :ivar authentication: Authentication properties for OpenTelemetry endpoints. Required. + :vartype authentication: ~azure.mgmt.iotoperations.models.DataflowOpenTelemetryAuthentication + """ + + host: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Host of the OpenTelemetry in the form of :. Required.""" + batching: Optional["_models.BatchingConfiguration"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Batching configuration.""" + tls: Optional["_models.TlsProperties"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """TLS configuration.""" + authentication: "_models.DataflowOpenTelemetryAuthentication" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Authentication properties for OpenTelemetry endpoints. Required.""" + + @overload + def __init__( + self, + *, + host: str, + authentication: "_models.DataflowOpenTelemetryAuthentication", + batching: Optional["_models.BatchingConfiguration"] = None, + tls: Optional["_models.TlsProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointProperties(_Model): + """DataflowEndpoint Resource properties. NOTE - Only one type of endpoint is supported for one + Resource. + + :ivar endpoint_type: Endpoint Type. Required. Known values are: "DataExplorer", + "DataLakeStorage", "FabricOneLake", "Kafka", "LocalStorage", "Mqtt", and "OpenTelemetry". + :vartype endpoint_type: str or ~azure.mgmt.iotoperations.models.EndpointType + :ivar host_type: The type of the Kafka host. E.g FabricRT, EventGrid. Known values are: + "FabricRT", "EventGrid", "LocalBroker", "Eventhub", "CustomMqtt", and "CustomKafka". + :vartype host_type: str or ~azure.mgmt.iotoperations.models.DataflowEnpointHostType + :ivar data_explorer_settings: Azure Data Explorer endpoint. + :vartype data_explorer_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointDataExplorer + :ivar data_lake_storage_settings: Azure Data Lake endpoint. + :vartype data_lake_storage_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointDataLakeStorage + :ivar fabric_one_lake_settings: Microsoft Fabric endpoint. + :vartype fabric_one_lake_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointFabricOneLake + :ivar kafka_settings: Kafka endpoint. + :vartype kafka_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointKafka + :ivar local_storage_settings: Local persistent volume endpoint. + :vartype local_storage_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointLocalStorage + :ivar mqtt_settings: Broker endpoint. + :vartype mqtt_settings: ~azure.mgmt.iotoperations.models.DataflowEndpointMqtt + :ivar open_telemetry_settings: OpenTelemetry endpoint. + :vartype open_telemetry_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointOpenTelemetry + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + """ + + endpoint_type: Union[str, "_models.EndpointType"] = rest_field( + name="endpointType", visibility=["read", "create", "update", "delete", "query"] + ) + """Endpoint Type. Required. Known values are: \"DataExplorer\", \"DataLakeStorage\", + \"FabricOneLake\", \"Kafka\", \"LocalStorage\", \"Mqtt\", and \"OpenTelemetry\".""" + host_type: Optional[Union[str, "_models.DataflowEnpointHostType"]] = rest_field( + name="hostType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of the Kafka host. E.g FabricRT, EventGrid. Known values are: \"FabricRT\", + \"EventGrid\", \"LocalBroker\", \"Eventhub\", \"CustomMqtt\", and \"CustomKafka\".""" + data_explorer_settings: Optional["_models.DataflowEndpointDataExplorer"] = rest_field( + name="dataExplorerSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Azure Data Explorer endpoint.""" + data_lake_storage_settings: Optional["_models.DataflowEndpointDataLakeStorage"] = rest_field( + name="dataLakeStorageSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Azure Data Lake endpoint.""" + fabric_one_lake_settings: Optional["_models.DataflowEndpointFabricOneLake"] = rest_field( + name="fabricOneLakeSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Microsoft Fabric endpoint.""" + kafka_settings: Optional["_models.DataflowEndpointKafka"] = rest_field( + name="kafkaSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Kafka endpoint.""" + local_storage_settings: Optional["_models.DataflowEndpointLocalStorage"] = rest_field( + name="localStorageSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Local persistent volume endpoint.""" + mqtt_settings: Optional["_models.DataflowEndpointMqtt"] = rest_field( + name="mqttSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Broker endpoint.""" + open_telemetry_settings: Optional["_models.DataflowEndpointOpenTelemetry"] = rest_field( + name="openTelemetrySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """OpenTelemetry endpoint.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + + @overload + def __init__( + self, + *, + endpoint_type: Union[str, "_models.EndpointType"], + host_type: Optional[Union[str, "_models.DataflowEnpointHostType"]] = None, + data_explorer_settings: Optional["_models.DataflowEndpointDataExplorer"] = None, + data_lake_storage_settings: Optional["_models.DataflowEndpointDataLakeStorage"] = None, + fabric_one_lake_settings: Optional["_models.DataflowEndpointFabricOneLake"] = None, + kafka_settings: Optional["_models.DataflowEndpointKafka"] = None, + local_storage_settings: Optional["_models.DataflowEndpointLocalStorage"] = None, + mqtt_settings: Optional["_models.DataflowEndpointMqtt"] = None, + open_telemetry_settings: Optional["_models.DataflowEndpointOpenTelemetry"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowEndpointResource(ProxyResource): + """Instance dataflowEndpoint resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.DataflowEndpointProperties + :ivar extended_location: Edge location of the resource. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + """ + + properties: Optional["_models.DataflowEndpointProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.DataflowEndpointProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphConnectionInput(_Model): + """DataflowGraph DataflowGraphNode Connection Input. + + :ivar name: Name of the source node. Required. + :vartype name: str + :ivar schema: Schema settings for the source node. + :vartype schema: ~azure.mgmt.iotoperations.models.DataflowGraphSchemaSettings + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the source node. Required.""" + schema: Optional["_models.DataflowGraphSchemaSettings"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Schema settings for the source node.""" + + @overload + def __init__( + self, + *, + name: str, + schema: Optional["_models.DataflowGraphSchemaSettings"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphConnectionOutput(_Model): + """DataflowGraph DataflowGraphNode Connection Output. + + :ivar name: Name of the destination node. Required. + :vartype name: str + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the destination node. Required.""" + + @overload + def __init__( + self, + *, + name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphDestinationNodeSettings(_Model): + """DataflowGraph destination node settings. + + :ivar endpoint_ref: The endpoint reference for the destination. Required. + :vartype endpoint_ref: str + :ivar data_destination: Data destination at the endpoint. Required. + :vartype data_destination: str + :ivar output_schema_settings: Output schema settings. + :vartype output_schema_settings: ~azure.mgmt.iotoperations.models.DataflowGraphSchemaSettings + """ + + endpoint_ref: str = rest_field(name="endpointRef", visibility=["read", "create", "update", "delete", "query"]) + """The endpoint reference for the destination. Required.""" + data_destination: str = rest_field( + name="dataDestination", visibility=["read", "create", "update", "delete", "query"] + ) + """Data destination at the endpoint. Required.""" + output_schema_settings: Optional["_models.DataflowGraphSchemaSettings"] = rest_field( + name="outputSchemaSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Output schema settings.""" + + @overload + def __init__( + self, + *, + endpoint_ref: str, + data_destination: str, + output_schema_settings: Optional["_models.DataflowGraphSchemaSettings"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphGraphNode(DataflowGraphNode, discriminator="Graph"): + """DataflowGraph graph node properties. + + :ivar name: Name of the node. Required. + :vartype name: str + :ivar type: Type of the graph node. Required. Dataflow graph node. + :vartype type: str or ~azure.mgmt.iotoperations.models.GRAPH + :ivar graph_settings: Graph configuration. Required. + :vartype graph_settings: ~azure.mgmt.iotoperations.models.DataflowGraphNodeGraphSettings + """ + + type: Literal[DataflowGraphNodeType.GRAPH] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of the graph node. Required. Dataflow graph node.""" + graph_settings: "_models.DataflowGraphNodeGraphSettings" = rest_field( + name="graphSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Graph configuration. Required.""" + + @overload + def __init__( + self, + *, + name: str, + graph_settings: "_models.DataflowGraphNodeGraphSettings", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=DataflowGraphNodeType.GRAPH, **kwargs) + + +class DataflowGraphGraphNodeConfiguration(_Model): + """DataflowGraph graph node configuration. + + :ivar key: Key of the configuration. Required. + :vartype key: str + :ivar value: Value of the configuration. Required. + :vartype value: str + """ + + key: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Key of the configuration. Required.""" + value: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Value of the configuration. Required.""" + + @overload + def __init__( + self, + *, + key: str, + value: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphNodeConnection(_Model): + """DataflowGraph DataflowGraphNode Connection. + + :ivar from_property: Information about the source node. Required. + :vartype from_property: ~azure.mgmt.iotoperations.models.DataflowGraphConnectionInput + :ivar to: Information about the destination node. Required. + :vartype to: ~azure.mgmt.iotoperations.models.DataflowGraphConnectionOutput + """ + + from_property: "_models.DataflowGraphConnectionInput" = rest_field( + name="from", visibility=["read", "create", "update", "delete", "query"] + ) + """Information about the source node. Required.""" + to: "_models.DataflowGraphConnectionOutput" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Information about the destination node. Required.""" + + @overload + def __init__( + self, + *, + from_property: "_models.DataflowGraphConnectionInput", + to: "_models.DataflowGraphConnectionOutput", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphNodeGraphSettings(_Model): + """DataflowGraph graph node settings. + + :ivar registry_endpoint_ref: Reference to the registry endpoint for pulling the artifact. + Required. + :vartype registry_endpoint_ref: str + :ivar artifact: The artifact name and version to pull. Required. + :vartype artifact: str + :ivar configuration: Configuration key-value pairs. + :vartype configuration: + list[~azure.mgmt.iotoperations.models.DataflowGraphGraphNodeConfiguration] + """ + + registry_endpoint_ref: str = rest_field( + name="registryEndpointRef", visibility=["read", "create", "update", "delete", "query"] + ) + """Reference to the registry endpoint for pulling the artifact. Required.""" + artifact: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The artifact name and version to pull. Required.""" + configuration: Optional[List["_models.DataflowGraphGraphNodeConfiguration"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration key-value pairs.""" + + @overload + def __init__( + self, + *, + registry_endpoint_ref: str, + artifact: str, + configuration: Optional[List["_models.DataflowGraphGraphNodeConfiguration"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphProperties(_Model): + """DataflowGraph properties. + + :ivar mode: The mode of the dataflow graph. Known values are: "Enabled" and "Disabled". + :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode + :ivar request_disk_persistence: Disk persistence mode. Known values are: "Enabled" and + "Disabled". + :vartype request_disk_persistence: str or ~azure.mgmt.iotoperations.models.OperationalMode + :ivar nodes: List of nodes in the dataflow graph. Required. + :vartype nodes: list[~azure.mgmt.iotoperations.models.DataflowGraphNode] + :ivar node_connections: List of connections between nodes in the dataflow graph. Required. + :vartype node_connections: list[~azure.mgmt.iotoperations.models.DataflowGraphNodeConnection] + :ivar provisioning_state: The provisioning state of the dataflow graph. Known values are: + "Succeeded", "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + """ + + mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The mode of the dataflow graph. Known values are: \"Enabled\" and \"Disabled\".""" + request_disk_persistence: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + name="requestDiskPersistence", visibility=["read", "create", "update", "delete", "query"] + ) + """Disk persistence mode. Known values are: \"Enabled\" and \"Disabled\".""" + nodes: List["_models.DataflowGraphNode"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of nodes in the dataflow graph. Required.""" + node_connections: List["_models.DataflowGraphNodeConnection"] = rest_field( + name="nodeConnections", visibility=["read", "create", "update", "delete", "query"] + ) + """List of connections between nodes in the dataflow graph. Required.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The provisioning state of the dataflow graph. Known values are: \"Succeeded\", \"Failed\", + \"Canceled\", \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + + @overload + def __init__( + self, + *, + nodes: List["_models.DataflowGraphNode"], + node_connections: List["_models.DataflowGraphNodeConnection"], + mode: Optional[Union[str, "_models.OperationalMode"]] = None, + request_disk_persistence: Optional[Union[str, "_models.OperationalMode"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphResource(ProxyResource): + """Instance dataflowEndpoint resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.DataflowGraphProperties + :ivar extended_location: Edge location of the resource. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + """ + + properties: Optional["_models.DataflowGraphProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.DataflowGraphProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphSchemaSettings(_Model): + """DataflowGraph output schema settings. + + :ivar serialization_format: Output serialization format. Known values are: "Delta", "Json", + "Parquet", and "Avro". + :vartype serialization_format: str or + ~azure.mgmt.iotoperations.models.DataflowGraphSerializationFormat + :ivar schema_ref: Reference to the schema that describes the output of the transformation. + Required. + :vartype schema_ref: str + """ + + serialization_format: Optional[Union[str, "_models.DataflowGraphSerializationFormat"]] = rest_field( + name="serializationFormat", visibility=["read", "create", "update", "delete", "query"] + ) + """Output serialization format. Known values are: \"Delta\", \"Json\", \"Parquet\", and \"Avro\".""" + schema_ref: str = rest_field(name="schemaRef", visibility=["read", "create", "update", "delete", "query"]) + """Reference to the schema that describes the output of the transformation. Required.""" + + @overload + def __init__( + self, + *, + schema_ref: str, + serialization_format: Optional[Union[str, "_models.DataflowGraphSerializationFormat"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowGraphSourceNode(DataflowGraphNode, discriminator="Source"): + """DataflowGraph source node properties. + + :ivar name: Name of the node. Required. + :vartype name: str + :ivar type: Type of the source node. Required. Dataflow source node. + :vartype type: str or ~azure.mgmt.iotoperations.models.SOURCE + :ivar source_settings: Source configuration. Required. + :vartype source_settings: ~azure.mgmt.iotoperations.models.DataflowGraphSourceSettings + """ + + type: Literal[DataflowGraphNodeType.SOURCE] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """Type of the source node. Required. Dataflow source node.""" + source_settings: "_models.DataflowGraphSourceSettings" = rest_field( + name="sourceSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Source configuration. Required.""" + + @overload + def __init__( + self, + *, + name: str, + source_settings: "_models.DataflowGraphSourceSettings", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, type=DataflowGraphNodeType.SOURCE, **kwargs) + + +class DataflowGraphSourceSettings(_Model): + """DataflowGraph source node settings. + + :ivar endpoint_ref: The endpoint reference for the source. Required. + :vartype endpoint_ref: str + :ivar data_sources: List of data sources. Required. + :vartype data_sources: list[str] + """ + + endpoint_ref: str = rest_field(name="endpointRef", visibility=["read", "create", "update", "delete", "query"]) + """The endpoint reference for the source. Required.""" + data_sources: List[str] = rest_field(name="dataSources", visibility=["read", "create", "update", "delete", "query"]) + """List of data sources. Required.""" + + @overload + def __init__( + self, + *, + endpoint_ref: str, + data_sources: List[str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowOpenTelemetryAuthentication(_Model): + """Dataflow OpenTelemetry authentication properties. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DataflowOpenTelemetryAnonymousAuthentication, + DataflowOpenTelemetryServiceAccountAuthentication, + DataflowOpenTelemetryX509CertificateAuthentication + + :ivar method: The authentication method. Required. Known values are: "ServiceAccountToken", + "X509Certificate", and "Anonymous". + :vartype method: str or + ~azure.mgmt.iotoperations.models.DataflowOpenTelemetryAuthenticationMethod + """ + + __mapping__: Dict[str, _Model] = {} + method: str = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) + """The authentication method. Required. Known values are: \"ServiceAccountToken\", + \"X509Certificate\", and \"Anonymous\".""" + + @overload + def __init__( + self, + *, + method: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowOpenTelemetryAnonymousAuthentication( + DataflowOpenTelemetryAuthentication, discriminator="Anonymous" +): # pylint: disable=name-too-long + """DataflowOpenTelemetryAnonymousAuthentication properties. + + :ivar method: The authentication method. Required. Connects anonymously. + :vartype method: str or ~azure.mgmt.iotoperations.models.ANONYMOUS + :ivar anonymous_settings: Settings for the anonymous connection. Required. + :vartype anonymous_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationAnonymous + """ + + method: Literal[DataflowOpenTelemetryAuthenticationMethod.ANONYMOUS] = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The authentication method. Required. Connects anonymously.""" + anonymous_settings: "_models.DataflowEndpointAuthenticationAnonymous" = rest_field( + name="anonymousSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Settings for the anonymous connection. Required.""" + + @overload + def __init__( + self, + *, + anonymous_settings: "_models.DataflowEndpointAuthenticationAnonymous", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, method=DataflowOpenTelemetryAuthenticationMethod.ANONYMOUS, **kwargs) + + +class DataflowOpenTelemetryServiceAccountAuthentication( + DataflowOpenTelemetryAuthentication, discriminator="ServiceAccountToken" +): # pylint: disable=name-too-long + """DataflowOpenTelemetryServiceAccountAuthentication properties. + + :ivar method: The authentication method. Required. Uses serviceaccount token. + :vartype method: str or ~azure.mgmt.iotoperations.models.SERVICE_ACCOUNT_TOKEN + :ivar service_account_token_settings: Kubernetes service account token authentication. + Required. + :vartype service_account_token_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationServiceAccountToken + """ + + method: Literal[DataflowOpenTelemetryAuthenticationMethod.SERVICE_ACCOUNT_TOKEN] = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The authentication method. Required. Uses serviceaccount token.""" + service_account_token_settings: "_models.DataflowEndpointAuthenticationServiceAccountToken" = rest_field( + name="serviceAccountTokenSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Kubernetes service account token authentication. Required.""" + + @overload + def __init__( + self, + *, + service_account_token_settings: "_models.DataflowEndpointAuthenticationServiceAccountToken", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, method=DataflowOpenTelemetryAuthenticationMethod.SERVICE_ACCOUNT_TOKEN, **kwargs) + + +class DataflowOpenTelemetryX509CertificateAuthentication( + DataflowOpenTelemetryAuthentication, discriminator="X509Certificate" +): # pylint: disable=name-too-long + """DataflowOpenTelemetryX509CertificateAuthentication properties. + + :ivar method: The authentication method. Required. Uses x509 certificate. + :vartype method: str or ~azure.mgmt.iotoperations.models.X509_CERTIFICATE + :ivar x509_certificate_settings: X.509 certificate authentication settings. Required. + :vartype x509_certificate_settings: + ~azure.mgmt.iotoperations.models.DataflowEndpointAuthenticationX509 + """ + + method: Literal[DataflowOpenTelemetryAuthenticationMethod.X509_CERTIFICATE] = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The authentication method. Required. Uses x509 certificate.""" + x509_certificate_settings: "_models.DataflowEndpointAuthenticationX509" = rest_field( + name="x509CertificateSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """X.509 certificate authentication settings. Required.""" + + @overload + def __init__( + self, + *, + x509_certificate_settings: "_models.DataflowEndpointAuthenticationX509", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, method=DataflowOpenTelemetryAuthenticationMethod.X509_CERTIFICATE, **kwargs) + + +class DataflowOperation(_Model): + """Dataflow Operation properties. NOTE - One only method is allowed to be used for one entry. + + :ivar operation_type: Type of operation. Required. Known values are: "Source", "Destination", + and "BuiltInTransformation". + :vartype operation_type: str or ~azure.mgmt.iotoperations.models.OperationType + :ivar name: Optional user provided name of the transformation. + :vartype name: str + :ivar source_settings: Source configuration. + :vartype source_settings: ~azure.mgmt.iotoperations.models.DataflowSourceOperationSettings + :ivar built_in_transformation_settings: Built In Transformation configuration. + :vartype built_in_transformation_settings: + ~azure.mgmt.iotoperations.models.DataflowBuiltInTransformationSettings + :ivar destination_settings: Destination configuration. + :vartype destination_settings: + ~azure.mgmt.iotoperations.models.DataflowDestinationOperationSettings + """ + + operation_type: Union[str, "_models.OperationType"] = rest_field( + name="operationType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of operation. Required. Known values are: \"Source\", \"Destination\", and + \"BuiltInTransformation\".""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional user provided name of the transformation.""" + source_settings: Optional["_models.DataflowSourceOperationSettings"] = rest_field( + name="sourceSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Source configuration.""" + built_in_transformation_settings: Optional["_models.DataflowBuiltInTransformationSettings"] = rest_field( + name="builtInTransformationSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Built In Transformation configuration.""" + destination_settings: Optional["_models.DataflowDestinationOperationSettings"] = rest_field( + name="destinationSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Destination configuration.""" + + @overload + def __init__( + self, + *, + operation_type: Union[str, "_models.OperationType"], + name: Optional[str] = None, + source_settings: Optional["_models.DataflowSourceOperationSettings"] = None, + built_in_transformation_settings: Optional["_models.DataflowBuiltInTransformationSettings"] = None, + destination_settings: Optional["_models.DataflowDestinationOperationSettings"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowProfileProperties(_Model): + """DataflowProfile Resource properties. + + :ivar diagnostics: Spec defines the desired identities of NBC diagnostics settings. + :vartype diagnostics: ~azure.mgmt.iotoperations.models.ProfileDiagnostics + :ivar instance_count: To manually scale the dataflow profile, specify the maximum number of + instances you want to run. + :vartype instance_count: int + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + """ + + diagnostics: Optional["_models.ProfileDiagnostics"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Spec defines the desired identities of NBC diagnostics settings.""" + instance_count: Optional[int] = rest_field( + name="instanceCount", visibility=["read", "create", "update", "delete", "query"] + ) + """To manually scale the dataflow profile, specify the maximum number of instances you want to + run.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + + @overload + def __init__( + self, + *, + diagnostics: Optional["_models.ProfileDiagnostics"] = None, + instance_count: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowProfileResource(ProxyResource): + """Instance dataflowProfile resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.DataflowProfileProperties + :ivar extended_location: Edge location of the resource. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + """ + + properties: Optional["_models.DataflowProfileProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.DataflowProfileProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowProperties(_Model): + """Dataflow Resource properties. + + :ivar mode: Mode for Dataflow. Optional; defaults to Enabled. Known values are: "Enabled" and + "Disabled". + :vartype mode: str or ~azure.mgmt.iotoperations.models.OperationalMode + :ivar operations: List of operations including source and destination references as well as + transformation. Required. + :vartype operations: list[~azure.mgmt.iotoperations.models.DataflowOperation] + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + """ + + mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Mode for Dataflow. Optional; defaults to Enabled. Known values are: \"Enabled\" and + \"Disabled\".""" + operations: List["_models.DataflowOperation"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of operations including source and destination references as well as transformation. + Required.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + + @overload + def __init__( + self, + *, + operations: List["_models.DataflowOperation"], + mode: Optional[Union[str, "_models.OperationalMode"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowResource(ProxyResource): + """Instance dataflowProfile dataflow resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.DataflowProperties + :ivar extended_location: Edge location of the resource. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + """ + + properties: Optional["_models.DataflowProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.DataflowProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DataflowSourceOperationSettings(_Model): + """Dataflow Source Operation properties. + + :ivar endpoint_ref: Reference to the Dataflow Endpoint resource. Can only be of Broker and + Kafka type. Required. + :vartype endpoint_ref: str + :ivar asset_ref: Reference to the resource in Azure Device Registry where the data in the + endpoint originates from. + :vartype asset_ref: str + :ivar serialization_format: Content is a JSON Schema. Allowed: JSON Schema/draft-7. "Json" + :vartype serialization_format: str or + ~azure.mgmt.iotoperations.models.SourceSerializationFormat + :ivar schema_ref: Schema CR reference. Data will be deserialized according to the schema, and + dropped if it doesn't match. + :vartype schema_ref: str + :ivar data_sources: List of source locations. Can be Broker or Kafka topics. Supports wildcards + # and +. Required. + :vartype data_sources: list[str] + """ + + endpoint_ref: str = rest_field(name="endpointRef", visibility=["read", "create", "update", "delete", "query"]) + """Reference to the Dataflow Endpoint resource. Can only be of Broker and Kafka type. Required.""" + asset_ref: Optional[str] = rest_field(name="assetRef", visibility=["read", "create", "update", "delete", "query"]) + """Reference to the resource in Azure Device Registry where the data in the endpoint originates + from.""" + serialization_format: Optional[Union[str, "_models.SourceSerializationFormat"]] = rest_field( + name="serializationFormat", visibility=["read", "create", "update", "delete", "query"] + ) + """Content is a JSON Schema. Allowed: JSON Schema/draft-7. \"Json\"""" + schema_ref: Optional[str] = rest_field(name="schemaRef", visibility=["read", "create", "update", "delete", "query"]) + """Schema CR reference. Data will be deserialized according to the schema, and dropped if it + doesn't match.""" + data_sources: List[str] = rest_field(name="dataSources", visibility=["read", "create", "update", "delete", "query"]) + """List of source locations. Can be Broker or Kafka topics. Supports wildcards # and +. Required.""" + + @overload + def __init__( + self, + *, + endpoint_ref: str, + data_sources: List[str], + asset_ref: Optional[str] = None, + serialization_format: Optional[Union[str, "_models.SourceSerializationFormat"]] = None, + schema_ref: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DiagnosticsLogs(_Model): + """Diagnostic Log properties. + + :ivar level: The log level. Examples - 'debug', 'info', 'warn', 'error', 'trace'. + :vartype level: str + """ + + level: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The log level. Examples - 'debug', 'info', 'warn', 'error', 'trace'.""" + + @overload + def __init__( + self, + *, + level: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DiskBackedMessageBuffer(_Model): + """DiskBackedMessageBuffer properties. + + :ivar max_size: The max size of the message buffer on disk. If a PVC template is specified + using one of ephemeralVolumeClaimSpec or persistentVolumeClaimSpec, then this size is used as + the request and limit sizes of that template. If neither ephemeralVolumeClaimSpec nor + persistentVolumeClaimSpec are specified, then an emptyDir volume is mounted with this size as + its limit. See `https://kubernetes.io/docs/concepts/storage/volumes/#emptydir + `_ for details. Required. + :vartype max_size: str + :ivar ephemeral_volume_claim_spec: Use the specified persistent volume claim template to mount + a "generic ephemeral volume" for the message buffer. See + `https://kubernetes.io/docs/concepts/storage/ephemeral-volumes/#generic-ephemeral-volumes + `_ + for details. + :vartype ephemeral_volume_claim_spec: ~azure.mgmt.iotoperations.models.VolumeClaimSpec + :ivar persistent_volume_claim_spec: Use the specified persistent volume claim template to mount + a persistent volume for the message buffer. + :vartype persistent_volume_claim_spec: ~azure.mgmt.iotoperations.models.VolumeClaimSpec + """ + + max_size: str = rest_field(name="maxSize", visibility=["read", "create", "update", "delete", "query"]) + """The max size of the message buffer on disk. If a PVC template is specified using one of + ephemeralVolumeClaimSpec or persistentVolumeClaimSpec, then this size is used as the request + and limit sizes of that template. If neither ephemeralVolumeClaimSpec nor + persistentVolumeClaimSpec are specified, then an emptyDir volume is mounted with this size as + its limit. See `https://kubernetes.io/docs/concepts/storage/volumes/#emptydir + `_ for details. Required.""" + ephemeral_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = rest_field( + name="ephemeralVolumeClaimSpec", visibility=["read", "create", "update", "delete", "query"] + ) + """Use the specified persistent volume claim template to mount a \"generic ephemeral volume\" for + the message buffer. See + `https://kubernetes.io/docs/concepts/storage/ephemeral-volumes/#generic-ephemeral-volumes + `_ + for details.""" + persistent_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = rest_field( + name="persistentVolumeClaimSpec", visibility=["read", "create", "update", "delete", "query"] + ) + """Use the specified persistent volume claim template to mount a persistent volume for the message + buffer.""" + + @overload + def __init__( + self, + *, + max_size: str, + ephemeral_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = None, + persistent_volume_claim_spec: Optional["_models.VolumeClaimSpec"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorAdditionalInfo(_Model): + """The resource management error additional info. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: any + """ + + type: Optional[str] = rest_field(visibility=["read"]) + """The additional info type.""" + info: Optional[Any] = rest_field(visibility=["read"]) + """The additional info.""" + + +class ErrorDetail(_Model): + """The error detail. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.iotoperations.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.iotoperations.models.ErrorAdditionalInfo] + """ + + code: Optional[str] = rest_field(visibility=["read"]) + """The error code.""" + message: Optional[str] = rest_field(visibility=["read"]) + """The error message.""" + target: Optional[str] = rest_field(visibility=["read"]) + """The error target.""" + details: Optional[List["_models.ErrorDetail"]] = rest_field(visibility=["read"]) + """The error details.""" + additional_info: Optional[List["_models.ErrorAdditionalInfo"]] = rest_field( + name="additionalInfo", visibility=["read"] + ) + """The error additional info.""" + + +class ErrorResponse(_Model): + """Error response. + + :ivar error: The error object. + :vartype error: ~azure.mgmt.iotoperations.models.ErrorDetail + """ + + error: Optional["_models.ErrorDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error object.""" + + @overload + def __init__( + self, + *, + error: Optional["_models.ErrorDetail"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ExtendedLocation(_Model): + """Extended location is an extension of Azure locations. They provide a way to use their Azure ARC + enabled Kubernetes clusters as target locations for deploying Azure services instances. + + :ivar name: The name of the extended location. Required. + :vartype name: str + :ivar type: Type of ExtendedLocation. Required. "CustomLocation" + :vartype type: str or ~azure.mgmt.iotoperations.models.ExtendedLocationType + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the extended location. Required.""" + type: Union[str, "_models.ExtendedLocationType"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Type of ExtendedLocation. Required. \"CustomLocation\"""" + + @overload + def __init__( + self, + *, + name: str, + type: Union[str, "_models.ExtendedLocationType"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Frontend(_Model): + """The desired properties of the frontend instances of the Broker. + + :ivar replicas: The desired number of frontend instances (pods). Required. + :vartype replicas: int + :ivar workers: Number of logical frontend workers per instance (pod). + :vartype workers: int + """ + + replicas: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The desired number of frontend instances (pods). Required.""" + workers: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Number of logical frontend workers per instance (pod).""" + + @overload + def __init__( + self, + *, + replicas: int, + workers: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class GenerateResourceLimits(_Model): + """GenerateResourceLimits properties. + + :ivar cpu: The toggle to enable/disable cpu resource limits. Known values are: "Enabled" and + "Disabled". + :vartype cpu: str or ~azure.mgmt.iotoperations.models.OperationalMode + """ + + cpu: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The toggle to enable/disable cpu resource limits. Known values are: \"Enabled\" and + \"Disabled\".""" + + @overload + def __init__( + self, + *, + cpu: Optional[Union[str, "_models.OperationalMode"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InstanceFeature(_Model): + """The features of the AIO Instance. + + :ivar mode: The state of the feature. Known values are: "Stable", "Preview", and "Disabled". + :vartype mode: str or ~azure.mgmt.iotoperations.models.InstanceFeatureMode + :ivar settings: The settings of the feature. + :vartype settings: dict[str, str or ~azure.mgmt.iotoperations.models.OperationalMode] + """ + + mode: Optional[Union[str, "_models.InstanceFeatureMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The state of the feature. Known values are: \"Stable\", \"Preview\", and \"Disabled\".""" + settings: Optional[Dict[str, Union[str, "_models.OperationalMode"]]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The settings of the feature.""" + + @overload + def __init__( + self, + *, + mode: Optional[Union[str, "_models.InstanceFeatureMode"]] = None, + settings: Optional[Dict[str, Union[str, "_models.OperationalMode"]]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InstancePatchModel(_Model): + """The Instance update model. + + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar identity: The managed service identities assigned to this resource. + :vartype identity: ~azure.mgmt.iotoperations.models.ManagedServiceIdentity + """ + + tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource tags.""" + identity: Optional["_models.ManagedServiceIdentity"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The managed service identities assigned to this resource.""" + + @overload + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InstanceProperties(_Model): + """The properties of the Instance resource. + + :ivar description: Detailed description of the Instance. + :vartype description: str + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + :ivar version: The Azure IoT Operations version. + :vartype version: str + :ivar schema_registry_ref: The reference to the Schema Registry for this AIO Instance. + Required. + :vartype schema_registry_ref: ~azure.mgmt.iotoperations.models.SchemaRegistryRef + :ivar default_secret_provider_class_ref: The reference to the AIO Secret provider class. + :vartype default_secret_provider_class_ref: + ~azure.mgmt.iotoperations.models.SecretProviderClassRef + :ivar features: The features of the AIO Instance. + :vartype features: dict[str, ~azure.mgmt.iotoperations.models.InstanceFeature] + :ivar adr_namespace_ref: The Azure Device Registry Namespace used by Assets, Discovered Assets + and devices. + :vartype adr_namespace_ref: ~azure.mgmt.iotoperations.models.AzureDeviceRegistryNamespaceRef + """ + + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Detailed description of the Instance.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + version: Optional[str] = rest_field(visibility=["read"]) + """The Azure IoT Operations version.""" + schema_registry_ref: "_models.SchemaRegistryRef" = rest_field( + name="schemaRegistryRef", visibility=["read", "create", "update", "delete", "query"] + ) + """The reference to the Schema Registry for this AIO Instance. Required.""" + default_secret_provider_class_ref: Optional["_models.SecretProviderClassRef"] = rest_field( + name="defaultSecretProviderClassRef", visibility=["read", "create", "update", "delete", "query"] + ) + """The reference to the AIO Secret provider class.""" + features: Optional[Dict[str, "_models.InstanceFeature"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The features of the AIO Instance.""" + adr_namespace_ref: Optional["_models.AzureDeviceRegistryNamespaceRef"] = rest_field( + name="adrNamespaceRef", visibility=["read", "create", "update", "delete", "query"] + ) + """The Azure Device Registry Namespace used by Assets, Discovered Assets and devices.""" + + @overload + def __init__( + self, + *, + schema_registry_ref: "_models.SchemaRegistryRef", + description: Optional[str] = None, + default_secret_provider_class_ref: Optional["_models.SecretProviderClassRef"] = None, + features: Optional[Dict[str, "_models.InstanceFeature"]] = None, + adr_namespace_ref: Optional["_models.AzureDeviceRegistryNamespaceRef"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TrackedResource(Resource): + """Tracked Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + """ + + tags: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource tags.""" + location: str = rest_field(visibility=["read", "create"]) + """The geo-location where the resource lives. Required.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InstanceResource(TrackedResource): + """A Instance resource is a logical container for a set of child resources. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.iotoperations.models.InstanceProperties + :ivar extended_location: Edge location of the resource. Required. + :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation + :ivar identity: The managed service identities assigned to this resource. + :vartype identity: ~azure.mgmt.iotoperations.models.ManagedServiceIdentity + """ + + properties: Optional["_models.InstanceProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + extended_location: "_models.ExtendedLocation" = rest_field(name="extendedLocation", visibility=["read", "create"]) + """Edge location of the resource. Required.""" + identity: Optional["_models.ManagedServiceIdentity"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The managed service identities assigned to this resource.""" + + @overload + def __init__( + self, + *, + location: str, + extended_location: "_models.ExtendedLocation", + tags: Optional[Dict[str, str]] = None, + properties: Optional["_models.InstanceProperties"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class KubernetesReference(_Model): + """Kubernetes reference. + + :ivar api_group: APIGroup is the group for the resource being referenced. If APIGroup is not + specified, the specified Kind must be in the core API group. For any other third-party types, + APIGroup is required. + :vartype api_group: str + :ivar kind: Kind is the type of resource being referenced. Required. + :vartype kind: str + :ivar name: Name is the name of resource being referenced. Required. + :vartype name: str + :ivar namespace: Namespace is the namespace of the resource being referenced. This field is + required when the resource has a namespace. + :vartype namespace: str + """ + + api_group: Optional[str] = rest_field(name="apiGroup", visibility=["read", "create", "update", "delete", "query"]) + """APIGroup is the group for the resource being referenced. If APIGroup is not specified, the + specified Kind must be in the core API group. For any other third-party types, APIGroup is + required.""" + kind: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Kind is the type of resource being referenced. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name is the name of resource being referenced. Required.""" + namespace: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Namespace is the namespace of the resource being referenced. This field is required when the + resource has a namespace.""" + + @overload + def __init__( + self, + *, + kind: str, + name: str, + api_group: Optional[str] = None, + namespace: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ListenerPort(_Model): + """Defines a TCP port on which a ``BrokerListener`` listens. + + :ivar authentication_ref: Reference to client authentication settings. Omit to disable + authentication. + :vartype authentication_ref: str + :ivar authorization_ref: Reference to client authorization settings. Omit to disable + authorization. + :vartype authorization_ref: str + :ivar node_port: Kubernetes node port. Only relevant when this port is associated with a + ``NodePort`` listener. + :vartype node_port: int + :ivar port: TCP port for accepting client connections. Required. + :vartype port: int + :ivar protocol: Protocol to use for client connections. Known values are: "Mqtt" and + "WebSockets". + :vartype protocol: str or ~azure.mgmt.iotoperations.models.BrokerProtocolType + :ivar tls: TLS server certificate settings for this port. Omit to disable TLS. + :vartype tls: ~azure.mgmt.iotoperations.models.TlsCertMethod + """ + + authentication_ref: Optional[str] = rest_field( + name="authenticationRef", visibility=["read", "create", "update", "delete", "query"] + ) + """Reference to client authentication settings. Omit to disable authentication.""" + authorization_ref: Optional[str] = rest_field( + name="authorizationRef", visibility=["read", "create", "update", "delete", "query"] + ) + """Reference to client authorization settings. Omit to disable authorization.""" + node_port: Optional[int] = rest_field(name="nodePort", visibility=["read", "create", "update", "delete", "query"]) + """Kubernetes node port. Only relevant when this port is associated with a ``NodePort`` listener.""" + port: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """TCP port for accepting client connections. Required.""" + protocol: Optional[Union[str, "_models.BrokerProtocolType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Protocol to use for client connections. Known values are: \"Mqtt\" and \"WebSockets\".""" + tls: Optional["_models.TlsCertMethod"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """TLS server certificate settings for this port. Omit to disable TLS.""" + + @overload + def __init__( + self, + *, + port: int, + authentication_ref: Optional[str] = None, + authorization_ref: Optional[str] = None, + node_port: Optional[int] = None, + protocol: Optional[Union[str, "_models.BrokerProtocolType"]] = None, + tls: Optional["_models.TlsCertMethod"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LocalKubernetesReference(_Model): + """Kubernetes reference. + + :ivar api_group: APIGroup is the group for the resource being referenced. If APIGroup is not + specified, the specified Kind must be in the core API group. For any other third-party types, + APIGroup is required. + :vartype api_group: str + :ivar kind: Kind is the type of resource being referenced. Required. + :vartype kind: str + :ivar name: Name is the name of resource being referenced. Required. + :vartype name: str + """ + + api_group: Optional[str] = rest_field(name="apiGroup", visibility=["read", "create", "update", "delete", "query"]) + """APIGroup is the group for the resource being referenced. If APIGroup is not specified, the + specified Kind must be in the core API group. For any other third-party types, APIGroup is + required.""" + kind: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Kind is the type of resource being referenced. Required.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name is the name of resource being referenced. Required.""" + + @overload + def __init__( + self, + *, + kind: str, + name: str, + api_group: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ManagedServiceIdentity(_Model): + """Managed service identity (system assigned and/or user assigned identities). + + :ivar principal_id: The service principal ID of the system assigned identity. This property + will only be provided for a system assigned identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be + provided for a system assigned identity. + :vartype tenant_id: str + :ivar type: The type of managed identity assigned to this resource. Required. Known values are: + "None", "SystemAssigned", "UserAssigned", and "SystemAssigned,UserAssigned". + :vartype type: str or ~azure.mgmt.iotoperations.models.ManagedServiceIdentityType + :ivar user_assigned_identities: The identities assigned to this resource by the user. + :vartype user_assigned_identities: dict[str, + ~azure.mgmt.iotoperations.models.UserAssignedIdentity] + """ + + principal_id: Optional[str] = rest_field(name="principalId", visibility=["read"]) + """The service principal ID of the system assigned identity. This property will only be provided + for a system assigned identity.""" + tenant_id: Optional[str] = rest_field(name="tenantId", visibility=["read"]) + """The tenant ID of the system assigned identity. This property will only be provided for a system + assigned identity.""" + type: Union[str, "_models.ManagedServiceIdentityType"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The type of managed identity assigned to this resource. Required. Known values are: \"None\", + \"SystemAssigned\", \"UserAssigned\", and \"SystemAssigned,UserAssigned\".""" + user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = rest_field( + name="userAssignedIdentities", visibility=["read", "create", "update", "delete", "query"] + ) + """The identities assigned to this resource by the user.""" + + @overload + def __init__( + self, + *, + type: Union[str, "_models.ManagedServiceIdentityType"], + user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Metrics(_Model): + """Diagnostic Metrics properties. + + :ivar prometheus_port: The prometheus port to expose the metrics. + :vartype prometheus_port: int + """ + + prometheus_port: Optional[int] = rest_field( + name="prometheusPort", visibility=["read", "create", "update", "delete", "query"] + ) + """The prometheus port to expose the metrics.""" + + @overload + def __init__( + self, + *, + prometheus_port: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Operation(_Model): + """REST API Operation. + + :ivar name: The name of the operation, as per Resource-Based Access Control (RBAC). Examples: + "Microsoft.Compute/virtualMachines/write", "Microsoft.Compute/virtualMachines/capture/action". + :vartype name: str + :ivar is_data_action: Whether the operation applies to data-plane. This is "true" for + data-plane operations and "false" for Azure Resource Manager/control-plane operations. + :vartype is_data_action: bool + :ivar display: Localized display information for this particular operation. + :vartype display: ~azure.mgmt.iotoperations.models.OperationDisplay + :ivar origin: The intended executor of the operation; as in Resource Based Access Control + (RBAC) and audit logs UX. Default value is "user,system". Known values are: "user", "system", + and "user,system". + :vartype origin: str or ~azure.mgmt.iotoperations.models.Origin + :ivar action_type: Extensible enum. Indicates the action type. "Internal" refers to actions + that are for internal only APIs. "Internal" + :vartype action_type: str or ~azure.mgmt.iotoperations.models.ActionType + """ + + name: Optional[str] = rest_field(visibility=["read"]) + """The name of the operation, as per Resource-Based Access Control (RBAC). Examples: + \"Microsoft.Compute/virtualMachines/write\", + \"Microsoft.Compute/virtualMachines/capture/action\".""" + is_data_action: Optional[bool] = rest_field(name="isDataAction", visibility=["read"]) + """Whether the operation applies to data-plane. This is \"true\" for data-plane operations and + \"false\" for Azure Resource Manager/control-plane operations.""" + display: Optional["_models.OperationDisplay"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Localized display information for this particular operation.""" + origin: Optional[Union[str, "_models.Origin"]] = rest_field(visibility=["read"]) + """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit + logs UX. Default value is \"user,system\". Known values are: \"user\", \"system\", and + \"user,system\".""" + action_type: Optional[Union[str, "_models.ActionType"]] = rest_field(name="actionType", visibility=["read"]) + """Extensible enum. Indicates the action type. \"Internal\" refers to actions that are for + internal only APIs. \"Internal\"""" + + @overload + def __init__( + self, + *, + display: Optional["_models.OperationDisplay"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OperationDisplay(_Model): + """Localized display information for and operation. + + :ivar provider: The localized friendly form of the resource provider name, e.g. "Microsoft + Monitoring Insights" or "Microsoft Compute". + :vartype provider: str + :ivar resource: The localized friendly name of the resource type related to this operation. + E.g. "Virtual Machines" or "Job Schedule Collections". + :vartype resource: str + :ivar operation: The concise, localized friendly name for the operation; suitable for + dropdowns. E.g. "Create or Update Virtual Machine", "Restart Virtual Machine". + :vartype operation: str + :ivar description: The short, localized friendly description of the operation; suitable for + tool tips and detailed views. + :vartype description: str """ - code: Optional[str] = rest_field(visibility=["read"]) - """The error code.""" - message: Optional[str] = rest_field(visibility=["read"]) - """The error message.""" - target: Optional[str] = rest_field(visibility=["read"]) - """The error target.""" - details: Optional[List["_models.ErrorDetail"]] = rest_field(visibility=["read"]) - """The error details.""" - additional_info: Optional[List["_models.ErrorAdditionalInfo"]] = rest_field( - name="additionalInfo", visibility=["read"] - ) - """The error additional info.""" + provider: Optional[str] = rest_field(visibility=["read"]) + """The localized friendly form of the resource provider name, e.g. \"Microsoft Monitoring + Insights\" or \"Microsoft Compute\".""" + resource: Optional[str] = rest_field(visibility=["read"]) + """The localized friendly name of the resource type related to this operation. E.g. \"Virtual + Machines\" or \"Job Schedule Collections\".""" + operation: Optional[str] = rest_field(visibility=["read"]) + """The concise, localized friendly name for the operation; suitable for dropdowns. E.g. \"Create + or Update Virtual Machine\", \"Restart Virtual Machine\".""" + description: Optional[str] = rest_field(visibility=["read"]) + """The short, localized friendly description of the operation; suitable for tool tips and detailed + views.""" -class ErrorResponse(_model_base.Model): - """Common error response for all Azure Resource Manager APIs to return error details for failed - operations. +class PrincipalDefinition(_Model): + """PrincipalDefinition properties of Rule. - :ivar error: The error object. - :vartype error: ~azure.mgmt.iotoperations.models.ErrorDetail + :ivar attributes: A list of key-value pairs that match the attributes of the clients. The + attributes are case-sensitive and must match the attributes provided by the clients during + authentication. + :vartype attributes: list[dict[str, str]] + :ivar client_ids: A list of client IDs that match the clients. The client IDs are + case-sensitive and must match the client IDs provided by the clients during connection. + :vartype client_ids: list[str] + :ivar usernames: A list of usernames that match the clients. The usernames are case-sensitive + and must match the usernames provided by the clients during authentication. + :vartype usernames: list[str] """ - error: Optional["_models.ErrorDetail"] = rest_field() - """The error object.""" + attributes: Optional[List[Dict[str, str]]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A list of key-value pairs that match the attributes of the clients. The attributes are + case-sensitive and must match the attributes provided by the clients during authentication.""" + client_ids: Optional[List[str]] = rest_field( + name="clientIds", visibility=["read", "create", "update", "delete", "query"] + ) + """A list of client IDs that match the clients. The client IDs are case-sensitive and must match + the client IDs provided by the clients during connection.""" + usernames: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A list of usernames that match the clients. The usernames are case-sensitive and must match the + usernames provided by the clients during authentication.""" @overload def __init__( self, *, - error: Optional["_models.ErrorDetail"] = None, + attributes: Optional[List[Dict[str, str]]] = None, + client_ids: Optional[List[str]] = None, + usernames: Optional[List[str]] = None, ) -> None: ... @overload @@ -3018,28 +7624,26 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ExtendedLocation(_model_base.Model): - """Extended location is an extension of Azure locations. They provide a way to use their Azure ARC - enabled Kubernetes clusters as target locations for deploying Azure services instances. - +class ProfileDiagnostics(_Model): + """DataflowProfile Diagnostics properties. - :ivar name: The name of the extended location. Required. - :vartype name: str - :ivar type: Type of ExtendedLocation. Required. "CustomLocation" - :vartype type: str or ~azure.mgmt.iotoperations.models.ExtendedLocationType + :ivar logs: Diagnostic log settings for the resource. + :vartype logs: ~azure.mgmt.iotoperations.models.DiagnosticsLogs + :ivar metrics: The metrics settings for the resource. + :vartype metrics: ~azure.mgmt.iotoperations.models.Metrics """ - name: str = rest_field() - """The name of the extended location. Required.""" - type: Union[str, "_models.ExtendedLocationType"] = rest_field() - """Type of ExtendedLocation. Required. \"CustomLocation\"""" + logs: Optional["_models.DiagnosticsLogs"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Diagnostic log settings for the resource.""" + metrics: Optional["_models.Metrics"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metrics settings for the resource.""" @overload def __init__( self, *, - name: str, - type: Union[str, "_models.ExtendedLocationType"], + logs: Optional["_models.DiagnosticsLogs"] = None, + metrics: Optional["_models.Metrics"] = None, ) -> None: ... @overload @@ -3053,27 +7657,30 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class Frontend(_model_base.Model): - """The desired properties of the frontend instances of the Broker. +class RegistryEndpointAuthentication(_Model): + """Model for RegistryEndpointAuthentication. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + RegistryEndpointAnonymousAuthentication, RegistryEndpointArtifactPullSecretAuthentication, + RegistryEndpointSystemAssignedIdentityAuthentication, + RegistryEndpointUserAssignedIdentityAuthentication - :ivar replicas: The desired number of frontend instances (pods). Required. - :vartype replicas: int - :ivar workers: Number of logical frontend workers per instance (pod). - :vartype workers: int + :ivar method: The authentication method. Required. Known values are: + "SystemAssignedManagedIdentity", "UserAssignedManagedIdentity", "Anonymous", and + "ArtifactPullSecret". + :vartype method: str or ~azure.mgmt.iotoperations.models.RegistryEndpointAuthenticationMethod """ - replicas: int = rest_field() - """The desired number of frontend instances (pods). Required.""" - workers: Optional[int] = rest_field() - """Number of logical frontend workers per instance (pod).""" + __mapping__: Dict[str, _Model] = {} + method: str = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) + """The authentication method. Required. Known values are: \"SystemAssignedManagedIdentity\", + \"UserAssignedManagedIdentity\", \"Anonymous\", and \"ArtifactPullSecret\".""" @overload def __init__( self, *, - replicas: int, - workers: Optional[int] = None, + method: str, ) -> None: ... @overload @@ -3087,23 +7694,27 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class GenerateResourceLimits(_model_base.Model): - """GenerateResourceLimits properties. +class RegistryEndpointAnonymousAuthentication(RegistryEndpointAuthentication, discriminator="Anonymous"): + """Anonymous authentication. - :ivar cpu: The toggle to enable/disable cpu resource limits. Known values are: "Enabled" and - "Disabled". - :vartype cpu: str or ~azure.mgmt.iotoperations.models.OperationalMode + :ivar method: The authentication method. Required. Anonymous Option + :vartype method: str or ~azure.mgmt.iotoperations.models.ANONYMOUS + :ivar anonymous_settings: Anonymous authentication properties. Required. + :vartype anonymous_settings: ~azure.mgmt.iotoperations.models.RegistryEndpointAnonymousSettings """ - cpu: Optional[Union[str, "_models.OperationalMode"]] = rest_field() - """The toggle to enable/disable cpu resource limits. Known values are: \"Enabled\" and - \"Disabled\".""" + method: Literal[RegistryEndpointAuthenticationMethod.ANONYMOUS] = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The authentication method. Required. Anonymous Option""" + anonymous_settings: "_models.RegistryEndpointAnonymousSettings" = rest_field( + name="anonymousSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Anonymous authentication properties. Required.""" @overload def __init__( self, *, - cpu: Optional[Union[str, "_models.OperationalMode"]] = None, + anonymous_settings: "_models.RegistryEndpointAnonymousSettings", ) -> None: ... @overload @@ -3114,29 +7725,37 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) + super().__init__(*args, method=RegistryEndpointAuthenticationMethod.ANONYMOUS, **kwargs) -class InstancePatchModel(_model_base.Model): - """The Instance update model. +class RegistryEndpointAnonymousSettings(_Model): + """RegistryEndpoint Anonymous authentication properties.""" - :ivar tags: Resource tags. - :vartype tags: dict[str, str] - :ivar identity: The managed service identities assigned to this resource. - :vartype identity: ~azure.mgmt.iotoperations.models.ManagedServiceIdentity + +class RegistryEndpointArtifactPullSecretAuthentication( + RegistryEndpointAuthentication, discriminator="ArtifactPullSecret" +): # pylint: disable=name-too-long + """Artifact Pull Secret authentication. + + :ivar method: The authentication method. Required. Artifact Pull Secret authentication + :vartype method: str or ~azure.mgmt.iotoperations.models.ARTIFACT_PULL_SECRET + :ivar artifact_pull_secret_settings: Artifact Pull Secret authentication properties. Required. + :vartype artifact_pull_secret_settings: + ~azure.mgmt.iotoperations.models.RegistryEndpointArtifactPullSecretSettings """ - tags: Optional[Dict[str, str]] = rest_field() - """Resource tags.""" - identity: Optional["_models.ManagedServiceIdentity"] = rest_field() - """The managed service identities assigned to this resource.""" + method: Literal[RegistryEndpointAuthenticationMethod.ARTIFACT_PULL_SECRET] = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The authentication method. Required. Artifact Pull Secret authentication""" + artifact_pull_secret_settings: "_models.RegistryEndpointArtifactPullSecretSettings" = rest_field( + name="artifactPullSecretSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Artifact Pull Secret authentication properties. Required.""" @overload def __init__( self, *, - tags: Optional[Dict[str, str]] = None, - identity: Optional["_models.ManagedServiceIdentity"] = None, + artifact_pull_secret_settings: "_models.RegistryEndpointArtifactPullSecretSettings", ) -> None: ... @overload @@ -3147,45 +7766,25 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - + super().__init__(*args, method=RegistryEndpointAuthenticationMethod.ARTIFACT_PULL_SECRET, **kwargs) -class InstanceProperties(_model_base.Model): - """The properties of the Instance resource. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class RegistryEndpointArtifactPullSecretSettings(_Model): # pylint: disable=name-too-long + """RegistryEndpoint Artifact Pull Secret authentication properties. - :ivar description: Detailed description of the Instance. - :vartype description: str - :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", - "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". - :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState - :ivar version: The Azure IoT Operations version. - :vartype version: str - :ivar schema_registry_ref: The reference to the Schema Registry for this AIO Instance. + :ivar secret_ref: The name of the kubernetes secret that contains the artifact pull secret. Required. - :vartype schema_registry_ref: ~azure.mgmt.iotoperations.models.SchemaRegistryRef + :vartype secret_ref: str """ - description: Optional[str] = rest_field() - """Detailed description of the Instance.""" - provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( - name="provisioningState", visibility=["read"] - ) - """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", - \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" - version: Optional[str] = rest_field(visibility=["read"]) - """The Azure IoT Operations version.""" - schema_registry_ref: "_models.SchemaRegistryRef" = rest_field(name="schemaRegistryRef") - """The reference to the Schema Registry for this AIO Instance. Required.""" + secret_ref: str = rest_field(name="secretRef", visibility=["read", "create", "update", "delete", "query"]) + """The name of the kubernetes secret that contains the artifact pull secret. Required.""" @overload def __init__( self, *, - schema_registry_ref: "_models.SchemaRegistryRef", - description: Optional[str] = None, + secret_ref: str, ) -> None: ... @overload @@ -3199,41 +7798,43 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class TrackedResource(Resource): - """The resource model definition for an Azure Resource Manager tracked top level resource which - has 'tags' and a 'location'. - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class RegistryEndpointProperties(_Model): + """RegistryEndpoint properties. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData - :ivar tags: Resource tags. - :vartype tags: dict[str, str] - :ivar location: The geo-location where the resource lives. Required. - :vartype location: str + :ivar host: The Container Registry endpoint hostname. Required. + :vartype host: str + :ivar authentication: The authentication settings for the Azure Container Registry. Required. + :vartype authentication: ~azure.mgmt.iotoperations.models.RegistryEndpointAuthentication + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", and "Accepted". + :vartype provisioning_state: str or ~azure.mgmt.iotoperations.models.ProvisioningState + :ivar trust_settings: Trust settings for the registry endpoint. + :vartype trust_settings: ~azure.mgmt.iotoperations.models.RegistryEndpointTrustedSettings """ - tags: Optional[Dict[str, str]] = rest_field() - """Resource tags.""" - location: str = rest_field(visibility=["read", "create"]) - """The geo-location where the resource lives. Required.""" + host: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The Container Registry endpoint hostname. Required.""" + authentication: "_models.RegistryEndpointAuthentication" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The authentication settings for the Azure Container Registry. Required.""" + provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", and \"Accepted\".""" + trust_settings: Optional["_models.RegistryEndpointTrustedSettings"] = rest_field( + name="trustSettings", visibility=["read", "create", "update", "delete", "query"] + ) + """Trust settings for the registry endpoint.""" @overload def __init__( self, *, - location: str, - tags: Optional[Dict[str, str]] = None, + host: str, + authentication: "_models.RegistryEndpointAuthentication", + trust_settings: Optional["_models.RegistryEndpointTrustedSettings"] = None, ) -> None: ... @overload @@ -3247,14 +7848,11 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class InstanceResource(TrackedResource): - """A Instance resource is a logical container for a set of child resources. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - +class RegistryEndpointResource(ProxyResource): + """RegistryEndpoint resource. :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. # pylint: disable=line-too-long + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str :ivar name: The name of the resource. :vartype name: str @@ -3264,34 +7862,27 @@ class InstanceResource(TrackedResource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.iotoperations.models.SystemData - :ivar tags: Resource tags. - :vartype tags: dict[str, str] - :ivar location: The geo-location where the resource lives. Required. - :vartype location: str :ivar properties: The resource-specific properties for this resource. - :vartype properties: ~azure.mgmt.iotoperations.models.InstanceProperties - :ivar extended_location: Edge location of the resource. Required. + :vartype properties: ~azure.mgmt.iotoperations.models.RegistryEndpointProperties + :ivar extended_location: Edge location of the resource. :vartype extended_location: ~azure.mgmt.iotoperations.models.ExtendedLocation - :ivar identity: The managed service identities assigned to this resource. - :vartype identity: ~azure.mgmt.iotoperations.models.ManagedServiceIdentity """ - properties: Optional["_models.InstanceProperties"] = rest_field() + properties: Optional["_models.RegistryEndpointProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """The resource-specific properties for this resource.""" - extended_location: "_models.ExtendedLocation" = rest_field(name="extendedLocation", visibility=["read", "create"]) - """Edge location of the resource. Required.""" - identity: Optional["_models.ManagedServiceIdentity"] = rest_field() - """The managed service identities assigned to this resource.""" + extended_location: Optional["_models.ExtendedLocation"] = rest_field( + name="extendedLocation", visibility=["read", "create"] + ) + """Edge location of the resource.""" @overload def __init__( self, *, - location: str, - extended_location: "_models.ExtendedLocation", - tags: Optional[Dict[str, str]] = None, - properties: Optional["_models.InstanceProperties"] = None, - identity: Optional["_models.ManagedServiceIdentity"] = None, + properties: Optional["_models.RegistryEndpointProperties"] = None, + extended_location: Optional["_models.ExtendedLocation"] = None, ) -> None: ... @overload @@ -3305,43 +7896,33 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class KubernetesReference(_model_base.Model): - """Kubernetes reference. - +class RegistryEndpointSystemAssignedIdentityAuthentication( + RegistryEndpointAuthentication, discriminator="SystemAssignedManagedIdentity" +): # pylint: disable=name-too-long + """System assigned identity authentication. - :ivar api_group: APIGroup is the group for the resource being referenced. If APIGroup is not - specified, the specified Kind must be in the core API group. For any other third-party types, - APIGroup is required. - :vartype api_group: str - :ivar kind: Kind is the type of resource being referenced. Required. - :vartype kind: str - :ivar name: Name is the name of resource being referenced. Required. - :vartype name: str - :ivar namespace: Namespace is the namespace of the resource being referenced. This field is - required when the resource has a namespace. - :vartype namespace: str + :ivar method: The authentication method. Required. SystemAssignedManagedIdentity type + :vartype method: str or ~azure.mgmt.iotoperations.models.SYSTEM_ASSIGNED_MANAGED_IDENTITY + :ivar system_assigned_managed_identity_settings: System assigned managed identity properties. + Required. + :vartype system_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.RegistryEndpointSystemAssignedManagedIdentitySettings """ - api_group: Optional[str] = rest_field(name="apiGroup") - """APIGroup is the group for the resource being referenced. If APIGroup is not specified, the - specified Kind must be in the core API group. For any other third-party types, APIGroup is - required.""" - kind: str = rest_field() - """Kind is the type of resource being referenced. Required.""" - name: str = rest_field() - """Name is the name of resource being referenced. Required.""" - namespace: Optional[str] = rest_field() - """Namespace is the namespace of the resource being referenced. This field is required when the - resource has a namespace.""" + method: Literal[RegistryEndpointAuthenticationMethod.SYSTEM_ASSIGNED_MANAGED_IDENTITY] = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The authentication method. Required. SystemAssignedManagedIdentity type""" + system_assigned_managed_identity_settings: "_models.RegistryEndpointSystemAssignedManagedIdentitySettings" = ( + rest_field( + name="systemAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + ) + """System assigned managed identity properties. Required.""" @overload def __init__( self, *, - kind: str, - name: str, - api_group: Optional[str] = None, - namespace: Optional[str] = None, + system_assigned_managed_identity_settings: "_models.RegistryEndpointSystemAssignedManagedIdentitySettings", ) -> None: ... @overload @@ -3352,54 +7933,26 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) + super().__init__(*args, method=RegistryEndpointAuthenticationMethod.SYSTEM_ASSIGNED_MANAGED_IDENTITY, **kwargs) -class ListenerPort(_model_base.Model): - """Defines a TCP port on which a ``BrokerListener`` listens. - +class RegistryEndpointSystemAssignedManagedIdentitySettings(_Model): # pylint: disable=name-too-long + """System assigned managed identity properties. - :ivar authentication_ref: Reference to client authentication settings. Omit to disable - authentication. - :vartype authentication_ref: str - :ivar authorization_ref: Reference to client authorization settings. Omit to disable - authorization. - :vartype authorization_ref: str - :ivar node_port: Kubernetes node port. Only relevant when this port is associated with a - ``NodePort`` listener. - :vartype node_port: int - :ivar port: TCP port for accepting client connections. Required. - :vartype port: int - :ivar protocol: Protocol to use for client connections. Known values are: "Mqtt" and - "WebSockets". - :vartype protocol: str or ~azure.mgmt.iotoperations.models.BrokerProtocolType - :ivar tls: TLS server certificate settings for this port. Omit to disable TLS. - :vartype tls: ~azure.mgmt.iotoperations.models.TlsCertMethod + :ivar audience: Audience of the service to authenticate against. Optional; defaults to the + audience for Service host configuration. + :vartype audience: str """ - authentication_ref: Optional[str] = rest_field(name="authenticationRef") - """Reference to client authentication settings. Omit to disable authentication.""" - authorization_ref: Optional[str] = rest_field(name="authorizationRef") - """Reference to client authorization settings. Omit to disable authorization.""" - node_port: Optional[int] = rest_field(name="nodePort") - """Kubernetes node port. Only relevant when this port is associated with a ``NodePort`` listener.""" - port: int = rest_field() - """TCP port for accepting client connections. Required.""" - protocol: Optional[Union[str, "_models.BrokerProtocolType"]] = rest_field() - """Protocol to use for client connections. Known values are: \"Mqtt\" and \"WebSockets\".""" - tls: Optional["_models.TlsCertMethod"] = rest_field() - """TLS server certificate settings for this port. Omit to disable TLS.""" - - @overload - def __init__( - self, - *, - port: int, - authentication_ref: Optional[str] = None, - authorization_ref: Optional[str] = None, - node_port: Optional[int] = None, - protocol: Optional[Union[str, "_models.BrokerProtocolType"]] = None, - tls: Optional["_models.TlsCertMethod"] = None, + audience: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Audience of the service to authenticate against. Optional; defaults to the audience for Service + host configuration.""" + + @overload + def __init__( + self, + *, + audience: Optional[str] = None, ) -> None: ... @overload @@ -3413,36 +7966,24 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class LocalKubernetesReference(_model_base.Model): - """Kubernetes reference. - +class RegistryEndpointTrustedSettings(_Model): + """RegistryEndpointTrustedSettings properties. - :ivar api_group: APIGroup is the group for the resource being referenced. If APIGroup is not - specified, the specified Kind must be in the core API group. For any other third-party types, - APIGroup is required. - :vartype api_group: str - :ivar kind: Kind is the type of resource being referenced. Required. - :vartype kind: str - :ivar name: Name is the name of resource being referenced. Required. - :vartype name: str + :ivar trusted_signing_keys: The trust properties for the registry endpoint. Required. + :vartype trusted_signing_keys: + ~azure.mgmt.iotoperations.models.RegistryEndpointTrustedSigningKey """ - api_group: Optional[str] = rest_field(name="apiGroup") - """APIGroup is the group for the resource being referenced. If APIGroup is not specified, the - specified Kind must be in the core API group. For any other third-party types, APIGroup is - required.""" - kind: str = rest_field() - """Kind is the type of resource being referenced. Required.""" - name: str = rest_field() - """Name is the name of resource being referenced. Required.""" + trusted_signing_keys: "_models.RegistryEndpointTrustedSigningKey" = rest_field( + name="trustedSigningKeys", visibility=["read", "create", "update", "delete", "query"] + ) + """The trust properties for the registry endpoint. Required.""" @overload def __init__( self, *, - kind: str, - name: str, - api_group: Optional[str] = None, + trusted_signing_keys: "_models.RegistryEndpointTrustedSigningKey", ) -> None: ... @overload @@ -3456,46 +7997,27 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class ManagedServiceIdentity(_model_base.Model): - """Managed service identity (system assigned and/or user assigned identities). - - Readonly variables are only populated by the server, and will be ignored when sending a request. +class RegistryEndpointTrustedSigningKey(_Model): + """RegistryEndpoint Trust properties. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + RegistryEndpointTrustedSigningKeyConfigMap, RegistryEndpointTrustedSigningKeySecret - :ivar principal_id: The service principal ID of the system assigned identity. This property - will only be provided for a system assigned identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be - provided for a system assigned identity. - :vartype tenant_id: str - :ivar type: The type of managed identity assigned to this resource. Required. Known values are: - "None", "SystemAssigned", "UserAssigned", and "SystemAssigned,UserAssigned". - :vartype type: str or ~azure.mgmt.iotoperations.models.ManagedServiceIdentityType - :ivar user_assigned_identities: The identities assigned to this resource by the user. - :vartype user_assigned_identities: dict[str, - ~azure.mgmt.iotoperations.models.UserAssignedIdentity] + :ivar type: The trust type for the registry endpoint. Required. Known values are: "Secret" and + "ConfigMap". + :vartype type: str or ~azure.mgmt.iotoperations.models.RegistryEndpointTrustedSigningKeyType """ - principal_id: Optional[str] = rest_field(name="principalId", visibility=["read"]) - """The service principal ID of the system assigned identity. This property will only be provided - for a system assigned identity.""" - tenant_id: Optional[str] = rest_field(name="tenantId", visibility=["read"]) - """The tenant ID of the system assigned identity. This property will only be provided for a system - assigned identity.""" - type: Union[str, "_models.ManagedServiceIdentityType"] = rest_field() - """The type of managed identity assigned to this resource. Required. Known values are: \"None\", - \"SystemAssigned\", \"UserAssigned\", and \"SystemAssigned,UserAssigned\".""" - user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = rest_field( - name="userAssignedIdentities" - ) - """The identities assigned to this resource by the user.""" + __mapping__: Dict[str, _Model] = {} + type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) + """The trust type for the registry endpoint. Required. Known values are: \"Secret\" and + \"ConfigMap\".""" @overload def __init__( self, *, - type: Union[str, "_models.ManagedServiceIdentityType"], - user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, + type: str, ) -> None: ... @overload @@ -3509,21 +8031,29 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class Metrics(_model_base.Model): - """Diagnostic Metrics properties. +class RegistryEndpointTrustedSigningKeyConfigMap( + RegistryEndpointTrustedSigningKey, discriminator="ConfigMap" +): # pylint: disable=name-too-long + """Settings for RegistryEndpoint trust provided through a configmap. - :ivar prometheus_port: The prometheus port to expose the metrics. - :vartype prometheus_port: int + :ivar type: The trust type for the registry endpoint. Required. Trust settings stored in a + Kubernetes ConfigMap. + :vartype type: str or ~azure.mgmt.iotoperations.models.CONFIG_MAP + :ivar config_map_ref: The name of the configmap. Required. + :vartype config_map_ref: str """ - prometheus_port: Optional[int] = rest_field(name="prometheusPort") - """The prometheus port to expose the metrics.""" + type: Literal[RegistryEndpointTrustedSigningKeyType.CONFIG_MAP] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The trust type for the registry endpoint. Required. Trust settings stored in a Kubernetes + ConfigMap.""" + config_map_ref: str = rest_field(name="configMapRef", visibility=["read", "create", "update", "delete", "query"]) + """The name of the configmap. Required.""" @overload def __init__( self, *, - prometheus_port: Optional[int] = None, + config_map_ref: str, ) -> None: ... @overload @@ -3534,53 +8064,30 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) + super().__init__(*args, type=RegistryEndpointTrustedSigningKeyType.CONFIG_MAP, **kwargs) -class Operation(_model_base.Model): - """Details of a REST API operation, returned from the Resource Provider Operations API. +class RegistryEndpointTrustedSigningKeySecret(RegistryEndpointTrustedSigningKey, discriminator="Secret"): + """Settings for RegistryEndpoint trust provided through a secret. - Readonly variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The name of the operation, as per Resource-Based Access Control (RBAC). Examples: - "Microsoft.Compute/virtualMachines/write", "Microsoft.Compute/virtualMachines/capture/action". - :vartype name: str - :ivar is_data_action: Whether the operation applies to data-plane. This is "true" for - data-plane operations and "false" for Azure Resource Manager/control-plane operations. - :vartype is_data_action: bool - :ivar display: Localized display information for this particular operation. - :vartype display: ~azure.mgmt.iotoperations.models.OperationDisplay - :ivar origin: The intended executor of the operation; as in Resource Based Access Control - (RBAC) and audit logs UX. Default value is "user,system". Known values are: "user", "system", - and "user,system". - :vartype origin: str or ~azure.mgmt.iotoperations.models.Origin - :ivar action_type: Extensible enum. Indicates the action type. "Internal" refers to actions - that are for internal only APIs. "Internal" - :vartype action_type: str or ~azure.mgmt.iotoperations.models.ActionType + :ivar type: The trust type for the registry endpoint. Required. Trust settings stored in a + Kubernetes Secret. + :vartype type: str or ~azure.mgmt.iotoperations.models.SECRET + :ivar secret_ref: The name of the secret. Required. + :vartype secret_ref: str """ - name: Optional[str] = rest_field(visibility=["read"]) - """The name of the operation, as per Resource-Based Access Control (RBAC). Examples: - \"Microsoft.Compute/virtualMachines/write\", - \"Microsoft.Compute/virtualMachines/capture/action\".""" - is_data_action: Optional[bool] = rest_field(name="isDataAction", visibility=["read"]) - """Whether the operation applies to data-plane. This is \"true\" for data-plane operations and - \"false\" for Azure Resource Manager/control-plane operations.""" - display: Optional["_models.OperationDisplay"] = rest_field(visibility=["read"]) - """Localized display information for this particular operation.""" - origin: Optional[Union[str, "_models.Origin"]] = rest_field(visibility=["read"]) - """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit - logs UX. Default value is \"user,system\". Known values are: \"user\", \"system\", and - \"user,system\".""" - action_type: Optional[Union[str, "_models.ActionType"]] = rest_field(name="actionType") - """Extensible enum. Indicates the action type. \"Internal\" refers to actions that are for - internal only APIs. \"Internal\"""" + type: Literal[RegistryEndpointTrustedSigningKeyType.SECRET] = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The trust type for the registry endpoint. Required. Trust settings stored in a Kubernetes + Secret.""" + secret_ref: str = rest_field(name="secretRef", visibility=["read", "create", "update", "delete", "query"]) + """The name of the secret. Required.""" @overload def __init__( self, *, - action_type: Optional[Union[str, "_models.ActionType"]] = None, + secret_ref: str, ) -> None: ... @overload @@ -3591,74 +8098,34 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class OperationDisplay(_model_base.Model): - """Localized display information for and operation. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - - :ivar provider: The localized friendly form of the resource provider name, e.g. "Microsoft - Monitoring Insights" or "Microsoft Compute". - :vartype provider: str - :ivar resource: The localized friendly name of the resource type related to this operation. - E.g. "Virtual Machines" or "Job Schedule Collections". - :vartype resource: str - :ivar operation: The concise, localized friendly name for the operation; suitable for - dropdowns. E.g. "Create or Update Virtual Machine", "Restart Virtual Machine". - :vartype operation: str - :ivar description: The short, localized friendly description of the operation; suitable for - tool tips and detailed views. - :vartype description: str - """ - - provider: Optional[str] = rest_field(visibility=["read"]) - """The localized friendly form of the resource provider name, e.g. \"Microsoft Monitoring - Insights\" or \"Microsoft Compute\".""" - resource: Optional[str] = rest_field(visibility=["read"]) - """The localized friendly name of the resource type related to this operation. E.g. \"Virtual - Machines\" or \"Job Schedule Collections\".""" - operation: Optional[str] = rest_field(visibility=["read"]) - """The concise, localized friendly name for the operation; suitable for dropdowns. E.g. \"Create - or Update Virtual Machine\", \"Restart Virtual Machine\".""" - description: Optional[str] = rest_field(visibility=["read"]) - """The short, localized friendly description of the operation; suitable for tool tips and detailed - views.""" + super().__init__(*args, type=RegistryEndpointTrustedSigningKeyType.SECRET, **kwargs) -class PrincipalDefinition(_model_base.Model): - """PrincipalDefinition properties of Rule. +class RegistryEndpointUserAssignedIdentityAuthentication( + RegistryEndpointAuthentication, discriminator="UserAssignedManagedIdentity" +): # pylint: disable=name-too-long + """User assigned identity authentication. - :ivar attributes: A list of key-value pairs that match the attributes of the clients. The - attributes are case-sensitive and must match the attributes provided by the clients during - authentication. - :vartype attributes: list[dict[str, str]] - :ivar client_ids: A list of client IDs that match the clients. The client IDs are - case-sensitive and must match the client IDs provided by the clients during connection. - :vartype client_ids: list[str] - :ivar usernames: A list of usernames that match the clients. The usernames are case-sensitive - and must match the usernames provided by the clients during authentication. - :vartype usernames: list[str] + :ivar method: The authentication method. Required. UserAssignedManagedIdentity type + :vartype method: str or ~azure.mgmt.iotoperations.models.USER_ASSIGNED_MANAGED_IDENTITY + :ivar user_assigned_managed_identity_settings: User assigned managed identity properties. + Required. + :vartype user_assigned_managed_identity_settings: + ~azure.mgmt.iotoperations.models.RegistryEndpointUserAssignedManagedIdentitySettings """ - attributes: Optional[List[Dict[str, str]]] = rest_field() - """A list of key-value pairs that match the attributes of the clients. The attributes are - case-sensitive and must match the attributes provided by the clients during authentication.""" - client_ids: Optional[List[str]] = rest_field(name="clientIds") - """A list of client IDs that match the clients. The client IDs are case-sensitive and must match - the client IDs provided by the clients during connection.""" - usernames: Optional[List[str]] = rest_field() - """A list of usernames that match the clients. The usernames are case-sensitive and must match the - usernames provided by the clients during authentication.""" + method: Literal[RegistryEndpointAuthenticationMethod.USER_ASSIGNED_MANAGED_IDENTITY] = rest_discriminator(name="method", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The authentication method. Required. UserAssignedManagedIdentity type""" + user_assigned_managed_identity_settings: "_models.RegistryEndpointUserAssignedManagedIdentitySettings" = rest_field( + name="userAssignedManagedIdentitySettings", visibility=["read", "create", "update", "delete", "query"] + ) + """User assigned managed identity properties. Required.""" @overload def __init__( self, *, - attributes: Optional[List[Dict[str, str]]] = None, - client_ids: Optional[List[str]] = None, - usernames: Optional[List[str]] = None, + user_assigned_managed_identity_settings: "_models.RegistryEndpointUserAssignedManagedIdentitySettings", ) -> None: ... @overload @@ -3669,29 +8136,35 @@ def __init__(self, mapping: Mapping[str, Any]) -> None: """ def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) + super().__init__(*args, method=RegistryEndpointAuthenticationMethod.USER_ASSIGNED_MANAGED_IDENTITY, **kwargs) -class ProfileDiagnostics(_model_base.Model): - """DataflowProfile Diagnostics properties. +class RegistryEndpointUserAssignedManagedIdentitySettings(_Model): # pylint: disable=name-too-long + """User assigned managed identity properties. - :ivar logs: Diagnostic log settings for the resource. - :vartype logs: ~azure.mgmt.iotoperations.models.DiagnosticsLogs - :ivar metrics: The metrics settings for the resource. - :vartype metrics: ~azure.mgmt.iotoperations.models.Metrics + :ivar client_id: Client ID for the user-assigned managed identity. Required. + :vartype client_id: str + :ivar scope: Resource identifier (application ID URI) of the resource, affixed with the + .default suffix. + :vartype scope: str + :ivar tenant_id: Tenant ID. Required. + :vartype tenant_id: str """ - logs: Optional["_models.DiagnosticsLogs"] = rest_field() - """Diagnostic log settings for the resource.""" - metrics: Optional["_models.Metrics"] = rest_field() - """The metrics settings for the resource.""" + client_id: str = rest_field(name="clientId", visibility=["read", "create", "update", "delete", "query"]) + """Client ID for the user-assigned managed identity. Required.""" + scope: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource identifier (application ID URI) of the resource, affixed with the .default suffix.""" + tenant_id: str = rest_field(name="tenantId", visibility=["read", "create", "update", "delete", "query"]) + """Tenant ID. Required.""" @overload def __init__( self, *, - logs: Optional["_models.DiagnosticsLogs"] = None, - metrics: Optional["_models.Metrics"] = None, + client_id: str, + tenant_id: str, + scope: Optional[str] = None, ) -> None: ... @overload @@ -3705,19 +8178,18 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class SanForCert(_model_base.Model): +class SanForCert(_Model): """Subject Alternative Names (SANs) for certificate. - :ivar dns: DNS SANs. Required. :vartype dns: list[str] :ivar ip: IP address SANs. Required. :vartype ip: list[str] """ - dns: List[str] = rest_field() + dns: List[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """DNS SANs. Required.""" - ip: List[str] = rest_field() + ip: List[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """IP address SANs. Required.""" @overload @@ -3739,15 +8211,14 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class SchemaRegistryRef(_model_base.Model): +class SchemaRegistryRef(_Model): """The reference to the Schema Registry for this AIO Instance. - :ivar resource_id: The resource ID of the Schema Registry. Required. :vartype resource_id: str """ - resource_id: str = rest_field(name="resourceId") + resource_id: str = rest_field(name="resourceId", visibility=["read", "create", "update", "delete", "query"]) """The resource ID of the Schema Registry. Required.""" @overload @@ -3768,7 +8239,35 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class SelfCheck(_model_base.Model): +class SecretProviderClassRef(_Model): + """The reference to the AIO Secret provider class. + + :ivar resource_id: The resource ID of the AIO Secret provider class. Required. + :vartype resource_id: str + """ + + resource_id: str = rest_field(name="resourceId", visibility=["read", "create", "update", "delete", "query"]) + """The resource ID of the AIO Secret provider class. Required.""" + + @overload + def __init__( + self, + *, + resource_id: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SelfCheck(_Model): """Broker Diagnostic Self check properties. :ivar mode: The toggle to enable/disable self check. Known values are: "Enabled" and @@ -3780,11 +8279,17 @@ class SelfCheck(_model_base.Model): :vartype timeout_seconds: int """ - mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field() + mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """The toggle to enable/disable self check. Known values are: \"Enabled\" and \"Disabled\".""" - interval_seconds: Optional[int] = rest_field(name="intervalSeconds") + interval_seconds: Optional[int] = rest_field( + name="intervalSeconds", visibility=["read", "create", "update", "delete", "query"] + ) """The self check interval.""" - timeout_seconds: Optional[int] = rest_field(name="timeoutSeconds") + timeout_seconds: Optional[int] = rest_field( + name="timeoutSeconds", visibility=["read", "create", "update", "delete", "query"] + ) """The timeout for self check.""" @overload @@ -3807,7 +8312,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class SelfTracing(_model_base.Model): +class SelfTracing(_Model): """Diagnostic Self tracing properties. :ivar mode: The toggle to enable/disable self tracing. Known values are: "Enabled" and @@ -3817,9 +8322,13 @@ class SelfTracing(_model_base.Model): :vartype interval_seconds: int """ - mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field() + mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """The toggle to enable/disable self tracing. Known values are: \"Enabled\" and \"Disabled\".""" - interval_seconds: Optional[int] = rest_field(name="intervalSeconds") + interval_seconds: Optional[int] = rest_field( + name="intervalSeconds", visibility=["read", "create", "update", "delete", "query"] + ) """The self tracing interval.""" @overload @@ -3841,32 +8350,35 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class StateStoreResourceRule(_model_base.Model): +class StateStoreResourceRule(_Model): """State Store Resource Rule properties. - :ivar key_type: Allowed keyTypes pattern, string, binary. The key type used for matching, for example pattern tries to match the key to a glob-style pattern and string checks key is equal to value provided in keys. Required. Known values are: "Pattern", "String", and "Binary". :vartype key_type: str or ~azure.mgmt.iotoperations.models.StateStoreResourceKeyTypes :ivar keys_property: Give access to state store keys for the corresponding principals defined. - When key type is pattern set glob-style pattern (e.g., '\\ *', 'clients/*\\ '). Required. + When key type is pattern set glob-style pattern (e.g., '*', 'clients/*'). Required. :vartype keys_property: list[str] - :ivar method: Give access for ``Read``\\ , ``Write`` and ``ReadWrite`` access level. Required. + :ivar method: Give access for ``Read``, ``Write`` and ``ReadWrite`` access level. Required. Known values are: "Read", "Write", and "ReadWrite". :vartype method: str or ~azure.mgmt.iotoperations.models.StateStoreResourceDefinitionMethods """ - key_type: Union[str, "_models.StateStoreResourceKeyTypes"] = rest_field(name="keyType") + key_type: Union[str, "_models.StateStoreResourceKeyTypes"] = rest_field( + name="keyType", visibility=["read", "create", "update", "delete", "query"] + ) """Allowed keyTypes pattern, string, binary. The key type used for matching, for example pattern tries to match the key to a glob-style pattern and string checks key is equal to value provided in keys. Required. Known values are: \"Pattern\", \"String\", and \"Binary\".""" - keys_property: List[str] = rest_field(name="keys") + keys_property: List[str] = rest_field(name="keys", visibility=["read", "create", "update", "delete", "query"]) """Give access to state store keys for the corresponding principals defined. When key type is - pattern set glob-style pattern (e.g., '\ *', 'clients/*\ '). Required.""" - method: Union[str, "_models.StateStoreResourceDefinitionMethods"] = rest_field() - """Give access for ``Read``\ , ``Write`` and ``ReadWrite`` access level. Required. Known values - are: \"Read\", \"Write\", and \"ReadWrite\".""" + pattern set glob-style pattern (e.g., '*', 'clients/*'). Required.""" + method: Union[str, "_models.StateStoreResourceDefinitionMethods"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Give access for ``Read``, ``Write`` and ``ReadWrite`` access level. Required. Known values are: + \"Read\", \"Write\", and \"ReadWrite\".""" @overload def __init__( @@ -3888,7 +8400,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class SubscriberQueueLimit(_model_base.Model): +class SubscriberQueueLimit(_Model): """The settings of Subscriber Queue Limit. :ivar length: The maximum length of the queue before messages start getting dropped. @@ -3898,9 +8410,11 @@ class SubscriberQueueLimit(_model_base.Model): :vartype strategy: str or ~azure.mgmt.iotoperations.models.SubscriberMessageDropStrategy """ - length: Optional[int] = rest_field() + length: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """The maximum length of the queue before messages start getting dropped.""" - strategy: Optional[Union[str, "_models.SubscriberMessageDropStrategy"]] = rest_field() + strategy: Optional[Union[str, "_models.SubscriberMessageDropStrategy"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """The strategy to use for dropping messages from the queue. Known values are: \"None\" and \"DropOldest\".""" @@ -3923,7 +8437,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class SystemData(_model_base.Model): +class SystemData(_Model): """Metadata pertaining to creation and last modification of the resource. :ivar created_by: The identity that created the resource. @@ -3942,19 +8456,29 @@ class SystemData(_model_base.Model): :vartype last_modified_at: ~datetime.datetime """ - created_by: Optional[str] = rest_field(name="createdBy") + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read", "create", "update", "delete", "query"]) """The identity that created the resource.""" - created_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field(name="createdByType") + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="createdByType", visibility=["read", "create", "update", "delete", "query"] + ) """The type of identity that created the resource. Known values are: \"User\", \"Application\", \"ManagedIdentity\", and \"Key\".""" - created_at: Optional[datetime.datetime] = rest_field(name="createdAt", format="rfc3339") + created_at: Optional[datetime.datetime] = rest_field( + name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) """The timestamp of resource creation (UTC).""" - last_modified_by: Optional[str] = rest_field(name="lastModifiedBy") + last_modified_by: Optional[str] = rest_field( + name="lastModifiedBy", visibility=["read", "create", "update", "delete", "query"] + ) """The identity that last modified the resource.""" - last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field(name="lastModifiedByType") + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="lastModifiedByType", visibility=["read", "create", "update", "delete", "query"] + ) """The type of identity that last modified the resource. Known values are: \"User\", \"Application\", \"ManagedIdentity\", and \"Key\".""" - last_modified_at: Optional[datetime.datetime] = rest_field(name="lastModifiedAt", format="rfc3339") + last_modified_at: Optional[datetime.datetime] = rest_field( + name="lastModifiedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) """The timestamp of resource last modification (UTC).""" @overload @@ -3980,10 +8504,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class TlsCertMethod(_model_base.Model): +class TlsCertMethod(_Model): """Collection of different TLS types, NOTE- Enum at a time only one of them needs to be supported. - :ivar mode: Mode of TLS server certificate management. Required. Known values are: "Automatic" and "Manual". :vartype mode: str or ~azure.mgmt.iotoperations.models.TlsCertMethodMode @@ -3995,14 +8518,18 @@ class TlsCertMethod(_model_base.Model): :vartype manual: ~azure.mgmt.iotoperations.models.X509ManualCertificate """ - mode: Union[str, "_models.TlsCertMethodMode"] = rest_field() + mode: Union[str, "_models.TlsCertMethodMode"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """Mode of TLS server certificate management. Required. Known values are: \"Automatic\" and \"Manual\".""" cert_manager_certificate_spec: Optional["_models.CertManagerCertificateSpec"] = rest_field( - name="certManagerCertificateSpec" + name="certManagerCertificateSpec", visibility=["read", "create", "update", "delete", "query"] ) """Option 1 - Automatic TLS server certificate management with cert-manager.""" - manual: Optional["_models.X509ManualCertificate"] = rest_field() + manual: Optional["_models.X509ManualCertificate"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """Option 2 - Manual TLS server certificate management through a defined secret.""" @overload @@ -4025,7 +8552,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class TlsProperties(_model_base.Model): +class TlsProperties(_Model): """Tls properties. :ivar mode: Mode for TLS. Known values are: "Enabled" and "Disabled". @@ -4034,9 +8561,13 @@ class TlsProperties(_model_base.Model): :vartype trusted_ca_certificate_config_map_ref: str """ - mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field() + mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """Mode for TLS. Known values are: \"Enabled\" and \"Disabled\".""" - trusted_ca_certificate_config_map_ref: Optional[str] = rest_field(name="trustedCaCertificateConfigMapRef") + trusted_ca_certificate_config_map_ref: Optional[str] = rest_field( + name="trustedCaCertificateConfigMapRef", visibility=["read", "create", "update", "delete", "query"] + ) """Trusted CA certificate config map.""" @overload @@ -4058,7 +8589,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class Traces(_model_base.Model): +class Traces(_Model): """Broker Diagnostic Trace properties. :ivar mode: The toggle to enable/disable traces. Known values are: "Enabled" and "Disabled". @@ -4071,13 +8602,21 @@ class Traces(_model_base.Model): :vartype span_channel_capacity: int """ - mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field() + mode: Optional[Union[str, "_models.OperationalMode"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """The toggle to enable/disable traces. Known values are: \"Enabled\" and \"Disabled\".""" - cache_size_megabytes: Optional[int] = rest_field(name="cacheSizeMegabytes") + cache_size_megabytes: Optional[int] = rest_field( + name="cacheSizeMegabytes", visibility=["read", "create", "update", "delete", "query"] + ) """The cache size in megabytes.""" - self_tracing: Optional["_models.SelfTracing"] = rest_field(name="selfTracing") + self_tracing: Optional["_models.SelfTracing"] = rest_field( + name="selfTracing", visibility=["read", "create", "update", "delete", "query"] + ) """The self tracing properties.""" - span_channel_capacity: Optional[int] = rest_field(name="spanChannelCapacity") + span_channel_capacity: Optional[int] = rest_field( + name="spanChannelCapacity", visibility=["read", "create", "update", "delete", "query"] + ) """The span channel capacity.""" @overload @@ -4101,11 +8640,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class UserAssignedIdentity(_model_base.Model): +class UserAssignedIdentity(_Model): """User assigned identity properties. - Readonly variables are only populated by the server, and will be ignored when sending a request. - :ivar principal_id: The principal ID of the assigned identity. :vartype principal_id: str :ivar client_id: The client ID of the assigned identity. @@ -4118,27 +8655,47 @@ class UserAssignedIdentity(_model_base.Model): """The client ID of the assigned identity.""" -class VolumeClaimResourceRequirements(_model_base.Model): +class VolumeClaimResourceRequirements(_Model): """VolumeClaimResourceRequirements properties. :ivar limits: Limits describes the maximum amount of compute resources allowed. More info: - https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/. + `https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + `_. :vartype limits: dict[str, str] :ivar requests: Requests describes the minimum amount of compute resources required. If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, otherwise to an implementation-defined value. More info: - https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/. + `https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + `_. :vartype requests: dict[str, str] + :ivar claims: Claims lists the names of resources, defined in spec.resourceClaims, that are + used by this container. + + This is an alpha field and requires enabling the DynamicResourceAllocation feature gate. + + This field is immutable. It can only be set for containers. + :vartype claims: list[~azure.mgmt.iotoperations.models.VolumeClaimResourceRequirementsClaims] """ - limits: Optional[Dict[str, str]] = rest_field() + limits: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Limits describes the maximum amount of compute resources allowed. More info: - https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/.""" - requests: Optional[Dict[str, str]] = rest_field() + `https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + `_.""" + requests: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) """Requests describes the minimum amount of compute resources required. If Requests is omitted for a container, it defaults to Limits if that is explicitly specified, otherwise to an implementation-defined value. More info: - https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/.""" + `https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/ + `_.""" + claims: Optional[List["_models.VolumeClaimResourceRequirementsClaims"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Claims lists the names of resources, defined in spec.resourceClaims, that are used by this + container. + + This is an alpha field and requires enabling the DynamicResourceAllocation feature gate. + + This field is immutable. It can only be set for containers.""" @overload def __init__( @@ -4146,6 +8703,36 @@ def __init__( *, limits: Optional[Dict[str, str]] = None, requests: Optional[Dict[str, str]] = None, + claims: Optional[List["_models.VolumeClaimResourceRequirementsClaims"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class VolumeClaimResourceRequirementsClaims(_Model): + """VolumeClaimResourceRequirementsClaims properties. + + :ivar name: Name of the resource. This must match the name of a resource in + spec.resourceClaims. Required. + :vartype name: str + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the resource. This must match the name of a resource in spec.resourceClaims. Required.""" + + @overload + def __init__( + self, + *, + name: str, ) -> None: ... @overload @@ -4159,7 +8746,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class VolumeClaimSpec(_model_base.Model): +class VolumeClaimSpec(_Model): """VolumeClaimSpec properties. :ivar volume_name: VolumeName is the binding reference to the PersistentVolume backing this @@ -4169,10 +8756,12 @@ class VolumeClaimSpec(_model_base.Model): Filesystem is implied when not included in claim spec. This is a beta feature. :vartype volume_mode: str :ivar storage_class_name: Name of the StorageClass required by the claim. More info: - https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1. + `https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 + `_. :vartype storage_class_name: str :ivar access_modes: AccessModes contains the desired access modes the volume should have. More - info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1. + info: `https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 + `_. :vartype access_modes: list[str] :ivar data_source: This field can be used to specify either: * An existing VolumeSnapshot object (snapshot.storage.k8s.io/VolumeSnapshot) * An existing PVC (PersistentVolumeClaim) If @@ -4199,31 +8788,46 @@ class VolumeClaimSpec(_model_base.Model): RecoverVolumeExpansionFailure feature is enabled users are allowed to specify resource requirements that are lower than previous value but must still be higher than capacity recorded in the status field of the claim. More info: - https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources. + `https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources + `_. :vartype resources: ~azure.mgmt.iotoperations.models.VolumeClaimResourceRequirements :ivar selector: A label query over volumes to consider for binding. :vartype selector: ~azure.mgmt.iotoperations.models.VolumeClaimSpecSelector """ - volume_name: Optional[str] = rest_field(name="volumeName") + volume_name: Optional[str] = rest_field( + name="volumeName", visibility=["read", "create", "update", "delete", "query"] + ) """VolumeName is the binding reference to the PersistentVolume backing this claim.""" - volume_mode: Optional[str] = rest_field(name="volumeMode") + volume_mode: Optional[str] = rest_field( + name="volumeMode", visibility=["read", "create", "update", "delete", "query"] + ) """volumeMode defines what type of volume is required by the claim. Value of Filesystem is implied when not included in claim spec. This is a beta feature.""" - storage_class_name: Optional[str] = rest_field(name="storageClassName") + storage_class_name: Optional[str] = rest_field( + name="storageClassName", visibility=["read", "create", "update", "delete", "query"] + ) """Name of the StorageClass required by the claim. More info: - https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1.""" - access_modes: Optional[List[str]] = rest_field(name="accessModes") + `https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1 + `_.""" + access_modes: Optional[List[str]] = rest_field( + name="accessModes", visibility=["read", "create", "update", "delete", "query"] + ) """AccessModes contains the desired access modes the volume should have. More info: - https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1.""" - data_source: Optional["_models.LocalKubernetesReference"] = rest_field(name="dataSource") + `https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1 + `_.""" + data_source: Optional["_models.LocalKubernetesReference"] = rest_field( + name="dataSource", visibility=["read", "create", "update", "delete", "query"] + ) """This field can be used to specify either: * An existing VolumeSnapshot object (snapshot.storage.k8s.io/VolumeSnapshot) * An existing PVC (PersistentVolumeClaim) If the provisioner or an external controller can support the specified data source, it will create a new volume based on the contents of the specified data source. If the AnyVolumeDataSource feature gate is enabled, this field will always have the same contents as the DataSourceRef field.""" - data_source_ref: Optional["_models.KubernetesReference"] = rest_field(name="dataSourceRef") + data_source_ref: Optional["_models.KubernetesReference"] = rest_field( + name="dataSourceRef", visibility=["read", "create", "update", "delete", "query"] + ) """Specifies the object from which to populate the volume with data, if a non-empty volume is desired. This may be any local object from a non-empty API group (non core object) or a PersistentVolumeClaim object. When this field is specified, volume binding will only succeed if @@ -4237,13 +8841,18 @@ class VolumeClaimSpec(_model_base.Model): DataSource ignores disallowed values (dropping them), DataSourceRef preserves all values, and generates an error if a disallowed value is specified. (Beta) Using this field requires the AnyVolumeDataSource feature gate to be enabled.""" - resources: Optional["_models.VolumeClaimResourceRequirements"] = rest_field() + resources: Optional["_models.VolumeClaimResourceRequirements"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """Resources represents the minimum resources the volume should have. If RecoverVolumeExpansionFailure feature is enabled users are allowed to specify resource requirements that are lower than previous value but must still be higher than capacity recorded in the status field of the claim. More info: - https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources.""" - selector: Optional["_models.VolumeClaimSpecSelector"] = rest_field() + `https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources + `_.""" + selector: Optional["_models.VolumeClaimSpecSelector"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """A label query over volumes to consider for binding.""" @overload @@ -4271,7 +8880,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class VolumeClaimSpecSelector(_model_base.Model): +class VolumeClaimSpecSelector(_Model): """VolumeClaimSpecSelector properties. :ivar match_expressions: MatchExpressions is a list of label selector requirements. The @@ -4285,10 +8894,12 @@ class VolumeClaimSpecSelector(_model_base.Model): """ match_expressions: Optional[List["_models.VolumeClaimSpecSelectorMatchExpressions"]] = rest_field( - name="matchExpressions" + name="matchExpressions", visibility=["read", "create", "update", "delete", "query"] ) """MatchExpressions is a list of label selector requirements. The requirements are ANDed.""" - match_labels: Optional[Dict[str, str]] = rest_field(name="matchLabels") + match_labels: Optional[Dict[str, str]] = rest_field( + name="matchLabels", visibility=["read", "create", "update", "delete", "query"] + ) """MatchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels map is equivalent to an element of matchExpressions, whose key field is \"key\", the operator is \"In\", and the values array contains only \"value\". The requirements are ANDed.""" @@ -4312,10 +8923,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class VolumeClaimSpecSelectorMatchExpressions(_model_base.Model): +class VolumeClaimSpecSelectorMatchExpressions(_Model): """VolumeClaimSpecSelectorMatchExpressions properties. - :ivar key: key is the label key that the selector applies to. Required. :vartype key: str :ivar operator: operator represents a key's relationship to a set of values. Valid operators @@ -4328,13 +8938,17 @@ class VolumeClaimSpecSelectorMatchExpressions(_model_base.Model): :vartype values_property: list[str] """ - key: str = rest_field() + key: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) """key is the label key that the selector applies to. Required.""" - operator: Union[str, "_models.OperatorValues"] = rest_field() + operator: Union[str, "_models.OperatorValues"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) """operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists and DoesNotExist. Required. Known values are: \"In\", \"NotIn\", \"Exists\", and \"DoesNotExist\".""" - values_property: Optional[List[str]] = rest_field(name="values") + values_property: Optional[List[str]] = rest_field( + name="values", visibility=["read", "create", "update", "delete", "query"] + ) """values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. This array is replaced during a strategic merge patch.""" @@ -4359,16 +8973,15 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class X509ManualCertificate(_model_base.Model): +class X509ManualCertificate(_Model): """X509 Certificate Authentication properties. - :ivar secret_ref: Kubernetes secret containing an X.509 client certificate. This is a reference to the secret through an identifying name, not the secret itself. Required. :vartype secret_ref: str """ - secret_ref: str = rest_field(name="secretRef") + secret_ref: str = rest_field(name="secretRef", visibility=["read", "create", "update", "delete", "query"]) """Kubernetes secret containing an X.509 client certificate. This is a reference to the secret through an identifying name, not the secret itself. Required.""" diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_patch.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_patch.py index f7dd32510333..8bcb627aa475 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_patch.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/models/_patch.py @@ -1,7 +1,8 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/__init__.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/__init__.py index ab557d4ab2d6..8285d4d727f7 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/__init__.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/__init__.py @@ -21,6 +21,11 @@ from ._operations import DataflowProfileOperations # type: ignore from ._operations import DataflowOperations # type: ignore from ._operations import DataflowEndpointOperations # type: ignore +from ._operations import DataflowGraphOperations # type: ignore +from ._operations import RegistryEndpointOperations # type: ignore +from ._operations import AkriConnectorTemplateOperations # type: ignore +from ._operations import AkriConnectorOperations # type: ignore +from ._operations import AkriDiscoveryHandlerOperations # type: ignore from ._patch import __all__ as _patch_all from ._patch import * @@ -36,6 +41,11 @@ "DataflowProfileOperations", "DataflowOperations", "DataflowEndpointOperations", + "DataflowGraphOperations", + "RegistryEndpointOperations", + "AkriConnectorTemplateOperations", + "AkriConnectorOperations", + "AkriDiscoveryHandlerOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/_operations.py index fd89734aec3d..75cdd87fff63 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/_operations.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -6,12 +6,13 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase import json -import sys from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Optional, TypeVar, Union, cast, overload import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -32,16 +33,14 @@ from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize -from .._serialization import Serializer +from .._configuration import IoTOperationsMgmtClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from .._utils.serialization import Deserializer, Serializer +from .._validation import api_version_validation -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +JSON = MutableMapping[str, Any] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False @@ -51,7 +50,7 @@ def build_operations_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -72,11 +71,11 @@ def build_instance_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -101,11 +100,11 @@ def build_instance_create_or_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -132,11 +131,11 @@ def build_instance_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -162,11 +161,11 @@ def build_instance_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -190,7 +189,7 @@ def build_instance_list_by_resource_group_request( # pylint: disable=name-too-l _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -219,7 +218,7 @@ def build_instance_list_by_subscription_request( # pylint: disable=name-too-lon _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -245,11 +244,11 @@ def build_broker_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -275,11 +274,11 @@ def build_broker_create_or_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -306,11 +305,11 @@ def build_broker_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -335,11 +334,11 @@ def build_broker_list_by_resource_group_request( # pylint: disable=name-too-lon _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -368,11 +367,11 @@ def build_broker_listener_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/listeners/{listenerName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/listeners/{listenerName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -404,11 +403,11 @@ def build_broker_listener_create_or_update_request( # pylint: disable=name-too- _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/listeners/{listenerName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/listeners/{listenerName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -441,11 +440,11 @@ def build_broker_listener_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/listeners/{listenerName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/listeners/{listenerName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -471,11 +470,11 @@ def build_broker_listener_list_by_resource_group_request( # pylint: disable=nam _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/listeners" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/listeners" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -505,11 +504,11 @@ def build_broker_authentication_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authentications/{authenticationName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authentications/{authenticationName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -541,11 +540,11 @@ def build_broker_authentication_create_or_update_request( # pylint: disable=nam _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authentications/{authenticationName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authentications/{authenticationName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -578,11 +577,11 @@ def build_broker_authentication_delete_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authentications/{authenticationName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authentications/{authenticationName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -608,11 +607,11 @@ def build_broker_authentication_list_by_resource_group_request( # pylint: disab _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authentications" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authentications" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -642,11 +641,11 @@ def build_broker_authorization_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authorizations/{authorizationName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authorizations/{authorizationName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -678,11 +677,11 @@ def build_broker_authorization_create_or_update_request( # pylint: disable=name _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authorizations/{authorizationName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authorizations/{authorizationName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -715,11 +714,11 @@ def build_broker_authorization_delete_request( # pylint: disable=name-too-long _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authorizations/{authorizationName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authorizations/{authorizationName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -745,11 +744,11 @@ def build_broker_authorization_list_by_resource_group_request( # pylint: disabl _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authorizations" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/brokers/{brokerName}/authorizations" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -774,11 +773,11 @@ def build_dataflow_profile_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -804,11 +803,11 @@ def build_dataflow_profile_create_or_update_request( # pylint: disable=name-too _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -835,11 +834,11 @@ def build_dataflow_profile_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -864,11 +863,11 @@ def build_dataflow_profile_list_by_resource_group_request( # pylint: disable=na _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -897,11 +896,11 @@ def build_dataflow_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflows/{dataflowName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflows/{dataflowName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -933,11 +932,11 @@ def build_dataflow_create_or_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflows/{dataflowName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflows/{dataflowName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -970,11 +969,11 @@ def build_dataflow_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflows/{dataflowName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflows/{dataflowName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -1000,11 +999,11 @@ def build_dataflow_list_by_resource_group_request( # pylint: disable=name-too-l _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflows" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflows" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -1029,11 +1028,11 @@ def build_dataflow_endpoint_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowEndpoints/{dataflowEndpointName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowEndpoints/{dataflowEndpointName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -1059,11 +1058,11 @@ def build_dataflow_endpoint_create_or_update_request( # pylint: disable=name-to _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowEndpoints/{dataflowEndpointName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowEndpoints/{dataflowEndpointName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -1090,11 +1089,11 @@ def build_dataflow_endpoint_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowEndpoints/{dataflowEndpointName}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowEndpoints/{dataflowEndpointName}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -1119,11 +1118,653 @@ def build_dataflow_endpoint_list_by_resource_group_request( # pylint: disable=n _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-11-01")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowEndpoints" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowEndpoints" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_dataflow_graph_get_request( + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflowGraphs/{dataflowGraphName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "dataflowProfileName": _SERIALIZER.url("dataflow_profile_name", dataflow_profile_name, "str"), + "dataflowGraphName": _SERIALIZER.url("dataflow_graph_name", dataflow_graph_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_dataflow_graph_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflowGraphs/{dataflowGraphName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "dataflowProfileName": _SERIALIZER.url("dataflow_profile_name", dataflow_profile_name, "str"), + "dataflowGraphName": _SERIALIZER.url("dataflow_graph_name", dataflow_graph_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_dataflow_graph_delete_request( + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflowGraphs/{dataflowGraphName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "dataflowProfileName": _SERIALIZER.url("dataflow_profile_name", dataflow_profile_name, "str"), + "dataflowGraphName": _SERIALIZER.url("dataflow_graph_name", dataflow_graph_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_dataflow_graph_list_by_dataflow_profile_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, dataflow_profile_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/dataflowProfiles/{dataflowProfileName}/dataflowGraphs" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "dataflowProfileName": _SERIALIZER.url("dataflow_profile_name", dataflow_profile_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_registry_endpoint_get_request( + resource_group_name: str, instance_name: str, registry_endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/registryEndpoints/{registryEndpointName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "registryEndpointName": _SERIALIZER.url("registry_endpoint_name", registry_endpoint_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_registry_endpoint_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, registry_endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/registryEndpoints/{registryEndpointName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "registryEndpointName": _SERIALIZER.url("registry_endpoint_name", registry_endpoint_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_registry_endpoint_delete_request( + resource_group_name: str, instance_name: str, registry_endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/registryEndpoints/{registryEndpointName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "registryEndpointName": _SERIALIZER.url("registry_endpoint_name", registry_endpoint_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_registry_endpoint_list_by_instance_resource_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/registryEndpoints" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_connector_template_get_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, akri_connector_template_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriConnectorTemplates/{akriConnectorTemplateName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriConnectorTemplateName": _SERIALIZER.url( + "akri_connector_template_name", akri_connector_template_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_connector_template_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, akri_connector_template_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriConnectorTemplates/{akriConnectorTemplateName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriConnectorTemplateName": _SERIALIZER.url( + "akri_connector_template_name", akri_connector_template_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_connector_template_delete_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, akri_connector_template_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriConnectorTemplates/{akriConnectorTemplateName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriConnectorTemplateName": _SERIALIZER.url( + "akri_connector_template_name", akri_connector_template_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_connector_template_list_by_instance_resource_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriConnectorTemplates" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_connector_get_request( + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriConnectorTemplates/{akriConnectorTemplateName}/akriConnectors/{akriConnectorName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriConnectorTemplateName": _SERIALIZER.url( + "akri_connector_template_name", akri_connector_template_name, "str" + ), + "akriConnectorName": _SERIALIZER.url("akri_connector_name", akri_connector_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_connector_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriConnectorTemplates/{akriConnectorTemplateName}/akriConnectors/{akriConnectorName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriConnectorTemplateName": _SERIALIZER.url( + "akri_connector_template_name", akri_connector_template_name, "str" + ), + "akriConnectorName": _SERIALIZER.url("akri_connector_name", akri_connector_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_connector_delete_request( + resource_group_name: str, + instance_name: str, + akri_connector_template_name: str, + akri_connector_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriConnectorTemplates/{akriConnectorTemplateName}/akriConnectors/{akriConnectorName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriConnectorTemplateName": _SERIALIZER.url( + "akri_connector_template_name", akri_connector_template_name, "str" + ), + "akriConnectorName": _SERIALIZER.url("akri_connector_name", akri_connector_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_connector_list_by_template_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, akri_connector_template_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriConnectorTemplates/{akriConnectorTemplateName}/akriConnectors" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriConnectorTemplateName": _SERIALIZER.url( + "akri_connector_template_name", akri_connector_template_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_discovery_handler_get_request( + resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriDiscoveryHandlers/{akriDiscoveryHandlerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriDiscoveryHandlerName": _SERIALIZER.url("akri_discovery_handler_name", akri_discovery_handler_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_discovery_handler_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriDiscoveryHandlers/{akriDiscoveryHandlerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriDiscoveryHandlerName": _SERIALIZER.url("akri_discovery_handler_name", akri_discovery_handler_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_discovery_handler_delete_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriDiscoveryHandlers/{akriDiscoveryHandlerName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "instanceName": _SERIALIZER.url("instance_name", instance_name, "str"), + "akriDiscoveryHandlerName": _SERIALIZER.url("akri_discovery_handler_name", akri_discovery_handler_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_akri_discovery_handler_list_by_instance_resource_request( # pylint: disable=name-too-long + resource_group_name: str, instance_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-07-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.IoTOperations/instances/{instanceName}/akriDiscoveryHandlers" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -1143,33 +1784,3080 @@ def build_dataflow_endpoint_list_by_resource_group_request( # pylint: disable=n class Operations: """ - .. warning:: - **DO NOT** instantiate this class directly. + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s + :attr:`operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: + """List the operations for the provider. + + :return: An iterator like instance of Operation + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_operations_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.Operation], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class InstanceOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s + :attr:`instance` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, instance_name: str, **kwargs: Any) -> _models.InstanceResource: + """Get a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :return: InstanceResource. The InstanceResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.InstanceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.InstanceResource] = kwargs.pop("cls", None) + + _request = build_instance_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.InstanceResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + resource: Union[_models.InstanceResource, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_instance_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + resource: _models.InstanceResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InstanceResource]: + """Create a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.InstanceResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns InstanceResource. The InstanceResource is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.InstanceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InstanceResource]: + """Create a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns InstanceResource. The InstanceResource is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.InstanceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.InstanceResource]: + """Create a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns InstanceResource. The InstanceResource is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.InstanceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + resource: Union[_models.InstanceResource, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.InstanceResource]: + """Create a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param resource: Resource create parameters. Is one of the following types: InstanceResource, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.InstanceResource or JSON or IO[bytes] + :return: An instance of LROPoller that returns InstanceResource. The InstanceResource is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.InstanceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InstanceResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.InstanceResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.InstanceResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.InstanceResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @overload + def update( + self, + resource_group_name: str, + instance_name: str, + properties: _models.InstancePatchModel, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.InstanceResource: + """Update a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.iotoperations.models.InstancePatchModel + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: InstanceResource. The InstanceResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.InstanceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + instance_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.InstanceResource: + """Update a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: InstanceResource. The InstanceResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.InstanceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + instance_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.InstanceResource: + """Update a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: InstanceResource. The InstanceResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.InstanceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, + resource_group_name: str, + instance_name: str, + properties: Union[_models.InstancePatchModel, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.InstanceResource: + """Update a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param properties: The resource properties to be updated. Is one of the following types: + InstancePatchModel, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.iotoperations.models.InstancePatchModel or JSON or IO[bytes] + :return: InstanceResource. The InstanceResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.InstanceResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.InstanceResource] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_instance_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.InstanceResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _delete_initial(self, resource_group_name: str, instance_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_instance_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, instance_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.InstanceResource"]: + """List InstanceResource resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of InstanceResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.InstanceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.InstanceResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_instance_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.InstanceResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.InstanceResource"]: + """List InstanceResource resources by subscription ID. + + :return: An iterator like instance of InstanceResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.InstanceResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.InstanceResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_instance_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.InstanceResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BrokerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s + :attr:`broker` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any + ) -> _models.BrokerResource: + """Get a BrokerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :return: BrokerResource. The BrokerResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.BrokerResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BrokerResource] = kwargs.pop("cls", None) + + _request = build_broker_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BrokerResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + resource: Union[_models.BrokerResource, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_broker_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + resource: _models.BrokerResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerResource]: + """Create a BrokerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.BrokerResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerResource. The BrokerResource is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerResource]: + """Create a BrokerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerResource. The BrokerResource is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerResource]: + """Create a BrokerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerResource. The BrokerResource is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + resource: Union[_models.BrokerResource, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.BrokerResource]: + """Create a BrokerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param resource: Resource create parameters. Is one of the following types: BrokerResource, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.BrokerResource or JSON or IO[bytes] + :return: An instance of LROPoller that returns BrokerResource. The BrokerResource is compatible + with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BrokerResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BrokerResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.BrokerResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.BrokerResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_broker_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a BrokerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, instance_name: str, **kwargs: Any + ) -> Iterable["_models.BrokerResource"]: + """List BrokerResource resources by InstanceResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :return: An iterator like instance of BrokerResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.BrokerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BrokerResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_broker_list_by_resource_group_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.BrokerResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BrokerListenerOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s + :attr:`broker_listener` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, instance_name: str, broker_name: str, listener_name: str, **kwargs: Any + ) -> _models.BrokerListenerResource: + """Get a BrokerListenerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param listener_name: Name of Instance broker listener resource. Required. + :type listener_name: str + :return: BrokerListenerResource. The BrokerListenerResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.BrokerListenerResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BrokerListenerResource] = kwargs.pop("cls", None) + + _request = build_broker_listener_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + listener_name=listener_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BrokerListenerResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + listener_name: str, + resource: Union[_models.BrokerListenerResource, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_broker_listener_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + listener_name=listener_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + listener_name: str, + resource: _models.BrokerListenerResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerListenerResource]: + """Create a BrokerListenerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param listener_name: Name of Instance broker listener resource. Required. + :type listener_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.BrokerListenerResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerListenerResource. The + BrokerListenerResource is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerListenerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + listener_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerListenerResource]: + """Create a BrokerListenerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param listener_name: Name of Instance broker listener resource. Required. + :type listener_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerListenerResource. The + BrokerListenerResource is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerListenerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + listener_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerListenerResource]: + """Create a BrokerListenerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param listener_name: Name of Instance broker listener resource. Required. + :type listener_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerListenerResource. The + BrokerListenerResource is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerListenerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + listener_name: str, + resource: Union[_models.BrokerListenerResource, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.BrokerListenerResource]: + """Create a BrokerListenerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param listener_name: Name of Instance broker listener resource. Required. + :type listener_name: str + :param resource: Resource create parameters. Is one of the following types: + BrokerListenerResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.BrokerListenerResource or JSON or IO[bytes] + :return: An instance of LROPoller that returns BrokerListenerResource. The + BrokerListenerResource is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerListenerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BrokerListenerResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + listener_name=listener_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BrokerListenerResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.BrokerListenerResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.BrokerListenerResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, instance_name: str, broker_name: str, listener_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_broker_listener_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + listener_name=listener_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, instance_name: str, broker_name: str, listener_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a BrokerListenerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param listener_name: Name of Instance broker listener resource. Required. + :type listener_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + listener_name=listener_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any + ) -> Iterable["_models.BrokerListenerResource"]: + """List BrokerListenerResource resources by BrokerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :return: An iterator like instance of BrokerListenerResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.BrokerListenerResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BrokerListenerResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_broker_listener_list_by_resource_group_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.BrokerListenerResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BrokerAuthenticationOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s + :attr:`broker_authentication` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, instance_name: str, broker_name: str, authentication_name: str, **kwargs: Any + ) -> _models.BrokerAuthenticationResource: + """Get a BrokerAuthenticationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authentication_name: Name of Instance broker authentication resource. Required. + :type authentication_name: str + :return: BrokerAuthenticationResource. The BrokerAuthenticationResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.BrokerAuthenticationResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BrokerAuthenticationResource] = kwargs.pop("cls", None) + + _request = build_broker_authentication_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authentication_name=authentication_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BrokerAuthenticationResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authentication_name: str, + resource: Union[_models.BrokerAuthenticationResource, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_broker_authentication_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authentication_name=authentication_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authentication_name: str, + resource: _models.BrokerAuthenticationResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerAuthenticationResource]: + """Create a BrokerAuthenticationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authentication_name: Name of Instance broker authentication resource. Required. + :type authentication_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.BrokerAuthenticationResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerAuthenticationResource. The + BrokerAuthenticationResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authentication_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerAuthenticationResource]: + """Create a BrokerAuthenticationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authentication_name: Name of Instance broker authentication resource. Required. + :type authentication_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerAuthenticationResource. The + BrokerAuthenticationResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authentication_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerAuthenticationResource]: + """Create a BrokerAuthenticationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authentication_name: Name of Instance broker authentication resource. Required. + :type authentication_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerAuthenticationResource. The + BrokerAuthenticationResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authentication_name: str, + resource: Union[_models.BrokerAuthenticationResource, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.BrokerAuthenticationResource]: + """Create a BrokerAuthenticationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authentication_name: Name of Instance broker authentication resource. Required. + :type authentication_name: str + :param resource: Resource create parameters. Is one of the following types: + BrokerAuthenticationResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.BrokerAuthenticationResource or JSON or + IO[bytes] + :return: An instance of LROPoller that returns BrokerAuthenticationResource. The + BrokerAuthenticationResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BrokerAuthenticationResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authentication_name=authentication_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BrokerAuthenticationResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.BrokerAuthenticationResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.BrokerAuthenticationResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, instance_name: str, broker_name: str, authentication_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_broker_authentication_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authentication_name=authentication_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete( + self, resource_group_name: str, instance_name: str, broker_name: str, authentication_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a BrokerAuthenticationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authentication_name: Name of Instance broker authentication resource. Required. + :type authentication_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authentication_name=authentication_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any + ) -> Iterable["_models.BrokerAuthenticationResource"]: + """List BrokerAuthenticationResource resources by BrokerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :return: An iterator like instance of BrokerAuthenticationResource + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BrokerAuthenticationResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_broker_authentication_list_by_resource_group_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_models.BrokerAuthenticationResource], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class BrokerAuthorizationOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s + :attr:`broker_authorization` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get( + self, resource_group_name: str, instance_name: str, broker_name: str, authorization_name: str, **kwargs: Any + ) -> _models.BrokerAuthorizationResource: + """Get a BrokerAuthorizationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authorization_name: Name of Instance broker authorization resource. Required. + :type authorization_name: str + :return: BrokerAuthorizationResource. The BrokerAuthorizationResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.BrokerAuthorizationResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.BrokerAuthorizationResource] = kwargs.pop("cls", None) + + _request = build_broker_authorization_get_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authorization_name=authorization_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.BrokerAuthorizationResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authorization_name: str, + resource: Union[_models.BrokerAuthorizationResource, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_broker_authorization_create_or_update_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authorization_name=authorization_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authorization_name: str, + resource: _models.BrokerAuthorizationResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerAuthorizationResource]: + """Create a BrokerAuthorizationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authorization_name: Name of Instance broker authorization resource. Required. + :type authorization_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.iotoperations.models.BrokerAuthorizationResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerAuthorizationResource. The + BrokerAuthorizationResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authorization_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerAuthorizationResource]: + """Create a BrokerAuthorizationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authorization_name: Name of Instance broker authorization resource. Required. + :type authorization_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerAuthorizationResource. The + BrokerAuthorizationResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authorization_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BrokerAuthorizationResource]: + """Create a BrokerAuthorizationResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authorization_name: Name of Instance broker authorization resource. Required. + :type authorization_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns BrokerAuthorizationResource. The + BrokerAuthorizationResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ - Instead, you should access the following operations through - :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s - :attr:`operations` attribute. - """ + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + instance_name: str, + broker_name: str, + authorization_name: str, + resource: Union[_models.BrokerAuthorizationResource, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.BrokerAuthorizationResource]: + """Create a BrokerAuthorizationResource. - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authorization_name: Name of Instance broker authorization resource. Required. + :type authorization_name: str + :param resource: Resource create parameters. Is one of the following types: + BrokerAuthorizationResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.BrokerAuthorizationResource or JSON or + IO[bytes] + :return: An instance of LROPoller that returns BrokerAuthorizationResource. The + BrokerAuthorizationResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BrokerAuthorizationResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authorization_name=authorization_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.BrokerAuthorizationResource, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.BrokerAuthorizationResource].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.BrokerAuthorizationResource]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial( + self, resource_group_name: str, instance_name: str, broker_name: str, authorization_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_broker_authorization_delete_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authorization_name=authorization_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: - """List the operations for the provider. + def begin_delete( + self, resource_group_name: str, instance_name: str, broker_name: str, authorization_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a BrokerAuthorizationResource. - :return: An iterator like instance of Operation - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.Operation] + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :param authorization_name: Name of Instance broker authorization resource. Required. + :type authorization_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + authorization_name=authorization_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any + ) -> Iterable["_models.BrokerAuthorizationResource"]: + """List BrokerAuthorizationResource resources by BrokerResource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param instance_name: Name of instance. Required. + :type instance_name: str + :param broker_name: Name of broker. Required. + :type broker_name: str + :return: An iterator like instance of BrokerAuthorizationResource + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.BrokerAuthorizationResource]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -1182,7 +4870,11 @@ def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: def prepare_request(next_link=None): if not next_link: - _request = build_operations_list_request( + _request = build_broker_authorization_list_by_resource_group_request( + resource_group_name=resource_group_name, + instance_name=instance_name, + broker_name=broker_name, + subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, params=_params, @@ -1218,7 +4910,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Operation], deserialized["value"]) + list_of_elem = _deserialize(List[_models.BrokerAuthorizationResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -1242,34 +4934,38 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class InstanceOperations: +class DataflowProfileOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s - :attr:`instance` attribute. + :attr:`dataflow_profile` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def get(self, resource_group_name: str, instance_name: str, **kwargs: Any) -> _models.InstanceResource: - """Get a InstanceResource. + def get( + self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any + ) -> _models.DataflowProfileResource: + """Get a DataflowProfileResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :return: InstanceResource. The InstanceResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.InstanceResource + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :return: DataflowProfileResource. The DataflowProfileResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.DataflowProfileResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -1283,11 +4979,12 @@ def get(self, resource_group_name: str, instance_name: str, **kwargs: Any) -> _m _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.InstanceResource] = kwargs.pop("cls", None) + cls: ClsType[_models.DataflowProfileResource] = kwargs.pop("cls", None) - _request = build_instance_get_request( + _request = build_dataflow_profile_get_request( resource_group_name=resource_group_name, instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -1318,7 +5015,7 @@ def get(self, resource_group_name: str, instance_name: str, **kwargs: Any) -> _m if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.InstanceResource, response.json()) + deserialized = _deserialize(_models.DataflowProfileResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1329,7 +5026,8 @@ def _create_or_update_initial( self, resource_group_name: str, instance_name: str, - resource: Union[_models.InstanceResource, JSON, IO[bytes]], + dataflow_profile_name: str, + resource: Union[_models.DataflowProfileResource, JSON, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { @@ -1353,9 +5051,10 @@ def _create_or_update_initial( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_instance_create_or_update_request( + _request = build_dataflow_profile_create_or_update_request( resource_group_name=resource_group_name, instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, subscription_id=self._config.subscription_id, content_type=content_type, api_version=self._config.api_version, @@ -1403,26 +5102,29 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - resource: _models.InstanceResource, + dataflow_profile_name: str, + resource: _models.DataflowProfileResource, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.InstanceResource]: - """Create a InstanceResource. + ) -> LROPoller[_models.DataflowProfileResource]: + """Create a DataflowProfileResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str :param resource: Resource create parameters. Required. - :type resource: ~azure.mgmt.iotoperations.models.InstanceResource + :type resource: ~azure.mgmt.iotoperations.models.DataflowProfileResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns InstanceResource. The InstanceResource is - compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.InstanceResource] + :return: An instance of LROPoller that returns DataflowProfileResource. The + DataflowProfileResource is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowProfileResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1431,26 +5133,29 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, + dataflow_profile_name: str, resource: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.InstanceResource]: - """Create a InstanceResource. + ) -> LROPoller[_models.DataflowProfileResource]: + """Create a DataflowProfileResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str :param resource: Resource create parameters. Required. :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns InstanceResource. The InstanceResource is - compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.InstanceResource] + :return: An instance of LROPoller that returns DataflowProfileResource. The + DataflowProfileResource is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowProfileResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1459,26 +5164,29 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, + dataflow_profile_name: str, resource: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.InstanceResource]: - """Create a InstanceResource. + ) -> LROPoller[_models.DataflowProfileResource]: + """Create a DataflowProfileResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str :param resource: Resource create parameters. Required. :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns InstanceResource. The InstanceResource is - compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.InstanceResource] + :return: An instance of LROPoller that returns DataflowProfileResource. The + DataflowProfileResource is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowProfileResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -1487,29 +5195,32 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - resource: Union[_models.InstanceResource, JSON, IO[bytes]], + dataflow_profile_name: str, + resource: Union[_models.DataflowProfileResource, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.InstanceResource]: - """Create a InstanceResource. + ) -> LROPoller[_models.DataflowProfileResource]: + """Create a DataflowProfileResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param resource: Resource create parameters. Is one of the following types: InstanceResource, - JSON, IO[bytes] Required. - :type resource: ~azure.mgmt.iotoperations.models.InstanceResource or JSON or IO[bytes] - :return: An instance of LROPoller that returns InstanceResource. The InstanceResource is - compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.InstanceResource] + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param resource: Resource create parameters. Is one of the following types: + DataflowProfileResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.DataflowProfileResource or JSON or IO[bytes] + :return: An instance of LROPoller that returns DataflowProfileResource. The + DataflowProfileResource is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowProfileResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.InstanceResource] = kwargs.pop("cls", None) + cls: ClsType[_models.DataflowProfileResource] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) @@ -1517,6 +5228,7 @@ def begin_create_or_update( raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, resource=resource, content_type=content_type, cls=lambda x, y, z: x, @@ -1529,7 +5241,7 @@ def begin_create_or_update( def get_long_running_output(pipeline_response): response = pipeline_response.http_response - deserialized = _deserialize(_models.InstanceResource, response.json()) + deserialized = _deserialize(_models.DataflowProfileResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -1547,183 +5259,19 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller[_models.InstanceResource].from_continuation_token( + return LROPoller[_models.DataflowProfileResource].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller[_models.InstanceResource]( + return LROPoller[_models.DataflowProfileResource]( self._client, raw_result, get_long_running_output, polling_method # type: ignore ) - @overload - def update( - self, - resource_group_name: str, - instance_name: str, - properties: _models.InstancePatchModel, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.InstanceResource: - """Update a InstanceResource. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param instance_name: Name of instance. Required. - :type instance_name: str - :param properties: The resource properties to be updated. Required. - :type properties: ~azure.mgmt.iotoperations.models.InstancePatchModel - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: InstanceResource. The InstanceResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.InstanceResource - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def update( - self, - resource_group_name: str, - instance_name: str, - properties: JSON, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.InstanceResource: - """Update a InstanceResource. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param instance_name: Name of instance. Required. - :type instance_name: str - :param properties: The resource properties to be updated. Required. - :type properties: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: InstanceResource. The InstanceResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.InstanceResource - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def update( - self, - resource_group_name: str, - instance_name: str, - properties: IO[bytes], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.InstanceResource: - """Update a InstanceResource. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param instance_name: Name of instance. Required. - :type instance_name: str - :param properties: The resource properties to be updated. Required. - :type properties: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: InstanceResource. The InstanceResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.InstanceResource - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def update( - self, - resource_group_name: str, - instance_name: str, - properties: Union[_models.InstancePatchModel, JSON, IO[bytes]], - **kwargs: Any - ) -> _models.InstanceResource: - """Update a InstanceResource. - - :param resource_group_name: The name of the resource group. The name is case insensitive. - Required. - :type resource_group_name: str - :param instance_name: Name of instance. Required. - :type instance_name: str - :param properties: The resource properties to be updated. Is one of the following types: - InstancePatchModel, JSON, IO[bytes] Required. - :type properties: ~azure.mgmt.iotoperations.models.InstancePatchModel or JSON or IO[bytes] - :return: InstanceResource. The InstanceResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.InstanceResource - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.InstanceResource] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _content = None - if isinstance(properties, (IOBase, bytes)): - _content = properties - else: - _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - - _request = build_instance_update_request( - resource_group_name=resource_group_name, - instance_name=instance_name, - subscription_id=self._config.subscription_id, - content_type=content_type, - api_version=self._config.api_version, - content=_content, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - try: - response.read() # Load the body in memory and close the socket - except (StreamConsumedError, StreamClosedError): - pass - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response.json()) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.InstanceResource, response.json()) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - def _delete_initial(self, resource_group_name: str, instance_name: str, **kwargs: Any) -> Iterator[bytes]: + def _delete_initial( + self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any + ) -> Iterator[bytes]: error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, @@ -1737,9 +5285,10 @@ def _delete_initial(self, resource_group_name: str, instance_name: str, **kwargs cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - _request = build_instance_delete_request( + _request = build_dataflow_profile_delete_request( resource_group_name=resource_group_name, instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -1779,14 +5328,18 @@ def _delete_initial(self, resource_group_name: str, instance_name: str, **kwargs return deserialized # type: ignore @distributed_trace - def begin_delete(self, resource_group_name: str, instance_name: str, **kwargs: Any) -> LROPoller[None]: - """Delete a InstanceResource. + def begin_delete( + self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a DataflowProfileResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str :return: An instance of LROPoller that returns None :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -1802,6 +5355,7 @@ def begin_delete(self, resource_group_name: str, instance_name: str, **kwargs: A raw_result = self._delete_initial( resource_group_name=resource_group_name, instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -1836,20 +5390,24 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace - def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.InstanceResource"]: - """List InstanceResource resources by resource group. + def list_by_resource_group( + self, resource_group_name: str, instance_name: str, **kwargs: Any + ) -> Iterable["_models.DataflowProfileResource"]: + """List DataflowProfileResource resources by InstanceResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str - :return: An iterator like instance of InstanceResource - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.InstanceResource] + :param instance_name: Name of instance. Required. + :type instance_name: str + :return: An iterator like instance of DataflowProfileResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.DataflowProfileResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.InstanceResource]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.DataflowProfileResource]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -1862,8 +5420,9 @@ def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Ite def prepare_request(next_link=None): if not next_link: - _request = build_instance_list_by_resource_group_request( + _request = build_dataflow_profile_list_by_resource_group_request( resource_group_name=resource_group_name, + instance_name=instance_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -1900,91 +5459,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.InstanceResource], deserialized["value"]) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = _failsafe_deserialize(_models.ErrorResponse, response.json()) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - @distributed_trace - def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.InstanceResource"]: - """List InstanceResource resources by subscription ID. - - :return: An iterator like instance of InstanceResource - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.InstanceResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.InstanceResource]] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_instance_list_by_subscription_request( - subscription_id=self._config.subscription_id, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.base_url", self._config.base_url, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "endpoint": self._serialize.url( - "self._config.base_url", self._config.base_url, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - return _request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.InstanceResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.DataflowProfileResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -2008,38 +5483,45 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class BrokerOperations: +class DataflowOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s - :attr:`broker` attribute. + :attr:`dataflow` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def get( - self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any - ) -> _models.BrokerResource: - """Get a BrokerResource. + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_name: str, + **kwargs: Any + ) -> _models.DataflowResource: + """Get a DataflowResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :return: BrokerResource. The BrokerResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.BrokerResource + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. + :type dataflow_name: str + :return: DataflowResource. The DataflowResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.DataflowResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -2053,12 +5535,13 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.BrokerResource] = kwargs.pop("cls", None) + cls: ClsType[_models.DataflowResource] = kwargs.pop("cls", None) - _request = build_broker_get_request( + _request = build_dataflow_get_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_name=dataflow_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -2089,7 +5572,7 @@ def get( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.BrokerResource, response.json()) + deserialized = _deserialize(_models.DataflowResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2100,8 +5583,9 @@ def _create_or_update_initial( self, resource_group_name: str, instance_name: str, - broker_name: str, - resource: Union[_models.BrokerResource, JSON, IO[bytes]], + dataflow_profile_name: str, + dataflow_name: str, + resource: Union[_models.DataflowResource, JSON, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { @@ -2125,10 +5609,11 @@ def _create_or_update_initial( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_broker_create_or_update_request( + _request = build_dataflow_create_or_update_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_name=dataflow_name, subscription_id=self._config.subscription_id, content_type=content_type, api_version=self._config.api_version, @@ -2176,29 +5661,32 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - resource: _models.BrokerResource, + dataflow_profile_name: str, + dataflow_name: str, + resource: _models.DataflowResource, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerResource]: - """Create a BrokerResource. + ) -> LROPoller[_models.DataflowResource]: + """Create a DataflowResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. + :type dataflow_name: str :param resource: Resource create parameters. Required. - :type resource: ~azure.mgmt.iotoperations.models.BrokerResource + :type resource: ~azure.mgmt.iotoperations.models.DataflowResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerResource. The BrokerResource is compatible - with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerResource] + :return: An instance of LROPoller that returns DataflowResource. The DataflowResource is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2207,29 +5695,32 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, + dataflow_profile_name: str, + dataflow_name: str, resource: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerResource]: - """Create a BrokerResource. + ) -> LROPoller[_models.DataflowResource]: + """Create a DataflowResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. + :type dataflow_name: str :param resource: Resource create parameters. Required. :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerResource. The BrokerResource is compatible - with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerResource] + :return: An instance of LROPoller that returns DataflowResource. The DataflowResource is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2238,29 +5729,32 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, + dataflow_profile_name: str, + dataflow_name: str, resource: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerResource]: - """Create a BrokerResource. + ) -> LROPoller[_models.DataflowResource]: + """Create a DataflowResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. + :type dataflow_name: str :param resource: Resource create parameters. Required. :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerResource. The BrokerResource is compatible - with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerResource] + :return: An instance of LROPoller that returns DataflowResource. The DataflowResource is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2269,32 +5763,35 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - resource: Union[_models.BrokerResource, JSON, IO[bytes]], + dataflow_profile_name: str, + dataflow_name: str, + resource: Union[_models.DataflowResource, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.BrokerResource]: - """Create a BrokerResource. + ) -> LROPoller[_models.DataflowResource]: + """Create a DataflowResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param resource: Resource create parameters. Is one of the following types: BrokerResource, + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. + :type dataflow_name: str + :param resource: Resource create parameters. Is one of the following types: DataflowResource, JSON, IO[bytes] Required. - :type resource: ~azure.mgmt.iotoperations.models.BrokerResource or JSON or IO[bytes] - :return: An instance of LROPoller that returns BrokerResource. The BrokerResource is compatible - with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerResource] + :type resource: ~azure.mgmt.iotoperations.models.DataflowResource or JSON or IO[bytes] + :return: An instance of LROPoller that returns DataflowResource. The DataflowResource is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BrokerResource] = kwargs.pop("cls", None) + cls: ClsType[_models.DataflowResource] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) @@ -2302,7 +5799,8 @@ def begin_create_or_update( raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_name=dataflow_name, resource=resource, content_type=content_type, cls=lambda x, y, z: x, @@ -2315,7 +5813,7 @@ def begin_create_or_update( def get_long_running_output(pipeline_response): response = pipeline_response.http_response - deserialized = _deserialize(_models.BrokerResource, response.json()) + deserialized = _deserialize(_models.DataflowResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2333,18 +5831,23 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller[_models.BrokerResource].from_continuation_token( + return LROPoller[_models.DataflowResource].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller[_models.BrokerResource]( + return LROPoller[_models.DataflowResource]( self._client, raw_result, get_long_running_output, polling_method # type: ignore ) def _delete_initial( - self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_name: str, + **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -2359,10 +5862,11 @@ def _delete_initial( cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - _request = build_broker_delete_request( + _request = build_dataflow_delete_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_name=dataflow_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -2403,17 +5907,24 @@ def _delete_initial( @distributed_trace def begin_delete( - self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_name: str, + **kwargs: Any ) -> LROPoller[None]: - """Delete a BrokerResource. + """Delete a DataflowResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. + :type dataflow_name: str :return: An instance of LROPoller that returns None :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -2429,7 +5940,8 @@ def begin_delete( raw_result = self._delete_initial( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_name=dataflow_name, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -2465,23 +5977,25 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- @distributed_trace def list_by_resource_group( - self, resource_group_name: str, instance_name: str, **kwargs: Any - ) -> Iterable["_models.BrokerResource"]: - """List BrokerResource resources by InstanceResource. + self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any + ) -> Iterable["_models.DataflowResource"]: + """List DataflowResource resources by DataflowProfileResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :return: An iterator like instance of BrokerResource - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.BrokerResource] + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :return: An iterator like instance of DataflowResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.DataflowResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.BrokerResource]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.DataflowResource]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -2494,9 +6008,10 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - _request = build_broker_list_by_resource_group_request( + _request = build_dataflow_list_by_resource_group_request( resource_group_name=resource_group_name, instance_name=instance_name, + dataflow_profile_name=dataflow_profile_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -2533,7 +6048,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.BrokerResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.DataflowResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -2557,40 +6072,39 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class BrokerListenerOperations: +class DataflowEndpointOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s - :attr:`broker_listener` attribute. + :attr:`dataflow_endpoint` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def get( - self, resource_group_name: str, instance_name: str, broker_name: str, listener_name: str, **kwargs: Any - ) -> _models.BrokerListenerResource: - """Get a BrokerListenerResource. + self, resource_group_name: str, instance_name: str, dataflow_endpoint_name: str, **kwargs: Any + ) -> _models.DataflowEndpointResource: + """Get a DataflowEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param listener_name: Name of Instance broker listener resource. Required. - :type listener_name: str - :return: BrokerListenerResource. The BrokerListenerResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.BrokerListenerResource + :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_endpoint_name: str + :return: DataflowEndpointResource. The DataflowEndpointResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.DataflowEndpointResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -2604,13 +6118,12 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.BrokerListenerResource] = kwargs.pop("cls", None) + cls: ClsType[_models.DataflowEndpointResource] = kwargs.pop("cls", None) - _request = build_broker_listener_get_request( + _request = build_dataflow_endpoint_get_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - listener_name=listener_name, + dataflow_endpoint_name=dataflow_endpoint_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -2641,7 +6154,7 @@ def get( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.BrokerListenerResource, response.json()) + deserialized = _deserialize(_models.DataflowEndpointResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -2652,9 +6165,8 @@ def _create_or_update_initial( self, resource_group_name: str, instance_name: str, - broker_name: str, - listener_name: str, - resource: Union[_models.BrokerListenerResource, JSON, IO[bytes]], + dataflow_endpoint_name: str, + resource: Union[_models.DataflowEndpointResource, JSON, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { @@ -2678,11 +6190,10 @@ def _create_or_update_initial( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_broker_listener_create_or_update_request( + _request = build_dataflow_endpoint_create_or_update_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - listener_name=listener_name, + dataflow_endpoint_name=dataflow_endpoint_name, subscription_id=self._config.subscription_id, content_type=content_type, api_version=self._config.api_version, @@ -2730,32 +6241,30 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - listener_name: str, - resource: _models.BrokerListenerResource, + dataflow_endpoint_name: str, + resource: _models.DataflowEndpointResource, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerListenerResource]: - """Create a BrokerListenerResource. + ) -> LROPoller[_models.DataflowEndpointResource]: + """Create a DataflowEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param listener_name: Name of Instance broker listener resource. Required. - :type listener_name: str + :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_endpoint_name: str :param resource: Resource create parameters. Required. - :type resource: ~azure.mgmt.iotoperations.models.BrokerListenerResource + :type resource: ~azure.mgmt.iotoperations.models.DataflowEndpointResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerListenerResource. The - BrokerListenerResource is compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerListenerResource] + :return: An instance of LROPoller that returns DataflowEndpointResource. The + DataflowEndpointResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2764,32 +6273,30 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - listener_name: str, + dataflow_endpoint_name: str, resource: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerListenerResource]: - """Create a BrokerListenerResource. + ) -> LROPoller[_models.DataflowEndpointResource]: + """Create a DataflowEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param listener_name: Name of Instance broker listener resource. Required. - :type listener_name: str + :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_endpoint_name: str :param resource: Resource create parameters. Required. :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerListenerResource. The - BrokerListenerResource is compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerListenerResource] + :return: An instance of LROPoller that returns DataflowEndpointResource. The + DataflowEndpointResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2798,32 +6305,30 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - listener_name: str, + dataflow_endpoint_name: str, resource: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerListenerResource]: - """Create a BrokerListenerResource. + ) -> LROPoller[_models.DataflowEndpointResource]: + """Create a DataflowEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param listener_name: Name of Instance broker listener resource. Required. - :type listener_name: str + :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_endpoint_name: str :param resource: Resource create parameters. Required. :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerListenerResource. The - BrokerListenerResource is compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerListenerResource] + :return: An instance of LROPoller that returns DataflowEndpointResource. The + DataflowEndpointResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -2832,35 +6337,33 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - listener_name: str, - resource: Union[_models.BrokerListenerResource, JSON, IO[bytes]], + dataflow_endpoint_name: str, + resource: Union[_models.DataflowEndpointResource, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.BrokerListenerResource]: - """Create a BrokerListenerResource. + ) -> LROPoller[_models.DataflowEndpointResource]: + """Create a DataflowEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param listener_name: Name of Instance broker listener resource. Required. - :type listener_name: str + :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_endpoint_name: str :param resource: Resource create parameters. Is one of the following types: - BrokerListenerResource, JSON, IO[bytes] Required. - :type resource: ~azure.mgmt.iotoperations.models.BrokerListenerResource or JSON or IO[bytes] - :return: An instance of LROPoller that returns BrokerListenerResource. The - BrokerListenerResource is compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerListenerResource] + DataflowEndpointResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.DataflowEndpointResource or JSON or IO[bytes] + :return: An instance of LROPoller that returns DataflowEndpointResource. The + DataflowEndpointResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BrokerListenerResource] = kwargs.pop("cls", None) + cls: ClsType[_models.DataflowEndpointResource] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) @@ -2868,8 +6371,7 @@ def begin_create_or_update( raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - listener_name=listener_name, + dataflow_endpoint_name=dataflow_endpoint_name, resource=resource, content_type=content_type, cls=lambda x, y, z: x, @@ -2882,7 +6384,7 @@ def begin_create_or_update( def get_long_running_output(pipeline_response): response = pipeline_response.http_response - deserialized = _deserialize(_models.BrokerListenerResource, response.json()) + deserialized = _deserialize(_models.DataflowEndpointResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -2900,18 +6402,18 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller[_models.BrokerListenerResource].from_continuation_token( + return LROPoller[_models.DataflowEndpointResource].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller[_models.BrokerListenerResource]( + return LROPoller[_models.DataflowEndpointResource]( self._client, raw_result, get_long_running_output, polling_method # type: ignore ) def _delete_initial( - self, resource_group_name: str, instance_name: str, broker_name: str, listener_name: str, **kwargs: Any + self, resource_group_name: str, instance_name: str, dataflow_endpoint_name: str, **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -2926,11 +6428,10 @@ def _delete_initial( cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - _request = build_broker_listener_delete_request( + _request = build_dataflow_endpoint_delete_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - listener_name=listener_name, + dataflow_endpoint_name=dataflow_endpoint_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -2971,19 +6472,17 @@ def _delete_initial( @distributed_trace def begin_delete( - self, resource_group_name: str, instance_name: str, broker_name: str, listener_name: str, **kwargs: Any + self, resource_group_name: str, instance_name: str, dataflow_endpoint_name: str, **kwargs: Any ) -> LROPoller[None]: - """Delete a BrokerListenerResource. + """Delete a DataflowEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param listener_name: Name of Instance broker listener resource. Required. - :type listener_name: str + :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_endpoint_name: str :return: An instance of LROPoller that returns None :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -2999,8 +6498,7 @@ def begin_delete( raw_result = self._delete_initial( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - listener_name=listener_name, + dataflow_endpoint_name=dataflow_endpoint_name, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -3036,25 +6534,23 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- @distributed_trace def list_by_resource_group( - self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any - ) -> Iterable["_models.BrokerListenerResource"]: - """List BrokerListenerResource resources by BrokerResource. + self, resource_group_name: str, instance_name: str, **kwargs: Any + ) -> Iterable["_models.DataflowEndpointResource"]: + """List DataflowEndpointResource resources by InstanceResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :return: An iterator like instance of BrokerListenerResource - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.BrokerListenerResource] + :return: An iterator like instance of DataflowEndpointResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.DataflowEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.BrokerListenerResource]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.DataflowEndpointResource]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -3067,10 +6563,9 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - _request = build_broker_listener_list_by_resource_group_request( + _request = build_dataflow_endpoint_list_by_resource_group_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -3107,7 +6602,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.BrokerListenerResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.DataflowEndpointResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -3131,41 +6626,59 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class BrokerAuthenticationOperations: +class DataflowGraphOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s - :attr:`broker_authentication` attribute. + :attr:`dataflow_graph` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "accept", + ] + }, + ) def get( - self, resource_group_name: str, instance_name: str, broker_name: str, authentication_name: str, **kwargs: Any - ) -> _models.BrokerAuthenticationResource: - """Get a BrokerAuthenticationResource. + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + **kwargs: Any + ) -> _models.DataflowGraphResource: + """Get a DataflowGraphResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authentication_name: Name of Instance broker authentication resource. Required. - :type authentication_name: str - :return: BrokerAuthenticationResource. The BrokerAuthenticationResource is compatible with - MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.BrokerAuthenticationResource + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str + :return: DataflowGraphResource. The DataflowGraphResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.DataflowGraphResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3179,13 +6692,13 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.BrokerAuthenticationResource] = kwargs.pop("cls", None) + cls: ClsType[_models.DataflowGraphResource] = kwargs.pop("cls", None) - _request = build_broker_authentication_get_request( + _request = build_dataflow_graph_get_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authentication_name=authentication_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -3216,20 +6729,35 @@ def get( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.BrokerAuthenticationResource, response.json()) + deserialized = _deserialize(_models.DataflowGraphResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "content_type", + "accept", + ] + }, + ) def _create_or_update_initial( self, resource_group_name: str, instance_name: str, - broker_name: str, - authentication_name: str, - resource: Union[_models.BrokerAuthenticationResource, JSON, IO[bytes]], + dataflow_profile_name: str, + dataflow_graph_name: str, + resource: Union[_models.DataflowGraphResource, JSON, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { @@ -3253,11 +6781,11 @@ def _create_or_update_initial( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_broker_authentication_create_or_update_request( + _request = build_dataflow_graph_create_or_update_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authentication_name=authentication_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, subscription_id=self._config.subscription_id, content_type=content_type, api_version=self._config.api_version, @@ -3305,33 +6833,32 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - authentication_name: str, - resource: _models.BrokerAuthenticationResource, + dataflow_profile_name: str, + dataflow_graph_name: str, + resource: _models.DataflowGraphResource, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerAuthenticationResource]: - """Create a BrokerAuthenticationResource. + ) -> LROPoller[_models.DataflowGraphResource]: + """Create a DataflowGraphResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authentication_name: Name of Instance broker authentication resource. Required. - :type authentication_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str :param resource: Resource create parameters. Required. - :type resource: ~azure.mgmt.iotoperations.models.BrokerAuthenticationResource + :type resource: ~azure.mgmt.iotoperations.models.DataflowGraphResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerAuthenticationResource. The - BrokerAuthenticationResource is compatible with MutableMapping - :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + :return: An instance of LROPoller that returns DataflowGraphResource. The DataflowGraphResource + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowGraphResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -3340,33 +6867,32 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - authentication_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, resource: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerAuthenticationResource]: - """Create a BrokerAuthenticationResource. + ) -> LROPoller[_models.DataflowGraphResource]: + """Create a DataflowGraphResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authentication_name: Name of Instance broker authentication resource. Required. - :type authentication_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str :param resource: Resource create parameters. Required. :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerAuthenticationResource. The - BrokerAuthenticationResource is compatible with MutableMapping - :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + :return: An instance of LROPoller that returns DataflowGraphResource. The DataflowGraphResource + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowGraphResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -3375,72 +6901,84 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - authentication_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, resource: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerAuthenticationResource]: - """Create a BrokerAuthenticationResource. + ) -> LROPoller[_models.DataflowGraphResource]: + """Create a DataflowGraphResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authentication_name: Name of Instance broker authentication resource. Required. - :type authentication_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str :param resource: Resource create parameters. Required. :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerAuthenticationResource. The - BrokerAuthenticationResource is compatible with MutableMapping - :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + :return: An instance of LROPoller that returns DataflowGraphResource. The DataflowGraphResource + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowGraphResource] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "content_type", + "accept", + ] + }, + ) def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - authentication_name: str, - resource: Union[_models.BrokerAuthenticationResource, JSON, IO[bytes]], + dataflow_profile_name: str, + dataflow_graph_name: str, + resource: Union[_models.DataflowGraphResource, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.BrokerAuthenticationResource]: - """Create a BrokerAuthenticationResource. + ) -> LROPoller[_models.DataflowGraphResource]: + """Create a DataflowGraphResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authentication_name: Name of Instance broker authentication resource. Required. - :type authentication_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str :param resource: Resource create parameters. Is one of the following types: - BrokerAuthenticationResource, JSON, IO[bytes] Required. - :type resource: ~azure.mgmt.iotoperations.models.BrokerAuthenticationResource or JSON or - IO[bytes] - :return: An instance of LROPoller that returns BrokerAuthenticationResource. The - BrokerAuthenticationResource is compatible with MutableMapping - :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + DataflowGraphResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.DataflowGraphResource or JSON or IO[bytes] + :return: An instance of LROPoller that returns DataflowGraphResource. The DataflowGraphResource + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowGraphResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BrokerAuthenticationResource] = kwargs.pop("cls", None) + cls: ClsType[_models.DataflowGraphResource] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) @@ -3448,8 +6986,8 @@ def begin_create_or_update( raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authentication_name=authentication_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, resource=resource, content_type=content_type, cls=lambda x, y, z: x, @@ -3462,7 +7000,7 @@ def begin_create_or_update( def get_long_running_output(pipeline_response): response = pipeline_response.http_response - deserialized = _deserialize(_models.BrokerAuthenticationResource, response.json()) + deserialized = _deserialize(_models.DataflowGraphResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -3480,18 +7018,37 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller[_models.BrokerAuthenticationResource].from_continuation_token( + return LROPoller[_models.DataflowGraphResource].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller[_models.BrokerAuthenticationResource]( + return LROPoller[_models.DataflowGraphResource]( self._client, raw_result, get_long_running_output, polling_method # type: ignore ) + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "accept", + ] + }, + ) def _delete_initial( - self, resource_group_name: str, instance_name: str, broker_name: str, authentication_name: str, **kwargs: Any + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -3506,11 +7063,11 @@ def _delete_initial( cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - _request = build_broker_authentication_delete_request( + _request = build_dataflow_graph_delete_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authentication_name=authentication_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -3550,20 +7107,39 @@ def _delete_initial( return deserialized # type: ignore @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "dataflow_graph_name", + "accept", + ] + }, + ) def begin_delete( - self, resource_group_name: str, instance_name: str, broker_name: str, authentication_name: str, **kwargs: Any + self, + resource_group_name: str, + instance_name: str, + dataflow_profile_name: str, + dataflow_graph_name: str, + **kwargs: Any ) -> LROPoller[None]: - """Delete a BrokerAuthenticationResource. + """Delete a DataflowGraphResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authentication_name: Name of Instance broker authentication resource. Required. - :type authentication_name: str + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :param dataflow_graph_name: Name of Instance dataflowEndpoint resource. Required. + :type dataflow_graph_name: str :return: An instance of LROPoller that returns None :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -3579,8 +7155,8 @@ def begin_delete( raw_result = self._delete_initial( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authentication_name=authentication_name, + dataflow_profile_name=dataflow_profile_name, + dataflow_graph_name=dataflow_graph_name, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -3615,27 +7191,39 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace - def list_by_resource_group( - self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any - ) -> Iterable["_models.BrokerAuthenticationResource"]: - """List BrokerAuthenticationResource resources by BrokerResource. + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "dataflow_profile_name", + "accept", + ] + }, + ) + def list_by_dataflow_profile( + self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any + ) -> Iterable["_models.DataflowGraphResource"]: + """List DataflowGraphResource resources by DataflowProfileResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :return: An iterator like instance of BrokerAuthenticationResource - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.BrokerAuthenticationResource] + :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. + :type dataflow_profile_name: str + :return: An iterator like instance of DataflowGraphResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.DataflowGraphResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.BrokerAuthenticationResource]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.DataflowGraphResource]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -3648,10 +7236,10 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - _request = build_broker_authentication_list_by_resource_group_request( + _request = build_dataflow_graph_list_by_dataflow_profile_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, + dataflow_profile_name=dataflow_profile_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -3688,7 +7276,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.BrokerAuthenticationResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.DataflowGraphResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -3712,41 +7300,52 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class BrokerAuthorizationOperations: +class RegistryEndpointOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s - :attr:`broker_authorization` attribute. + :attr:`registry_endpoint` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "accept", + ] + }, + ) def get( - self, resource_group_name: str, instance_name: str, broker_name: str, authorization_name: str, **kwargs: Any - ) -> _models.BrokerAuthorizationResource: - """Get a BrokerAuthorizationResource. + self, resource_group_name: str, instance_name: str, registry_endpoint_name: str, **kwargs: Any + ) -> _models.RegistryEndpointResource: + """Get a RegistryEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authorization_name: Name of Instance broker authorization resource. Required. - :type authorization_name: str - :return: BrokerAuthorizationResource. The BrokerAuthorizationResource is compatible with + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str + :return: RegistryEndpointResource. The RegistryEndpointResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.BrokerAuthorizationResource + :rtype: ~azure.mgmt.iotoperations.models.RegistryEndpointResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -3760,13 +7359,12 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.BrokerAuthorizationResource] = kwargs.pop("cls", None) + cls: ClsType[_models.RegistryEndpointResource] = kwargs.pop("cls", None) - _request = build_broker_authorization_get_request( + _request = build_registry_endpoint_get_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authorization_name=authorization_name, + registry_endpoint_name=registry_endpoint_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -3797,20 +7395,33 @@ def get( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.BrokerAuthorizationResource, response.json()) + deserialized = _deserialize(_models.RegistryEndpointResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "content_type", + "accept", + ] + }, + ) def _create_or_update_initial( self, resource_group_name: str, instance_name: str, - broker_name: str, - authorization_name: str, - resource: Union[_models.BrokerAuthorizationResource, JSON, IO[bytes]], + registry_endpoint_name: str, + resource: Union[_models.RegistryEndpointResource, JSON, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { @@ -3834,11 +7445,10 @@ def _create_or_update_initial( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_broker_authorization_create_or_update_request( + _request = build_registry_endpoint_create_or_update_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authorization_name=authorization_name, + registry_endpoint_name=registry_endpoint_name, subscription_id=self._config.subscription_id, content_type=content_type, api_version=self._config.api_version, @@ -3886,33 +7496,30 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - authorization_name: str, - resource: _models.BrokerAuthorizationResource, + registry_endpoint_name: str, + resource: _models.RegistryEndpointResource, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerAuthorizationResource]: - """Create a BrokerAuthorizationResource. + ) -> LROPoller[_models.RegistryEndpointResource]: + """Create a RegistryEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authorization_name: Name of Instance broker authorization resource. Required. - :type authorization_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str :param resource: Resource create parameters. Required. - :type resource: ~azure.mgmt.iotoperations.models.BrokerAuthorizationResource + :type resource: ~azure.mgmt.iotoperations.models.RegistryEndpointResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerAuthorizationResource. The - BrokerAuthorizationResource is compatible with MutableMapping + :return: An instance of LROPoller that returns RegistryEndpointResource. The + RegistryEndpointResource is compatible with MutableMapping :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.RegistryEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -3921,33 +7528,30 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - authorization_name: str, + registry_endpoint_name: str, resource: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerAuthorizationResource]: - """Create a BrokerAuthorizationResource. + ) -> LROPoller[_models.RegistryEndpointResource]: + """Create a RegistryEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authorization_name: Name of Instance broker authorization resource. Required. - :type authorization_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str :param resource: Resource create parameters. Required. :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerAuthorizationResource. The - BrokerAuthorizationResource is compatible with MutableMapping + :return: An instance of LROPoller that returns RegistryEndpointResource. The + RegistryEndpointResource is compatible with MutableMapping :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.RegistryEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -3956,72 +7560,79 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - authorization_name: str, + registry_endpoint_name: str, resource: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.BrokerAuthorizationResource]: - """Create a BrokerAuthorizationResource. + ) -> LROPoller[_models.RegistryEndpointResource]: + """Create a RegistryEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authorization_name: Name of Instance broker authorization resource. Required. - :type authorization_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str :param resource: Resource create parameters. Required. :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns BrokerAuthorizationResource. The - BrokerAuthorizationResource is compatible with MutableMapping + :return: An instance of LROPoller that returns RegistryEndpointResource. The + RegistryEndpointResource is compatible with MutableMapping :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.RegistryEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "content_type", + "accept", + ] + }, + ) def begin_create_or_update( self, resource_group_name: str, instance_name: str, - broker_name: str, - authorization_name: str, - resource: Union[_models.BrokerAuthorizationResource, JSON, IO[bytes]], + registry_endpoint_name: str, + resource: Union[_models.RegistryEndpointResource, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.BrokerAuthorizationResource]: - """Create a BrokerAuthorizationResource. + ) -> LROPoller[_models.RegistryEndpointResource]: + """Create a RegistryEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authorization_name: Name of Instance broker authorization resource. Required. - :type authorization_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str :param resource: Resource create parameters. Is one of the following types: - BrokerAuthorizationResource, JSON, IO[bytes] Required. - :type resource: ~azure.mgmt.iotoperations.models.BrokerAuthorizationResource or JSON or - IO[bytes] - :return: An instance of LROPoller that returns BrokerAuthorizationResource. The - BrokerAuthorizationResource is compatible with MutableMapping + RegistryEndpointResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.RegistryEndpointResource or JSON or IO[bytes] + :return: An instance of LROPoller that returns RegistryEndpointResource. The + RegistryEndpointResource is compatible with MutableMapping :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.RegistryEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.BrokerAuthorizationResource] = kwargs.pop("cls", None) + cls: ClsType[_models.RegistryEndpointResource] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) @@ -4029,8 +7640,7 @@ def begin_create_or_update( raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authorization_name=authorization_name, + registry_endpoint_name=registry_endpoint_name, resource=resource, content_type=content_type, cls=lambda x, y, z: x, @@ -4043,7 +7653,7 @@ def begin_create_or_update( def get_long_running_output(pipeline_response): response = pipeline_response.http_response - deserialized = _deserialize(_models.BrokerAuthorizationResource, response.json()) + deserialized = _deserialize(_models.RegistryEndpointResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -4061,18 +7671,31 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller[_models.BrokerAuthorizationResource].from_continuation_token( + return LROPoller[_models.RegistryEndpointResource].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller[_models.BrokerAuthorizationResource]( + return LROPoller[_models.RegistryEndpointResource]( self._client, raw_result, get_long_running_output, polling_method # type: ignore ) + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "accept", + ] + }, + ) def _delete_initial( - self, resource_group_name: str, instance_name: str, broker_name: str, authorization_name: str, **kwargs: Any + self, resource_group_name: str, instance_name: str, registry_endpoint_name: str, **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -4087,11 +7710,10 @@ def _delete_initial( cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - _request = build_broker_authorization_delete_request( + _request = build_registry_endpoint_delete_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authorization_name=authorization_name, + registry_endpoint_name=registry_endpoint_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -4131,20 +7753,31 @@ def _delete_initial( return deserialized # type: ignore @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "registry_endpoint_name", + "accept", + ] + }, + ) def begin_delete( - self, resource_group_name: str, instance_name: str, broker_name: str, authorization_name: str, **kwargs: Any + self, resource_group_name: str, instance_name: str, registry_endpoint_name: str, **kwargs: Any ) -> LROPoller[None]: - """Delete a BrokerAuthorizationResource. + """Delete a RegistryEndpointResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :param authorization_name: Name of Instance broker authorization resource. Required. - :type authorization_name: str + :param registry_endpoint_name: Name of RegistryEndpoint resource. Required. + :type registry_endpoint_name: str :return: An instance of LROPoller that returns None :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -4160,8 +7793,7 @@ def begin_delete( raw_result = self._delete_initial( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, - authorization_name=authorization_name, + registry_endpoint_name=registry_endpoint_name, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -4196,27 +7828,30 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace - def list_by_resource_group( - self, resource_group_name: str, instance_name: str, broker_name: str, **kwargs: Any - ) -> Iterable["_models.BrokerAuthorizationResource"]: - """List BrokerAuthorizationResource resources by BrokerResource. + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": ["api_version", "subscription_id", "resource_group_name", "instance_name", "accept"] + }, + ) + def list_by_instance_resource( + self, resource_group_name: str, instance_name: str, **kwargs: Any + ) -> Iterable["_models.RegistryEndpointResource"]: + """List RegistryEndpointResource resources by InstanceResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param broker_name: Name of broker. Required. - :type broker_name: str - :return: An iterator like instance of BrokerAuthorizationResource - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.BrokerAuthorizationResource] + :return: An iterator like instance of RegistryEndpointResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.RegistryEndpointResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.BrokerAuthorizationResource]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.RegistryEndpointResource]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -4229,10 +7864,9 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - _request = build_broker_authorization_list_by_resource_group_request( + _request = build_registry_endpoint_list_by_instance_resource_request( resource_group_name=resource_group_name, instance_name=instance_name, - broker_name=broker_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -4269,7 +7903,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.BrokerAuthorizationResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.RegistryEndpointResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -4293,38 +7927,52 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class DataflowProfileOperations: +class AkriConnectorTemplateOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s - :attr:`dataflow_profile` attribute. + :attr:`akri_connector_template` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "accept", + ] + }, + ) def get( - self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any - ) -> _models.DataflowProfileResource: - """Get a DataflowProfileResource. + self, resource_group_name: str, instance_name: str, akri_connector_template_name: str, **kwargs: Any + ) -> _models.AkriConnectorTemplateResource: + """Get a AkriConnectorTemplateResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str - :return: DataflowProfileResource. The DataflowProfileResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.DataflowProfileResource + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :return: AkriConnectorTemplateResource. The AkriConnectorTemplateResource is compatible with + MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4338,12 +7986,12 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.DataflowProfileResource] = kwargs.pop("cls", None) + cls: ClsType[_models.AkriConnectorTemplateResource] = kwargs.pop("cls", None) - _request = build_dataflow_profile_get_request( + _request = build_akri_connector_template_get_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, + akri_connector_template_name=akri_connector_template_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -4374,19 +8022,33 @@ def get( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.DataflowProfileResource, response.json()) + deserialized = _deserialize(_models.AkriConnectorTemplateResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "content_type", + "accept", + ] + }, + ) def _create_or_update_initial( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - resource: Union[_models.DataflowProfileResource, JSON, IO[bytes]], + akri_connector_template_name: str, + resource: Union[_models.AkriConnectorTemplateResource, JSON, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { @@ -4410,10 +8072,10 @@ def _create_or_update_initial( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_dataflow_profile_create_or_update_request( + _request = build_akri_connector_template_create_or_update_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, + akri_connector_template_name=akri_connector_template_name, subscription_id=self._config.subscription_id, content_type=content_type, api_version=self._config.api_version, @@ -4461,29 +8123,30 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - resource: _models.DataflowProfileResource, + akri_connector_template_name: str, + resource: _models.AkriConnectorTemplateResource, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.DataflowProfileResource]: - """Create a DataflowProfileResource. + ) -> LROPoller[_models.AkriConnectorTemplateResource]: + """Create a AkriConnectorTemplateResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str :param resource: Resource create parameters. Required. - :type resource: ~azure.mgmt.iotoperations.models.DataflowProfileResource + :type resource: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns DataflowProfileResource. The - DataflowProfileResource is compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowProfileResource] + :return: An instance of LROPoller that returns AkriConnectorTemplateResource. The + AkriConnectorTemplateResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -4492,29 +8155,30 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, + akri_connector_template_name: str, resource: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.DataflowProfileResource]: - """Create a DataflowProfileResource. + ) -> LROPoller[_models.AkriConnectorTemplateResource]: + """Create a AkriConnectorTemplateResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str :param resource: Resource create parameters. Required. :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns DataflowProfileResource. The - DataflowProfileResource is compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowProfileResource] + :return: An instance of LROPoller that returns AkriConnectorTemplateResource. The + AkriConnectorTemplateResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -4523,63 +8187,80 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, + akri_connector_template_name: str, resource: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.DataflowProfileResource]: - """Create a DataflowProfileResource. + ) -> LROPoller[_models.AkriConnectorTemplateResource]: + """Create a AkriConnectorTemplateResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str :param resource: Resource create parameters. Required. :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns DataflowProfileResource. The - DataflowProfileResource is compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowProfileResource] + :return: An instance of LROPoller that returns AkriConnectorTemplateResource. The + AkriConnectorTemplateResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "content_type", + "accept", + ] + }, + ) def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - resource: Union[_models.DataflowProfileResource, JSON, IO[bytes]], + akri_connector_template_name: str, + resource: Union[_models.AkriConnectorTemplateResource, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.DataflowProfileResource]: - """Create a DataflowProfileResource. + ) -> LROPoller[_models.AkriConnectorTemplateResource]: + """Create a AkriConnectorTemplateResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str :param resource: Resource create parameters. Is one of the following types: - DataflowProfileResource, JSON, IO[bytes] Required. - :type resource: ~azure.mgmt.iotoperations.models.DataflowProfileResource or JSON or IO[bytes] - :return: An instance of LROPoller that returns DataflowProfileResource. The - DataflowProfileResource is compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowProfileResource] + AkriConnectorTemplateResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource or JSON or + IO[bytes] + :return: An instance of LROPoller that returns AkriConnectorTemplateResource. The + AkriConnectorTemplateResource is compatible with MutableMapping + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DataflowProfileResource] = kwargs.pop("cls", None) + cls: ClsType[_models.AkriConnectorTemplateResource] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) @@ -4587,7 +8268,7 @@ def begin_create_or_update( raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, + akri_connector_template_name=akri_connector_template_name, resource=resource, content_type=content_type, cls=lambda x, y, z: x, @@ -4600,7 +8281,7 @@ def begin_create_or_update( def get_long_running_output(pipeline_response): response = pipeline_response.http_response - deserialized = _deserialize(_models.DataflowProfileResource, response.json()) + deserialized = _deserialize(_models.AkriConnectorTemplateResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -4618,18 +8299,31 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller[_models.DataflowProfileResource].from_continuation_token( + return LROPoller[_models.AkriConnectorTemplateResource].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller[_models.DataflowProfileResource]( + return LROPoller[_models.AkriConnectorTemplateResource]( self._client, raw_result, get_long_running_output, polling_method # type: ignore ) + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "accept", + ] + }, + ) def _delete_initial( - self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any + self, resource_group_name: str, instance_name: str, akri_connector_template_name: str, **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -4644,10 +8338,10 @@ def _delete_initial( cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - _request = build_dataflow_profile_delete_request( + _request = build_akri_connector_template_delete_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, + akri_connector_template_name=akri_connector_template_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -4687,18 +8381,31 @@ def _delete_initial( return deserialized # type: ignore @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "accept", + ] + }, + ) def begin_delete( - self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any + self, resource_group_name: str, instance_name: str, akri_connector_template_name: str, **kwargs: Any ) -> LROPoller[None]: - """Delete a DataflowProfileResource. + """Delete a AkriConnectorTemplateResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str :return: An instance of LROPoller that returns None :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -4714,7 +8421,7 @@ def begin_delete( raw_result = self._delete_initial( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, + akri_connector_template_name=akri_connector_template_name, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -4749,24 +8456,31 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace - def list_by_resource_group( + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": ["api_version", "subscription_id", "resource_group_name", "instance_name", "accept"] + }, + ) + def list_by_instance_resource( self, resource_group_name: str, instance_name: str, **kwargs: Any - ) -> Iterable["_models.DataflowProfileResource"]: - """List DataflowProfileResource resources by InstanceResource. + ) -> Iterable["_models.AkriConnectorTemplateResource"]: + """List AkriConnectorTemplateResource resources by InstanceResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :return: An iterator like instance of DataflowProfileResource - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.DataflowProfileResource] + :return: An iterator like instance of AkriConnectorTemplateResource + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.AkriConnectorTemplateResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.DataflowProfileResource]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.AkriConnectorTemplateResource]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -4779,7 +8493,7 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - _request = build_dataflow_profile_list_by_resource_group_request( + _request = build_akri_connector_template_list_by_instance_resource_request( resource_group_name=resource_group_name, instance_name=instance_name, subscription_id=self._config.subscription_id, @@ -4818,7 +8532,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DataflowProfileResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.AkriConnectorTemplateResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -4842,45 +8556,59 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class DataflowOperations: +class AkriConnectorOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s - :attr:`dataflow` attribute. + :attr:`akri_connector` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "accept", + ] + }, + ) def get( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - dataflow_name: str, + akri_connector_template_name: str, + akri_connector_name: str, **kwargs: Any - ) -> _models.DataflowResource: - """Get a DataflowResource. + ) -> _models.AkriConnectorResource: + """Get a AkriConnectorResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str - :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. - :type dataflow_name: str - :return: DataflowResource. The DataflowResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.DataflowResource + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str + :return: AkriConnectorResource. The AkriConnectorResource is compatible with MutableMapping + :rtype: ~azure.mgmt.iotoperations.models.AkriConnectorResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -4894,13 +8622,13 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.DataflowResource] = kwargs.pop("cls", None) + cls: ClsType[_models.AkriConnectorResource] = kwargs.pop("cls", None) - _request = build_dataflow_get_request( + _request = build_akri_connector_get_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, - dataflow_name=dataflow_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -4931,20 +8659,35 @@ def get( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.DataflowResource, response.json()) + deserialized = _deserialize(_models.AkriConnectorResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "content_type", + "accept", + ] + }, + ) def _create_or_update_initial( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - dataflow_name: str, - resource: Union[_models.DataflowResource, JSON, IO[bytes]], + akri_connector_template_name: str, + akri_connector_name: str, + resource: Union[_models.AkriConnectorResource, JSON, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { @@ -4968,11 +8711,11 @@ def _create_or_update_initial( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_dataflow_create_or_update_request( + _request = build_akri_connector_create_or_update_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, - dataflow_name=dataflow_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, subscription_id=self._config.subscription_id, content_type=content_type, api_version=self._config.api_version, @@ -5020,32 +8763,32 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - dataflow_name: str, - resource: _models.DataflowResource, + akri_connector_template_name: str, + akri_connector_name: str, + resource: _models.AkriConnectorResource, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.DataflowResource]: - """Create a DataflowResource. + ) -> LROPoller[_models.AkriConnectorResource]: + """Create a AkriConnectorResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str - :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. - :type dataflow_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str :param resource: Resource create parameters. Required. - :type resource: ~azure.mgmt.iotoperations.models.DataflowResource + :type resource: ~azure.mgmt.iotoperations.models.AkriConnectorResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns DataflowResource. The DataflowResource is - compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowResource] + :return: An instance of LROPoller that returns AkriConnectorResource. The AkriConnectorResource + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriConnectorResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -5054,32 +8797,32 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - dataflow_name: str, + akri_connector_template_name: str, + akri_connector_name: str, resource: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.DataflowResource]: - """Create a DataflowResource. + ) -> LROPoller[_models.AkriConnectorResource]: + """Create a AkriConnectorResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str - :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. - :type dataflow_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str :param resource: Resource create parameters. Required. :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns DataflowResource. The DataflowResource is - compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowResource] + :return: An instance of LROPoller that returns AkriConnectorResource. The AkriConnectorResource + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriConnectorResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -5088,69 +8831,84 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - dataflow_name: str, + akri_connector_template_name: str, + akri_connector_name: str, resource: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.DataflowResource]: - """Create a DataflowResource. + ) -> LROPoller[_models.AkriConnectorResource]: + """Create a AkriConnectorResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str - :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. - :type dataflow_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str :param resource: Resource create parameters. Required. :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns DataflowResource. The DataflowResource is - compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowResource] + :return: An instance of LROPoller that returns AkriConnectorResource. The AkriConnectorResource + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriConnectorResource] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "content_type", + "accept", + ] + }, + ) def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - dataflow_name: str, - resource: Union[_models.DataflowResource, JSON, IO[bytes]], + akri_connector_template_name: str, + akri_connector_name: str, + resource: Union[_models.AkriConnectorResource, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.DataflowResource]: - """Create a DataflowResource. + ) -> LROPoller[_models.AkriConnectorResource]: + """Create a AkriConnectorResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str - :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. - :type dataflow_name: str - :param resource: Resource create parameters. Is one of the following types: DataflowResource, - JSON, IO[bytes] Required. - :type resource: ~azure.mgmt.iotoperations.models.DataflowResource or JSON or IO[bytes] - :return: An instance of LROPoller that returns DataflowResource. The DataflowResource is - compatible with MutableMapping - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowResource] + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str + :param resource: Resource create parameters. Is one of the following types: + AkriConnectorResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.AkriConnectorResource or JSON or IO[bytes] + :return: An instance of LROPoller that returns AkriConnectorResource. The AkriConnectorResource + is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriConnectorResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DataflowResource] = kwargs.pop("cls", None) + cls: ClsType[_models.AkriConnectorResource] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) @@ -5158,8 +8916,8 @@ def begin_create_or_update( raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, - dataflow_name=dataflow_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, resource=resource, content_type=content_type, cls=lambda x, y, z: x, @@ -5172,7 +8930,7 @@ def begin_create_or_update( def get_long_running_output(pipeline_response): response = pipeline_response.http_response - deserialized = _deserialize(_models.DataflowResource, response.json()) + deserialized = _deserialize(_models.AkriConnectorResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -5190,22 +8948,36 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller[_models.DataflowResource].from_continuation_token( + return LROPoller[_models.AkriConnectorResource].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller[_models.DataflowResource]( + return LROPoller[_models.AkriConnectorResource]( self._client, raw_result, get_long_running_output, polling_method # type: ignore ) + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "accept", + ] + }, + ) def _delete_initial( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - dataflow_name: str, + akri_connector_template_name: str, + akri_connector_name: str, **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { @@ -5221,11 +8993,11 @@ def _delete_initial( cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - _request = build_dataflow_delete_request( + _request = build_akri_connector_delete_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, - dataflow_name=dataflow_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -5265,25 +9037,39 @@ def _delete_initial( return deserialized # type: ignore @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "akri_connector_name", + "accept", + ] + }, + ) def begin_delete( self, resource_group_name: str, instance_name: str, - dataflow_profile_name: str, - dataflow_name: str, + akri_connector_template_name: str, + akri_connector_name: str, **kwargs: Any ) -> LROPoller[None]: - """Delete a DataflowResource. + """Delete a AkriConnectorResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str - :param dataflow_name: Name of Instance dataflowProfile dataflow resource. Required. - :type dataflow_name: str + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :param akri_connector_name: Name of AkriConnector resource. Required. + :type akri_connector_name: str :return: An instance of LROPoller that returns None :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -5299,8 +9085,8 @@ def begin_delete( raw_result = self._delete_initial( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, - dataflow_name=dataflow_name, + akri_connector_template_name=akri_connector_template_name, + akri_connector_name=akri_connector_name, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -5335,26 +9121,39 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace - def list_by_resource_group( - self, resource_group_name: str, instance_name: str, dataflow_profile_name: str, **kwargs: Any - ) -> Iterable["_models.DataflowResource"]: - """List DataflowResource resources by DataflowProfileResource. + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_connector_template_name", + "accept", + ] + }, + ) + def list_by_template( + self, resource_group_name: str, instance_name: str, akri_connector_template_name: str, **kwargs: Any + ) -> Iterable["_models.AkriConnectorResource"]: + """List AkriConnectorResource resources by AkriConnectorTemplateResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_profile_name: Name of Instance dataflowProfile resource. Required. - :type dataflow_profile_name: str - :return: An iterator like instance of DataflowResource - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.DataflowResource] + :param akri_connector_template_name: Name of AkriConnectorTemplate resource. Required. + :type akri_connector_template_name: str + :return: An iterator like instance of AkriConnectorResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.AkriConnectorResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.DataflowResource]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.AkriConnectorResource]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -5367,10 +9166,10 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - _request = build_dataflow_list_by_resource_group_request( + _request = build_akri_connector_list_by_template_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_profile_name=dataflow_profile_name, + akri_connector_template_name=akri_connector_template_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -5407,7 +9206,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DataflowResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.AkriConnectorResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) @@ -5431,39 +9230,52 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) -class DataflowEndpointOperations: +class AkriDiscoveryHandlerOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.iotoperations.IoTOperationsMgmtClient`'s - :attr:`dataflow_endpoint` attribute. + :attr:`akri_discovery_handler` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: IoTOperationsMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "accept", + ] + }, + ) def get( - self, resource_group_name: str, instance_name: str, dataflow_endpoint_name: str, **kwargs: Any - ) -> _models.DataflowEndpointResource: - """Get a DataflowEndpointResource. + self, resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, **kwargs: Any + ) -> _models.AkriDiscoveryHandlerResource: + """Get a AkriDiscoveryHandlerResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. - :type dataflow_endpoint_name: str - :return: DataflowEndpointResource. The DataflowEndpointResource is compatible with + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str + :return: AkriDiscoveryHandlerResource. The AkriDiscoveryHandlerResource is compatible with MutableMapping - :rtype: ~azure.mgmt.iotoperations.models.DataflowEndpointResource + :rtype: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -5477,12 +9289,12 @@ def get( _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models.DataflowEndpointResource] = kwargs.pop("cls", None) + cls: ClsType[_models.AkriDiscoveryHandlerResource] = kwargs.pop("cls", None) - _request = build_dataflow_endpoint_get_request( + _request = build_akri_discovery_handler_get_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_endpoint_name=dataflow_endpoint_name, + akri_discovery_handler_name=akri_discovery_handler_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -5513,19 +9325,33 @@ def get( if _stream: deserialized = response.iter_bytes() else: - deserialized = _deserialize(_models.DataflowEndpointResource, response.json()) + deserialized = _deserialize(_models.AkriDiscoveryHandlerResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "content_type", + "accept", + ] + }, + ) def _create_or_update_initial( self, resource_group_name: str, instance_name: str, - dataflow_endpoint_name: str, - resource: Union[_models.DataflowEndpointResource, JSON, IO[bytes]], + akri_discovery_handler_name: str, + resource: Union[_models.AkriDiscoveryHandlerResource, JSON, IO[bytes]], **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { @@ -5549,10 +9375,10 @@ def _create_or_update_initial( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_dataflow_endpoint_create_or_update_request( + _request = build_akri_discovery_handler_create_or_update_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_endpoint_name=dataflow_endpoint_name, + akri_discovery_handler_name=akri_discovery_handler_name, subscription_id=self._config.subscription_id, content_type=content_type, api_version=self._config.api_version, @@ -5600,30 +9426,30 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_endpoint_name: str, - resource: _models.DataflowEndpointResource, + akri_discovery_handler_name: str, + resource: _models.AkriDiscoveryHandlerResource, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.DataflowEndpointResource]: - """Create a DataflowEndpointResource. + ) -> LROPoller[_models.AkriDiscoveryHandlerResource]: + """Create a AkriDiscoveryHandlerResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. - :type dataflow_endpoint_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str :param resource: Resource create parameters. Required. - :type resource: ~azure.mgmt.iotoperations.models.DataflowEndpointResource + :type resource: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns DataflowEndpointResource. The - DataflowEndpointResource is compatible with MutableMapping + :return: An instance of LROPoller that returns AkriDiscoveryHandlerResource. The + AkriDiscoveryHandlerResource is compatible with MutableMapping :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowEndpointResource] + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -5632,30 +9458,30 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_endpoint_name: str, + akri_discovery_handler_name: str, resource: JSON, *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.DataflowEndpointResource]: - """Create a DataflowEndpointResource. + ) -> LROPoller[_models.AkriDiscoveryHandlerResource]: + """Create a AkriDiscoveryHandlerResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. - :type dataflow_endpoint_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str :param resource: Resource create parameters. Required. :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns DataflowEndpointResource. The - DataflowEndpointResource is compatible with MutableMapping + :return: An instance of LROPoller that returns AkriDiscoveryHandlerResource. The + AkriDiscoveryHandlerResource is compatible with MutableMapping :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowEndpointResource] + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] :raises ~azure.core.exceptions.HttpResponseError: """ @@ -5664,65 +9490,80 @@ def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_endpoint_name: str, + akri_discovery_handler_name: str, resource: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> LROPoller[_models.DataflowEndpointResource]: - """Create a DataflowEndpointResource. + ) -> LROPoller[_models.AkriDiscoveryHandlerResource]: + """Create a AkriDiscoveryHandlerResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. - :type dataflow_endpoint_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str :param resource: Resource create parameters. Required. :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: An instance of LROPoller that returns DataflowEndpointResource. The - DataflowEndpointResource is compatible with MutableMapping + :return: An instance of LROPoller that returns AkriDiscoveryHandlerResource. The + AkriDiscoveryHandlerResource is compatible with MutableMapping :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowEndpointResource] + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "content_type", + "accept", + ] + }, + ) def begin_create_or_update( self, resource_group_name: str, instance_name: str, - dataflow_endpoint_name: str, - resource: Union[_models.DataflowEndpointResource, JSON, IO[bytes]], + akri_discovery_handler_name: str, + resource: Union[_models.AkriDiscoveryHandlerResource, JSON, IO[bytes]], **kwargs: Any - ) -> LROPoller[_models.DataflowEndpointResource]: - """Create a DataflowEndpointResource. + ) -> LROPoller[_models.AkriDiscoveryHandlerResource]: + """Create a AkriDiscoveryHandlerResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. - :type dataflow_endpoint_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str :param resource: Resource create parameters. Is one of the following types: - DataflowEndpointResource, JSON, IO[bytes] Required. - :type resource: ~azure.mgmt.iotoperations.models.DataflowEndpointResource or JSON or IO[bytes] - :return: An instance of LROPoller that returns DataflowEndpointResource. The - DataflowEndpointResource is compatible with MutableMapping + AkriDiscoveryHandlerResource, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource or JSON or + IO[bytes] + :return: An instance of LROPoller that returns AkriDiscoveryHandlerResource. The + AkriDiscoveryHandlerResource is compatible with MutableMapping :rtype: - ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.DataflowEndpointResource] + ~azure.core.polling.LROPoller[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.DataflowEndpointResource] = kwargs.pop("cls", None) + cls: ClsType[_models.AkriDiscoveryHandlerResource] = kwargs.pop("cls", None) polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token: Optional[str] = kwargs.pop("continuation_token", None) @@ -5730,7 +9571,7 @@ def begin_create_or_update( raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_endpoint_name=dataflow_endpoint_name, + akri_discovery_handler_name=akri_discovery_handler_name, resource=resource, content_type=content_type, cls=lambda x, y, z: x, @@ -5743,7 +9584,7 @@ def begin_create_or_update( def get_long_running_output(pipeline_response): response = pipeline_response.http_response - deserialized = _deserialize(_models.DataflowEndpointResource, response.json()) + deserialized = _deserialize(_models.AkriDiscoveryHandlerResource, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized @@ -5761,18 +9602,31 @@ def get_long_running_output(pipeline_response): else: polling_method = polling if cont_token: - return LROPoller[_models.DataflowEndpointResource].from_continuation_token( + return LROPoller[_models.AkriDiscoveryHandlerResource].from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) - return LROPoller[_models.DataflowEndpointResource]( + return LROPoller[_models.AkriDiscoveryHandlerResource]( self._client, raw_result, get_long_running_output, polling_method # type: ignore ) + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "accept", + ] + }, + ) def _delete_initial( - self, resource_group_name: str, instance_name: str, dataflow_endpoint_name: str, **kwargs: Any + self, resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, **kwargs: Any ) -> Iterator[bytes]: error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -5787,10 +9641,10 @@ def _delete_initial( cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) - _request = build_dataflow_endpoint_delete_request( + _request = build_akri_discovery_handler_delete_request( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_endpoint_name=dataflow_endpoint_name, + akri_discovery_handler_name=akri_discovery_handler_name, subscription_id=self._config.subscription_id, api_version=self._config.api_version, headers=_headers, @@ -5830,18 +9684,31 @@ def _delete_initial( return deserialized # type: ignore @distributed_trace + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "instance_name", + "akri_discovery_handler_name", + "accept", + ] + }, + ) def begin_delete( - self, resource_group_name: str, instance_name: str, dataflow_endpoint_name: str, **kwargs: Any + self, resource_group_name: str, instance_name: str, akri_discovery_handler_name: str, **kwargs: Any ) -> LROPoller[None]: - """Delete a DataflowEndpointResource. + """Delete a AkriDiscoveryHandlerResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :param dataflow_endpoint_name: Name of Instance dataflowEndpoint resource. Required. - :type dataflow_endpoint_name: str + :param akri_discovery_handler_name: Name of AkriDiscoveryHandler resource. Required. + :type akri_discovery_handler_name: str :return: An instance of LROPoller that returns None :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: @@ -5857,7 +9724,7 @@ def begin_delete( raw_result = self._delete_initial( resource_group_name=resource_group_name, instance_name=instance_name, - dataflow_endpoint_name=dataflow_endpoint_name, + akri_discovery_handler_name=akri_discovery_handler_name, cls=lambda x, y, z: x, headers=_headers, params=_params, @@ -5892,24 +9759,31 @@ def get_long_running_output(pipeline_response): # pylint: disable=inconsistent- return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore @distributed_trace - def list_by_resource_group( + @api_version_validation( + method_added_on="2025-07-01-preview", + params_added_on={ + "2025-07-01-preview": ["api_version", "subscription_id", "resource_group_name", "instance_name", "accept"] + }, + ) + def list_by_instance_resource( self, resource_group_name: str, instance_name: str, **kwargs: Any - ) -> Iterable["_models.DataflowEndpointResource"]: - """List DataflowEndpointResource resources by InstanceResource. + ) -> Iterable["_models.AkriDiscoveryHandlerResource"]: + """List AkriDiscoveryHandlerResource resources by InstanceResource. :param resource_group_name: The name of the resource group. The name is case insensitive. Required. :type resource_group_name: str :param instance_name: Name of instance. Required. :type instance_name: str - :return: An iterator like instance of DataflowEndpointResource - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.DataflowEndpointResource] + :return: An iterator like instance of AkriDiscoveryHandlerResource + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.iotoperations.models.AkriDiscoveryHandlerResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.DataflowEndpointResource]] = kwargs.pop("cls", None) + cls: ClsType[List[_models.AkriDiscoveryHandlerResource]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -5922,7 +9796,7 @@ def list_by_resource_group( def prepare_request(next_link=None): if not next_link: - _request = build_dataflow_endpoint_list_by_resource_group_request( + _request = build_akri_discovery_handler_list_by_instance_resource_request( resource_group_name=resource_group_name, instance_name=instance_name, subscription_id=self._config.subscription_id, @@ -5961,7 +9835,7 @@ def prepare_request(next_link=None): def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.DataflowEndpointResource], deserialized["value"]) + list_of_elem = _deserialize(List[_models.AkriDiscoveryHandlerResource], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.get("nextLink") or None, iter(list_of_elem) diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/_patch.py b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/_patch.py index f7dd32510333..8bcb627aa475 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/_patch.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/azure/mgmt/iotoperations/operations/_patch.py @@ -1,7 +1,8 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..2c9fb070ba34 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_create_or_update_maximum_set_gen.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_connector_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.akri_connector.begin_create_or_update( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_connector_template_name="resource-name123", + akri_connector_name="resource-name123", + resource={"extendedLocation": {"name": "qmbrfwcpwwhggszhrdjv", "type": "CustomLocation"}, "properties": {}}, + ).result() + print(response) + + +# x-ms-original-file: 2025-07-01-preview/AkriConnector_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_delete_maximum_set_gen.py new file mode 100644 index 000000000000..a182e0c2bde5 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_delete_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_connector_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.akri_connector.begin_delete( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_connector_template_name="resource-name123", + akri_connector_name="resource-name123", + ).result() + + +# x-ms-original-file: 2025-07-01-preview/AkriConnector_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_get_maximum_set_gen.py new file mode 100644 index 000000000000..ad017bc56ef3 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_get_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_connector_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.akri_connector.get( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_connector_template_name="resource-name123", + akri_connector_name="resource-name123", + ) + print(response) + + +# x-ms-original-file: 2025-07-01-preview/AkriConnector_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_list_by_template_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_list_by_template_maximum_set_gen.py new file mode 100644 index 000000000000..3dd58b57f6fb --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_list_by_template_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_connector_list_by_template_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.akri_connector.list_by_template( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_connector_template_name="resource-name123", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2025-07-01-preview/AkriConnector_ListByTemplate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..c0e4515609ee --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_create_or_update_maximum_set_gen.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_connector_template_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.akri_connector_template.begin_create_or_update( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_connector_template_name="resource-name123", + resource={ + "extendedLocation": {"name": "qmbrfwcpwwhggszhrdjv", "type": "CustomLocation"}, + "properties": { + "aioMetadata": {"aioMaxVersion": "qoxwkvfxvqedqtju", "aioMinVersion": "tkiz"}, + "deviceInboundEndpointTypes": [ + { + "configurationSchemaRefs": { + "additionalConfigSchemaRef": "uxvyqeqxbamqwjy", + "defaultDatasetConfigSchemaRef": "nsifbedzo", + "defaultEventsConfigSchemaRef": "tloquokslyfukgwhklpvpybdgykl", + "defaultProcessControlConfigSchemaRef": "mvjycfnxovmiijpwlpbeacryfdo", + "defaultStreamsConfigSchemaRef": "kdaitmohr", + }, + "endpointType": "chkkpymxhp", + "version": "chkkpymxhp", + } + ], + "diagnostics": {"logs": {"level": "pe"}}, + "mqttConnectionConfiguration": { + "authentication": {"method": "AkriConnectorsMqttAuthentication"}, + "host": "kj", + "keepAliveSeconds": 0, + "maxInflightMessages": 0, + "protocol": "Mqtt", + "sessionExpirySeconds": 0, + "tls": {"mode": "Enabled", "trustedCaCertificateConfigMapRef": "tectjjvukvelsreihwadh"}, + }, + "runtimeConfiguration": {"runtimeConfigurationType": "AkriConnectorTemplateRuntimeConfiguration"}, + }, + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-07-01-preview/AkriConnectorTemplate_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_delete_maximum_set_gen.py new file mode 100644 index 000000000000..cb5ba5bcc176 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_connector_template_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.akri_connector_template.begin_delete( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_connector_template_name="resource-name123", + ).result() + + +# x-ms-original-file: 2025-07-01-preview/AkriConnectorTemplate_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_get_maximum_set_gen.py new file mode 100644 index 000000000000..f18d519d5000 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_connector_template_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.akri_connector_template.get( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_connector_template_name="resource-name123", + ) + print(response) + + +# x-ms-original-file: 2025-07-01-preview/AkriConnectorTemplate_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_list_by_instance_resource_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_list_by_instance_resource_maximum_set_gen.py new file mode 100644 index 000000000000..270838c0ba74 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_connector_template_list_by_instance_resource_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_connector_template_list_by_instance_resource_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.akri_connector_template.list_by_instance_resource( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2025-07-01-preview/AkriConnectorTemplate_ListByInstanceResource_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..311c5c8c85ea --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_create_or_update_maximum_set_gen.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_discovery_handler_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.akri_discovery_handler.begin_create_or_update( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_discovery_handler_name="resource-name123", + resource={ + "extendedLocation": {"name": "qmbrfwcpwwhggszhrdjv", "type": "CustomLocation"}, + "properties": { + "additionalConfiguration": {"key2206": "ysosuqsgtnwlyq"}, + "aioMetadata": {"aioMaxVersion": "yrmlixhxsnrgialgwruyk", "aioMinVersion": "nxusafsppaikld"}, + "diagnostics": {"logs": {"level": "inypis"}}, + "discoverableDeviceEndpointTypes": [{"endpointType": "chkkpymxhp", "version": "chkkpymxhp"}], + "imageConfiguration": { + "imageName": "vrglpcosiuiulgwqkjbrdxnyjphp", + "imagePullPolicy": "Always", + "registrySettings": {"registrySettingsType": "AkriConnectorsRegistrySettings"}, + "replicas": 4, + "tagDigestSettings": {"tagDigestType": "AkriConnectorsTagDigestSettings"}, + }, + "mode": "Enabled", + "mqttConnectionConfiguration": { + "authentication": {"method": "AkriConnectorsMqttAuthentication"}, + "host": "mfxhevl", + "keepAliveSeconds": 0, + "maxInflightMessages": 0, + "protocol": "Mqtt", + "sessionExpirySeconds": 0, + "tls": {"mode": "Enabled", "trustedCaCertificateConfigMapRef": "tectjjvukvelsreihwadh"}, + }, + "schedule": {"scheduleType": "AkriDiscoveryHandlerSchedule"}, + "secrets": [ + { + "secretAlias": "daizzyhqznrvmmrg", + "secretKey": "qblyfotyqcojcchpji", + "secretRef": "vqxrhffxxfwqagwjcjimkmzjoxu", + } + ], + }, + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-07-01-preview/AkriDiscoveryHandler_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_delete_maximum_set_gen.py new file mode 100644 index 000000000000..a09c422ce2c4 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_discovery_handler_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.akri_discovery_handler.begin_delete( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_discovery_handler_name="resource-name123", + ).result() + + +# x-ms-original-file: 2025-07-01-preview/AkriDiscoveryHandler_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_get_maximum_set_gen.py new file mode 100644 index 000000000000..5d61e02cc568 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_discovery_handler_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.akri_discovery_handler.get( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + akri_discovery_handler_name="resource-name123", + ) + print(response) + + +# x-ms-original-file: 2025-07-01-preview/AkriDiscoveryHandler_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_list_by_instance_resource_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_list_by_instance_resource_maximum_set_gen.py new file mode 100644 index 000000000000..295436fbc52d --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/akri_discovery_handler_list_by_instance_resource_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python akri_discovery_handler_list_by_instance_resource_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.akri_discovery_handler.list_by_instance_resource( + resource_group_name="rgiotoperations", + instance_name="resource-name123", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2025-07-01-preview/AkriDiscoveryHandler_ListByInstanceResource_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_create_or_update_complex.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_create_or_update_complex.py index 8d00e1311ab0..ea56853238cb 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_create_or_update_complex.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_create_or_update_complex.py @@ -64,6 +64,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerAuthentication_CreateOrUpdate_Complex.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthentication_CreateOrUpdate_Complex.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_create_or_update_maximum_set_gen.py index 0d01c1a02464..99059773c7e7 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_create_or_update_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_create_or_update_maximum_set_gen.py @@ -62,6 +62,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerAuthentication_CreateOrUpdate_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthentication_CreateOrUpdate_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_delete_maximum_set_gen.py index 7c84afe7a97a..6db4ab90c483 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_delete_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_delete_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: 2024-11-01/BrokerAuthentication_Delete_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthentication_Delete_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_get_maximum_set_gen.py index 9d98c9aae0d7..a1dfc97862c7 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_get_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_get_maximum_set_gen.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerAuthentication_Get_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthentication_Get_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_list_by_resource_group_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_list_by_resource_group_maximum_set_gen.py index 96f30799edf6..cda5a64f4f76 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_list_by_resource_group_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authentication_list_by_resource_group_maximum_set_gen.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/BrokerAuthentication_ListByResourceGroup_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthentication_ListByResourceGroup_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_complex.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_complex.py index b383bfe7bf70..3749e1a7e4d0 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_complex.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_complex.py @@ -78,6 +78,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerAuthorization_CreateOrUpdate_Complex.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthorization_CreateOrUpdate_Complex.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_maximum_set_gen.py index 24b86aaa114a..2b4bb4d43839 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_maximum_set_gen.py @@ -60,6 +60,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerAuthorization_CreateOrUpdate_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthorization_CreateOrUpdate_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_simple.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_simple.py index 1a4a32a5868f..b7449a8da79b 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_simple.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_create_or_update_simple.py @@ -60,6 +60,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerAuthorization_CreateOrUpdate_Simple.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthorization_CreateOrUpdate_Simple.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_delete_maximum_set_gen.py index 2524580a6ec9..707b8b583c9f 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_delete_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_delete_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: 2024-11-01/BrokerAuthorization_Delete_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthorization_Delete_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_get_maximum_set_gen.py index dc714bcef1ec..df213bf7b3a8 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_get_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_get_maximum_set_gen.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerAuthorization_Get_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthorization_Get_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_list_by_resource_group_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_list_by_resource_group_maximum_set_gen.py index b77ebf478f7b..ee447f1eec59 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_list_by_resource_group_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_authorization_list_by_resource_group_maximum_set_gen.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/BrokerAuthorization_ListByResourceGroup_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerAuthorization_ListByResourceGroup_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_complex.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_complex.py index 12d4ce853141..7ea1a98e24e0 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_complex.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_complex.py @@ -50,6 +50,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Broker_CreateOrUpdate_Complex.json +# x-ms-original-file: 2025-07-01-preview/Broker_CreateOrUpdate_Complex.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_maximum_set_gen.py index 46f2f7a0567b..87e33e9e8af4 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_maximum_set_gen.py @@ -119,6 +119,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Broker_CreateOrUpdate_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Broker_CreateOrUpdate_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_minimal.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_minimal.py index c28264aae553..94580255b741 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_minimal.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_minimal.py @@ -42,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Broker_CreateOrUpdate_Minimal.json +# x-ms-original-file: 2025-07-01-preview/Broker_CreateOrUpdate_Minimal.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_simple.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_simple.py index 7f0d94ade3bf..2acdde8394bb 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_simple.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_create_or_update_simple.py @@ -49,6 +49,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Broker_CreateOrUpdate_Simple.json +# x-ms-original-file: 2025-07-01-preview/Broker_CreateOrUpdate_Simple.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_delete_maximum_set_gen.py index af30292fa876..4502b9152f37 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_delete_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_delete_maximum_set_gen.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: 2024-11-01/Broker_Delete_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Broker_Delete_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_get_maximum_set_gen.py index ebb5028aa3fe..34fa3421a5d4 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_get_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_get_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Broker_Get_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Broker_Get_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_list_by_resource_group_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_list_by_resource_group_maximum_set_gen.py index 82fae2584020..fe535f8be781 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_list_by_resource_group_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_list_by_resource_group_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/Broker_ListByResourceGroup_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Broker_ListByResourceGroup_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_complex.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_complex.py index e8d3ce286c18..e6378646231d 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_complex.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_complex.py @@ -69,6 +69,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerListener_CreateOrUpdate_Complex.json +# x-ms-original-file: 2025-07-01-preview/BrokerListener_CreateOrUpdate_Complex.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_maximum_set_gen.py index 8625fb83f582..2872a82a00d8 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_maximum_set_gen.py @@ -71,6 +71,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerListener_CreateOrUpdate_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerListener_CreateOrUpdate_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_simple.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_simple.py index 99d634507188..a2e118d36567 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_simple.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_create_or_update_simple.py @@ -43,6 +43,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerListener_CreateOrUpdate_Simple.json +# x-ms-original-file: 2025-07-01-preview/BrokerListener_CreateOrUpdate_Simple.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_delete_maximum_set_gen.py index 28a9809c8f24..fd95b48d9bfc 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_delete_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_delete_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: 2024-11-01/BrokerListener_Delete_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerListener_Delete_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_get_maximum_set_gen.py index df05fe7724c4..d13e9eef7d08 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_get_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_get_maximum_set_gen.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/BrokerListener_Get_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerListener_Get_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_list_by_resource_group_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_list_by_resource_group_maximum_set_gen.py index 912350a0e32f..7de138c0589e 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_list_by_resource_group_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/broker_listener_list_by_resource_group_maximum_set_gen.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/BrokerListener_ListByResourceGroup_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/BrokerListener_ListByResourceGroup_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_complex_contextualization.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_complex_contextualization.py index 7736bbaa99e0..fd9bf571fd10 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_complex_contextualization.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_complex_contextualization.py @@ -77,6 +77,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Dataflow_CreateOrUpdate_ComplexContextualization.json +# x-ms-original-file: 2025-07-01-preview/Dataflow_CreateOrUpdate_ComplexContextualization.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_complex_event_hub.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_complex_event_hub.py index 77ea9220896b..eb0fb0023fef 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_complex_event_hub.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_complex_event_hub.py @@ -95,6 +95,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Dataflow_CreateOrUpdate_ComplexEventHub.json +# x-ms-original-file: 2025-07-01-preview/Dataflow_CreateOrUpdate_ComplexEventHub.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_filter_to_topic.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_filter_to_topic.py index 77c08b771813..a58550114615 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_filter_to_topic.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_filter_to_topic.py @@ -78,6 +78,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Dataflow_CreateOrUpdate_FilterToTopic.json +# x-ms-original-file: 2025-07-01-preview/Dataflow_CreateOrUpdate_FilterToTopic.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_maximum_set_gen.py index 2dc23675e8ec..82835e7acc04 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_maximum_set_gen.py @@ -92,6 +92,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Dataflow_CreateOrUpdate_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Dataflow_CreateOrUpdate_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_simple_event_grid.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_simple_event_grid.py index 8056558c7fdb..5ee728c2de17 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_simple_event_grid.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_simple_event_grid.py @@ -63,6 +63,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Dataflow_CreateOrUpdate_SimpleEventGrid.json +# x-ms-original-file: 2025-07-01-preview/Dataflow_CreateOrUpdate_SimpleEventGrid.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_simple_fabric.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_simple_fabric.py index d5c7a973e01a..863b74a0109e 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_simple_fabric.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_create_or_update_simple_fabric.py @@ -67,6 +67,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Dataflow_CreateOrUpdate_SimpleFabric.json +# x-ms-original-file: 2025-07-01-preview/Dataflow_CreateOrUpdate_SimpleFabric.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_delete_maximum_set_gen.py index 4e175b8eb5c4..3bcac9185bfa 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_delete_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_delete_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): ).result() -# x-ms-original-file: 2024-11-01/Dataflow_Delete_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Dataflow_Delete_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_adlsv2.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_adlsv2.py index 729785156913..340492099569 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_adlsv2.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_adlsv2.py @@ -48,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_ADLSv2.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_ADLSv2.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_adx.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_adx.py index ce1582fa111c..487da9ed82ef 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_adx.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_adx.py @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_ADX.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_ADX.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_aio.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_aio.py index 87c58fd57447..728ef472f0a0 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_aio.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_aio.py @@ -52,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_AIO.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_AIO.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_event_grid.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_event_grid.py index bd7e795e64cf..a0295d0fb2c5 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_event_grid.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_event_grid.py @@ -52,6 +52,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_EventGrid.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_EventGrid.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_event_hub.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_event_hub.py index 3776e5819623..f49d0efd395a 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_event_hub.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_event_hub.py @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_EventHub.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_EventHub.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_fabric.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_fabric.py index 830258b9f449..a8f9f61e1fee 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_fabric.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_fabric.py @@ -53,6 +53,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_Fabric.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_Fabric.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_kafka.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_kafka.py index 520639eee1ae..b60b1770bfb3 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_kafka.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_kafka.py @@ -59,6 +59,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_Kafka.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_Kafka.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_local_storage.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_local_storage.py index 2712226ed1c2..4132dc562ab0 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_local_storage.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_local_storage.py @@ -45,6 +45,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_LocalStorage.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_LocalStorage.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_maximum_set_gen.py index 6a7e6b7c439b..adcfc2e19932 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_maximum_set_gen.py @@ -133,6 +133,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_mqtt.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_mqtt.py index cb36b10e85bd..19d07123398f 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_mqtt.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_create_or_update_mqtt.py @@ -59,6 +59,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_CreateOrUpdate_MQTT.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_CreateOrUpdate_MQTT.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_delete_maximum_set_gen.py index 3c32bf7b3994..6918d9953f2b 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_delete_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_delete_maximum_set_gen.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: 2024-11-01/DataflowEndpoint_Delete_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_Delete_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_get_maximum_set_gen.py index e15e574d563d..3791054332af 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_get_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_get_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_Get_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_Get_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_list_by_resource_group_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_list_by_resource_group_maximum_set_gen.py index daa472a6e53b..71f74a841560 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_list_by_resource_group_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_endpoint_list_by_resource_group_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/DataflowEndpoint_ListByResourceGroup_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/DataflowEndpoint_ListByResourceGroup_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_get_maximum_set_gen.py index 2a03b8678050..2c9df8e8c52f 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_get_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_get_maximum_set_gen.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Dataflow_Get_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Dataflow_Get_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..615e54cd6afd --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_create_or_update_maximum_set_gen.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python dataflow_graph_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.dataflow_graph.begin_create_or_update( + resource_group_name="rgiotoperations", + instance_name="resource-123", + dataflow_profile_name="resource-123", + dataflow_graph_name="resource-123", + resource={ + "extendedLocation": {"name": "qmbrfwcpwwhggszhrdjv", "type": "CustomLocation"}, + "properties": { + "mode": "Enabled", + "nodeConnections": [ + { + "from": { + "name": "wsqbccd", + "schema": {"schemaRef": "gkyqreiunkvdgggy", "serializationFormat": "Delta"}, + }, + "to": {"name": "dqvkpqshglptwrjfeblfzbofmkfktzbjhidsvqctmbzocbtgtkbgrayg"}, + } + ], + "nodes": [{"name": "gjbjgii", "type": "DataflowGraphNode"}], + "requestDiskPersistence": "Enabled", + }, + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-07-01-preview/DataflowGraph_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_delete_maximum_set_gen.py new file mode 100644 index 000000000000..6eec21319672 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_delete_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python dataflow_graph_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.dataflow_graph.begin_delete( + resource_group_name="rgiotoperations", + instance_name="resource-123", + dataflow_profile_name="resource-123", + dataflow_graph_name="resource-123", + ).result() + + +# x-ms-original-file: 2025-07-01-preview/DataflowGraph_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_get_maximum_set_gen.py new file mode 100644 index 000000000000..db3503e079f4 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_get_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python dataflow_graph_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.dataflow_graph.get( + resource_group_name="rgiotoperations", + instance_name="resource-123", + dataflow_profile_name="resource-123", + dataflow_graph_name="resource-123", + ) + print(response) + + +# x-ms-original-file: 2025-07-01-preview/DataflowGraph_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_list_by_dataflow_profile_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_list_by_dataflow_profile_maximum_set_gen.py new file mode 100644 index 000000000000..d22ed5985b02 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_graph_list_by_dataflow_profile_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python dataflow_graph_list_by_dataflow_profile_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.dataflow_graph.list_by_dataflow_profile( + resource_group_name="rgiotoperations", + instance_name="resource-123", + dataflow_profile_name="resource-123", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2025-07-01-preview/DataflowGraph_ListByDataflowProfile_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_list_by_profile_resource_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_list_by_profile_resource_maximum_set_gen.py index 87f5a5789fbe..7adfc746700f 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_list_by_profile_resource_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_list_by_profile_resource_maximum_set_gen.py @@ -39,6 +39,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/Dataflow_ListByProfileResource_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Dataflow_ListByProfileResource_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_maximum_set_gen.py index 9dd91df19984..a077664702b0 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_maximum_set_gen.py @@ -48,6 +48,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowProfile_CreateOrUpdate_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/DataflowProfile_CreateOrUpdate_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_minimal.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_minimal.py index a85c03fd2fe3..4ff72d5a7c8e 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_minimal.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_minimal.py @@ -42,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowProfile_CreateOrUpdate_Minimal.json +# x-ms-original-file: 2025-07-01-preview/DataflowProfile_CreateOrUpdate_Minimal.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_multi.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_multi.py index 187a3fb8ec11..bf78e5da85e3 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_multi.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_create_or_update_multi.py @@ -42,6 +42,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowProfile_CreateOrUpdate_Multi.json +# x-ms-original-file: 2025-07-01-preview/DataflowProfile_CreateOrUpdate_Multi.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_delete_maximum_set_gen.py index 9465f90e1704..1bc6b4feaecf 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_delete_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_delete_maximum_set_gen.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: 2024-11-01/DataflowProfile_Delete_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/DataflowProfile_Delete_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_get_maximum_set_gen.py index de3264b061c5..f2e7130e814a 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_get_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_get_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/DataflowProfile_Get_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/DataflowProfile_Get_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_list_by_resource_group_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_list_by_resource_group_maximum_set_gen.py index f4384cb9fd13..139bb28e70e5 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_list_by_resource_group_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/dataflow_profile_list_by_resource_group_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/DataflowProfile_ListByResourceGroup_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/DataflowProfile_ListByResourceGroup_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_create_or_update_maximum_set_gen.py index caa2a907feea..ea7fb40f133b 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_create_or_update_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_create_or_update_maximum_set_gen.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -49,6 +50,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Instance_CreateOrUpdate_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Instance_CreateOrUpdate_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_delete_maximum_set_gen.py index 77df8c7a0429..87a9bd43f697 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_delete_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_delete_maximum_set_gen.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: 2024-11-01/Instance_Delete_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Instance_Delete_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_get_maximum_set_gen.py index 4719603bce34..99eac5d968ed 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_get_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_get_maximum_set_gen.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Instance_Get_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Instance_Get_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_list_by_resource_group_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_list_by_resource_group_maximum_set_gen.py index 644af16ea52c..f1a7b998ace3 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_list_by_resource_group_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_list_by_resource_group_maximum_set_gen.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/Instance_ListByResourceGroup_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Instance_ListByResourceGroup_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_list_by_subscription_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_list_by_subscription_maximum_set_gen.py index 59defe77563f..1aca92bc96dd 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_list_by_subscription_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_list_by_subscription_maximum_set_gen.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/Instance_ListBySubscription_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Instance_ListBySubscription_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_update_maximum_set_gen.py index 53ebf23342f4..9ec13be771b9 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_update_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/instance_update_maximum_set_gen.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2024-11-01/Instance_Update_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Instance_Update_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/operations_list_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/operations_list_maximum_set_gen.py index 670399787c8e..dfe87829f5db 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/operations_list_maximum_set_gen.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/operations_list_maximum_set_gen.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: 2024-11-01/Operations_List_MaximumSet_Gen.json +# x-ms-original-file: 2025-07-01-preview/Operations_List_MaximumSet_Gen.json if __name__ == "__main__": main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_create_or_update_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..928f8b5cfffc --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_create_or_update_maximum_set_gen.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python registry_endpoint_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.registry_endpoint.begin_create_or_update( + resource_group_name="rgiotoperations", + instance_name="resource-123", + registry_endpoint_name="resource-123", + resource={ + "extendedLocation": {"name": "qmbrfwcpwwhggszhrdjv", "type": "CustomLocation"}, + "properties": { + "authentication": {"method": "RegistryEndpointAuthentication"}, + "host": "contoso.azurecr.io", + }, + }, + ).result() + print(response) + + +# x-ms-original-file: 2025-07-01-preview/RegistryEndpoint_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_delete_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_delete_maximum_set_gen.py new file mode 100644 index 000000000000..8db5c6770e84 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python registry_endpoint_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.registry_endpoint.begin_delete( + resource_group_name="rgiotoperations", + instance_name="resource-123", + registry_endpoint_name="resource-123", + ).result() + + +# x-ms-original-file: 2025-07-01-preview/RegistryEndpoint_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_get_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_get_maximum_set_gen.py new file mode 100644 index 000000000000..1fd6e3260c33 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python registry_endpoint_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.registry_endpoint.get( + resource_group_name="rgiotoperations", + instance_name="resource-123", + registry_endpoint_name="resource-123", + ) + print(response) + + +# x-ms-original-file: 2025-07-01-preview/RegistryEndpoint_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_list_by_instance_resource_maximum_set_gen.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_list_by_instance_resource_maximum_set_gen.py new file mode 100644 index 000000000000..0a5ce435fa0f --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_samples/registry_endpoint_list_by_instance_resource_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-iotoperations +# USAGE + python registry_endpoint_list_by_instance_resource_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = IoTOperationsMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.registry_endpoint.list_by_instance_resource( + resource_group_name="rgiotoperations", + instance_name="resource-123", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2025-07-01-preview/RegistryEndpoint_ListByInstanceResource_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_operations.py new file mode 100644 index 000000000000..6c8680454a66 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_operations.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtAkriConnectorOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_connector_get(self, resource_group): + response = self.client.akri_connector.get( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + akri_connector_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_connector_begin_create_or_update(self, resource_group): + response = self.client.akri_connector.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + akri_connector_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": {"provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_connector_begin_delete(self, resource_group): + response = self.client.akri_connector.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + akri_connector_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_connector_list_by_template(self, resource_group): + response = self.client.akri_connector.list_by_template( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_operations_async.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_operations_async.py new file mode 100644 index 000000000000..2bcf0a58a3bc --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_operations_async.py @@ -0,0 +1,90 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations.aio import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtAkriConnectorOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_connector_get(self, resource_group): + response = await self.client.akri_connector.get( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + akri_connector_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_connector_begin_create_or_update(self, resource_group): + response = await ( + await self.client.akri_connector.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + akri_connector_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": {"provisioningState": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_connector_begin_delete(self, resource_group): + response = await ( + await self.client.akri_connector.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + akri_connector_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_connector_list_by_template(self, resource_group): + response = self.client.akri_connector.list_by_template( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_template_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_template_operations.py new file mode 100644 index 000000000000..874968429734 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_template_operations.py @@ -0,0 +1,108 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtAkriConnectorTemplateOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_connector_template_get(self, resource_group): + response = self.client.akri_connector_template.get( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_connector_template_begin_create_or_update(self, resource_group): + response = self.client.akri_connector_template.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": { + "deviceInboundEndpointTypes": [ + { + "endpointType": "str", + "version": "str", + "configurationSchemaRefs": { + "additionalConfigSchemaRef": "str", + "defaultDatasetConfigSchemaRef": "str", + "defaultEventsConfigSchemaRef": "str", + "defaultProcessControlConfigSchemaRef": "str", + "defaultStreamsConfigSchemaRef": "str", + }, + } + ], + "runtimeConfiguration": "akri_connector_template_runtime_configuration", + "aioMetadata": {"aioMaxVersion": "str", "aioMinVersion": "str"}, + "diagnostics": {"logs": {"level": "str"}}, + "mqttConnectionConfiguration": { + "authentication": "akri_connectors_mqtt_authentication", + "host": "str", + "keepAliveSeconds": 0, + "maxInflightMessages": 0, + "protocol": "str", + "sessionExpirySeconds": 0, + "tls": {"mode": "str", "trustedCaCertificateConfigMapRef": "str"}, + }, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_connector_template_begin_delete(self, resource_group): + response = self.client.akri_connector_template.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_connector_template_list_by_instance_resource(self, resource_group): + response = self.client.akri_connector_template.list_by_instance_resource( + resource_group_name=resource_group.name, + instance_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_template_operations_async.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_template_operations_async.py new file mode 100644 index 000000000000..6d387cb5a6e4 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_connector_template_operations_async.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations.aio import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtAkriConnectorTemplateOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_connector_template_get(self, resource_group): + response = await self.client.akri_connector_template.get( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_connector_template_begin_create_or_update(self, resource_group): + response = await ( + await self.client.akri_connector_template.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": { + "deviceInboundEndpointTypes": [ + { + "endpointType": "str", + "version": "str", + "configurationSchemaRefs": { + "additionalConfigSchemaRef": "str", + "defaultDatasetConfigSchemaRef": "str", + "defaultEventsConfigSchemaRef": "str", + "defaultProcessControlConfigSchemaRef": "str", + "defaultStreamsConfigSchemaRef": "str", + }, + } + ], + "runtimeConfiguration": "akri_connector_template_runtime_configuration", + "aioMetadata": {"aioMaxVersion": "str", "aioMinVersion": "str"}, + "diagnostics": {"logs": {"level": "str"}}, + "mqttConnectionConfiguration": { + "authentication": "akri_connectors_mqtt_authentication", + "host": "str", + "keepAliveSeconds": 0, + "maxInflightMessages": 0, + "protocol": "str", + "sessionExpirySeconds": 0, + "tls": {"mode": "str", "trustedCaCertificateConfigMapRef": "str"}, + }, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_connector_template_begin_delete(self, resource_group): + response = await ( + await self.client.akri_connector_template.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + akri_connector_template_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_connector_template_list_by_instance_resource(self, resource_group): + response = self.client.akri_connector_template.list_by_instance_resource( + resource_group_name=resource_group.name, + instance_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_discovery_handler_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_discovery_handler_operations.py new file mode 100644 index 000000000000..88e3285937f9 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_discovery_handler_operations.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtAkriDiscoveryHandlerOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_discovery_handler_get(self, resource_group): + response = self.client.akri_discovery_handler.get( + resource_group_name=resource_group.name, + instance_name="str", + akri_discovery_handler_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_discovery_handler_begin_create_or_update(self, resource_group): + response = self.client.akri_discovery_handler.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + akri_discovery_handler_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": { + "discoverableDeviceEndpointTypes": [{"endpointType": "str", "version": "str"}], + "imageConfiguration": { + "imageName": "str", + "imagePullPolicy": "str", + "registrySettings": "akri_connectors_registry_settings", + "replicas": 0, + "tagDigestSettings": "akri_connectors_tag_digest_settings", + }, + "schedule": "akri_discovery_handler_schedule", + "additionalConfiguration": {"str": "str"}, + "aioMetadata": {"aioMaxVersion": "str", "aioMinVersion": "str"}, + "diagnostics": {"logs": {"level": "str"}}, + "mode": "str", + "mqttConnectionConfiguration": { + "authentication": "akri_connectors_mqtt_authentication", + "host": "str", + "keepAliveSeconds": 0, + "maxInflightMessages": 0, + "protocol": "str", + "sessionExpirySeconds": 0, + "tls": {"mode": "str", "trustedCaCertificateConfigMapRef": "str"}, + }, + "provisioningState": "str", + "secrets": [{"secretAlias": "str", "secretKey": "str", "secretRef": "str"}], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_discovery_handler_begin_delete(self, resource_group): + response = self.client.akri_discovery_handler.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + akri_discovery_handler_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_akri_discovery_handler_list_by_instance_resource(self, resource_group): + response = self.client.akri_discovery_handler.list_by_instance_resource( + resource_group_name=resource_group.name, + instance_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_discovery_handler_operations_async.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_discovery_handler_operations_async.py new file mode 100644 index 000000000000..defb457b8996 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_akri_discovery_handler_operations_async.py @@ -0,0 +1,111 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations.aio import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtAkriDiscoveryHandlerOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_discovery_handler_get(self, resource_group): + response = await self.client.akri_discovery_handler.get( + resource_group_name=resource_group.name, + instance_name="str", + akri_discovery_handler_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_discovery_handler_begin_create_or_update(self, resource_group): + response = await ( + await self.client.akri_discovery_handler.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + akri_discovery_handler_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": { + "discoverableDeviceEndpointTypes": [{"endpointType": "str", "version": "str"}], + "imageConfiguration": { + "imageName": "str", + "imagePullPolicy": "str", + "registrySettings": "akri_connectors_registry_settings", + "replicas": 0, + "tagDigestSettings": "akri_connectors_tag_digest_settings", + }, + "schedule": "akri_discovery_handler_schedule", + "additionalConfiguration": {"str": "str"}, + "aioMetadata": {"aioMaxVersion": "str", "aioMinVersion": "str"}, + "diagnostics": {"logs": {"level": "str"}}, + "mode": "str", + "mqttConnectionConfiguration": { + "authentication": "akri_connectors_mqtt_authentication", + "host": "str", + "keepAliveSeconds": 0, + "maxInflightMessages": 0, + "protocol": "str", + "sessionExpirySeconds": 0, + "tls": {"mode": "str", "trustedCaCertificateConfigMapRef": "str"}, + }, + "provisioningState": "str", + "secrets": [{"secretAlias": "str", "secretKey": "str", "secretRef": "str"}], + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_discovery_handler_begin_delete(self, resource_group): + response = await ( + await self.client.akri_discovery_handler.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + akri_discovery_handler_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_akri_discovery_handler_list_by_instance_resource(self, resource_group): + response = self.client.akri_discovery_handler.list_by_instance_resource( + resource_group_name=resource_group.name, + instance_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_authentication_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_authentication_operations.py index 8e5c40963cd3..f5dc7547caf0 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_authentication_operations.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_authentication_operations.py @@ -55,6 +55,7 @@ def test_broker_authentication_begin_create_or_update(self, resource_group): }, "serviceAccountTokenSettings": {"audiences": ["str"]}, "x509Settings": { + "additionalValidation": "str", "authorizationAttributes": {"str": {"attributes": {"str": "str"}, "subject": "str"}}, "trustedClientCaCert": "str", }, diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_authentication_operations_async.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_authentication_operations_async.py index cab43bad5ac7..78804d2c9213 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_authentication_operations_async.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_authentication_operations_async.py @@ -57,6 +57,7 @@ async def test_broker_authentication_begin_create_or_update(self, resource_group }, "serviceAccountTokenSettings": {"audiences": ["str"]}, "x509Settings": { + "additionalValidation": "str", "authorizationAttributes": { "str": {"attributes": {"str": "str"}, "subject": "str"} }, diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_operations.py index 1ed9d29e13dd..b0f66cada542 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_operations.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_operations.py @@ -79,7 +79,11 @@ def test_broker_begin_create_or_update(self, resource_group): "accessModes": ["str"], "dataSource": {"kind": "str", "name": "str", "apiGroup": "str"}, "dataSourceRef": {"kind": "str", "name": "str", "apiGroup": "str", "namespace": "str"}, - "resources": {"limits": {"str": "str"}, "requests": {"str": "str"}}, + "resources": { + "claims": [{"name": "str"}], + "limits": {"str": "str"}, + "requests": {"str": "str"}, + }, "selector": { "matchExpressions": [{"key": "str", "operator": "str", "values": ["str"]}], "matchLabels": {"str": "str"}, @@ -92,7 +96,11 @@ def test_broker_begin_create_or_update(self, resource_group): "accessModes": ["str"], "dataSource": {"kind": "str", "name": "str", "apiGroup": "str"}, "dataSourceRef": {"kind": "str", "name": "str", "apiGroup": "str", "namespace": "str"}, - "resources": {"limits": {"str": "str"}, "requests": {"str": "str"}}, + "resources": { + "claims": [{"name": "str"}], + "limits": {"str": "str"}, + "requests": {"str": "str"}, + }, "selector": { "matchExpressions": [{"key": "str", "operator": "str", "values": ["str"]}], "matchLabels": {"str": "str"}, @@ -104,6 +112,31 @@ def test_broker_begin_create_or_update(self, resource_group): }, "generateResourceLimits": {"cpu": "str"}, "memoryProfile": "str", + "persistence": { + "maxSize": "str", + "dynamicSettings": {"userPropertyKey": "str", "userPropertyValue": "str"}, + "encryption": {"mode": "str"}, + "persistentVolumeClaimSpec": { + "accessModes": ["str"], + "dataSource": {"kind": "str", "name": "str", "apiGroup": "str"}, + "dataSourceRef": {"kind": "str", "name": "str", "apiGroup": "str", "namespace": "str"}, + "resources": { + "claims": [{"name": "str"}], + "limits": {"str": "str"}, + "requests": {"str": "str"}, + }, + "selector": { + "matchExpressions": [{"key": "str", "operator": "str", "values": ["str"]}], + "matchLabels": {"str": "str"}, + }, + "storageClassName": "str", + "volumeMode": "str", + "volumeName": "str", + }, + "retain": "broker_retain_messages_policy", + "stateStore": "broker_state_store_policy", + "subscriberQueue": "broker_subscriber_queue_policy", + }, "provisioningState": "str", }, "systemData": { diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_operations_async.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_operations_async.py index 85391a0dffbd..d9db82c8e622 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_operations_async.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_broker_operations_async.py @@ -81,7 +81,11 @@ async def test_broker_begin_create_or_update(self, resource_group): "accessModes": ["str"], "dataSource": {"kind": "str", "name": "str", "apiGroup": "str"}, "dataSourceRef": {"kind": "str", "name": "str", "apiGroup": "str", "namespace": "str"}, - "resources": {"limits": {"str": "str"}, "requests": {"str": "str"}}, + "resources": { + "claims": [{"name": "str"}], + "limits": {"str": "str"}, + "requests": {"str": "str"}, + }, "selector": { "matchExpressions": [{"key": "str", "operator": "str", "values": ["str"]}], "matchLabels": {"str": "str"}, @@ -94,7 +98,11 @@ async def test_broker_begin_create_or_update(self, resource_group): "accessModes": ["str"], "dataSource": {"kind": "str", "name": "str", "apiGroup": "str"}, "dataSourceRef": {"kind": "str", "name": "str", "apiGroup": "str", "namespace": "str"}, - "resources": {"limits": {"str": "str"}, "requests": {"str": "str"}}, + "resources": { + "claims": [{"name": "str"}], + "limits": {"str": "str"}, + "requests": {"str": "str"}, + }, "selector": { "matchExpressions": [{"key": "str", "operator": "str", "values": ["str"]}], "matchLabels": {"str": "str"}, @@ -106,6 +114,31 @@ async def test_broker_begin_create_or_update(self, resource_group): }, "generateResourceLimits": {"cpu": "str"}, "memoryProfile": "str", + "persistence": { + "maxSize": "str", + "dynamicSettings": {"userPropertyKey": "str", "userPropertyValue": "str"}, + "encryption": {"mode": "str"}, + "persistentVolumeClaimSpec": { + "accessModes": ["str"], + "dataSource": {"kind": "str", "name": "str", "apiGroup": "str"}, + "dataSourceRef": {"kind": "str", "name": "str", "apiGroup": "str", "namespace": "str"}, + "resources": { + "claims": [{"name": "str"}], + "limits": {"str": "str"}, + "requests": {"str": "str"}, + }, + "selector": { + "matchExpressions": [{"key": "str", "operator": "str", "values": ["str"]}], + "matchLabels": {"str": "str"}, + }, + "storageClassName": "str", + "volumeMode": "str", + "volumeName": "str", + }, + "retain": "broker_retain_messages_policy", + "stateStore": "broker_state_store_policy", + "subscriberQueue": "broker_subscriber_queue_policy", + }, "provisioningState": "str", }, "systemData": { diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_endpoint_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_endpoint_operations.py index 4227ee4806be..c0c239c76f25 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_endpoint_operations.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_endpoint_operations.py @@ -86,6 +86,7 @@ def test_dataflow_endpoint_begin_create_or_update(self, resource_group): "oneLakePathType": "str", "batching": {"latencySeconds": 0, "maxMessages": 0}, }, + "hostType": "str", "kafkaSettings": { "authentication": { "method": "str", @@ -132,6 +133,12 @@ def test_dataflow_endpoint_begin_create_or_update(self, resource_group): "sessionExpirySeconds": 0, "tls": {"mode": "str", "trustedCaCertificateConfigMapRef": "str"}, }, + "openTelemetrySettings": { + "authentication": "dataflow_open_telemetry_authentication", + "host": "str", + "batching": {"latencySeconds": 0, "maxMessages": 0}, + "tls": {"mode": "str", "trustedCaCertificateConfigMapRef": "str"}, + }, "provisioningState": "str", }, "systemData": { diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_endpoint_operations_async.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_endpoint_operations_async.py index fab2a6575817..27ca2b2021e4 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_endpoint_operations_async.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_endpoint_operations_async.py @@ -88,6 +88,7 @@ async def test_dataflow_endpoint_begin_create_or_update(self, resource_group): "oneLakePathType": "str", "batching": {"latencySeconds": 0, "maxMessages": 0}, }, + "hostType": "str", "kafkaSettings": { "authentication": { "method": "str", @@ -134,6 +135,12 @@ async def test_dataflow_endpoint_begin_create_or_update(self, resource_group): "sessionExpirySeconds": 0, "tls": {"mode": "str", "trustedCaCertificateConfigMapRef": "str"}, }, + "openTelemetrySettings": { + "authentication": "dataflow_open_telemetry_authentication", + "host": "str", + "batching": {"latencySeconds": 0, "maxMessages": 0}, + "tls": {"mode": "str", "trustedCaCertificateConfigMapRef": "str"}, + }, "provisioningState": "str", }, "systemData": { diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_graph_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_graph_operations.py new file mode 100644 index 000000000000..7c817848bfae --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_graph_operations.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtDataflowGraphOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_dataflow_graph_get(self, resource_group): + response = self.client.dataflow_graph.get( + resource_group_name=resource_group.name, + instance_name="str", + dataflow_profile_name="str", + dataflow_graph_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_dataflow_graph_begin_create_or_update(self, resource_group): + response = self.client.dataflow_graph.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + dataflow_profile_name="str", + dataflow_graph_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": { + "nodeConnections": [ + { + "from": {"name": "str", "schema": {"schemaRef": "str", "serializationFormat": "str"}}, + "to": {"name": "str"}, + } + ], + "nodes": ["dataflow_graph_node"], + "mode": "str", + "provisioningState": "str", + "requestDiskPersistence": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_dataflow_graph_begin_delete(self, resource_group): + response = self.client.dataflow_graph.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + dataflow_profile_name="str", + dataflow_graph_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_dataflow_graph_list_by_dataflow_profile(self, resource_group): + response = self.client.dataflow_graph.list_by_dataflow_profile( + resource_group_name=resource_group.name, + instance_name="str", + dataflow_profile_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_graph_operations_async.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_graph_operations_async.py new file mode 100644 index 000000000000..ebaec94ca406 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_dataflow_graph_operations_async.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations.aio import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtDataflowGraphOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_dataflow_graph_get(self, resource_group): + response = await self.client.dataflow_graph.get( + resource_group_name=resource_group.name, + instance_name="str", + dataflow_profile_name="str", + dataflow_graph_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_dataflow_graph_begin_create_or_update(self, resource_group): + response = await ( + await self.client.dataflow_graph.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + dataflow_profile_name="str", + dataflow_graph_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": { + "nodeConnections": [ + { + "from": {"name": "str", "schema": {"schemaRef": "str", "serializationFormat": "str"}}, + "to": {"name": "str"}, + } + ], + "nodes": ["dataflow_graph_node"], + "mode": "str", + "provisioningState": "str", + "requestDiskPersistence": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_dataflow_graph_begin_delete(self, resource_group): + response = await ( + await self.client.dataflow_graph.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + dataflow_profile_name="str", + dataflow_graph_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_dataflow_graph_list_by_dataflow_profile(self, resource_group): + response = self.client.dataflow_graph.list_by_dataflow_profile( + resource_group_name=resource_group.name, + instance_name="str", + dataflow_profile_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_instance_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_instance_operations.py index b0c1dd1e3b86..68274053e51f 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_instance_operations.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_instance_operations.py @@ -48,7 +48,10 @@ def test_instance_begin_create_or_update(self, resource_group): "name": "str", "properties": { "schemaRegistryRef": {"resourceId": "str"}, + "adrNamespaceRef": {"resourceId": "str"}, + "defaultSecretProviderClassRef": {"resourceId": "str"}, "description": "str", + "features": {"str": {"mode": "str", "settings": {"str": "str"}}}, "provisioningState": "str", "version": "str", }, diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_instance_operations_async.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_instance_operations_async.py index dab0ce2bb743..cbacf4c2c81a 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_instance_operations_async.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_instance_operations_async.py @@ -50,7 +50,10 @@ async def test_instance_begin_create_or_update(self, resource_group): "name": "str", "properties": { "schemaRegistryRef": {"resourceId": "str"}, + "adrNamespaceRef": {"resourceId": "str"}, + "defaultSecretProviderClassRef": {"resourceId": "str"}, "description": "str", + "features": {"str": {"mode": "str", "settings": {"str": "str"}}}, "provisioningState": "str", "version": "str", }, diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_registry_endpoint_operations.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_registry_endpoint_operations.py new file mode 100644 index 000000000000..d4f7905dd39b --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_registry_endpoint_operations.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtRegistryEndpointOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_registry_endpoint_get(self, resource_group): + response = self.client.registry_endpoint.get( + resource_group_name=resource_group.name, + instance_name="str", + registry_endpoint_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_registry_endpoint_begin_create_or_update(self, resource_group): + response = self.client.registry_endpoint.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + registry_endpoint_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": { + "authentication": "registry_endpoint_authentication", + "host": "str", + "provisioningState": "str", + "trustSettings": {"trustedSigningKeys": "registry_endpoint_trusted_signing_key"}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_registry_endpoint_begin_delete(self, resource_group): + response = self.client.registry_endpoint.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + registry_endpoint_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_registry_endpoint_list_by_instance_resource(self, resource_group): + response = self.client.registry_endpoint.list_by_instance_resource( + resource_group_name=resource_group.name, + instance_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_registry_endpoint_operations_async.py b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_registry_endpoint_operations_async.py new file mode 100644 index 000000000000..0fff7a5222f9 --- /dev/null +++ b/sdk/iotoperations/azure-mgmt-iotoperations/generated_tests/test_io_toperations_mgmt_registry_endpoint_operations_async.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.iotoperations.aio import IoTOperationsMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestIoTOperationsMgmtRegistryEndpointOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(IoTOperationsMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_registry_endpoint_get(self, resource_group): + response = await self.client.registry_endpoint.get( + resource_group_name=resource_group.name, + instance_name="str", + registry_endpoint_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_registry_endpoint_begin_create_or_update(self, resource_group): + response = await ( + await self.client.registry_endpoint.begin_create_or_update( + resource_group_name=resource_group.name, + instance_name="str", + registry_endpoint_name="str", + resource={ + "extendedLocation": {"name": "str", "type": "str"}, + "id": "str", + "name": "str", + "properties": { + "authentication": "registry_endpoint_authentication", + "host": "str", + "provisioningState": "str", + "trustSettings": {"trustedSigningKeys": "registry_endpoint_trusted_signing_key"}, + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_registry_endpoint_begin_delete(self, resource_group): + response = await ( + await self.client.registry_endpoint.begin_delete( + resource_group_name=resource_group.name, + instance_name="str", + registry_endpoint_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_registry_endpoint_list_by_instance_resource(self, resource_group): + response = self.client.registry_endpoint.list_by_instance_resource( + resource_group_name=resource_group.name, + instance_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/sdk_packaging.toml b/sdk/iotoperations/azure-mgmt-iotoperations/sdk_packaging.toml index ab2e907e35c4..7c9fd2bd4fda 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/sdk_packaging.toml +++ b/sdk/iotoperations/azure-mgmt-iotoperations/sdk_packaging.toml @@ -3,7 +3,7 @@ package_name = "azure-mgmt-iotoperations" package_nspkg = "azure-mgmt-nspkg" package_pprint_name = "Iotoperations Management" package_doc_id = "" -is_stable = true +is_stable = false is_arm = true need_msrestazure = false need_azuremgmtcore = true diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/setup.py b/sdk/iotoperations/azure-mgmt-iotoperations/setup.py index 87e5dc487a7e..5f5c35539c83 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/setup.py +++ b/sdk/iotoperations/azure-mgmt-iotoperations/setup.py @@ -49,11 +49,10 @@ url="https://github.com/Azure/azure-sdk-for-python", keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product classifiers=[ - "Development Status :: 5 - Production/Stable", + "Development Status :: 4 - Beta", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -77,7 +76,7 @@ "isodate>=0.6.1", "typing-extensions>=4.6.0", "azure-common>=1.1", - "azure-mgmt-core>=1.3.2", + "azure-mgmt-core>=1.5.0", ], - python_requires=">=3.8", + python_requires=">=3.9", ) diff --git a/sdk/iotoperations/azure-mgmt-iotoperations/tsp-location.yaml b/sdk/iotoperations/azure-mgmt-iotoperations/tsp-location.yaml index 613fd52508c3..fbe5f6dabd37 100644 --- a/sdk/iotoperations/azure-mgmt-iotoperations/tsp-location.yaml +++ b/sdk/iotoperations/azure-mgmt-iotoperations/tsp-location.yaml @@ -1,4 +1,4 @@ directory: specification/iotoperations/IoTOperations.Management -commit: ab67c148ec716a0d0075770742d54468f128c72e +commit: 804678a63f4e15829793b12c133f7a2877c232b4 repo: Azure/azure-rest-api-specs additionalDirectories: