diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json index 7a98149e5eed..57f426345755 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/_meta.json +++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json @@ -1,11 +1,11 @@ { - "commit": "471fbc404548c3c6611833680dbbeefcc010e201", - "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "commit": "6250497796675059acd32fe7e2db39db503d2c0b", + "repository_url": "https://github.com/test-repo-billy/azure-rest-api-specs", "autorest": "3.10.2", "use": [ - "@autorest/python@6.17.0", + "@autorest/python@6.19.0", "@autorest/modelerfour@4.27.0" ], - "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.17.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", + "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --generate-test=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.19.0 --use=@autorest/modelerfour@4.27.0 --version=3.10.2 --version-tolerant=False", "readme": "specification/datafactory/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py index b77ac9246082..c47f66669f1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "9.0.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py index 909d7e58b3fc..683df87021a0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/aio/operations/_factories_operations.py @@ -36,7 +36,6 @@ build_get_git_hub_access_token_request, build_get_request, build_list_by_resource_group_request, - build_list_request, build_update_request, ) @@ -67,80 +66,6 @@ def __init__(self, *args, **kwargs) -> None: self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list(self, **kwargs: Any) -> AsyncIterable["_models.Factory"]: - """Lists factories under the specified subscription. - - :return: An iterator like instance of either Factory or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.Factory] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None) - - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - async def extract_data(pipeline_response): - deserialized = self._deserialize("FactoryListResponse", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - @overload async def configure_factory_repo( self, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 5616a16fc7fb..04c45f5ec6c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -350,6 +350,9 @@ from ._models_py3 import HubspotLinkedService from ._models_py3 import HubspotObjectDataset from ._models_py3 import HubspotSource +from ._models_py3 import IcebergDataset +from ._models_py3 import IcebergSink +from ._models_py3 import IcebergWriteSettings from ._models_py3 import IfConditionActivity from ._models_py3 import ImpalaLinkedService from ._models_py3 import ImpalaObjectDataset @@ -1271,6 +1274,9 @@ "HubspotLinkedService", "HubspotObjectDataset", "HubspotSource", + "IcebergDataset", + "IcebergSink", + "IcebergWriteSettings", "IfConditionActivity", "ImpalaLinkedService", "ImpalaObjectDataset", diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 6415cfc305b6..66767a0b4824 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -879,7 +879,7 @@ class Dataset(_serialization.Model): DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GoogleBigQueryV2ObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, - HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, + HttpDataset, HubspotObjectDataset, IcebergDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, LakeHouseTableDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, @@ -988,6 +988,7 @@ class Dataset(_serialization.Model): "HiveObject": "HiveObjectDataset", "HttpFile": "HttpDataset", "HubspotObject": "HubspotObjectDataset", + "Iceberg": "IcebergDataset", "ImpalaObject": "ImpalaObjectDataset", "InformixTable": "InformixTableDataset", "JiraObject": "JiraObjectDataset", @@ -5180,11 +5181,12 @@ class CopySink(_serialization.Model): AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, - DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, - JsonSink, LakeHouseTableSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, - OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, - SalesforceServiceCloudV2Sink, SalesforceSink, SalesforceV2Sink, SapCloudForCustomerSink, - SnowflakeSink, SnowflakeV2Sink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink, WarehouseSink + DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, IcebergSink, + InformixSink, JsonSink, LakeHouseTableSink, MicrosoftAccessSink, MongoDbAtlasSink, + MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, + SalesforceServiceCloudSink, SalesforceServiceCloudV2Sink, SalesforceSink, SalesforceV2Sink, + SapCloudForCustomerSink, SnowflakeSink, SnowflakeV2Sink, SqlDWSink, SqlMISink, SqlServerSink, + SqlSink, WarehouseSink All required parameters must be populated in order to send to server. @@ -5251,6 +5253,7 @@ class CopySink(_serialization.Model): "DynamicsCrmSink": "DynamicsCrmSink", "DynamicsSink": "DynamicsSink", "FileSystemSink": "FileSystemSink", + "IcebergSink": "IcebergSink", "InformixSink": "InformixSink", "JsonSink": "JsonSink", "LakeHouseTableSink": "LakeHouseTableSink", @@ -5525,8 +5528,8 @@ class FormatWriteSettings(_serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known sub-classes are: - AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, - ParquetWriteSettings + AvroWriteSettings, DelimitedTextWriteSettings, IcebergWriteSettings, JsonWriteSettings, + OrcWriteSettings, ParquetWriteSettings All required parameters must be populated in order to send to server. @@ -5550,6 +5553,7 @@ class FormatWriteSettings(_serialization.Model): "type": { "AvroWriteSettings": "AvroWriteSettings", "DelimitedTextWriteSettings": "DelimitedTextWriteSettings", + "IcebergWriteSettings": "IcebergWriteSettings", "JsonWriteSettings": "JsonWriteSettings", "OrcWriteSettings": "OrcWriteSettings", "ParquetWriteSettings": "ParquetWriteSettings", @@ -37354,6 +37358,248 @@ def __init__( self.query = query +class IcebergDataset(Dataset): + """Iceberg dataset. + + All required parameters must be populated in order to send to server. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :vartype structure: JSON + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[JSON] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar location: The location of the iceberg storage. Setting a file name is not allowed for + iceberg format. + :vartype location: ~azure.mgmt.datafactory.models.DatasetLocation + """ + + _validation = { + "type": {"required": True}, + "linked_service_name": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "location": {"key": "typeProperties.location", "type": "DatasetLocation"}, + } + + def __init__( + self, + *, + linked_service_name: "_models.LinkedServiceReference", + additional_properties: Optional[Dict[str, JSON]] = None, + description: Optional[str] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, + parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, + annotations: Optional[List[JSON]] = None, + folder: Optional["_models.DatasetFolder"] = None, + location: Optional["_models.DatasetLocation"] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: JSON + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[JSON] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword location: The location of the iceberg storage. Setting a file name is not allowed for + iceberg format. + :paramtype location: ~azure.mgmt.datafactory.models.DatasetLocation + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type: str = "Iceberg" + self.location = location + + +class IcebergSink(CopySink): + """A copy activity Iceberg sink. + + All required parameters must be populated in order to send to server. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :vartype write_batch_size: JSON + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\\d+).)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype write_batch_timeout: JSON + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :vartype sink_retry_count: JSON + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\\d+).)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype sink_retry_wait: JSON + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :vartype max_concurrent_connections: JSON + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :vartype disable_metrics_collection: JSON + :ivar store_settings: Iceberg store settings. + :vartype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :ivar format_settings: Iceberg format settings. + :vartype format_settings: ~azure.mgmt.datafactory.models.IcebergWriteSettings + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "store_settings": {"key": "storeSettings", "type": "StoreWriteSettings"}, + "format_settings": {"key": "formatSettings", "type": "IcebergWriteSettings"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + store_settings: Optional["_models.StoreWriteSettings"] = None, + format_settings: Optional["_models.IcebergWriteSettings"] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: JSON + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\\d+).)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: JSON + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: JSON + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\\d+).)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: JSON + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: JSON + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: JSON + :keyword store_settings: Iceberg store settings. + :paramtype store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :keyword format_settings: Iceberg format settings. + :paramtype format_settings: ~azure.mgmt.datafactory.models.IcebergWriteSettings + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type: str = "IcebergSink" + self.store_settings = store_settings + self.format_settings = format_settings + + +class IcebergWriteSettings(FormatWriteSettings): + """Iceberg write settings. + + All required parameters must be populated in order to send to server. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: The write setting type. Required. + :vartype type: str + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + } + + def __init__(self, *, additional_properties: Optional[Dict[str, JSON]] = None, **kwargs: Any) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + """ + super().__init__(additional_properties=additional_properties, **kwargs) + self.type: str = "IcebergWriteSettings" + + class IfConditionActivity(ControlActivity): # pylint: disable=too-many-instance-attributes """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the @@ -43347,7 +43593,7 @@ class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype annotations: list[JSON] :ivar driver_version: The version of the MariaDB driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can only - support connection string. + support connection string. The legacy driver is scheduled for deprecation by October 2024. :vartype driver_version: JSON :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -43360,6 +43606,15 @@ class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance- :vartype username: JSON :ivar database: Database name for connection. Type: string. :vartype database: JSON + :ivar ssl_mode: This option specifies whether the driver uses TLS encryption and verification + when connecting to MariaDB. E.g., SSLMode=<0/1/2/3/4>. Options: DISABLED (0) / PREFERRED (1) + (Default) / REQUIRED (2) / VERIFY_CA (3) / VERIFY_IDENTITY (4), REQUIRED (2) is recommended to + only allow connections encrypted with SSL/TLS. + :vartype ssl_mode: JSON + :ivar use_system_trust_store: This option specifies whether to use a CA certificate from the + system trust store, or from a specified PEM file. E.g. UseSystemTrustStore=<0/1>; Options: + Enabled (1) / Disabled (0) (Default). + :vartype use_system_trust_store: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -43385,6 +43640,8 @@ class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance- "port": {"key": "typeProperties.port", "type": "object"}, "username": {"key": "typeProperties.username", "type": "object"}, "database": {"key": "typeProperties.database", "type": "object"}, + "ssl_mode": {"key": "typeProperties.sslMode", "type": "object"}, + "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } @@ -43404,6 +43661,8 @@ def __init__( port: Optional[JSON] = None, username: Optional[JSON] = None, database: Optional[JSON] = None, + ssl_mode: Optional[JSON] = None, + use_system_trust_store: Optional[JSON] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, **kwargs: Any @@ -43424,7 +43683,7 @@ def __init__( :paramtype annotations: list[JSON] :keyword driver_version: The version of the MariaDB driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can - only support connection string. + only support connection string. The legacy driver is scheduled for deprecation by October 2024. :paramtype driver_version: JSON :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -43437,6 +43696,15 @@ def __init__( :paramtype username: JSON :keyword database: Database name for connection. Type: string. :paramtype database: JSON + :keyword ssl_mode: This option specifies whether the driver uses TLS encryption and + verification when connecting to MariaDB. E.g., SSLMode=<0/1/2/3/4>. Options: DISABLED (0) / + PREFERRED (1) (Default) / REQUIRED (2) / VERIFY_CA (3) / VERIFY_IDENTITY (4), REQUIRED (2) is + recommended to only allow connections encrypted with SSL/TLS. + :paramtype ssl_mode: JSON + :keyword use_system_trust_store: This option specifies whether to use a CA certificate from the + system trust store, or from a specified PEM file. E.g. UseSystemTrustStore=<0/1>; Options: + Enabled (1) / Disabled (0) (Default). + :paramtype use_system_trust_store: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials @@ -43459,6 +43727,8 @@ def __init__( self.port = port self.username = username self.database = database + self.ssl_mode = ssl_mode + self.use_system_trust_store = use_system_trust_store self.password = password self.encrypted_credential = encrypted_credential @@ -51912,6 +52182,8 @@ class PostgreSqlV2LinkedService(LinkedService): # pylint: disable=too-many-inst :vartype username: JSON :ivar database: Database name for connection. Type: string. Required. :vartype database: JSON + :ivar authentication_type: The authentication type to use. Type: string. Required. + :vartype authentication_type: JSON :ivar ssl_mode: SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3: require, 4: verify-ca, 5: verify-full. Type: integer. Required. :vartype ssl_mode: JSON @@ -51960,6 +52232,7 @@ class PostgreSqlV2LinkedService(LinkedService): # pylint: disable=too-many-inst "server": {"required": True}, "username": {"required": True}, "database": {"required": True}, + "authentication_type": {"required": True}, "ssl_mode": {"required": True}, } @@ -51975,6 +52248,7 @@ class PostgreSqlV2LinkedService(LinkedService): # pylint: disable=too-many-inst "port": {"key": "typeProperties.port", "type": "object"}, "username": {"key": "typeProperties.username", "type": "object"}, "database": {"key": "typeProperties.database", "type": "object"}, + "authentication_type": {"key": "typeProperties.authenticationType", "type": "object"}, "ssl_mode": {"key": "typeProperties.sslMode", "type": "object"}, "schema": {"key": "typeProperties.schema", "type": "object"}, "pooling": {"key": "typeProperties.pooling", "type": "object"}, @@ -51998,6 +52272,7 @@ def __init__( # pylint: disable=too-many-locals server: JSON, username: JSON, database: JSON, + authentication_type: JSON, ssl_mode: JSON, additional_properties: Optional[Dict[str, JSON]] = None, version: Optional[str] = None, @@ -52044,6 +52319,8 @@ def __init__( # pylint: disable=too-many-locals :paramtype username: JSON :keyword database: Database name for connection. Type: string. Required. :paramtype database: JSON + :keyword authentication_type: The authentication type to use. Type: string. Required. + :paramtype authentication_type: JSON :keyword ssl_mode: SSL mode for connection. Type: integer. 0: disable, 1:allow, 2: prefer, 3: require, 4: verify-ca, 5: verify-full. Type: integer. Required. :paramtype ssl_mode: JSON @@ -52102,6 +52379,7 @@ def __init__( # pylint: disable=too-many-locals self.port = port self.username = username self.database = database + self.authentication_type = authentication_type self.ssl_mode = ssl_mode self.schema = schema self.pooling = pooling @@ -57742,6 +58020,9 @@ class SalesforceV2Source(TabularSource): # pylint: disable=too-many-instance-at :ivar include_deleted_objects: This property control whether query result contains Deleted objects. Default is false. Type: boolean (or Expression with resultType boolean). :vartype include_deleted_objects: JSON + :ivar page_size: Page size for each http request, too large pageSize will caused timeout, + default 300,000. Type: integer (or Expression with resultType integer). + :vartype page_size: JSON """ _validation = { @@ -57760,6 +58041,7 @@ class SalesforceV2Source(TabularSource): # pylint: disable=too-many-instance-at "soql_query": {"key": "SOQLQuery", "type": "object"}, "query": {"key": "query", "type": "object"}, "include_deleted_objects": {"key": "includeDeletedObjects", "type": "object"}, + "page_size": {"key": "pageSize", "type": "object"}, } def __init__( @@ -57775,6 +58057,7 @@ def __init__( soql_query: Optional[JSON] = None, query: Optional[JSON] = None, include_deleted_objects: Optional[JSON] = None, + page_size: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -57812,6 +58095,9 @@ def __init__( :keyword include_deleted_objects: This property control whether query result contains Deleted objects. Default is false. Type: boolean (or Expression with resultType boolean). :paramtype include_deleted_objects: JSON + :keyword page_size: Page size for each http request, too large pageSize will caused timeout, + default 300,000. Type: integer (or Expression with resultType integer). + :paramtype page_size: JSON """ super().__init__( additional_properties=additional_properties, @@ -57827,6 +58113,7 @@ def __init__( self.soql_query = soql_query self.query = query self.include_deleted_objects = include_deleted_objects + self.page_size = page_size class SapBwCubeDataset(Dataset): @@ -62363,6 +62650,9 @@ class ServiceNowV2Source(TabularSource): :vartype additional_columns: JSON :ivar expression: Expression to filter data from source. :vartype expression: ~azure.mgmt.datafactory.models.ExpressionV2 + :ivar page_size: Page size of the result. Type: integer (or Expression with resultType + integer). + :vartype page_size: JSON """ _validation = { @@ -62379,6 +62669,7 @@ class ServiceNowV2Source(TabularSource): "query_timeout": {"key": "queryTimeout", "type": "object"}, "additional_columns": {"key": "additionalColumns", "type": "object"}, "expression": {"key": "expression", "type": "ExpressionV2"}, + "page_size": {"key": "pageSize", "type": "object"}, } def __init__( @@ -62392,6 +62683,7 @@ def __init__( query_timeout: Optional[JSON] = None, additional_columns: Optional[JSON] = None, expression: Optional["_models.ExpressionV2"] = None, + page_size: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -62418,6 +62710,9 @@ def __init__( :paramtype additional_columns: JSON :keyword expression: Expression to filter data from source. :paramtype expression: ~azure.mgmt.datafactory.models.ExpressionV2 + :keyword page_size: Page size of the result. Type: integer (or Expression with resultType + integer). + :paramtype page_size: JSON """ super().__init__( additional_properties=additional_properties, @@ -62431,6 +62726,7 @@ def __init__( ) self.type: str = "ServiceNowV2Source" self.expression = expression + self.page_size = page_size class ServicePrincipalCredential(Credential): @@ -64633,6 +64929,8 @@ class SnowflakeV2LinkedService(LinkedService): # pylint: disable=too-many-insta :ivar private_key_passphrase: The Azure key vault secret reference of private key password for KeyPair auth with encrypted private key. :vartype private_key_passphrase: ~azure.mgmt.datafactory.models.SecretBase + :ivar host: The host name of the Snowflake account. + :vartype host: JSON :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :vartype encrypted_credential: str @@ -64665,6 +64963,7 @@ class SnowflakeV2LinkedService(LinkedService): # pylint: disable=too-many-insta "scope": {"key": "typeProperties.scope", "type": "object"}, "private_key": {"key": "typeProperties.privateKey", "type": "SecretBase"}, "private_key_passphrase": {"key": "typeProperties.privateKeyPassphrase", "type": "SecretBase"}, + "host": {"key": "typeProperties.host", "type": "object"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } @@ -64689,6 +64988,7 @@ def __init__( scope: Optional[JSON] = None, private_key: Optional["_models.SecretBase"] = None, private_key_passphrase: Optional["_models.SecretBase"] = None, + host: Optional[JSON] = None, encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: @@ -64738,6 +65038,8 @@ def __init__( :keyword private_key_passphrase: The Azure key vault secret reference of private key password for KeyPair auth with encrypted private key. :paramtype private_key_passphrase: ~azure.mgmt.datafactory.models.SecretBase + :keyword host: The host name of the Snowflake account. + :paramtype host: JSON :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :paramtype encrypted_credential: str @@ -64764,6 +65066,7 @@ def __init__( self.scope = scope self.private_key = private_key self.private_key_passphrase = private_key_passphrase + self.host = host self.encrypted_credential = encrypted_credential diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py index 8854b1aced51..7e91c6629c12 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_factories_operations.py @@ -40,30 +40,6 @@ _SERIALIZER.client_side_validation = False -def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2018-06-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories") - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - def build_configure_factory_repo_request(location_id: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -405,80 +381,6 @@ def __init__(self, *args, **kwargs): self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.Factory"]: - """Lists factories under the specified subscription. - - :return: An iterator like instance of either Factory or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.Factory] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) - cls: ClsType[_models.FactoryListResponse] = kwargs.pop("cls", None) - - error_map: MutableMapping[int, Type[HttpResponseError]] = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_list_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - _request.method = "GET" - return _request - - def extract_data(pipeline_response): - deserialized = self._deserialize("FactoryListResponse", pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - @overload def configure_factory_repo( self, diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/activity_runs_query_by_pipeline_run.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/activity_runs_query_by_pipeline_run.py index f7210d13bc49..fdaa39cd818f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/activity_runs_query_by_pipeline_run.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/activity_runs_query_by_pipeline_run.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/approve_reject_private_endpoint_connection.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/approve_reject_private_endpoint_connection.py index 0c5e7351c792..76308791c231 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/approve_reject_private_endpoint_connection.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/approve_reject_private_endpoint_connection.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py index 5a95fbdddf49..349a0789c6c2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py index 1e6c4fb73591..b867cedcea7a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/change_data_capture_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_create.py index 549cfaf78c76..a70172475881 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/credentials_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_add_data_flow.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_add_data_flow.py index 9666302164e4..3d23c14295d3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_add_data_flow.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_add_data_flow.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_create.py index 5a4b016fe114..4e49e6ac2bd3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py index 5993ff988b43..9301b76c91cb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_delete.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_execute_command.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_execute_command.py index de450293e5cd..45d1dd3421f1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_execute_command.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flow_debug_session_execute_command.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_create.py index 7b1066d40a93..1bcf28e2eac9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_update.py index a01884eaf599..9be25278b286 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/data_flows_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_create.py index b0b8d14dc287..a7bd124bf938 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_update.py index 0a1e9ff1b746..d4e74e1b17e5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/datasets_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value.py index 2c82e63bca10..d7b2b226a4d8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value_by_factory.py index f3482176d35a..6965f1ec6b98 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value_by_factory.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_get_feature_value_by_factory.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_query_feature_values_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_query_feature_values_by_factory.py index 7b455f13c50d..fa3a9ed11b31 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_query_feature_values_by_factory.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/exposure_control_query_feature_values_by_factory.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_configure_factory_repo.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_configure_factory_repo.py index 662e787a01d0..594252a37e67 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_configure_factory_repo.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_configure_factory_repo.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_create_or_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_create_or_update.py index 5aac01800329..b4bc70cb173e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_create_or_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_create_or_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_data_plane_access.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_data_plane_access.py index 14650f48bf98..b720d931d55b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_data_plane_access.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_data_plane_access.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_git_hub_access_token.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_git_hub_access_token.py index ee93ad533db9..b009174f37b7 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_git_hub_access_token.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_get_git_hub_access_token.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_list.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_list.py deleted file mode 100644 index eea9551b7ce0..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_list.py +++ /dev/null @@ -1,40 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.identity import DefaultAzureCredential - -from azure.mgmt.datafactory import DataFactoryManagementClient - -""" -# PREREQUISITES - pip install azure-identity - pip install azure-mgmt-datafactory -# USAGE - python factories_list.py - - Before run the sample, please set the values of the client ID, tenant ID and client secret - of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, - AZURE_CLIENT_SECRET. For more info about how to get the value, please see: - https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal -""" - - -def main(): - client = DataFactoryManagementClient( - credential=DefaultAzureCredential(), - subscription_id="12345678-1234-1234-1234-12345678abc", - ) - - response = client.factories.list() - for item in response: - print(item) - - -# x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_List.json -if __name__ == "__main__": - main() diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_update.py index 3aa6f283e190..bb6131a7a85d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/factories_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_create.py index bb23777a1d7b..7491d9d3554f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_update.py index ebecf6c2f69f..d47950c8bef2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/global_parameters_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_update.py index 71e8b2dfa9e1..fb410f48bb70 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtime_nodes_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create.py index cc095f5daa0e..687cf840b7fc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create_linked_integration_runtime.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create_linked_integration_runtime.py index 636ae714e35c..9bd9ea5feb65 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create_linked_integration_runtime.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_create_linked_integration_runtime.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_regenerate_auth_key.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_regenerate_auth_key.py index 59dc78a72d7f..121efc4938d0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_regenerate_auth_key.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_regenerate_auth_key.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py index 2b73df707706..25320eb81748 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_remove_links.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_update.py index 5a6e753895c7..969dfc802366 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/integration_runtimes_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_create.py index 54721852357a..5acc0528ae9c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_update.py index f3e42ef14904..eb068c8a9e81 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/linked_services_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_create.py index 78819449f547..409d7d263b84 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_private_endpoints_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_virtual_networks_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_virtual_networks_create.py index 4a51cc6f510e..9a6a5030c0bb 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_virtual_networks_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/managed_virtual_networks_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_query_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_query_by_factory.py index 301e27780897..e1e7d62f42e8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_query_by_factory.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipeline_runs_query_by_factory.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_create.py index efe6b9116844..758ae41234ed 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_update.py index 39c504b227bb..b7c5398a385d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/pipelines_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_query_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_query_by_factory.py index 564a942c9aa7..96e750dc81b2 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_query_by_factory.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/trigger_runs_query_by_factory.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_create.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_create.py index df21f8434926..d80e3503c95c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_create.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_create.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_query_by_factory.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_query_by_factory.py index 9f9ae0dc87e1..8b8c9993de2c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_query_by_factory.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_query_by_factory.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_update.py b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_update.py index 33aeccb309a8..a1f03c5c2bf9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_update.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_samples/triggers_update.py @@ -6,8 +6,6 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, IO, Union - from azure.identity import DefaultAzureCredential from azure.mgmt.datafactory import DataFactoryManagementClient diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py index 94dbeb5cf9fb..aff76ec0be42 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations.py @@ -18,16 +18,6 @@ class TestDataFactoryManagementFactoriesOperations(AzureMgmtRecordedTestCase): def setup_method(self, method): self.client = self.create_mgmt_client(DataFactoryManagementClient) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy - def test_list(self, resource_group): - response = self.client.factories.list( - api_version="2018-06-01", - ) - result = [r for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy def test_configure_factory_repo(self, resource_group): diff --git a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py index fd08741d55f9..cb5d06acc50c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py +++ b/sdk/datafactory/azure-mgmt-datafactory/generated_tests/test_data_factory_management_factories_operations_async.py @@ -19,16 +19,6 @@ class TestDataFactoryManagementFactoriesOperationsAsync(AzureMgmtRecordedTestCas def setup_method(self, method): self.client = self.create_mgmt_client(DataFactoryManagementClient, is_async=True) - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) - @recorded_by_proxy_async - async def test_list(self, resource_group): - response = self.client.factories.list( - api_version="2018-06-01", - ) - result = [r async for r in response] - # please add some check logic here by yourself - # ... - @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) @recorded_by_proxy_async async def test_configure_factory_repo(self, resource_group):