diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md
index ebcffa68fe80..6f9b5368c840 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/CHANGELOG.md
@@ -1,14 +1,48 @@
# Release History
-## 1.2.0-beta.1 (Unreleased)
+## 1.0.0-beta.1 (2025-04-30)
+
+- Azure Resource Manager DataFactory client library for Java. This package contains Microsoft Azure SDK for DataFactory Management SDK. The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. Package tag package-2018-06. For documentation on how to use this package, please see [Azure Management Libraries for Java](https://aka.ms/azsdk/java/mgmt).
### Features Added
-### Breaking Changes
+* `models.AmazonRdsForOracleAuthenticationType` was added
+
+#### `models.AmazonRdsForOracleLinkedService` was modified
+
+* `cryptoChecksumClient()` was added
+* `withEncryptionClient(java.lang.Object)` was added
+* `fetchSize()` was added
+* `withInitialLobFetchSize(java.lang.Object)` was added
+* `encryptionTypesClient()` was added
+* `cryptoChecksumTypesClient()` was added
+* `supportV1DataTypes()` was added
+* `withStatementCacheSize(java.lang.Object)` was added
+* `withFetchSize(java.lang.Object)` was added
+* `username()` was added
+* `withCryptoChecksumTypesClient(java.lang.Object)` was added
+* `enableBulkLoad()` was added
+* `server()` was added
+* `withEncryptionTypesClient(java.lang.Object)` was added
+* `withFetchTswtzAsTimestamp(java.lang.Object)` was added
+* `withAuthenticationType(models.AmazonRdsForOracleAuthenticationType)` was added
+* `withServer(java.lang.Object)` was added
+* `encryptionClient()` was added
+* `statementCacheSize()` was added
+* `withEnableBulkLoad(java.lang.Object)` was added
+* `authenticationType()` was added
+* `withSupportV1DataTypes(java.lang.Object)` was added
+* `fetchTswtzAsTimestamp()` was added
+* `withCryptoChecksumClient(java.lang.Object)` was added
+* `withUsername(java.lang.Object)` was added
+* `initializationString()` was added
+* `withInitializationString(java.lang.Object)` was added
+* `initialLobFetchSize()` was added
-### Bugs Fixed
+#### `models.AzureDatabricksLinkedService` was modified
-### Other Changes
+* `withDataSecurityMode(java.lang.Object)` was added
+* `dataSecurityMode()` was added
## 1.1.0 (2025-04-10)
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/README.md b/sdk/datafactory/azure-resourcemanager-datafactory/README.md
index cd3b1148f9fe..a8f3426705b9 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/README.md
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/README.md
@@ -32,7 +32,7 @@ Various documentation is available to help you get started
com.azure.resourcemanager
azure-resourcemanager-datafactory
- 1.1.0
+ 1.2.0-beta.1
```
[//]: # ({x-version-update-end})
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/SAMPLE.md b/sdk/datafactory/azure-resourcemanager-datafactory/SAMPLE.md
index 40812bb6077e..270be640be82 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/SAMPLE.md
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/SAMPLE.md
@@ -3,1084 +3,1163 @@
## ActivityRuns
-- [QueryByPipelineRunSync](#activityruns_querybypipelinerunsync)
+- [QueryByPipelineRun](#activityruns_querybypipelinerun)
## ChangeDataCapture
-- [CreateOrUpdateSync](#changedatacapture_createorupdatesync)
-- [DeleteSync](#changedatacapture_deletesync)
-- [GetSync](#changedatacapture_getsync)
-- [ListByFactorySync](#changedatacapture_listbyfactorysync)
-- [StartSync](#changedatacapture_startsync)
-- [StatusSync](#changedatacapture_statussync)
-- [StopSync](#changedatacapture_stopsync)
+- [CreateOrUpdate](#changedatacapture_createorupdate)
+- [Delete](#changedatacapture_delete)
+- [Get](#changedatacapture_get)
+- [ListByFactory](#changedatacapture_listbyfactory)
+- [Start](#changedatacapture_start)
+- [Status](#changedatacapture_status)
+- [Stop](#changedatacapture_stop)
## CredentialOperations
-- [CreateOrUpdateSync](#credentialoperations_createorupdatesync)
-- [DeleteSync](#credentialoperations_deletesync)
-- [GetSync](#credentialoperations_getsync)
-- [ListByFactorySync](#credentialoperations_listbyfactorysync)
+- [CreateOrUpdate](#credentialoperations_createorupdate)
+- [Delete](#credentialoperations_delete)
+- [Get](#credentialoperations_get)
+- [ListByFactory](#credentialoperations_listbyfactory)
## DataFlowDebugSession
-- [AddDataFlowSync](#dataflowdebugsession_adddataflowsync)
+- [AddDataFlow](#dataflowdebugsession_adddataflow)
- [Create](#dataflowdebugsession_create)
-- [DeleteSync](#dataflowdebugsession_deletesync)
+- [Delete](#dataflowdebugsession_delete)
- [ExecuteCommand](#dataflowdebugsession_executecommand)
-- [QueryByFactorySync](#dataflowdebugsession_querybyfactorysync)
+- [QueryByFactory](#dataflowdebugsession_querybyfactory)
## DataFlows
-- [CreateOrUpdateSync](#dataflows_createorupdatesync)
-- [DeleteSync](#dataflows_deletesync)
-- [GetSync](#dataflows_getsync)
-- [ListByFactorySync](#dataflows_listbyfactorysync)
+- [CreateOrUpdate](#dataflows_createorupdate)
+- [Delete](#dataflows_delete)
+- [Get](#dataflows_get)
+- [ListByFactory](#dataflows_listbyfactory)
## Datasets
-- [CreateOrUpdateSync](#datasets_createorupdatesync)
-- [DeleteSync](#datasets_deletesync)
-- [GetSync](#datasets_getsync)
-- [ListByFactorySync](#datasets_listbyfactorysync)
+- [CreateOrUpdate](#datasets_createorupdate)
+- [Delete](#datasets_delete)
+- [Get](#datasets_get)
+- [ListByFactory](#datasets_listbyfactory)
## ExposureControl
-- [GetFeatureValueByFactorySync](#exposurecontrol_getfeaturevaluebyfactorysync)
-- [GetFeatureValueSync](#exposurecontrol_getfeaturevaluesync)
-- [QueryFeatureValuesByFactorySync](#exposurecontrol_queryfeaturevaluesbyfactorysync)
+- [GetFeatureValue](#exposurecontrol_getfeaturevalue)
+- [GetFeatureValueByFactory](#exposurecontrol_getfeaturevaluebyfactory)
+- [QueryFeatureValuesByFactory](#exposurecontrol_queryfeaturevaluesbyfactory)
## Factories
-- [ConfigureFactoryRepoSync](#factories_configurefactoryreposync)
-- [CreateOrUpdateSync](#factories_createorupdatesync)
-- [DeleteSync](#factories_deletesync)
-- [GetByResourceGroupSync](#factories_getbyresourcegroupsync)
-- [GetDataPlaneAccessSync](#factories_getdataplaneaccesssync)
-- [GetGitHubAccessTokenSync](#factories_getgithubaccesstokensync)
-- [ListByResourceGroupSync](#factories_listbyresourcegroupsync)
-- [ListSync](#factories_listsync)
-- [UpdateSync](#factories_updatesync)
+- [ConfigureFactoryRepo](#factories_configurefactoryrepo)
+- [CreateOrUpdate](#factories_createorupdate)
+- [Delete](#factories_delete)
+- [GetByResourceGroup](#factories_getbyresourcegroup)
+- [GetDataPlaneAccess](#factories_getdataplaneaccess)
+- [GetGitHubAccessToken](#factories_getgithubaccesstoken)
+- [List](#factories_list)
+- [ListByResourceGroup](#factories_listbyresourcegroup)
+- [Update](#factories_update)
## GlobalParameters
-- [CreateOrUpdateSync](#globalparameters_createorupdatesync)
-- [DeleteSync](#globalparameters_deletesync)
-- [GetSync](#globalparameters_getsync)
-- [ListByFactorySync](#globalparameters_listbyfactorysync)
+- [CreateOrUpdate](#globalparameters_createorupdate)
+- [Delete](#globalparameters_delete)
+- [Get](#globalparameters_get)
+- [ListByFactory](#globalparameters_listbyfactory)
## IntegrationRuntimeNodes
-- [DeleteSync](#integrationruntimenodes_deletesync)
-- [GetIpAddressSync](#integrationruntimenodes_getipaddresssync)
-- [GetSync](#integrationruntimenodes_getsync)
-- [UpdateSync](#integrationruntimenodes_updatesync)
+- [Delete](#integrationruntimenodes_delete)
+- [Get](#integrationruntimenodes_get)
+- [GetIpAddress](#integrationruntimenodes_getipaddress)
+- [Update](#integrationruntimenodes_update)
## IntegrationRuntimeObjectMetadata
-- [GetSync](#integrationruntimeobjectmetadata_getsync)
+- [Get](#integrationruntimeobjectmetadata_get)
- [Refresh](#integrationruntimeobjectmetadata_refresh)
## IntegrationRuntimes
-- [CreateLinkedIntegrationRuntimeSync](#integrationruntimes_createlinkedintegrationruntimesync)
-- [CreateOrUpdateSync](#integrationruntimes_createorupdatesync)
-- [DeleteSync](#integrationruntimes_deletesync)
-- [GetConnectionInfoSync](#integrationruntimes_getconnectioninfosync)
-- [GetMonitoringDataSync](#integrationruntimes_getmonitoringdatasync)
-- [GetStatusSync](#integrationruntimes_getstatussync)
-- [GetSync](#integrationruntimes_getsync)
-- [ListAuthKeysSync](#integrationruntimes_listauthkeyssync)
-- [ListByFactorySync](#integrationruntimes_listbyfactorysync)
-- [ListOutboundNetworkDependenciesEndpointsSync](#integrationruntimes_listoutboundnetworkdependenciesendpointssync)
-- [RegenerateAuthKeySync](#integrationruntimes_regenerateauthkeysync)
-- [RemoveLinksSync](#integrationruntimes_removelinkssync)
+- [CreateLinkedIntegrationRuntime](#integrationruntimes_createlinkedintegrationruntime)
+- [CreateOrUpdate](#integrationruntimes_createorupdate)
+- [Delete](#integrationruntimes_delete)
+- [Get](#integrationruntimes_get)
+- [GetConnectionInfo](#integrationruntimes_getconnectioninfo)
+- [GetMonitoringData](#integrationruntimes_getmonitoringdata)
+- [GetStatus](#integrationruntimes_getstatus)
+- [ListAuthKeys](#integrationruntimes_listauthkeys)
+- [ListByFactory](#integrationruntimes_listbyfactory)
+- [ListOutboundNetworkDependenciesEndpoints](#integrationruntimes_listoutboundnetworkdependenciesendpoints)
+- [RegenerateAuthKey](#integrationruntimes_regenerateauthkey)
+- [RemoveLinks](#integrationruntimes_removelinks)
- [Start](#integrationruntimes_start)
- [Stop](#integrationruntimes_stop)
-- [SyncCredentialsSync](#integrationruntimes_synccredentialssync)
-- [UpdateSync](#integrationruntimes_updatesync)
-- [UpgradeSync](#integrationruntimes_upgradesync)
+- [SyncCredentials](#integrationruntimes_synccredentials)
+- [Update](#integrationruntimes_update)
+- [Upgrade](#integrationruntimes_upgrade)
## LinkedServices
-- [CreateOrUpdateSync](#linkedservices_createorupdatesync)
-- [DeleteSync](#linkedservices_deletesync)
-- [GetSync](#linkedservices_getsync)
-- [ListByFactorySync](#linkedservices_listbyfactorysync)
+- [CreateOrUpdate](#linkedservices_createorupdate)
+- [Delete](#linkedservices_delete)
+- [Get](#linkedservices_get)
+- [ListByFactory](#linkedservices_listbyfactory)
## ManagedPrivateEndpoints
-- [CreateOrUpdateSync](#managedprivateendpoints_createorupdatesync)
-- [DeleteSync](#managedprivateendpoints_deletesync)
-- [GetSync](#managedprivateendpoints_getsync)
-- [ListByFactorySync](#managedprivateendpoints_listbyfactorysync)
+- [CreateOrUpdate](#managedprivateendpoints_createorupdate)
+- [Delete](#managedprivateendpoints_delete)
+- [Get](#managedprivateendpoints_get)
+- [ListByFactory](#managedprivateendpoints_listbyfactory)
## ManagedVirtualNetworks
-- [CreateOrUpdateSync](#managedvirtualnetworks_createorupdatesync)
-- [GetSync](#managedvirtualnetworks_getsync)
-- [ListByFactorySync](#managedvirtualnetworks_listbyfactorysync)
+- [CreateOrUpdate](#managedvirtualnetworks_createorupdate)
+- [Get](#managedvirtualnetworks_get)
+- [ListByFactory](#managedvirtualnetworks_listbyfactory)
## Operations
-- [ListSync](#operations_listsync)
+- [List](#operations_list)
## PipelineRuns
-- [CancelSync](#pipelineruns_cancelsync)
-- [GetSync](#pipelineruns_getsync)
-- [QueryByFactorySync](#pipelineruns_querybyfactorysync)
+- [Cancel](#pipelineruns_cancel)
+- [Get](#pipelineruns_get)
+- [QueryByFactory](#pipelineruns_querybyfactory)
## Pipelines
-- [CreateOrUpdateSync](#pipelines_createorupdatesync)
-- [CreateRunSync](#pipelines_createrunsync)
-- [DeleteSync](#pipelines_deletesync)
-- [GetSync](#pipelines_getsync)
-- [ListByFactorySync](#pipelines_listbyfactorysync)
+- [CreateOrUpdate](#pipelines_createorupdate)
+- [CreateRun](#pipelines_createrun)
+- [Delete](#pipelines_delete)
+- [Get](#pipelines_get)
+- [ListByFactory](#pipelines_listbyfactory)
## PrivateEndPointConnections
-- [ListByFactorySync](#privateendpointconnections_listbyfactorysync)
+- [ListByFactory](#privateendpointconnections_listbyfactory)
## PrivateEndpointConnectionOperation
-- [CreateOrUpdateSync](#privateendpointconnectionoperation_createorupdatesync)
-- [DeleteSync](#privateendpointconnectionoperation_deletesync)
-- [GetSync](#privateendpointconnectionoperation_getsync)
+- [CreateOrUpdate](#privateendpointconnectionoperation_createorupdate)
+- [Delete](#privateendpointconnectionoperation_delete)
+- [Get](#privateendpointconnectionoperation_get)
## PrivateLinkResources
-- [GetSync](#privatelinkresources_getsync)
+- [Get](#privatelinkresources_get)
## TriggerRuns
-- [CancelSync](#triggerruns_cancelsync)
-- [QueryByFactorySync](#triggerruns_querybyfactorysync)
-- [RerunSync](#triggerruns_rerunsync)
+- [Cancel](#triggerruns_cancel)
+- [QueryByFactory](#triggerruns_querybyfactory)
+- [Rerun](#triggerruns_rerun)
## Triggers
-- [CreateOrUpdateSync](#triggers_createorupdatesync)
-- [DeleteSync](#triggers_deletesync)
-- [GetEventSubscriptionStatusSync](#triggers_geteventsubscriptionstatussync)
-- [GetSync](#triggers_getsync)
-- [ListByFactorySync](#triggers_listbyfactorysync)
-- [QueryByFactorySync](#triggers_querybyfactorysync)
+- [CreateOrUpdate](#triggers_createorupdate)
+- [Delete](#triggers_delete)
+- [Get](#triggers_get)
+- [GetEventSubscriptionStatus](#triggers_geteventsubscriptionstatus)
+- [ListByFactory](#triggers_listbyfactory)
+- [QueryByFactory](#triggers_querybyfactory)
- [Start](#triggers_start)
- [Stop](#triggers_stop)
- [SubscribeToEvents](#triggers_subscribetoevents)
- [UnsubscribeFromEvents](#triggers_unsubscribefromevents)
-### ActivityRuns_QueryByPipelineRunSync
+### ActivityRuns_QueryByPipelineRun
```java
/**
- * Samples for Factories DeleteSync.
+ * Samples for IntegrationRuntimeNodes GetIpAddress.
*/
-public final class FactoriesDeleteSyncSamples {
+public final class IntegrationRuntimeNodesGetIpAddressSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Delete.json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * IntegrationRuntimeNodes_GetIpAddress.json
*/
/**
- * Sample code: Factories_Delete.
+ * Sample code: IntegrationRuntimeNodes_GetIpAddress.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void factoriesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.factories()
- .deleteByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName",
- com.azure.core.util.Context.NONE);
+ public static void
+ integrationRuntimeNodesGetIpAddress(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimeNodes()
+ .getIpAddressWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
+ "Node_1", com.azure.core.util.Context.NONE);
}
}
```
-### ChangeDataCapture_CreateOrUpdateSync
+### ChangeDataCapture_CreateOrUpdate
```java
/**
- * Samples for IntegrationRuntimes GetSync.
+ * Samples for IntegrationRuntimes GetConnectionInfo.
*/
-public final class IntegrationRuntimesGetSyncSamples {
+public final class IntegrationRuntimesGetConnectionInfoSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_Get.json
+ * IntegrationRuntimes_GetConnectionInfo.json
*/
/**
- * Sample code: IntegrationRuntimes_Get.
+ * Sample code: IntegrationRuntimes_GetConnectionInfo.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ public static void
+ integrationRuntimesGetConnectionInfo(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
manager.integrationRuntimes()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", null,
+ .getConnectionInfoWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
com.azure.core.util.Context.NONE);
}
}
```
-### ChangeDataCapture_DeleteSync
+### ChangeDataCapture_Delete
```java
+import com.azure.core.management.serializer.SerializerFactory;
+import com.azure.core.util.serializer.SerializerEncoding;
+import com.azure.resourcemanager.datafactory.models.AzureBlobStorageLinkedService;
+import com.azure.resourcemanager.datafactory.models.AzureBlobStorageLocation;
+import com.azure.resourcemanager.datafactory.models.DataFlowDebugPackage;
+import com.azure.resourcemanager.datafactory.models.DataFlowDebugPackageDebugSettings;
+import com.azure.resourcemanager.datafactory.models.DataFlowDebugResource;
+import com.azure.resourcemanager.datafactory.models.DataFlowSource;
+import com.azure.resourcemanager.datafactory.models.DataFlowSourceSetting;
+import com.azure.resourcemanager.datafactory.models.DatasetDebugResource;
+import com.azure.resourcemanager.datafactory.models.DatasetReference;
+import com.azure.resourcemanager.datafactory.models.DelimitedTextDataset;
+import com.azure.resourcemanager.datafactory.models.LinkedServiceDebugResource;
+import com.azure.resourcemanager.datafactory.models.LinkedServiceReference;
+import com.azure.resourcemanager.datafactory.models.MappingDataFlow;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
/**
- * Samples for PrivateEndPointConnections ListByFactorySync.
+ * Samples for DataFlowDebugSession AddDataFlow.
*/
-public final class PrivateEndPointConnectionsListByFactorySyncSamples {
+public final class DataFlowDebugSessionAddDataFlowSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * PrivateEndPointConnections_ListByFactory.json
+ * DataFlowDebugSession_AddDataFlow.json
*/
/**
- * Sample code: privateEndPointConnections_ListByFactory.
+ * Sample code: DataFlowDebugSession_AddDataFlow.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- privateEndPointConnectionsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.privateEndPointConnections()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ public static void dataFlowDebugSessionAddDataFlow(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ throws IOException {
+ manager.dataFlowDebugSessions()
+ .addDataFlowWithResponse("exampleResourceGroup", "exampleFactoryName", new DataFlowDebugPackage()
+ .withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e")
+ .withDataFlow(new DataFlowDebugResource().withName("dataflow1")
+ .withProperties(new MappingDataFlow()
+ .withSources(Arrays.asList(new DataFlowSource().withName("source1")
+ .withDataset(new DatasetReference().withReferenceName("DelimitedText2"))))
+ .withSinks(Arrays.asList())
+ .withTransformations(Arrays.asList())
+ .withScript(
+ "\n\nsource(output(\n\t\tColumn_1 as string\n\t),\n\tallowSchemaDrift: true,\n\tvalidateSchema: false) ~> source1")))
+ .withDatasets(Arrays.asList(new DatasetDebugResource().withName("dataset1")
+ .withProperties(new DelimitedTextDataset()
+ .withSchema(SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("[{\"type\":\"String\"}]", Object.class, SerializerEncoding.JSON))
+ .withLinkedServiceName(new LinkedServiceReference().withReferenceName("linkedService5"))
+ .withAnnotations(Arrays.asList())
+ .withLocation(new AzureBlobStorageLocation().withFileName("Ansiencoding.csv")
+ .withContainer("dataflow-sample-data"))
+ .withColumnDelimiter(",")
+ .withQuoteChar("\"")
+ .withEscapeChar("\\")
+ .withFirstRowAsHeader(true))))
+ .withLinkedServices(Arrays.asList(new LinkedServiceDebugResource().withName("linkedService1")
+ .withProperties(new AzureBlobStorageLinkedService().withAnnotations(Arrays.asList())
+ .withConnectionString(
+ "DefaultEndpointsProtocol=https;AccountName=;EndpointSuffix=core.windows.net;")
+ .withEncryptedCredential("fakeTokenPlaceholder"))))
+ .withDebugSettings(new DataFlowDebugPackageDebugSettings()
+ .withSourceSettings(Arrays.asList(
+ new DataFlowSourceSetting().withSourceName("source1")
+ .withRowLimit(1000)
+ .withAdditionalProperties(mapOf()),
+ new DataFlowSourceSetting().withSourceName("source2")
+ .withRowLimit(222)
+ .withAdditionalProperties(mapOf())))
+ .withParameters(mapOf("sourcePath", "Toy"))
+ .withDatasetParameters(SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("{\"Movies\":{\"path\":\"abc\"},\"Output\":{\"time\":\"def\"}}", Object.class,
+ SerializerEncoding.JSON)))
+ .withAdditionalProperties(mapOf()), com.azure.core.util.Context.NONE);
+ }
+
+ // Use "Map.of" if available
+ @SuppressWarnings("unchecked")
+ private static Map mapOf(Object... inputs) {
+ Map map = new HashMap<>();
+ for (int i = 0; i < inputs.length; i += 2) {
+ String key = (String) inputs[i];
+ T value = (T) inputs[i + 1];
+ map.put(key, value);
+ }
+ return map;
}
}
```
-### ChangeDataCapture_GetSync
+### ChangeDataCapture_Get
```java
-import com.azure.resourcemanager.datafactory.models.DataFlowResource;
-import com.azure.resourcemanager.datafactory.models.DataFlowSink;
-import com.azure.resourcemanager.datafactory.models.DataFlowSource;
-import com.azure.resourcemanager.datafactory.models.DatasetReference;
-import com.azure.resourcemanager.datafactory.models.MappingDataFlow;
-import java.util.Arrays;
-
/**
- * Samples for DataFlows CreateOrUpdateSync.
+ * Samples for Factories ListByResourceGroup.
*/
-public final class DataFlowsCreateOrUpdateSyncSamples {
+public final class FactoriesListByResourceGroupSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Create.json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * Factories_ListByResourceGroup.json
*/
/**
- * Sample code: DataFlows_Create.
+ * Sample code: Factories_ListByResourceGroup.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void dataFlowsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.dataFlows()
- .define("exampleDataFlow")
- .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withProperties(new MappingDataFlow().withDescription(
- "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.")
- .withSources(Arrays.asList(
- new DataFlowSource().withName("USDCurrency")
- .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetUSD")),
- new DataFlowSource().withName("CADSource")
- .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetCAD"))))
- .withSinks(Arrays.asList(
- new DataFlowSink().withName("USDSink")
- .withDataset(new DatasetReference().withReferenceName("USDOutput")),
- new DataFlowSink().withName("CADSink")
- .withDataset(new DatasetReference().withReferenceName("CADOutput"))))
- .withScriptLines(Arrays.asList("source(output(", "PreviousConversionRate as double,",
- "Country as string,", "DateTime1 as string,", "CurrentConversionRate as double", "),",
- "allowSchemaDrift: false,", "validateSchema: false) ~> USDCurrency", "source(output(",
- "PreviousConversionRate as double,", "Country as string,", "DateTime1 as string,",
- "CurrentConversionRate as double", "),", "allowSchemaDrift: true,",
- "validateSchema: false) ~> CADSource", "USDCurrency, CADSource union(byName: true)~> Union",
- "Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn",
- "NewCurrencyColumn split(Country == 'USD',",
- "Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)",
- "ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink",
- "ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink")))
- .create();
+ public static void factoriesListByResourceGroup(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.factories().listByResourceGroup("exampleResourceGroup", com.azure.core.util.Context.NONE);
}
+}
+```
+### ChangeDataCapture_ListByFactory
+
+```java
+/**
+ * Samples for IntegrationRuntimes Start.
+ */
+public final class IntegrationRuntimesStartSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Update.json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * IntegrationRuntimes_Start.json
*/
/**
- * Sample code: DataFlows_Update.
+ * Sample code: IntegrationRuntimes_Start.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void dataFlowsUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- DataFlowResource resource = manager.dataFlows()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null,
- com.azure.core.util.Context.NONE)
- .getValue();
- resource.update()
- .withProperties(new MappingDataFlow().withDescription(
- "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.")
- .withSources(Arrays.asList(
- new DataFlowSource().withName("USDCurrency")
- .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetUSD")),
- new DataFlowSource().withName("CADSource")
- .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetCAD"))))
- .withSinks(Arrays.asList(
- new DataFlowSink().withName("USDSink")
- .withDataset(new DatasetReference().withReferenceName("USDOutput")),
- new DataFlowSink().withName("CADSink")
- .withDataset(new DatasetReference().withReferenceName("CADOutput"))))
- .withScriptLines(Arrays.asList("source(output(", "PreviousConversionRate as double,",
- "Country as string,", "DateTime1 as string,", "CurrentConversionRate as double", "),",
- "allowSchemaDrift: false,", "validateSchema: false) ~> USDCurrency", "source(output(",
- "PreviousConversionRate as double,", "Country as string,", "DateTime1 as string,",
- "CurrentConversionRate as double", "),", "allowSchemaDrift: true,",
- "validateSchema: false) ~> CADSource", "USDCurrency, CADSource union(byName: true)~> Union",
- "Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn",
- "NewCurrencyColumn split(Country == 'USD',",
- "Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)",
- "ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink",
- "ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink")))
- .apply();
+ public static void integrationRuntimesStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .start("exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime",
+ com.azure.core.util.Context.NONE);
}
}
```
-### ChangeDataCapture_ListByFactorySync
+### ChangeDataCapture_Start
```java
/**
- * Samples for ManagedPrivateEndpoints GetSync.
+ * Samples for IntegrationRuntimes Stop.
*/
-public final class ManagedPrivateEndpointsGetSyncSamples {
+public final class IntegrationRuntimesStopSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ManagedPrivateEndpoints_Get.json
+ * IntegrationRuntimes_Stop.json
*/
/**
- * Sample code: ManagedPrivateEndpoints_Get.
+ * Sample code: IntegrationRuntimes_Stop.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void managedPrivateEndpointsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.managedPrivateEndpoints()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName",
- "exampleManagedPrivateEndpointName", null, com.azure.core.util.Context.NONE);
+ public static void integrationRuntimesStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .stop("exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime",
+ com.azure.core.util.Context.NONE);
}
}
```
-### ChangeDataCapture_StartSync
+### ChangeDataCapture_Status
```java
/**
- * Samples for Factories ListByResourceGroupSync.
+ * Samples for IntegrationRuntimes SyncCredentials.
*/
-public final class FactoriesListByResourceGroupSyncSamples {
+public final class IntegrationRuntimesSyncCredentialsSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Factories_ListByResourceGroup.json
+ * IntegrationRuntimes_SyncCredentials.json
*/
/**
- * Sample code: Factories_ListByResourceGroup.
+ * Sample code: IntegrationRuntimes_SyncCredentials.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void factoriesListByResourceGroup(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.factories().listByResourceGroup("exampleResourceGroup", com.azure.core.util.Context.NONE);
+ public static void
+ integrationRuntimesSyncCredentials(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .syncCredentialsWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
+ com.azure.core.util.Context.NONE);
}
}
```
-### ChangeDataCapture_StatusSync
+### ChangeDataCapture_Stop
```java
/**
- * Samples for PipelineRuns CancelSync.
+ * Samples for Pipelines Delete.
*/
-public final class PipelineRunsCancelSyncSamples {
+public final class PipelinesDeleteSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Cancel.
- * json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Delete.json
*/
/**
- * Sample code: PipelineRuns_Cancel.
+ * Sample code: Pipelines_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void pipelineRunsCancel(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.pipelineRuns()
- .cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "16ac5348-ff82-4f95-a80d-638c1d47b721",
- null, com.azure.core.util.Context.NONE);
+ public static void pipelinesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.pipelines()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline",
+ com.azure.core.util.Context.NONE);
}
}
```
-### ChangeDataCapture_StopSync
+### CredentialOperations_CreateOrUpdate
```java
-import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpoint;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
+import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntime;
/**
- * Samples for ManagedPrivateEndpoints CreateOrUpdateSync.
+ * Samples for IntegrationRuntimes CreateOrUpdate.
*/
-public final class ManagedPrivateEndpointsCreateOrUpdateSyncSamples {
+public final class IntegrationRuntimesCreateOrUpdateSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ManagedPrivateEndpoints_Create.json
+ * IntegrationRuntimes_Create.json
*/
/**
- * Sample code: ManagedVirtualNetworks_Create.
+ * Sample code: IntegrationRuntimes_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void managedVirtualNetworksCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.managedPrivateEndpoints()
- .define("exampleManagedPrivateEndpointName")
- .withExistingManagedVirtualNetwork("exampleResourceGroup", "exampleFactoryName",
- "exampleManagedVirtualNetworkName")
- .withProperties(new ManagedPrivateEndpoint().withFqdns(Arrays.asList())
- .withGroupId("blob")
- .withPrivateLinkResourceId(
- "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Storage/storageAccounts/exampleBlobStorage")
- .withAdditionalProperties(mapOf()))
- .create();
- }
-
- // Use "Map.of" if available
- @SuppressWarnings("unchecked")
- private static Map mapOf(Object... inputs) {
- Map map = new HashMap<>();
- for (int i = 0; i < inputs.length; i += 2) {
- String key = (String) inputs[i];
- T value = (T) inputs[i + 1];
- map.put(key, value);
- }
- return map;
+ public static void integrationRuntimesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .define("exampleIntegrationRuntime")
+ .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
+ .withProperties(new SelfHostedIntegrationRuntime().withDescription("A selfhosted integration runtime"))
+ .create();
}
}
```
-### CredentialOperations_CreateOrUpdateSync
+### CredentialOperations_Delete
```java
/**
- * Samples for ChangeDataCapture StartSync.
+ * Samples for IntegrationRuntimes GetStatus.
*/
-public final class ChangeDataCaptureStartSyncSamples {
+public final class IntegrationRuntimesGetStatusSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ChangeDataCapture_Start.json
+ * IntegrationRuntimes_GetStatus.json
*/
/**
- * Sample code: ChangeDataCapture_Start.
+ * Sample code: IntegrationRuntimes_GetStatus.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void changeDataCaptureStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.changeDataCaptures()
- .startWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture",
+ public static void integrationRuntimesGetStatus(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .getStatusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
com.azure.core.util.Context.NONE);
}
}
```
-### CredentialOperations_DeleteSync
+### CredentialOperations_Get
```java
/**
- * Samples for PrivateEndpointConnectionOperation DeleteSync.
+ * Samples for PipelineRuns Get.
*/
-public final class PrivateEndpointConnectionOperationDeleteSyncSamples {
+public final class PipelineRunsGetSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * DeletePrivateEndpointConnection.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Get.json
*/
/**
- * Sample code: Delete a private endpoint connection for a datafactory.
+ * Sample code: PipelineRuns_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void deleteAPrivateEndpointConnectionForADatafactory(
- com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.privateEndpointConnectionOperations()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "connection",
+ public static void pipelineRunsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.pipelineRuns()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b",
com.azure.core.util.Context.NONE);
}
}
```
-### CredentialOperations_GetSync
+### CredentialOperations_ListByFactory
```java
/**
- * Samples for IntegrationRuntimes Start.
+ * Samples for Triggers Delete.
*/
-public final class IntegrationRuntimesStartSamples {
+public final class TriggersDeleteSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_Start.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Delete.json
*/
/**
- * Sample code: IntegrationRuntimes_Start.
+ * Sample code: Triggers_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .start("exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime",
+ public static void triggersDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.triggers()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger",
com.azure.core.util.Context.NONE);
}
}
```
-### CredentialOperations_ListByFactorySync
+### DataFlowDebugSession_AddDataFlow
```java
/**
- * Samples for IntegrationRuntimes Stop.
+ * Samples for IntegrationRuntimes Upgrade.
*/
-public final class IntegrationRuntimesStopSamples {
+public final class IntegrationRuntimesUpgradeSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_Stop.json
+ * IntegrationRuntimes_Upgrade.json
*/
/**
- * Sample code: IntegrationRuntimes_Stop.
+ * Sample code: IntegrationRuntimes_Upgrade.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ public static void integrationRuntimesUpgrade(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
manager.integrationRuntimes()
- .stop("exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime",
+ .upgradeWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
com.azure.core.util.Context.NONE);
}
}
```
-### DataFlowDebugSession_AddDataFlowSync
+### DataFlowDebugSession_Create
```java
-import com.azure.resourcemanager.datafactory.models.GlobalParameterResource;
-import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification;
-import java.util.Map;
-
/**
- * Samples for GlobalParameters CreateOrUpdateSync.
+ * Samples for DataFlowDebugSession QueryByFactory.
*/
-public final class GlobalParametersCreateOrUpdateSyncSamples {
- /*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * GlobalParameters_Create.json
- */
- /**
- * Sample code: GlobalParameters_Create.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void globalParametersCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.globalParameters()
- .define("default")
- .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withProperties((Map) null)
- .create();
- }
-
+public final class DataFlowDebugSessionQueryByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * GlobalParameters_Update.json
+ * DataFlowDebugSession_QueryByFactory.json
*/
/**
- * Sample code: GlobalParameters_Update.
+ * Sample code: DataFlowDebugSession_QueryByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void globalParametersUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- GlobalParameterResource resource = manager.globalParameters()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "default", com.azure.core.util.Context.NONE)
- .getValue();
- resource.update().apply();
+ public static void
+ dataFlowDebugSessionQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.dataFlowDebugSessions()
+ .queryByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### DataFlowDebugSession_Create
+### DataFlowDebugSession_Delete
```java
/**
- * Samples for Triggers DeleteSync.
+ * Samples for LinkedServices Get.
*/
-public final class TriggersDeleteSyncSamples {
+public final class LinkedServicesGetSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Delete.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Get.
+ * json
*/
/**
- * Sample code: Triggers_Delete.
+ * Sample code: LinkedServices_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.triggers()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger",
+ public static void linkedServicesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.linkedServices()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", null,
com.azure.core.util.Context.NONE);
}
}
```
-### DataFlowDebugSession_DeleteSync
+### DataFlowDebugSession_ExecuteCommand
```java
/**
- * Samples for PipelineRuns GetSync.
+ * Samples for Pipelines ListByFactory.
*/
-public final class PipelineRunsGetSyncSamples {
+public final class PipelinesListByFactorySamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Get.json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * Pipelines_ListByFactory.json
*/
/**
- * Sample code: PipelineRuns_Get.
+ * Sample code: Pipelines_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void pipelineRunsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.pipelineRuns()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b",
- com.azure.core.util.Context.NONE);
+ public static void pipelinesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.pipelines()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### DataFlowDebugSession_ExecuteCommand
+### DataFlowDebugSession_QueryByFactory
```java
+import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureResource;
+import com.azure.resourcemanager.datafactory.models.MapperPolicy;
+import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo;
+import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo;
+import java.util.List;
+
/**
- * Samples for LinkedServices GetSync.
+ * Samples for ChangeDataCapture CreateOrUpdate.
*/
-public final class LinkedServicesGetSyncSamples {
+public final class ChangeDataCaptureCreateOrUpdateSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Get.
- * json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * ChangeDataCapture_Create.json
*/
/**
- * Sample code: LinkedServices_Get.
+ * Sample code: ChangeDataCapture_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void linkedServicesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.linkedServices()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", null,
- com.azure.core.util.Context.NONE);
+ public static void changeDataCaptureCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.changeDataCaptures()
+ .define("exampleChangeDataCapture")
+ .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
+ .withSourceConnectionsInfo((List) null)
+ .withTargetConnectionsInfo((List) null)
+ .withPolicy((MapperPolicy) null)
+ .withDescription(
+ "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database with automapped and non-automapped mappings.")
+ .withAllowVNetOverride(false)
+ .create();
}
-}
-```
-
-### DataFlowDebugSession_QueryByFactorySync
-
-```java
-import com.azure.resourcemanager.datafactory.models.ExposureControlBatchRequest;
-import com.azure.resourcemanager.datafactory.models.ExposureControlRequest;
-import java.util.Arrays;
-/**
- * Samples for ExposureControl QueryFeatureValuesByFactorySync.
- */
-public final class ExposureControlQueryFeatureValuesByFactorySyncSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ExposureControl_QueryFeatureValuesByFactory.json
+ * ChangeDataCapture_Update.json
*/
/**
- * Sample code: ExposureControl_QueryFeatureValuesByFactory.
+ * Sample code: ChangeDataCapture_Update.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- exposureControlQueryFeatureValuesByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.exposureControls()
- .queryFeatureValuesByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName",
- new ExposureControlBatchRequest().withExposureControlRequests(Arrays.asList(
- new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac")
- .withFeatureType("Feature"),
- new ExposureControlRequest().withFeatureName("ADFSampleFeature").withFeatureType("Feature"))),
- com.azure.core.util.Context.NONE);
+ public static void changeDataCaptureUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ ChangeDataCaptureResource resource = manager.changeDataCaptures()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", null,
+ com.azure.core.util.Context.NONE)
+ .getValue();
+ resource.update()
+ .withDescription(
+ "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database. Updating table mappings.")
+ .withAllowVNetOverride(false)
+ .withStatus("Stopped")
+ .apply();
}
}
```
-### DataFlows_CreateOrUpdateSync
+### DataFlows_CreateOrUpdate
```java
-import com.azure.resourcemanager.datafactory.models.GitHubAccessTokenRequest;
-
/**
- * Samples for Factories GetGitHubAccessTokenSync.
+ * Samples for IntegrationRuntimes ListAuthKeys.
*/
-public final class FactoriesGetGitHubAccessTokenSyncSamples {
+public final class IntegrationRuntimesListAuthKeysSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Factories_GetGitHubAccessToken.json
+ * IntegrationRuntimes_ListAuthKeys.json
*/
/**
- * Sample code: Factories_GetGitHubAccessToken.
+ * Sample code: IntegrationRuntimes_ListAuthKeys.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void factoriesGetGitHubAccessToken(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.factories()
- .getGitHubAccessTokenWithResponse("exampleResourceGroup", "exampleFactoryName",
- new GitHubAccessTokenRequest().withGitHubAccessCode("fakeTokenPlaceholder")
- .withGitHubClientId("some")
- .withGitHubAccessTokenBaseUrl("fakeTokenPlaceholder"),
+ public static void
+ integrationRuntimesListAuthKeys(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .listAuthKeysWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
com.azure.core.util.Context.NONE);
}
}
```
-### DataFlows_DeleteSync
+### DataFlows_Delete
```java
/**
- * Samples for IntegrationRuntimes GetStatusSync.
+ * Samples for IntegrationRuntimes Get.
*/
-public final class IntegrationRuntimesGetStatusSyncSamples {
+public final class IntegrationRuntimesGetSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_GetStatus.json
+ * IntegrationRuntimes_Get.json
*/
/**
- * Sample code: IntegrationRuntimes_GetStatus.
+ * Sample code: IntegrationRuntimes_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesGetStatus(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ public static void integrationRuntimesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
manager.integrationRuntimes()
- .getStatusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", null,
com.azure.core.util.Context.NONE);
}
}
```
-### DataFlows_GetSync
+### DataFlows_Get
```java
+import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeAutoUpdate;
+import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource;
+
/**
- * Samples for IntegrationRuntimes ListAuthKeysSync.
+ * Samples for IntegrationRuntimes Update.
*/
-public final class IntegrationRuntimesListAuthKeysSyncSamples {
+public final class IntegrationRuntimesUpdateSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_ListAuthKeys.json
+ * IntegrationRuntimes_Update.json
*/
/**
- * Sample code: IntegrationRuntimes_ListAuthKeys.
+ * Sample code: IntegrationRuntimes_Update.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- integrationRuntimesListAuthKeys(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .listAuthKeysWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
- com.azure.core.util.Context.NONE);
+ public static void integrationRuntimesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ IntegrationRuntimeResource resource = manager.integrationRuntimes()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", null,
+ com.azure.core.util.Context.NONE)
+ .getValue();
+ resource.update().withAutoUpdate(IntegrationRuntimeAutoUpdate.OFF).withUpdateDelayOffset("\"PT3H\"").apply();
}
}
```
-### DataFlows_ListByFactorySync
+### DataFlows_ListByFactory
```java
/**
- * Samples for DataFlows DeleteSync.
+ * Samples for ManagedVirtualNetworks ListByFactory.
*/
-public final class DataFlowsDeleteSyncSamples {
+public final class ManagedVirtualNetworksListByFactorySamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Delete.json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * ManagedVirtualNetworks_ListByFactory.json
*/
/**
- * Sample code: DataFlows_Delete.
+ * Sample code: ManagedVirtualNetworks_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void dataFlowsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.dataFlows()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow",
- com.azure.core.util.Context.NONE);
+ public static void
+ managedVirtualNetworksListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.managedVirtualNetworks()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### Datasets_CreateOrUpdateSync
+### Datasets_CreateOrUpdate
```java
/**
- * Samples for DataFlows GetSync.
+ * Samples for Factories GetByResourceGroup.
*/
-public final class DataFlowsGetSyncSamples {
+public final class FactoriesGetByResourceGroupSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Get.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Get.json
*/
/**
- * Sample code: DataFlows_Get.
+ * Sample code: Factories_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void dataFlowsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.dataFlows()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null,
+ public static void factoriesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.factories()
+ .getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null,
com.azure.core.util.Context.NONE);
}
}
```
-### Datasets_DeleteSync
+### Datasets_Delete
```java
-/**
- * Samples for ManagedPrivateEndpoints DeleteSync.
+import com.azure.core.management.serializer.SerializerFactory;
+import com.azure.core.util.serializer.SerializerEncoding;
+import com.azure.resourcemanager.datafactory.models.BlobSink;
+import com.azure.resourcemanager.datafactory.models.BlobSource;
+import com.azure.resourcemanager.datafactory.models.CopyActivity;
+import com.azure.resourcemanager.datafactory.models.DatasetReference;
+import com.azure.resourcemanager.datafactory.models.Expression;
+import com.azure.resourcemanager.datafactory.models.ForEachActivity;
+import com.azure.resourcemanager.datafactory.models.ParameterSpecification;
+import com.azure.resourcemanager.datafactory.models.ParameterType;
+import com.azure.resourcemanager.datafactory.models.PipelineElapsedTimeMetricPolicy;
+import com.azure.resourcemanager.datafactory.models.PipelinePolicy;
+import com.azure.resourcemanager.datafactory.models.PipelineResource;
+import com.azure.resourcemanager.datafactory.models.VariableSpecification;
+import com.azure.resourcemanager.datafactory.models.VariableType;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Samples for Pipelines CreateOrUpdate.
*/
-public final class ManagedPrivateEndpointsDeleteSyncSamples {
+public final class PipelinesCreateOrUpdateSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ManagedPrivateEndpoints_Delete.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Create.json
*/
/**
- * Sample code: ManagedVirtualNetworks_Delete.
+ * Sample code: Pipelines_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void managedVirtualNetworksDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.managedPrivateEndpoints()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName",
- "exampleManagedPrivateEndpointName", com.azure.core.util.Context.NONE);
+ public static void pipelinesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ throws IOException {
+ manager.pipelines()
+ .define("examplePipeline")
+ .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
+ .withActivities(Arrays.asList(new ForEachActivity().withName("ExampleForeachActivity")
+ .withIsSequential(true)
+ .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList"))
+ .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity")
+ .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset")
+ .withParameters(
+ mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer"))))
+ .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset")
+ .withParameters(mapOf("MyFileName",
+ SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class,
+ SerializerEncoding.JSON),
+ "MyFolderPath", "examplecontainer"))))
+ .withSource(new BlobSource())
+ .withSink(new BlobSink())
+ .withDataIntegrationUnits(32)))))
+ .withParameters(mapOf("JobId", new ParameterSpecification().withType(ParameterType.STRING),
+ "OutputBlobNameList", new ParameterSpecification().withType(ParameterType.ARRAY)))
+ .withVariables(mapOf("TestVariableArray", new VariableSpecification().withType(VariableType.ARRAY)))
+ .withRunDimensions(mapOf("JobId",
+ SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("{\"type\":\"Expression\",\"value\":\"@pipeline().parameters.JobId\"}", Object.class,
+ SerializerEncoding.JSON)))
+ .withPolicy(new PipelinePolicy()
+ .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("0.00:10:00")))
+ .create();
}
-}
-```
-### Datasets_GetSync
-
-```java
-/**
- * Samples for Operations ListSync.
- */
-public final class OperationsListSyncSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Operations_List.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Update.json
*/
/**
- * Sample code: Operations_List.
+ * Sample code: Pipelines_Update.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void operationsList(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.operations().list(com.azure.core.util.Context.NONE);
+ public static void pipelinesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ throws IOException {
+ PipelineResource resource = manager.pipelines()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null,
+ com.azure.core.util.Context.NONE)
+ .getValue();
+ resource.update()
+ .withDescription("Example description")
+ .withActivities(Arrays.asList(new ForEachActivity().withName("ExampleForeachActivity")
+ .withIsSequential(true)
+ .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList"))
+ .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity")
+ .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset")
+ .withParameters(
+ mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer"))))
+ .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset")
+ .withParameters(mapOf("MyFileName",
+ SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class,
+ SerializerEncoding.JSON),
+ "MyFolderPath", "examplecontainer"))))
+ .withSource(new BlobSource())
+ .withSink(new BlobSink())
+ .withDataIntegrationUnits(32)))))
+ .withParameters(mapOf("OutputBlobNameList", new ParameterSpecification().withType(ParameterType.ARRAY)))
+ .withPolicy(new PipelinePolicy()
+ .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("0.00:10:00")))
+ .apply();
+ }
+
+ // Use "Map.of" if available
+ @SuppressWarnings("unchecked")
+ private static Map mapOf(Object... inputs) {
+ Map map = new HashMap<>();
+ for (int i = 0; i < inputs.length; i += 2) {
+ String key = (String) inputs[i];
+ T value = (T) inputs[i + 1];
+ map.put(key, value);
+ }
+ return map;
}
}
```
-### Datasets_ListByFactorySync
+### Datasets_Get
```java
/**
- * Samples for ManagedVirtualNetworks GetSync.
+ * Samples for Triggers UnsubscribeFromEvents.
*/
-public final class ManagedVirtualNetworksGetSyncSamples {
+public final class TriggersUnsubscribeFromEventsSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ManagedVirtualNetworks_Get.json
+ * Triggers_UnsubscribeFromEvents.json
*/
/**
- * Sample code: ManagedVirtualNetworks_Get.
+ * Sample code: Triggers_UnsubscribeFromEvents.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void managedVirtualNetworksGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.managedVirtualNetworks()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", null,
+ public static void triggersUnsubscribeFromEvents(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.triggers()
+ .unsubscribeFromEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger",
com.azure.core.util.Context.NONE);
}
}
```
-### ExposureControl_GetFeatureValueByFactorySync
+### Datasets_ListByFactory
```java
-import com.azure.resourcemanager.datafactory.models.SelfHostedIntegrationRuntime;
+import com.azure.resourcemanager.datafactory.models.DataFlowResource;
+import com.azure.resourcemanager.datafactory.models.DataFlowSink;
+import com.azure.resourcemanager.datafactory.models.DataFlowSource;
+import com.azure.resourcemanager.datafactory.models.DatasetReference;
+import com.azure.resourcemanager.datafactory.models.MappingDataFlow;
+import java.util.Arrays;
/**
- * Samples for IntegrationRuntimes CreateOrUpdateSync.
+ * Samples for DataFlows CreateOrUpdate.
*/
-public final class IntegrationRuntimesCreateOrUpdateSyncSamples {
+public final class DataFlowsCreateOrUpdateSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_Create.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Create.json
*/
/**
- * Sample code: IntegrationRuntimes_Create.
+ * Sample code: DataFlows_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .define("exampleIntegrationRuntime")
+ public static void dataFlowsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.dataFlows()
+ .define("exampleDataFlow")
.withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withProperties(new SelfHostedIntegrationRuntime().withDescription("A selfhosted integration runtime"))
+ .withProperties(new MappingDataFlow().withDescription(
+ "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.")
+ .withSources(Arrays.asList(
+ new DataFlowSource().withName("USDCurrency")
+ .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetUSD")),
+ new DataFlowSource().withName("CADSource")
+ .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetCAD"))))
+ .withSinks(Arrays.asList(
+ new DataFlowSink().withName("USDSink")
+ .withDataset(new DatasetReference().withReferenceName("USDOutput")),
+ new DataFlowSink().withName("CADSink")
+ .withDataset(new DatasetReference().withReferenceName("CADOutput"))))
+ .withScriptLines(Arrays.asList("source(output(", "PreviousConversionRate as double,",
+ "Country as string,", "DateTime1 as string,", "CurrentConversionRate as double", "),",
+ "allowSchemaDrift: false,", "validateSchema: false) ~> USDCurrency", "source(output(",
+ "PreviousConversionRate as double,", "Country as string,", "DateTime1 as string,",
+ "CurrentConversionRate as double", "),", "allowSchemaDrift: true,",
+ "validateSchema: false) ~> CADSource", "USDCurrency, CADSource union(byName: true)~> Union",
+ "Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn",
+ "NewCurrencyColumn split(Country == 'USD',",
+ "Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)",
+ "ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink",
+ "ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink")))
.create();
}
-}
-```
-### ExposureControl_GetFeatureValueSync
-
-```java
-/**
- * Samples for Triggers UnsubscribeFromEvents.
- */
-public final class TriggersUnsubscribeFromEventsSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Triggers_UnsubscribeFromEvents.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Update.json
*/
/**
- * Sample code: Triggers_UnsubscribeFromEvents.
+ * Sample code: DataFlows_Update.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersUnsubscribeFromEvents(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.triggers()
- .unsubscribeFromEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger",
- com.azure.core.util.Context.NONE);
+ public static void dataFlowsUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ DataFlowResource resource = manager.dataFlows()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null,
+ com.azure.core.util.Context.NONE)
+ .getValue();
+ resource.update()
+ .withProperties(new MappingDataFlow().withDescription(
+ "Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation.")
+ .withSources(Arrays.asList(
+ new DataFlowSource().withName("USDCurrency")
+ .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetUSD")),
+ new DataFlowSource().withName("CADSource")
+ .withDataset(new DatasetReference().withReferenceName("CurrencyDatasetCAD"))))
+ .withSinks(Arrays.asList(
+ new DataFlowSink().withName("USDSink")
+ .withDataset(new DatasetReference().withReferenceName("USDOutput")),
+ new DataFlowSink().withName("CADSink")
+ .withDataset(new DatasetReference().withReferenceName("CADOutput"))))
+ .withScriptLines(Arrays.asList("source(output(", "PreviousConversionRate as double,",
+ "Country as string,", "DateTime1 as string,", "CurrentConversionRate as double", "),",
+ "allowSchemaDrift: false,", "validateSchema: false) ~> USDCurrency", "source(output(",
+ "PreviousConversionRate as double,", "Country as string,", "DateTime1 as string,",
+ "CurrentConversionRate as double", "),", "allowSchemaDrift: true,",
+ "validateSchema: false) ~> CADSource", "USDCurrency, CADSource union(byName: true)~> Union",
+ "Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn",
+ "NewCurrencyColumn split(Country == 'USD',",
+ "Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)",
+ "ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink",
+ "ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink")))
+ .apply();
}
}
```
-### ExposureControl_QueryFeatureValuesByFactorySync
+### ExposureControl_GetFeatureValue
```java
+import com.azure.resourcemanager.datafactory.models.GetSsisObjectMetadataRequest;
+
/**
- * Samples for PrivateLinkResources GetSync.
+ * Samples for IntegrationRuntimeObjectMetadata Get.
*/
-public final class PrivateLinkResourcesGetSyncSamples {
+public final class IntegrationRuntimeObjectMetadataGetSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * GetPrivateLinkResources.json
+ * IntegrationRuntimeObjectMetadata_Get.json
*/
/**
- * Sample code: Get private link resources of a site.
+ * Sample code: IntegrationRuntimeObjectMetadata_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
public static void
- getPrivateLinkResourcesOfASite(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.privateLinkResources()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ integrationRuntimeObjectMetadataGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimeObjectMetadatas()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "testactivityv2",
+ new GetSsisObjectMetadataRequest().withMetadataPath("ssisFolders"), com.azure.core.util.Context.NONE);
}
}
```
-### Factories_ConfigureFactoryRepoSync
+### ExposureControl_GetFeatureValueByFactory
```java
-import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeAuthKeyName;
-import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeRegenerateKeyParameters;
+import com.azure.resourcemanager.datafactory.models.UpdateIntegrationRuntimeNodeRequest;
/**
- * Samples for IntegrationRuntimes RegenerateAuthKeySync.
+ * Samples for IntegrationRuntimeNodes Update.
*/
-public final class IntegrationRuntimesRegenerateAuthKeySyncSamples {
+public final class IntegrationRuntimeNodesUpdateSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_RegenerateAuthKey.json
- */
- /**
- * Sample code: IntegrationRuntimes_RegenerateAuthKey.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void
- integrationRuntimesRegenerateAuthKey(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .regenerateAuthKeyWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
- new IntegrationRuntimeRegenerateKeyParameters().withKeyName(IntegrationRuntimeAuthKeyName.AUTH_KEY2),
- com.azure.core.util.Context.NONE);
- }
-}
-```
-
-### Factories_CreateOrUpdateSync
-
-```java
-/**
- * Samples for Factories GetByResourceGroupSync.
- */
-public final class FactoriesGetByResourceGroupSyncSamples {
- /*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Get.json
+ * IntegrationRuntimeNodes_Update.json
*/
/**
- * Sample code: Factories_Get.
+ * Sample code: IntegrationRuntimeNodes_Update.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void factoriesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.factories()
- .getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null,
- com.azure.core.util.Context.NONE);
+ public static void integrationRuntimeNodesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimeNodes()
+ .updateWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1",
+ new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(2), com.azure.core.util.Context.NONE);
}
}
```
-### Factories_DeleteSync
+### ExposureControl_QueryFeatureValuesByFactory
```java
/**
- * Samples for DataFlows ListByFactorySync.
+ * Samples for ManagedPrivateEndpoints Get.
*/
-public final class DataFlowsListByFactorySyncSamples {
+public final class ManagedPrivateEndpointsGetSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * DataFlows_ListByFactory.json
+ * ManagedPrivateEndpoints_Get.json
*/
/**
- * Sample code: DataFlows_ListByFactory.
+ * Sample code: ManagedPrivateEndpoints_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void dataFlowsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.dataFlows()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ public static void managedPrivateEndpointsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.managedPrivateEndpoints()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName",
+ "exampleManagedPrivateEndpointName", null, com.azure.core.util.Context.NONE);
}
}
```
-### Factories_GetByResourceGroupSync
+### Factories_ConfigureFactoryRepo
```java
/**
- * Samples for IntegrationRuntimeNodes DeleteSync.
+ * Samples for GlobalParameters Delete.
*/
-public final class IntegrationRuntimeNodesDeleteSyncSamples {
+public final class GlobalParametersDeleteSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimeNodes_Delete.json
+ * GlobalParameters_Delete.json
*/
/**
- * Sample code: IntegrationRuntimesNodes_Delete.
+ * Sample code: GlobalParameters_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- integrationRuntimesNodesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimeNodes()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1",
+ public static void globalParametersDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.globalParameters()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "default",
com.azure.core.util.Context.NONE);
}
}
```
-### Factories_GetDataPlaneAccessSync
+### Factories_CreateOrUpdate
```java
import com.azure.resourcemanager.datafactory.models.PrivateEndpoint;
@@ -1088,9 +1167,9 @@ import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionApprova
import com.azure.resourcemanager.datafactory.models.PrivateLinkConnectionState;
/**
- * Samples for PrivateEndpointConnectionOperation CreateOrUpdateSync.
+ * Samples for PrivateEndpointConnectionOperation CreateOrUpdate.
*/
-public final class PrivateEndpointConnectionOperationCreateOrUpdateSyncSamples {
+public final class PrivateEndpointConnectionOperationCreateOrUpdateSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
* ApproveRejectPrivateEndpointConnection.json
@@ -1116,321 +1195,203 @@ public final class PrivateEndpointConnectionOperationCreateOrUpdateSyncSamples {
}
```
-### Factories_GetGitHubAccessTokenSync
-
-```java
-import com.azure.resourcemanager.datafactory.models.RunFilterParameters;
-import java.time.OffsetDateTime;
-
-/**
- * Samples for ActivityRuns QueryByPipelineRunSync.
- */
-public final class ActivityRunsQueryByPipelineRunSyncSamples {
- /*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ActivityRuns_QueryByPipelineRun.json
- */
- /**
- * Sample code: ActivityRuns_QueryByPipelineRun.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void
- activityRunsQueryByPipelineRun(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.activityRuns()
- .queryByPipelineRunWithResponse("exampleResourceGroup", "exampleFactoryName",
- "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b",
- new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z"))
- .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")),
- com.azure.core.util.Context.NONE);
- }
-}
-```
-
-### Factories_ListByResourceGroupSync
-
-```java
-/**
- * Samples for Pipelines ListByFactorySync.
- */
-public final class PipelinesListByFactorySyncSamples {
- /*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Pipelines_ListByFactory.json
- */
- /**
- * Sample code: Pipelines_ListByFactory.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void pipelinesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.pipelines()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
- }
-}
-```
-
-### Factories_ListSync
-
-```java
-/**
- * Samples for LinkedServices DeleteSync.
- */
-public final class LinkedServicesDeleteSyncSamples {
- /*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Delete
- * .json
- */
- /**
- * Sample code: LinkedServices_Delete.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void linkedServicesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.linkedServices()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService",
- com.azure.core.util.Context.NONE);
- }
-}
-```
-
-### Factories_UpdateSync
+### Factories_Delete
```java
-import com.azure.core.management.serializer.SerializerFactory;
-import com.azure.core.util.serializer.SerializerEncoding;
-import com.azure.resourcemanager.datafactory.models.AzureBlobStorageLinkedService;
-import com.azure.resourcemanager.datafactory.models.AzureBlobStorageLocation;
-import com.azure.resourcemanager.datafactory.models.DataFlowDebugPackage;
-import com.azure.resourcemanager.datafactory.models.DataFlowDebugPackageDebugSettings;
-import com.azure.resourcemanager.datafactory.models.DataFlowDebugResource;
-import com.azure.resourcemanager.datafactory.models.DataFlowSource;
-import com.azure.resourcemanager.datafactory.models.DataFlowSourceSetting;
-import com.azure.resourcemanager.datafactory.models.DatasetDebugResource;
-import com.azure.resourcemanager.datafactory.models.DatasetReference;
-import com.azure.resourcemanager.datafactory.models.DelimitedTextDataset;
-import com.azure.resourcemanager.datafactory.models.LinkedServiceDebugResource;
-import com.azure.resourcemanager.datafactory.models.LinkedServiceReference;
-import com.azure.resourcemanager.datafactory.models.MappingDataFlow;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Samples for DataFlowDebugSession AddDataFlowSync.
- */
-public final class DataFlowDebugSessionAddDataFlowSyncSamples {
- /*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * DataFlowDebugSession_AddDataFlow.json
- */
- /**
- * Sample code: DataFlowDebugSession_AddDataFlow.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void dataFlowDebugSessionAddDataFlow(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
- throws IOException {
- manager.dataFlowDebugSessions()
- .addDataFlowWithResponse("exampleResourceGroup", "exampleFactoryName", new DataFlowDebugPackage()
- .withSessionId("f06ed247-9d07-49b2-b05e-2cb4a2fc871e")
- .withDataFlow(new DataFlowDebugResource().withName("dataflow1")
- .withProperties(new MappingDataFlow()
- .withSources(Arrays.asList(new DataFlowSource().withName("source1")
- .withDataset(new DatasetReference().withReferenceName("DelimitedText2"))))
- .withSinks(Arrays.asList())
- .withTransformations(Arrays.asList())
- .withScript(
- "\n\nsource(output(\n\t\tColumn_1 as string\n\t),\n\tallowSchemaDrift: true,\n\tvalidateSchema: false) ~> source1")))
- .withDatasets(Arrays.asList(new DatasetDebugResource().withName("dataset1")
- .withProperties(new DelimitedTextDataset()
- .withSchema(SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("[{\"type\":\"String\"}]", Object.class, SerializerEncoding.JSON))
- .withLinkedServiceName(new LinkedServiceReference().withReferenceName("linkedService5"))
- .withAnnotations(Arrays.asList())
- .withLocation(new AzureBlobStorageLocation().withFileName("Ansiencoding.csv")
- .withContainer("dataflow-sample-data"))
- .withColumnDelimiter(",")
- .withQuoteChar("\"")
- .withEscapeChar("\\")
- .withFirstRowAsHeader(true))))
- .withLinkedServices(Arrays.asList(new LinkedServiceDebugResource().withName("linkedService1")
- .withProperties(new AzureBlobStorageLinkedService().withAnnotations(Arrays.asList())
- .withConnectionString(
- "DefaultEndpointsProtocol=https;AccountName=;EndpointSuffix=core.windows.net;")
- .withEncryptedCredential("fakeTokenPlaceholder"))))
- .withDebugSettings(new DataFlowDebugPackageDebugSettings()
- .withSourceSettings(Arrays.asList(
- new DataFlowSourceSetting().withSourceName("source1")
- .withRowLimit(1000)
- .withAdditionalProperties(mapOf()),
- new DataFlowSourceSetting().withSourceName("source2")
- .withRowLimit(222)
- .withAdditionalProperties(mapOf())))
- .withParameters(mapOf("sourcePath", "Toy"))
- .withDatasetParameters(SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("{\"Movies\":{\"path\":\"abc\"},\"Output\":{\"time\":\"def\"}}", Object.class,
- SerializerEncoding.JSON)))
- .withAdditionalProperties(mapOf()), com.azure.core.util.Context.NONE);
- }
-
- // Use "Map.of" if available
- @SuppressWarnings("unchecked")
- private static Map mapOf(Object... inputs) {
- Map map = new HashMap<>();
- for (int i = 0; i < inputs.length; i += 2) {
- String key = (String) inputs[i];
- T value = (T) inputs[i + 1];
- map.put(key, value);
- }
- return map;
- }
-}
-```
-
-### GlobalParameters_CreateOrUpdateSync
-
-```java
-import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetwork;
-import java.util.HashMap;
-import java.util.Map;
+import com.azure.resourcemanager.datafactory.models.GitHubAccessTokenRequest;
/**
- * Samples for ManagedVirtualNetworks CreateOrUpdateSync.
+ * Samples for Factories GetGitHubAccessToken.
*/
-public final class ManagedVirtualNetworksCreateOrUpdateSyncSamples {
+public final class FactoriesGetGitHubAccessTokenSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ManagedVirtualNetworks_Create.json
+ * Factories_GetGitHubAccessToken.json
*/
/**
- * Sample code: ManagedVirtualNetworks_Create.
+ * Sample code: Factories_GetGitHubAccessToken.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void managedVirtualNetworksCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.managedVirtualNetworks()
- .define("exampleManagedVirtualNetworkName")
- .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withProperties(new ManagedVirtualNetwork().withAdditionalProperties(mapOf()))
- .create();
- }
-
- // Use "Map.of" if available
- @SuppressWarnings("unchecked")
- private static Map mapOf(Object... inputs) {
- Map map = new HashMap<>();
- for (int i = 0; i < inputs.length; i += 2) {
- String key = (String) inputs[i];
- T value = (T) inputs[i + 1];
- map.put(key, value);
- }
- return map;
+ public static void factoriesGetGitHubAccessToken(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.factories()
+ .getGitHubAccessTokenWithResponse("exampleResourceGroup", "exampleFactoryName",
+ new GitHubAccessTokenRequest().withGitHubAccessCode("fakeTokenPlaceholder")
+ .withGitHubClientId("some")
+ .withGitHubAccessTokenBaseUrl("fakeTokenPlaceholder"),
+ com.azure.core.util.Context.NONE);
}
}
```
-### GlobalParameters_DeleteSync
+### Factories_GetByResourceGroup
```java
/**
- * Samples for IntegrationRuntimes DeleteSync.
+ * Samples for Triggers ListByFactory.
*/
-public final class IntegrationRuntimesDeleteSyncSamples {
+public final class TriggersListByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_Delete.json
+ * Triggers_ListByFactory.json
*/
/**
- * Sample code: IntegrationRuntimes_Delete.
+ * Sample code: Triggers_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
- com.azure.core.util.Context.NONE);
+ public static void triggersListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.triggers()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### GlobalParameters_GetSync
+### Factories_GetDataPlaneAccess
```java
+import com.azure.resourcemanager.datafactory.models.GlobalParameterResource;
+import com.azure.resourcemanager.datafactory.models.GlobalParameterSpecification;
+import java.util.Map;
+
/**
- * Samples for ChangeDataCapture GetSync.
+ * Samples for GlobalParameters CreateOrUpdate.
*/
-public final class ChangeDataCaptureGetSyncSamples {
+public final class GlobalParametersCreateOrUpdateSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Get
- * .json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * GlobalParameters_Create.json
*/
/**
- * Sample code: ChangeDataCapture_Get.
+ * Sample code: GlobalParameters_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void changeDataCaptureGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.changeDataCaptures()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", null,
- com.azure.core.util.Context.NONE);
+ public static void globalParametersCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.globalParameters()
+ .define("default")
+ .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
+ .withProperties((Map) null)
+ .create();
+ }
+
+ /*
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * GlobalParameters_Update.json
+ */
+ /**
+ * Sample code: GlobalParameters_Update.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void globalParametersUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ GlobalParameterResource resource = manager.globalParameters()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "default", com.azure.core.util.Context.NONE)
+ .getValue();
+ resource.update().apply();
}
}
```
-### GlobalParameters_ListByFactorySync
+### Factories_GetGitHubAccessToken
```java
/**
- * Samples for CredentialOperations GetSync.
+ * Samples for IntegrationRuntimeNodes Get.
*/
-public final class CredentialOperationsGetSyncSamples {
+public final class IntegrationRuntimeNodesGetSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Get.json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * IntegrationRuntimeNodes_Get.json
*/
/**
- * Sample code: Credentials_Get.
+ * Sample code: IntegrationRuntimeNodes_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void credentialsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.credentialOperations()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential", null,
+ public static void integrationRuntimeNodesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimeNodes()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1",
com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimeNodes_DeleteSync
+### Factories_List
```java
-import com.azure.resourcemanager.datafactory.models.Factory;
+import com.azure.core.management.serializer.SerializerFactory;
+import com.azure.core.util.serializer.SerializerEncoding;
+import com.azure.resourcemanager.datafactory.models.AzureBlobDataset;
+import com.azure.resourcemanager.datafactory.models.DatasetResource;
+import com.azure.resourcemanager.datafactory.models.LinkedServiceReference;
+import com.azure.resourcemanager.datafactory.models.ParameterSpecification;
+import com.azure.resourcemanager.datafactory.models.ParameterType;
+import com.azure.resourcemanager.datafactory.models.TextFormat;
+import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
- * Samples for Factories UpdateSync.
+ * Samples for Datasets CreateOrUpdate.
*/
-public final class FactoriesUpdateSyncSamples {
+public final class DatasetsCreateOrUpdateSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Update.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Create.json
*/
/**
- * Sample code: Factories_Update.
+ * Sample code: Datasets_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void factoriesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- Factory resource = manager.factories()
- .getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null,
+ public static void datasetsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ throws IOException {
+ manager.datasets()
+ .define("exampleDataset")
+ .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
+ .withProperties(new AzureBlobDataset()
+ .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService"))
+ .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING),
+ "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING)))
+ .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class,
+ SerializerEncoding.JSON))
+ .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class,
+ SerializerEncoding.JSON))
+ .withFormat(new TextFormat()))
+ .create();
+ }
+
+ /*
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Update.json
+ */
+ /**
+ * Sample code: Datasets_Update.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void datasetsUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ throws IOException {
+ DatasetResource resource = manager.datasets()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null,
com.azure.core.util.Context.NONE)
.getValue();
- resource.update().withTags(mapOf("exampleTag", "exampleValue")).apply();
+ resource.update()
+ .withProperties(new AzureBlobDataset().withDescription("Example description")
+ .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService"))
+ .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING),
+ "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING)))
+ .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class,
+ SerializerEncoding.JSON))
+ .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class,
+ SerializerEncoding.JSON))
+ .withFormat(new TextFormat()))
+ .apply();
}
// Use "Map.of" if available
@@ -1447,412 +1408,380 @@ public final class FactoriesUpdateSyncSamples {
}
```
-### IntegrationRuntimeNodes_GetIpAddressSync
+### Factories_ListByResourceGroup
```java
/**
- * Samples for CredentialOperations ListByFactorySync.
+ * Samples for IntegrationRuntimeNodes Delete.
*/
-public final class CredentialOperationsListByFactorySyncSamples {
+public final class IntegrationRuntimeNodesDeleteSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Credentials_ListByFactory.json
- */
- /**
- * Sample code: Credentials_ListByFactory.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void credentialsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.credentialOperations()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
- }
-}
-```
-
-### IntegrationRuntimeNodes_GetSync
-
-```java
-/**
- * Samples for ChangeDataCapture ListByFactorySync.
- */
-public final class ChangeDataCaptureListByFactorySyncSamples {
- /*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ChangeDataCapture_ListByFactory.json
+ * IntegrationRuntimeNodes_Delete.json
*/
/**
- * Sample code: ChangeDataCapture_ListByFactory.
+ * Sample code: IntegrationRuntimesNodes_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
public static void
- changeDataCaptureListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.changeDataCaptures()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ integrationRuntimesNodesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimeNodes()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1",
+ com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimeNodes_UpdateSync
+### Factories_Update
```java
/**
- * Samples for Datasets DeleteSync.
+ * Samples for CredentialOperations Delete.
*/
-public final class DatasetsDeleteSyncSamples {
+public final class CredentialOperationsDeleteSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Delete.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Delete.
+ * json
*/
/**
- * Sample code: Datasets_Delete.
+ * Sample code: Credentials_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void datasetsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.datasets()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset",
+ public static void credentialsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.credentialOperations()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential",
com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimeObjectMetadata_GetSync
+### GlobalParameters_CreateOrUpdate
```java
-import com.azure.resourcemanager.datafactory.models.GetSsisObjectMetadataRequest;
-
/**
- * Samples for IntegrationRuntimeObjectMetadata GetSync.
+ * Samples for ManagedPrivateEndpoints ListByFactory.
*/
-public final class IntegrationRuntimeObjectMetadataGetSyncSamples {
+public final class ManagedPrivateEndpointsListByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimeObjectMetadata_Get.json
+ * ManagedPrivateEndpoints_ListByFactory.json
*/
/**
- * Sample code: IntegrationRuntimeObjectMetadata_Get.
+ * Sample code: ManagedPrivateEndpoints_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
public static void
- integrationRuntimeObjectMetadataGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimeObjectMetadatas()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "testactivityv2",
- new GetSsisObjectMetadataRequest().withMetadataPath("ssisFolders"), com.azure.core.util.Context.NONE);
- }
-}
-```
-
-### IntegrationRuntimeObjectMetadata_Refresh
-
-```java
-/**
- * Samples for ChangeDataCapture DeleteSync.
- */
-public final class ChangeDataCaptureDeleteSyncSamples {
- /*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ChangeDataCapture_Delete.json
- */
- /**
- * Sample code: ChangeDataCapture_Delete.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void changeDataCaptureDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.changeDataCaptures()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture",
+ managedPrivateEndpointsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.managedPrivateEndpoints()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName",
com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_CreateLinkedIntegrationRuntimeSync
+### GlobalParameters_Delete
```java
-import com.azure.resourcemanager.datafactory.models.FactoryRepoUpdate;
-import com.azure.resourcemanager.datafactory.models.FactoryVstsConfiguration;
-
/**
- * Samples for Factories ConfigureFactoryRepoSync.
+ * Samples for Datasets Delete.
*/
-public final class FactoriesConfigureFactoryRepoSyncSamples {
+public final class DatasetsDeleteSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Factories_ConfigureFactoryRepo.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Delete.json
*/
/**
- * Sample code: Factories_ConfigureFactoryRepo.
+ * Sample code: Datasets_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void factoriesConfigureFactoryRepo(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.factories()
- .configureFactoryRepoWithResponse("East US", new FactoryRepoUpdate().withFactoryResourceId(
- "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName")
- .withRepoConfiguration(new FactoryVstsConfiguration().withAccountName("ADF")
- .withRepositoryName("repo")
- .withCollaborationBranch("master")
- .withRootFolder("/")
- .withLastCommitId("")
- .withProjectName("project")
- .withTenantId("")),
+ public static void datasetsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.datasets()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset",
com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_CreateOrUpdateSync
+### GlobalParameters_Get
```java
-import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeAutoUpdate;
-import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeResource;
-
/**
- * Samples for IntegrationRuntimes UpdateSync.
+ * Samples for LinkedServices Delete.
*/
-public final class IntegrationRuntimesUpdateSyncSamples {
+public final class LinkedServicesDeleteSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_Update.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Delete
+ * .json
*/
/**
- * Sample code: IntegrationRuntimes_Update.
+ * Sample code: LinkedServices_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- IntegrationRuntimeResource resource = manager.integrationRuntimes()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", null,
- com.azure.core.util.Context.NONE)
- .getValue();
- resource.update().withAutoUpdate(IntegrationRuntimeAutoUpdate.OFF).withUpdateDelayOffset("\"PT3H\"").apply();
+ public static void linkedServicesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.linkedServices()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService",
+ com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_DeleteSync
+### GlobalParameters_ListByFactory
```java
/**
- * Samples for Pipelines GetSync.
+ * Samples for DataFlows Get.
*/
-public final class PipelinesGetSyncSamples {
+public final class DataFlowsGetSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Get.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Get.json
*/
/**
- * Sample code: Pipelines_Get.
+ * Sample code: DataFlows_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void pipelinesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.pipelines()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null,
+ public static void dataFlowsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.dataFlows()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", null,
com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_GetConnectionInfoSync
+### IntegrationRuntimeNodes_Delete
```java
/**
- * Samples for PrivateEndpointConnectionOperation GetSync.
+ * Samples for ManagedPrivateEndpoints Delete.
*/
-public final class PrivateEndpointConnectionOperationGetSyncSamples {
+public final class ManagedPrivateEndpointsDeleteSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * GetPrivateEndpointConnection.json
+ * ManagedPrivateEndpoints_Delete.json
*/
/**
- * Sample code: Get a private endpoint connection for a datafactory.
+ * Sample code: ManagedVirtualNetworks_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- getAPrivateEndpointConnectionForADatafactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.privateEndpointConnectionOperations()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "connection", null,
- com.azure.core.util.Context.NONE);
+ public static void managedVirtualNetworksDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.managedPrivateEndpoints()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName",
+ "exampleManagedPrivateEndpointName", com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_GetMonitoringDataSync
+### IntegrationRuntimeNodes_Get
```java
+import com.azure.resourcemanager.datafactory.models.RunFilterParameters;
+import com.azure.resourcemanager.datafactory.models.RunQueryFilter;
+import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperand;
+import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperator;
+import java.time.OffsetDateTime;
+import java.util.Arrays;
+
/**
- * Samples for ChangeDataCapture StatusSync.
+ * Samples for PipelineRuns QueryByFactory.
*/
-public final class ChangeDataCaptureStatusSyncSamples {
+public final class PipelineRunsQueryByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ChangeDataCapture_Status.json
+ * PipelineRuns_QueryByFactory.json
*/
/**
- * Sample code: ChangeDataCapture_Start.
+ * Sample code: PipelineRuns_QueryByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void changeDataCaptureStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.changeDataCaptures()
- .statusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture",
+ public static void pipelineRunsQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.pipelineRuns()
+ .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName",
+ new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z"))
+ .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z"))
+ .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.PIPELINE_NAME)
+ .withOperator(RunQueryFilterOperator.EQUALS)
+ .withValues(Arrays.asList("examplePipeline")))),
com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_GetStatusSync
+### IntegrationRuntimeNodes_GetIpAddress
```java
-import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureResource;
-import com.azure.resourcemanager.datafactory.models.MapperPolicy;
-import com.azure.resourcemanager.datafactory.models.MapperSourceConnectionsInfo;
-import com.azure.resourcemanager.datafactory.models.MapperTargetConnectionsInfo;
-import java.util.List;
+import com.azure.core.management.serializer.SerializerFactory;
+import com.azure.core.util.serializer.SerializerEncoding;
+import com.azure.resourcemanager.datafactory.models.AzureStorageLinkedService;
+import com.azure.resourcemanager.datafactory.models.LinkedServiceResource;
+import java.io.IOException;
/**
- * Samples for ChangeDataCapture CreateOrUpdateSync.
+ * Samples for LinkedServices CreateOrUpdate.
*/
-public final class ChangeDataCaptureCreateOrUpdateSyncSamples {
+public final class LinkedServicesCreateOrUpdateSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ChangeDataCapture_Create.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Create
+ * .json
*/
/**
- * Sample code: ChangeDataCapture_Create.
+ * Sample code: LinkedServices_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void changeDataCaptureCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.changeDataCaptures()
- .define("exampleChangeDataCapture")
+ public static void linkedServicesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ throws IOException {
+ manager.linkedServices()
+ .define("exampleLinkedService")
.withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withSourceConnectionsInfo((List) null)
- .withTargetConnectionsInfo((List) null)
- .withPolicy((MapperPolicy) null)
- .withDescription(
- "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database with automapped and non-automapped mappings.")
- .withAllowVNetOverride(false)
+ .withProperties(new AzureStorageLinkedService().withConnectionString(SerializerFactory
+ .createDefaultManagementSerializerAdapter()
+ .deserialize(
+ "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}",
+ Object.class, SerializerEncoding.JSON)))
.create();
}
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ChangeDataCapture_Update.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Update
+ * .json
*/
/**
- * Sample code: ChangeDataCapture_Update.
+ * Sample code: LinkedServices_Update.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void changeDataCaptureUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- ChangeDataCaptureResource resource = manager.changeDataCaptures()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", null,
+ public static void linkedServicesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ throws IOException {
+ LinkedServiceResource resource = manager.linkedServices()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", null,
com.azure.core.util.Context.NONE)
.getValue();
resource.update()
- .withDescription(
- "Sample demo change data capture to transfer data from delimited (csv) to Azure SQL Database. Updating table mappings.")
- .withAllowVNetOverride(false)
- .withStatus("Stopped")
+ .withProperties(new AzureStorageLinkedService().withDescription("Example description")
+ .withConnectionString(SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize(
+ "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}",
+ Object.class, SerializerEncoding.JSON)))
.apply();
}
}
```
-### IntegrationRuntimes_GetSync
+### IntegrationRuntimeNodes_Update
```java
+import com.azure.resourcemanager.datafactory.models.Factory;
+import java.util.HashMap;
+import java.util.Map;
+
/**
- * Samples for ManagedPrivateEndpoints ListByFactorySync.
+ * Samples for Factories Update.
*/
-public final class ManagedPrivateEndpointsListByFactorySyncSamples {
+public final class FactoriesUpdateSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ManagedPrivateEndpoints_ListByFactory.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Update.json
*/
/**
- * Sample code: ManagedPrivateEndpoints_ListByFactory.
+ * Sample code: Factories_Update.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- managedPrivateEndpointsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.managedPrivateEndpoints()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName",
- com.azure.core.util.Context.NONE);
+ public static void factoriesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ Factory resource = manager.factories()
+ .getByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName", null,
+ com.azure.core.util.Context.NONE)
+ .getValue();
+ resource.update().withTags(mapOf("exampleTag", "exampleValue")).apply();
+ }
+
+ // Use "Map.of" if available
+ @SuppressWarnings("unchecked")
+ private static Map mapOf(Object... inputs) {
+ Map map = new HashMap<>();
+ for (int i = 0; i < inputs.length; i += 2) {
+ String key = (String) inputs[i];
+ T value = (T) inputs[i + 1];
+ map.put(key, value);
+ }
+ return map;
}
}
```
-### IntegrationRuntimes_ListAuthKeysSync
+### IntegrationRuntimeObjectMetadata_Get
```java
-import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential;
+import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeRequest;
/**
- * Samples for CredentialOperations CreateOrUpdateSync.
+ * Samples for IntegrationRuntimes RemoveLinks.
*/
-public final class CredentialOperationsCreateOrUpdateSyncSamples {
+public final class IntegrationRuntimesRemoveLinksSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Create.
- * json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * IntegrationRuntimes_RemoveLinks.json
*/
/**
- * Sample code: Credentials_Create.
+ * Sample code: IntegrationRuntimes_Upgrade.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void credentialsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.credentialOperations()
- .define("exampleCredential")
- .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withProperties(new ManagedIdentityCredential().withResourceId(
- "/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami"))
- .create();
+ public static void integrationRuntimesUpgrade(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .removeLinksWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
+ new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("exampleFactoryName-linked"),
+ com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_ListByFactorySync
+### IntegrationRuntimeObjectMetadata_Refresh
```java
/**
- * Samples for Pipelines DeleteSync.
+ * Samples for ChangeDataCapture Get.
*/
-public final class PipelinesDeleteSyncSamples {
+public final class ChangeDataCaptureGetSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Delete.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ChangeDataCapture_Get
+ * .json
*/
/**
- * Sample code: Pipelines_Delete.
+ * Sample code: ChangeDataCapture_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void pipelinesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.pipelines()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline",
+ public static void changeDataCaptureGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.changeDataCaptures()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture", null,
com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_ListOutboundNetworkDependenciesEndpointsSync
+### IntegrationRuntimes_CreateLinkedIntegrationRuntime
```java
import com.azure.resourcemanager.datafactory.models.ExposureControlRequest;
/**
- * Samples for ExposureControl GetFeatureValueByFactorySync.
+ * Samples for ExposureControl GetFeatureValueByFactory.
*/
-public final class ExposureControlGetFeatureValueByFactorySyncSamples {
+public final class ExposureControlGetFeatureValueByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
* ExposureControl_GetFeatureValueByFactory.json
@@ -1873,517 +1802,456 @@ public final class ExposureControlGetFeatureValueByFactorySyncSamples {
}
```
-### IntegrationRuntimes_RegenerateAuthKeySync
+### IntegrationRuntimes_CreateOrUpdate
```java
+import com.azure.resourcemanager.datafactory.models.TriggerFilterParameters;
+
/**
- * Samples for Triggers SubscribeToEvents.
+ * Samples for Triggers QueryByFactory.
*/
-public final class TriggersSubscribeToEventsSamples {
+public final class TriggersQueryByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Triggers_SubscribeToEvents.json
+ * Triggers_QueryByFactory.json
*/
/**
- * Sample code: Triggers_SubscribeToEvents.
+ * Sample code: Triggers_QueryByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersSubscribeToEvents(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ public static void triggersQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
manager.triggers()
- .subscribeToEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger",
- com.azure.core.util.Context.NONE);
- }
-}
-```
-
-### IntegrationRuntimes_RemoveLinksSync
-
-```java
-/**
- * Samples for CredentialOperations DeleteSync.
- */
-public final class CredentialOperationsDeleteSyncSamples {
- /*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Delete.
- * json
- */
- /**
- * Sample code: Credentials_Delete.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void credentialsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.credentialOperations()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential",
+ .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName",
+ new TriggerFilterParameters().withParentTriggerName("exampleTrigger"),
com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_Start
+### IntegrationRuntimes_Delete
```java
/**
- * Samples for Triggers Start.
+ * Samples for Triggers SubscribeToEvents.
*/
-public final class TriggersStartSamples {
+public final class TriggersSubscribeToEventsSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Start.json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * Triggers_SubscribeToEvents.json
*/
/**
- * Sample code: Triggers_Start.
+ * Sample code: Triggers_SubscribeToEvents.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ public static void triggersSubscribeToEvents(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
manager.triggers()
- .start("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", com.azure.core.util.Context.NONE);
+ .subscribeToEvents("exampleResourceGroup", "exampleFactoryName", "exampleTrigger",
+ com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_Stop
+### IntegrationRuntimes_Get
```java
/**
- * Samples for IntegrationRuntimeNodes GetSync.
+ * Samples for IntegrationRuntimes ListOutboundNetworkDependenciesEndpoints.
*/
-public final class IntegrationRuntimeNodesGetSyncSamples {
+public final class IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimeNodes_Get.json
+ * IntegrationRuntimes_ListOutboundNetworkDependenciesEndpoints.json
*/
/**
- * Sample code: IntegrationRuntimeNodes_Get.
+ * Sample code: IntegrationRuntimes_OutboundNetworkDependenciesEndpoints.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimeNodesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimeNodes()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1",
- com.azure.core.util.Context.NONE);
+ public static void integrationRuntimesOutboundNetworkDependenciesEndpoints(
+ com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .listOutboundNetworkDependenciesEndpointsWithResponse("exampleResourceGroup", "exampleFactoryName",
+ "exampleIntegrationRuntime", com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_SyncCredentialsSync
+### IntegrationRuntimes_GetConnectionInfo
```java
-import com.azure.core.management.serializer.SerializerFactory;
-import com.azure.core.util.serializer.SerializerEncoding;
-import com.azure.resourcemanager.datafactory.models.AzureStorageLinkedService;
-import com.azure.resourcemanager.datafactory.models.LinkedServiceResource;
-import java.io.IOException;
-
/**
- * Samples for LinkedServices CreateOrUpdateSync.
+ * Samples for Triggers Start.
*/
-public final class LinkedServicesCreateOrUpdateSyncSamples {
- /*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Create
- * .json
- */
- /**
- * Sample code: LinkedServices_Create.
- *
- * @param manager Entry point to DataFactoryManager.
- */
- public static void linkedServicesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
- throws IOException {
- manager.linkedServices()
- .define("exampleLinkedService")
- .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withProperties(new AzureStorageLinkedService().withConnectionString(SerializerFactory
- .createDefaultManagementSerializerAdapter()
- .deserialize(
- "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}",
- Object.class, SerializerEncoding.JSON)))
- .create();
- }
-
+public final class TriggersStartSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Update
- * .json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Start.json
*/
/**
- * Sample code: LinkedServices_Update.
+ * Sample code: Triggers_Start.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void linkedServicesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
- throws IOException {
- LinkedServiceResource resource = manager.linkedServices()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", null,
- com.azure.core.util.Context.NONE)
- .getValue();
- resource.update()
- .withProperties(new AzureStorageLinkedService().withDescription("Example description")
- .withConnectionString(SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize(
- "{\"type\":\"SecureString\",\"value\":\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=\"}",
- Object.class, SerializerEncoding.JSON)))
- .apply();
+ public static void triggersStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.triggers()
+ .start("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_UpdateSync
+### IntegrationRuntimes_GetMonitoringData
```java
-import com.azure.resourcemanager.datafactory.models.ExposureControlRequest;
-
/**
- * Samples for ExposureControl GetFeatureValueSync.
+ * Samples for GlobalParameters ListByFactory.
*/
-public final class ExposureControlGetFeatureValueSyncSamples {
+public final class GlobalParametersListByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ExposureControl_GetFeatureValue.json
+ * GlobalParameters_ListByFactory.json
*/
/**
- * Sample code: ExposureControl_GetFeatureValue.
+ * Sample code: GlobalParameters_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- exposureControlGetFeatureValue(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.exposureControls()
- .getFeatureValueWithResponse("WestEurope",
- new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac")
- .withFeatureType("Feature"),
- com.azure.core.util.Context.NONE);
+ public static void globalParametersListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.globalParameters()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### IntegrationRuntimes_UpgradeSync
+### IntegrationRuntimes_GetStatus
```java
/**
- * Samples for GlobalParameters GetSync.
+ * Samples for ManagedVirtualNetworks Get.
*/
-public final class GlobalParametersGetSyncSamples {
+public final class ManagedVirtualNetworksGetSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Get.
- * json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * ManagedVirtualNetworks_Get.json
*/
/**
- * Sample code: GlobalParameters_Get.
+ * Sample code: ManagedVirtualNetworks_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void globalParametersGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.globalParameters()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "default", com.azure.core.util.Context.NONE);
+ public static void managedVirtualNetworksGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.managedVirtualNetworks()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", null,
+ com.azure.core.util.Context.NONE);
}
}
```
-### LinkedServices_CreateOrUpdateSync
+### IntegrationRuntimes_ListAuthKeys
```java
-import com.azure.core.management.serializer.SerializerFactory;
-import com.azure.core.util.serializer.SerializerEncoding;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
/**
- * Samples for Pipelines CreateRunSync.
+ * Samples for IntegrationRuntimes ListByFactory.
*/
-public final class PipelinesCreateRunSyncSamples {
+public final class IntegrationRuntimesListByFactorySamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.
- * json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * IntegrationRuntimes_ListByFactory.json
*/
/**
- * Sample code: Pipelines_CreateRun.
+ * Sample code: IntegrationRuntimes_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void pipelinesCreateRun(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
- throws IOException {
- manager.pipelines()
- .createRunWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, null, null,
- null,
- mapOf("OutputBlobNameList",
- SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)),
- com.azure.core.util.Context.NONE);
+ public static void
+ integrationRuntimesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
+}
+```
- // Use "Map.of" if available
- @SuppressWarnings("unchecked")
- private static Map mapOf(Object... inputs) {
- Map map = new HashMap<>();
- for (int i = 0; i < inputs.length; i += 2) {
- String key = (String) inputs[i];
- T value = (T) inputs[i + 1];
- map.put(key, value);
- }
- return map;
+### IntegrationRuntimes_ListByFactory
+
+```java
+/**
+ * Samples for Factories CreateOrUpdate.
+ */
+public final class FactoriesCreateOrUpdateSamples {
+ /*
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * Factories_CreateOrUpdate.json
+ */
+ /**
+ * Sample code: Factories_CreateOrUpdate.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void factoriesCreateOrUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.factories()
+ .define("exampleFactoryName")
+ .withRegion("East US")
+ .withExistingResourceGroup("exampleResourceGroup")
+ .create();
}
}
```
-### LinkedServices_DeleteSync
+### IntegrationRuntimes_ListOutboundNetworkDependenciesEndpoints
```java
+import com.azure.resourcemanager.datafactory.models.CreateLinkedIntegrationRuntimeRequest;
+
/**
- * Samples for ChangeDataCapture StopSync.
+ * Samples for IntegrationRuntimes CreateLinkedIntegrationRuntime.
*/
-public final class ChangeDataCaptureStopSyncSamples {
+public final class IntegrationRuntimesCreateLinkedIntegrationRuntimeSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ChangeDataCapture_Stop.json
+ * IntegrationRuntimes_CreateLinkedIntegrationRuntime.json
*/
/**
- * Sample code: ChangeDataCapture_Stop.
+ * Sample code: IntegrationRuntimes_CreateLinkedIntegrationRuntime.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void changeDataCaptureStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.changeDataCaptures()
- .stopWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture",
+ public static void integrationRuntimesCreateLinkedIntegrationRuntime(
+ com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .createLinkedIntegrationRuntimeWithResponse("exampleResourceGroup", "exampleFactoryName",
+ "exampleIntegrationRuntime",
+ new CreateLinkedIntegrationRuntimeRequest().withName("bfa92911-9fb6-4fbe-8f23-beae87bc1c83")
+ .withSubscriptionId("061774c7-4b5a-4159-a55b-365581830283")
+ .withDataFactoryName("e9955d6d-56ea-4be3-841c-52a12c1a9981")
+ .withDataFactoryLocation("West US"),
com.azure.core.util.Context.NONE);
}
}
```
-### LinkedServices_GetSync
+### IntegrationRuntimes_RegenerateAuthKey
```java
/**
- * Samples for Datasets GetSync.
+ * Samples for PrivateLinkResources Get.
*/
-public final class DatasetsGetSyncSamples {
+public final class PrivateLinkResourcesGetSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Get.json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * GetPrivateLinkResources.json
*/
/**
- * Sample code: Datasets_Get.
+ * Sample code: Get private link resources of a site.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void datasetsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.datasets()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null,
- com.azure.core.util.Context.NONE);
+ public static void
+ getPrivateLinkResourcesOfASite(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.privateLinkResources()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### LinkedServices_ListByFactorySync
+### IntegrationRuntimes_RemoveLinks
```java
-import com.azure.core.management.serializer.SerializerFactory;
-import com.azure.core.util.serializer.SerializerEncoding;
-import com.azure.resourcemanager.datafactory.models.BlobSink;
-import com.azure.resourcemanager.datafactory.models.BlobSource;
-import com.azure.resourcemanager.datafactory.models.CopyActivity;
-import com.azure.resourcemanager.datafactory.models.DatasetReference;
-import com.azure.resourcemanager.datafactory.models.Expression;
-import com.azure.resourcemanager.datafactory.models.ForEachActivity;
-import com.azure.resourcemanager.datafactory.models.ParameterSpecification;
-import com.azure.resourcemanager.datafactory.models.ParameterType;
-import com.azure.resourcemanager.datafactory.models.PipelineElapsedTimeMetricPolicy;
-import com.azure.resourcemanager.datafactory.models.PipelinePolicy;
-import com.azure.resourcemanager.datafactory.models.PipelineResource;
-import com.azure.resourcemanager.datafactory.models.VariableSpecification;
-import com.azure.resourcemanager.datafactory.models.VariableType;
-import java.io.IOException;
-import java.util.Arrays;
+import com.azure.resourcemanager.datafactory.models.ManagedVirtualNetwork;
import java.util.HashMap;
import java.util.Map;
/**
- * Samples for Pipelines CreateOrUpdateSync.
+ * Samples for ManagedVirtualNetworks CreateOrUpdate.
*/
-public final class PipelinesCreateOrUpdateSyncSamples {
+public final class ManagedVirtualNetworksCreateOrUpdateSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Create.json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * ManagedVirtualNetworks_Create.json
*/
/**
- * Sample code: Pipelines_Create.
+ * Sample code: ManagedVirtualNetworks_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void pipelinesCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
- throws IOException {
- manager.pipelines()
- .define("examplePipeline")
+ public static void managedVirtualNetworksCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.managedVirtualNetworks()
+ .define("exampleManagedVirtualNetworkName")
.withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withActivities(Arrays.asList(new ForEachActivity().withName("ExampleForeachActivity")
- .withIsSequential(true)
- .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList"))
- .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity")
- .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset")
- .withParameters(
- mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer"))))
- .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset")
- .withParameters(mapOf("MyFileName",
- SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class,
- SerializerEncoding.JSON),
- "MyFolderPath", "examplecontainer"))))
- .withSource(new BlobSource())
- .withSink(new BlobSink())
- .withDataIntegrationUnits(32)))))
- .withParameters(mapOf("JobId", new ParameterSpecification().withType(ParameterType.STRING),
- "OutputBlobNameList", new ParameterSpecification().withType(ParameterType.ARRAY)))
- .withVariables(mapOf("TestVariableArray", new VariableSpecification().withType(VariableType.ARRAY)))
- .withRunDimensions(mapOf("JobId",
- SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("{\"type\":\"Expression\",\"value\":\"@pipeline().parameters.JobId\"}", Object.class,
- SerializerEncoding.JSON)))
- .withPolicy(new PipelinePolicy()
- .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("0.00:10:00")))
+ .withProperties(new ManagedVirtualNetwork().withAdditionalProperties(mapOf()))
.create();
}
+ // Use "Map.of" if available
+ @SuppressWarnings("unchecked")
+ private static Map mapOf(Object... inputs) {
+ Map map = new HashMap<>();
+ for (int i = 0; i < inputs.length; i += 2) {
+ String key = (String) inputs[i];
+ T value = (T) inputs[i + 1];
+ map.put(key, value);
+ }
+ return map;
+ }
+}
+```
+
+### IntegrationRuntimes_Start
+
+```java
+import com.azure.resourcemanager.datafactory.models.ManagedIdentityCredential;
+
+/**
+ * Samples for CredentialOperations CreateOrUpdate.
+ */
+public final class CredentialOperationsCreateOrUpdateSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Update.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Create.
+ * json
*/
/**
- * Sample code: Pipelines_Update.
+ * Sample code: Credentials_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void pipelinesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
- throws IOException {
- PipelineResource resource = manager.pipelines()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null,
- com.azure.core.util.Context.NONE)
- .getValue();
- resource.update()
- .withDescription("Example description")
- .withActivities(Arrays.asList(new ForEachActivity().withName("ExampleForeachActivity")
- .withIsSequential(true)
- .withItems(new Expression().withValue("@pipeline().parameters.OutputBlobNameList"))
- .withActivities(Arrays.asList(new CopyActivity().withName("ExampleCopyActivity")
- .withInputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset")
- .withParameters(
- mapOf("MyFileName", "examplecontainer.csv", "MyFolderPath", "examplecontainer"))))
- .withOutputs(Arrays.asList(new DatasetReference().withReferenceName("exampleDataset")
- .withParameters(mapOf("MyFileName",
- SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("{\"type\":\"Expression\",\"value\":\"@item()\"}", Object.class,
- SerializerEncoding.JSON),
- "MyFolderPath", "examplecontainer"))))
- .withSource(new BlobSource())
- .withSink(new BlobSink())
- .withDataIntegrationUnits(32)))))
- .withParameters(mapOf("OutputBlobNameList", new ParameterSpecification().withType(ParameterType.ARRAY)))
- .withPolicy(new PipelinePolicy()
- .withElapsedTimeMetric(new PipelineElapsedTimeMetricPolicy().withDuration("0.00:10:00")))
- .apply();
+ public static void credentialsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.credentialOperations()
+ .define("exampleCredential")
+ .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
+ .withProperties(new ManagedIdentityCredential().withResourceId(
+ "/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami"))
+ .create();
}
+}
+```
- // Use "Map.of" if available
- @SuppressWarnings("unchecked")
- private static Map mapOf(Object... inputs) {
- Map map = new HashMap<>();
- for (int i = 0; i < inputs.length; i += 2) {
- String key = (String) inputs[i];
- T value = (T) inputs[i + 1];
- map.put(key, value);
- }
- return map;
+### IntegrationRuntimes_Stop
+
+```java
+import com.azure.resourcemanager.datafactory.models.FactoryRepoUpdate;
+import com.azure.resourcemanager.datafactory.models.FactoryVstsConfiguration;
+
+/**
+ * Samples for Factories ConfigureFactoryRepo.
+ */
+public final class FactoriesConfigureFactoryRepoSamples {
+ /*
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * Factories_ConfigureFactoryRepo.json
+ */
+ /**
+ * Sample code: Factories_ConfigureFactoryRepo.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void factoriesConfigureFactoryRepo(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.factories()
+ .configureFactoryRepoWithResponse("East US", new FactoryRepoUpdate().withFactoryResourceId(
+ "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName")
+ .withRepoConfiguration(new FactoryVstsConfiguration().withAccountName("ADF")
+ .withRepositoryName("repo")
+ .withCollaborationBranch("master")
+ .withRootFolder("/")
+ .withLastCommitId("")
+ .withProjectName("project")
+ .withTenantId("")),
+ com.azure.core.util.Context.NONE);
}
}
```
-### ManagedPrivateEndpoints_CreateOrUpdateSync
+### IntegrationRuntimes_SyncCredentials
```java
/**
- * Samples for TriggerRuns CancelSync.
+ * Samples for ChangeDataCapture Status.
*/
-public final class TriggerRunsCancelSyncSamples {
+public final class ChangeDataCaptureStatusSamples {
/*
- * x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Cancel.
- * json
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * ChangeDataCapture_Status.json
*/
/**
- * Sample code: Triggers_Cancel.
+ * Sample code: ChangeDataCapture_Start.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersCancel(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.triggerRuns()
- .cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger",
- "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE);
+ public static void changeDataCaptureStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.changeDataCaptures()
+ .statusWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture",
+ com.azure.core.util.Context.NONE);
}
}
```
-### ManagedPrivateEndpoints_DeleteSync
+### IntegrationRuntimes_Update
```java
/**
- * Samples for IntegrationRuntimes SyncCredentialsSync.
+ * Samples for DataFlows Delete.
*/
-public final class IntegrationRuntimesSyncCredentialsSyncSamples {
+public final class DataFlowsDeleteSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_SyncCredentials.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Delete.json
*/
/**
- * Sample code: IntegrationRuntimes_SyncCredentials.
+ * Sample code: DataFlows_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- integrationRuntimesSyncCredentials(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .syncCredentialsWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
+ public static void dataFlowsDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.dataFlows()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataFlow",
com.azure.core.util.Context.NONE);
}
}
```
-### ManagedPrivateEndpoints_GetSync
+### IntegrationRuntimes_Upgrade
```java
+import com.azure.resourcemanager.datafactory.models.RunFilterParameters;
+import com.azure.resourcemanager.datafactory.models.RunQueryFilter;
+import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperand;
+import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperator;
+import java.time.OffsetDateTime;
+import java.util.Arrays;
+
/**
- * Samples for IntegrationRuntimes ListByFactorySync.
+ * Samples for TriggerRuns QueryByFactory.
*/
-public final class IntegrationRuntimesListByFactorySyncSamples {
+public final class TriggerRunsQueryByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_ListByFactory.json
+ * TriggerRuns_QueryByFactory.json
*/
/**
- * Sample code: IntegrationRuntimes_ListByFactory.
+ * Sample code: TriggerRuns_QueryByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- integrationRuntimesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ public static void triggerRunsQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.triggerRuns()
+ .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName",
+ new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z"))
+ .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z"))
+ .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.TRIGGER_NAME)
+ .withOperator(RunQueryFilterOperator.EQUALS)
+ .withValues(Arrays.asList("exampleTrigger")))),
+ com.azure.core.util.Context.NONE);
}
}
```
-### ManagedPrivateEndpoints_ListByFactorySync
+### LinkedServices_CreateOrUpdate
```java
/**
- * Samples for Triggers GetEventSubscriptionStatusSync.
+ * Samples for Triggers GetEventSubscriptionStatus.
*/
-public final class TriggersGetEventSubscriptionStatusSyncSamples {
+public final class TriggersGetEventSubscriptionStatusSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
* Triggers_GetEventSubscriptionStatus.json
@@ -2402,106 +2270,106 @@ public final class TriggersGetEventSubscriptionStatusSyncSamples {
}
```
-### ManagedVirtualNetworks_CreateOrUpdateSync
+### LinkedServices_Delete
```java
/**
- * Samples for Factories CreateOrUpdateSync.
+ * Samples for Factories List.
*/
-public final class FactoriesCreateOrUpdateSyncSamples {
+public final class FactoriesListSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Factories_CreateOrUpdate.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_List.json
*/
/**
- * Sample code: Factories_CreateOrUpdate.
+ * Sample code: Factories_List.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void factoriesCreateOrUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.factories()
- .define("exampleFactoryName")
- .withRegion("East US")
- .withExistingResourceGroup("exampleResourceGroup")
- .create();
+ public static void factoriesList(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.factories().list(com.azure.core.util.Context.NONE);
}
}
```
-### ManagedVirtualNetworks_GetSync
+### LinkedServices_Get
```java
import com.azure.core.management.serializer.SerializerFactory;
import com.azure.core.util.serializer.SerializerEncoding;
-import com.azure.resourcemanager.datafactory.models.AzureBlobDataset;
-import com.azure.resourcemanager.datafactory.models.DatasetResource;
-import com.azure.resourcemanager.datafactory.models.LinkedServiceReference;
-import com.azure.resourcemanager.datafactory.models.ParameterSpecification;
-import com.azure.resourcemanager.datafactory.models.ParameterType;
-import com.azure.resourcemanager.datafactory.models.TextFormat;
+import com.azure.resourcemanager.datafactory.models.PipelineReference;
+import com.azure.resourcemanager.datafactory.models.RecurrenceFrequency;
+import com.azure.resourcemanager.datafactory.models.ScheduleTrigger;
+import com.azure.resourcemanager.datafactory.models.ScheduleTriggerRecurrence;
+import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference;
+import com.azure.resourcemanager.datafactory.models.TriggerResource;
import java.io.IOException;
+import java.time.OffsetDateTime;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
- * Samples for Datasets CreateOrUpdateSync.
+ * Samples for Triggers CreateOrUpdate.
*/
-public final class DatasetsCreateOrUpdateSyncSamples {
+public final class TriggersCreateOrUpdateSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Create.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Create.json
*/
/**
- * Sample code: Datasets_Create.
+ * Sample code: Triggers_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void datasetsCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ public static void triggersCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
throws IOException {
- manager.datasets()
- .define("exampleDataset")
+ manager.triggers()
+ .define("exampleTrigger")
.withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withProperties(new AzureBlobDataset()
- .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService"))
- .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING),
- "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING)))
- .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class,
- SerializerEncoding.JSON))
- .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class,
- SerializerEncoding.JSON))
- .withFormat(new TextFormat()))
+ .withProperties(new ScheduleTrigger()
+ .withPipelines(Arrays.asList(new TriggerPipelineReference()
+ .withPipelineReference(new PipelineReference().withReferenceName("examplePipeline"))
+ .withParameters(mapOf("OutputBlobNameList",
+ SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)))))
+ .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE)
+ .withInterval(4)
+ .withStartTime(OffsetDateTime.parse("2018-06-16T00:39:13.8441801Z"))
+ .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:13.8441801Z"))
+ .withTimeZone("UTC")
+ .withAdditionalProperties(mapOf())))
.create();
}
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Update.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Update.json
*/
/**
- * Sample code: Datasets_Update.
+ * Sample code: Triggers_Update.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void datasetsUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ public static void triggersUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
throws IOException {
- DatasetResource resource = manager.datasets()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null,
+ TriggerResource resource = manager.triggers()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", null,
com.azure.core.util.Context.NONE)
.getValue();
resource.update()
- .withProperties(new AzureBlobDataset().withDescription("Example description")
- .withLinkedServiceName(new LinkedServiceReference().withReferenceName("exampleLinkedService"))
- .withParameters(mapOf("MyFileName", new ParameterSpecification().withType(ParameterType.STRING),
- "MyFolderPath", new ParameterSpecification().withType(ParameterType.STRING)))
- .withFolderPath(SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFolderPath\"}", Object.class,
- SerializerEncoding.JSON))
- .withFileName(SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("{\"type\":\"Expression\",\"value\":\"@dataset().MyFileName\"}", Object.class,
- SerializerEncoding.JSON))
- .withFormat(new TextFormat()))
+ .withProperties(new ScheduleTrigger().withDescription("Example description")
+ .withPipelines(Arrays.asList(new TriggerPipelineReference()
+ .withPipelineReference(new PipelineReference().withReferenceName("examplePipeline"))
+ .withParameters(mapOf("OutputBlobNameList",
+ SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)))))
+ .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE)
+ .withInterval(4)
+ .withStartTime(OffsetDateTime.parse("2018-06-16T00:39:14.905167Z"))
+ .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:14.905167Z"))
+ .withTimeZone("UTC")
+ .withAdditionalProperties(mapOf())))
.apply();
}
@@ -2519,253 +2387,262 @@ public final class DatasetsCreateOrUpdateSyncSamples {
}
```
-### ManagedVirtualNetworks_ListByFactorySync
+### LinkedServices_ListByFactory
+
+```java
+import com.azure.resourcemanager.datafactory.models.DeleteDataFlowDebugSessionRequest;
+
+/**
+ * Samples for DataFlowDebugSession Delete.
+ */
+public final class DataFlowDebugSessionDeleteSamples {
+ /*
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * DataFlowDebugSession_Delete.json
+ */
+ /**
+ * Sample code: DataFlowDebugSession_Delete.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void dataFlowDebugSessionDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.dataFlowDebugSessions()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName",
+ new DeleteDataFlowDebugSessionRequest().withSessionId("91fb57e0-8292-47be-89ff-c8f2d2bb2a7e"),
+ com.azure.core.util.Context.NONE);
+ }
+}
+```
+
+### ManagedPrivateEndpoints_CreateOrUpdate
```java
/**
- * Samples for Factories ListSync.
+ * Samples for PipelineRuns Cancel.
*/
-public final class FactoriesListSyncSamples {
+public final class PipelineRunsCancelSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_List.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Cancel.
+ * json
*/
/**
- * Sample code: Factories_List.
+ * Sample code: PipelineRuns_Cancel.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void factoriesList(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.factories().list(com.azure.core.util.Context.NONE);
+ public static void pipelineRunsCancel(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.pipelineRuns()
+ .cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "16ac5348-ff82-4f95-a80d-638c1d47b721",
+ null, com.azure.core.util.Context.NONE);
}
}
```
-### Operations_ListSync
+### ManagedPrivateEndpoints_Delete
```java
/**
- * Samples for Triggers ListByFactorySync.
+ * Samples for Triggers Stop.
*/
-public final class TriggersListByFactorySyncSamples {
+public final class TriggersStopSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Triggers_ListByFactory.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Stop.json
*/
/**
- * Sample code: Triggers_ListByFactory.
+ * Sample code: Triggers_Stop.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ public static void triggersStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
manager.triggers()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ .stop("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", com.azure.core.util.Context.NONE);
+ }
+}
+```
+
+### ManagedPrivateEndpoints_Get
+
+```java
+import com.azure.resourcemanager.datafactory.models.ExposureControlRequest;
+
+/**
+ * Samples for ExposureControl GetFeatureValue.
+ */
+public final class ExposureControlGetFeatureValueSamples {
+ /*
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * ExposureControl_GetFeatureValue.json
+ */
+ /**
+ * Sample code: ExposureControl_GetFeatureValue.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void
+ exposureControlGetFeatureValue(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.exposureControls()
+ .getFeatureValueWithResponse("WestEurope",
+ new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac")
+ .withFeatureType("Feature"),
+ com.azure.core.util.Context.NONE);
}
}
```
-### PipelineRuns_CancelSync
+### ManagedPrivateEndpoints_ListByFactory
```java
/**
- * Samples for Triggers Stop.
+ * Samples for Factories Delete.
*/
-public final class TriggersStopSamples {
+public final class FactoriesDeleteSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Stop.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Delete.json
*/
/**
- * Sample code: Triggers_Stop.
+ * Sample code: Factories_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.triggers()
- .stop("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", com.azure.core.util.Context.NONE);
+ public static void factoriesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.factories()
+ .deleteByResourceGroupWithResponse("exampleResourceGroup", "exampleFactoryName",
+ com.azure.core.util.Context.NONE);
}
}
```
-### PipelineRuns_GetSync
+### ManagedVirtualNetworks_CreateOrUpdate
```java
/**
- * Samples for GlobalParameters ListByFactorySync.
+ * Samples for IntegrationRuntimes GetMonitoringData.
*/
-public final class GlobalParametersListByFactorySyncSamples {
+public final class IntegrationRuntimesGetMonitoringDataSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * GlobalParameters_ListByFactory.json
+ * IntegrationRuntimes_GetMonitoringData.json
*/
/**
- * Sample code: GlobalParameters_ListByFactory.
+ * Sample code: IntegrationRuntimes_GetMonitoringData.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void globalParametersListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.globalParameters()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ public static void
+ integrationRuntimesGetMonitoringData(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .getMonitoringDataWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
+ com.azure.core.util.Context.NONE);
}
}
```
-### PipelineRuns_QueryByFactorySync
+### ManagedVirtualNetworks_Get
```java
-import com.azure.resourcemanager.datafactory.models.RunFilterParameters;
-import com.azure.resourcemanager.datafactory.models.RunQueryFilter;
-import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperand;
-import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperator;
-import java.time.OffsetDateTime;
+import com.azure.resourcemanager.datafactory.models.ExposureControlBatchRequest;
+import com.azure.resourcemanager.datafactory.models.ExposureControlRequest;
import java.util.Arrays;
/**
- * Samples for PipelineRuns QueryByFactorySync.
+ * Samples for ExposureControl QueryFeatureValuesByFactory.
*/
-public final class PipelineRunsQueryByFactorySyncSamples {
+public final class ExposureControlQueryFeatureValuesByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * PipelineRuns_QueryByFactory.json
+ * ExposureControl_QueryFeatureValuesByFactory.json
*/
/**
- * Sample code: PipelineRuns_QueryByFactory.
+ * Sample code: ExposureControl_QueryFeatureValuesByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void pipelineRunsQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.pipelineRuns()
- .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName",
- new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z"))
- .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z"))
- .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.PIPELINE_NAME)
- .withOperator(RunQueryFilterOperator.EQUALS)
- .withValues(Arrays.asList("examplePipeline")))),
+ public static void
+ exposureControlQueryFeatureValuesByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.exposureControls()
+ .queryFeatureValuesByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName",
+ new ExposureControlBatchRequest().withExposureControlRequests(Arrays.asList(
+ new ExposureControlRequest().withFeatureName("ADFIntegrationRuntimeSharingRbac")
+ .withFeatureType("Feature"),
+ new ExposureControlRequest().withFeatureName("ADFSampleFeature").withFeatureType("Feature"))),
com.azure.core.util.Context.NONE);
}
}
```
-### Pipelines_CreateOrUpdateSync
+### ManagedVirtualNetworks_ListByFactory
```java
-import com.azure.core.management.serializer.SerializerFactory;
-import com.azure.core.util.serializer.SerializerEncoding;
-import com.azure.resourcemanager.datafactory.models.PipelineReference;
-import com.azure.resourcemanager.datafactory.models.RecurrenceFrequency;
-import com.azure.resourcemanager.datafactory.models.ScheduleTrigger;
-import com.azure.resourcemanager.datafactory.models.ScheduleTriggerRecurrence;
-import com.azure.resourcemanager.datafactory.models.TriggerPipelineReference;
-import com.azure.resourcemanager.datafactory.models.TriggerResource;
-import java.io.IOException;
-import java.time.OffsetDateTime;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
/**
- * Samples for Triggers CreateOrUpdateSync.
+ * Samples for Operations List.
*/
-public final class TriggersCreateOrUpdateSyncSamples {
+public final class OperationsListSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Create.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Operations_List.json
*/
/**
- * Sample code: Triggers_Create.
+ * Sample code: Operations_List.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
- throws IOException {
- manager.triggers()
- .define("exampleTrigger")
- .withExistingFactory("exampleResourceGroup", "exampleFactoryName")
- .withProperties(new ScheduleTrigger()
- .withPipelines(Arrays.asList(new TriggerPipelineReference()
- .withPipelineReference(new PipelineReference().withReferenceName("examplePipeline"))
- .withParameters(mapOf("OutputBlobNameList",
- SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)))))
- .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE)
- .withInterval(4)
- .withStartTime(OffsetDateTime.parse("2018-06-16T00:39:13.8441801Z"))
- .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:13.8441801Z"))
- .withTimeZone("UTC")
- .withAdditionalProperties(mapOf())))
- .create();
+ public static void operationsList(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.operations().list(com.azure.core.util.Context.NONE);
}
+}
+```
+### Operations_List
+
+```java
+/**
+ * Samples for Triggers Get.
+ */
+public final class TriggersGetSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Update.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Get.json
*/
/**
- * Sample code: Triggers_Update.
+ * Sample code: Triggers_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
- throws IOException {
- TriggerResource resource = manager.triggers()
+ public static void triggersGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.triggers()
.getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", null,
- com.azure.core.util.Context.NONE)
- .getValue();
- resource.update()
- .withProperties(new ScheduleTrigger().withDescription("Example description")
- .withPipelines(Arrays.asList(new TriggerPipelineReference()
- .withPipelineReference(new PipelineReference().withReferenceName("examplePipeline"))
- .withParameters(mapOf("OutputBlobNameList",
- SerializerFactory.createDefaultManagementSerializerAdapter()
- .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)))))
- .withRecurrence(new ScheduleTriggerRecurrence().withFrequency(RecurrenceFrequency.MINUTE)
- .withInterval(4)
- .withStartTime(OffsetDateTime.parse("2018-06-16T00:39:14.905167Z"))
- .withEndTime(OffsetDateTime.parse("2018-06-16T00:55:14.905167Z"))
- .withTimeZone("UTC")
- .withAdditionalProperties(mapOf())))
- .apply();
- }
-
- // Use "Map.of" if available
- @SuppressWarnings("unchecked")
- private static Map mapOf(Object... inputs) {
- Map map = new HashMap<>();
- for (int i = 0; i < inputs.length; i += 2) {
- String key = (String) inputs[i];
- T value = (T) inputs[i + 1];
- map.put(key, value);
- }
- return map;
+ com.azure.core.util.Context.NONE);
}
}
```
-### Pipelines_CreateRunSync
+### PipelineRuns_Cancel
```java
/**
- * Samples for ManagedVirtualNetworks ListByFactorySync.
+ * Samples for DataFlows ListByFactory.
*/
-public final class ManagedVirtualNetworksListByFactorySyncSamples {
+public final class DataFlowsListByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * ManagedVirtualNetworks_ListByFactory.json
+ * DataFlows_ListByFactory.json
*/
/**
- * Sample code: ManagedVirtualNetworks_ListByFactory.
+ * Sample code: DataFlows_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- managedVirtualNetworksListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.managedVirtualNetworks()
+ public static void dataFlowsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.dataFlows()
.listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### Pipelines_DeleteSync
+### PipelineRuns_Get
```java
import com.azure.resourcemanager.datafactory.models.CreateDataFlowDebugSessionRequest;
@@ -2820,32 +2697,31 @@ public final class DataFlowDebugSessionCreateSamples {
}
```
-### Pipelines_GetSync
+### PipelineRuns_QueryByFactory
```java
/**
- * Samples for IntegrationRuntimes GetMonitoringDataSync.
+ * Samples for ChangeDataCapture Delete.
*/
-public final class IntegrationRuntimesGetMonitoringDataSyncSamples {
+public final class ChangeDataCaptureDeleteSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_GetMonitoringData.json
+ * ChangeDataCapture_Delete.json
*/
/**
- * Sample code: IntegrationRuntimes_GetMonitoringData.
+ * Sample code: ChangeDataCapture_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void
- integrationRuntimesGetMonitoringData(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .getMonitoringDataWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
+ public static void changeDataCaptureDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.changeDataCaptures()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture",
com.azure.core.util.Context.NONE);
}
}
```
-### Pipelines_ListByFactorySync
+### Pipelines_CreateOrUpdate
```java
/**
@@ -2869,283 +2745,373 @@ public final class IntegrationRuntimeObjectMetadataRefreshSamples {
}
```
-### PrivateEndPointConnections_ListByFactorySync
+### Pipelines_CreateRun
```java
/**
- * Samples for GlobalParameters DeleteSync.
+ * Samples for Pipelines Get.
*/
-public final class GlobalParametersDeleteSyncSamples {
+public final class PipelinesGetSamples {
+ /*
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Get.json
+ */
+ /**
+ * Sample code: Pipelines_Get.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void pipelinesGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.pipelines()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null,
+ com.azure.core.util.Context.NONE);
+ }
+}
+```
+
+### Pipelines_Delete
+
+```java
+/**
+ * Samples for PrivateEndpointConnectionOperation Delete.
+ */
+public final class PrivateEndpointConnectionOperationDeleteSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * GlobalParameters_Delete.json
+ * DeletePrivateEndpointConnection.json
*/
/**
- * Sample code: GlobalParameters_Delete.
+ * Sample code: Delete a private endpoint connection for a datafactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void globalParametersDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.globalParameters()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "default",
+ public static void deleteAPrivateEndpointConnectionForADatafactory(
+ com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.privateEndpointConnectionOperations()
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "connection",
com.azure.core.util.Context.NONE);
}
}
```
-### PrivateEndpointConnectionOperation_CreateOrUpdateSync
+### Pipelines_Get
```java
-import com.azure.resourcemanager.datafactory.models.UpdateIntegrationRuntimeNodeRequest;
+/**
+ * Samples for CredentialOperations Get.
+ */
+public final class CredentialOperationsGetSamples {
+ /*
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Get.json
+ */
+ /**
+ * Sample code: Credentials_Get.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void credentialsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.credentialOperations()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleCredential", null,
+ com.azure.core.util.Context.NONE);
+ }
+}
+```
+
+### Pipelines_ListByFactory
+```java
/**
- * Samples for IntegrationRuntimeNodes UpdateSync.
+ * Samples for LinkedServices ListByFactory.
*/
-public final class IntegrationRuntimeNodesUpdateSyncSamples {
+public final class LinkedServicesListByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimeNodes_Update.json
+ * LinkedServices_ListByFactory.json
*/
/**
- * Sample code: IntegrationRuntimeNodes_Update.
+ * Sample code: LinkedServices_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimeNodesUpdate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimeNodes()
- .updateWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1",
- new UpdateIntegrationRuntimeNodeRequest().withConcurrentJobsLimit(2), com.azure.core.util.Context.NONE);
+ public static void linkedServicesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.linkedServices()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### PrivateEndpointConnectionOperation_DeleteSync
+### PrivateEndPointConnections_ListByFactory
```java
/**
- * Samples for IntegrationRuntimes UpgradeSync.
+ * Samples for IntegrationRuntimes Delete.
*/
-public final class IntegrationRuntimesUpgradeSyncSamples {
+public final class IntegrationRuntimesDeleteSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_Upgrade.json
+ * IntegrationRuntimes_Delete.json
*/
/**
- * Sample code: IntegrationRuntimes_Upgrade.
+ * Sample code: IntegrationRuntimes_Delete.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesUpgrade(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ public static void integrationRuntimesDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
manager.integrationRuntimes()
- .upgradeWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
+ .deleteWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
com.azure.core.util.Context.NONE);
}
}
```
-### PrivateEndpointConnectionOperation_GetSync
+### PrivateEndpointConnectionOperation_CreateOrUpdate
```java
/**
- * Samples for IntegrationRuntimes ListOutboundNetworkDependenciesEndpointsSync.
+ * Samples for CredentialOperations ListByFactory.
*/
-public final class IntegrationRuntimesListOutboundNetworkDependenciesEndpointsSyncSamples {
+public final class CredentialOperationsListByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_ListOutboundNetworkDependenciesEndpoints.json
+ * Credentials_ListByFactory.json
*/
/**
- * Sample code: IntegrationRuntimes_OutboundNetworkDependenciesEndpoints.
+ * Sample code: Credentials_ListByFactory.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void credentialsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.credentialOperations()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ }
+}
+```
+
+### PrivateEndpointConnectionOperation_Delete
+
+```java
+/**
+ * Samples for GlobalParameters Get.
+ */
+public final class GlobalParametersGetSamples {
+ /*
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Get.
+ * json
+ */
+ /**
+ * Sample code: GlobalParameters_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesOutboundNetworkDependenciesEndpoints(
- com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .listOutboundNetworkDependenciesEndpointsWithResponse("exampleResourceGroup", "exampleFactoryName",
- "exampleIntegrationRuntime", com.azure.core.util.Context.NONE);
+ public static void globalParametersGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.globalParameters()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "default", com.azure.core.util.Context.NONE);
}
}
```
-### PrivateLinkResources_GetSync
+### PrivateEndpointConnectionOperation_Get
```java
/**
- * Samples for DataFlowDebugSession QueryByFactorySync.
+ * Samples for PrivateEndPointConnections ListByFactory.
*/
-public final class DataFlowDebugSessionQueryByFactorySyncSamples {
+public final class PrivateEndPointConnectionsListByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * DataFlowDebugSession_QueryByFactory.json
+ * PrivateEndPointConnections_ListByFactory.json
*/
/**
- * Sample code: DataFlowDebugSession_QueryByFactory.
+ * Sample code: privateEndPointConnections_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
public static void
- dataFlowDebugSessionQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.dataFlowDebugSessions()
- .queryByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ privateEndPointConnectionsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.privateEndPointConnections()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### TriggerRuns_CancelSync
+### PrivateLinkResources_Get
```java
-import com.azure.resourcemanager.datafactory.models.TriggerFilterParameters;
+import com.azure.resourcemanager.datafactory.models.ManagedPrivateEndpoint;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
/**
- * Samples for Triggers QueryByFactorySync.
+ * Samples for ManagedPrivateEndpoints CreateOrUpdate.
*/
-public final class TriggersQueryByFactorySyncSamples {
+public final class ManagedPrivateEndpointsCreateOrUpdateSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Triggers_QueryByFactory.json
+ * ManagedPrivateEndpoints_Create.json
*/
/**
- * Sample code: Triggers_QueryByFactory.
+ * Sample code: ManagedVirtualNetworks_Create.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.triggers()
- .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName",
- new TriggerFilterParameters().withParentTriggerName("exampleTrigger"),
- com.azure.core.util.Context.NONE);
+ public static void managedVirtualNetworksCreate(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.managedPrivateEndpoints()
+ .define("exampleManagedPrivateEndpointName")
+ .withExistingManagedVirtualNetwork("exampleResourceGroup", "exampleFactoryName",
+ "exampleManagedVirtualNetworkName")
+ .withProperties(new ManagedPrivateEndpoint().withFqdns(Arrays.asList())
+ .withGroupId("blob")
+ .withPrivateLinkResourceId(
+ "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Storage/storageAccounts/exampleBlobStorage")
+ .withAdditionalProperties(mapOf()))
+ .create();
+ }
+
+ // Use "Map.of" if available
+ @SuppressWarnings("unchecked")
+ private static Map mapOf(Object... inputs) {
+ Map map = new HashMap<>();
+ for (int i = 0; i < inputs.length; i += 2) {
+ String key = (String) inputs[i];
+ T value = (T) inputs[i + 1];
+ map.put(key, value);
+ }
+ return map;
}
}
```
-### TriggerRuns_QueryByFactorySync
+### TriggerRuns_Cancel
```java
-import com.azure.resourcemanager.datafactory.models.DeleteDataFlowDebugSessionRequest;
+import com.azure.resourcemanager.datafactory.models.RunFilterParameters;
+import java.time.OffsetDateTime;
/**
- * Samples for DataFlowDebugSession DeleteSync.
+ * Samples for ActivityRuns QueryByPipelineRun.
*/
-public final class DataFlowDebugSessionDeleteSyncSamples {
+public final class ActivityRunsQueryByPipelineRunSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * DataFlowDebugSession_Delete.json
+ * ActivityRuns_QueryByPipelineRun.json
*/
/**
- * Sample code: DataFlowDebugSession_Delete.
+ * Sample code: ActivityRuns_QueryByPipelineRun.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void dataFlowDebugSessionDelete(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.dataFlowDebugSessions()
- .deleteWithResponse("exampleResourceGroup", "exampleFactoryName",
- new DeleteDataFlowDebugSessionRequest().withSessionId("91fb57e0-8292-47be-89ff-c8f2d2bb2a7e"),
+ public static void
+ activityRunsQueryByPipelineRun(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.activityRuns()
+ .queryByPipelineRunWithResponse("exampleResourceGroup", "exampleFactoryName",
+ "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b",
+ new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z"))
+ .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z")),
com.azure.core.util.Context.NONE);
}
}
```
-### TriggerRuns_RerunSync
+### TriggerRuns_QueryByFactory
```java
/**
- * Samples for Datasets ListByFactorySync.
+ * Samples for ChangeDataCapture Start.
*/
-public final class DatasetsListByFactorySyncSamples {
+public final class ChangeDataCaptureStartSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Datasets_ListByFactory.json
+ * ChangeDataCapture_Start.json
*/
/**
- * Sample code: Datasets_ListByFactory.
+ * Sample code: ChangeDataCapture_Start.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void datasetsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.datasets()
- .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
+ public static void changeDataCaptureStart(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.changeDataCaptures()
+ .startWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture",
+ com.azure.core.util.Context.NONE);
}
}
```
-### Triggers_CreateOrUpdateSync
+### TriggerRuns_Rerun
```java
-import com.azure.resourcemanager.datafactory.models.LinkedIntegrationRuntimeRequest;
-
/**
- * Samples for IntegrationRuntimes RemoveLinksSync.
+ * Samples for ChangeDataCapture Stop.
*/
-public final class IntegrationRuntimesRemoveLinksSyncSamples {
+public final class ChangeDataCaptureStopSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_RemoveLinks.json
+ * ChangeDataCapture_Stop.json
*/
/**
- * Sample code: IntegrationRuntimes_Upgrade.
+ * Sample code: ChangeDataCapture_Stop.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesUpgrade(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .removeLinksWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
- new LinkedIntegrationRuntimeRequest().withLinkedFactoryName("exampleFactoryName-linked"),
+ public static void changeDataCaptureStop(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.changeDataCaptures()
+ .stopWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleChangeDataCapture",
com.azure.core.util.Context.NONE);
}
}
```
-### Triggers_DeleteSync
+### Triggers_CreateOrUpdate
```java
/**
- * Samples for IntegrationRuntimes GetConnectionInfoSync.
+ * Samples for ChangeDataCapture ListByFactory.
*/
-public final class IntegrationRuntimesGetConnectionInfoSyncSamples {
+public final class ChangeDataCaptureListByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_GetConnectionInfo.json
+ * ChangeDataCapture_ListByFactory.json
*/
/**
- * Sample code: IntegrationRuntimes_GetConnectionInfo.
+ * Sample code: ChangeDataCapture_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
public static void
- integrationRuntimesGetConnectionInfo(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .getConnectionInfoWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
- com.azure.core.util.Context.NONE);
+ changeDataCaptureListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.changeDataCaptures()
+ .listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
```
-### Triggers_GetEventSubscriptionStatusSync
+### Triggers_Delete
```java
/**
- * Samples for Triggers GetSync.
+ * Samples for Datasets Get.
*/
-public final class TriggersGetSyncSamples {
+public final class DatasetsGetSamples {
/*
* x-ms-original-file:
- * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Get.json
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Get.json
*/
/**
- * Sample code: Triggers_Get.
+ * Sample code: Datasets_Get.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggersGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.triggers()
- .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger", null,
+ public static void datasetsGet(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.datasets()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleDataset", null,
com.azure.core.util.Context.NONE);
}
}
```
-### Triggers_GetSync
+### Triggers_Get
```java
import com.azure.resourcemanager.datafactory.models.DataFlowDebugCommandPayload;
@@ -3177,62 +3143,86 @@ public final class DataFlowDebugSessionExecuteCommandSamples {
}
```
-### Triggers_ListByFactorySync
+### Triggers_GetEventSubscriptionStatus
```java
/**
- * Samples for IntegrationRuntimeNodes GetIpAddressSync.
+ * Samples for PrivateEndpointConnectionOperation Get.
*/
-public final class IntegrationRuntimeNodesGetIpAddressSyncSamples {
+public final class PrivateEndpointConnectionOperationGetSamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimeNodes_GetIpAddress.json
+ * GetPrivateEndpointConnection.json
*/
/**
- * Sample code: IntegrationRuntimeNodes_GetIpAddress.
+ * Sample code: Get a private endpoint connection for a datafactory.
*
* @param manager Entry point to DataFactoryManager.
*/
public static void
- integrationRuntimeNodesGetIpAddress(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimeNodes()
- .getIpAddressWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
- "Node_1", com.azure.core.util.Context.NONE);
+ getAPrivateEndpointConnectionForADatafactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.privateEndpointConnectionOperations()
+ .getWithResponse("exampleResourceGroup", "exampleFactoryName", "connection", null,
+ com.azure.core.util.Context.NONE);
}
}
```
-### Triggers_QueryByFactorySync
+### Triggers_ListByFactory
```java
-import com.azure.resourcemanager.datafactory.models.RunFilterParameters;
-import com.azure.resourcemanager.datafactory.models.RunQueryFilter;
-import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperand;
-import com.azure.resourcemanager.datafactory.models.RunQueryFilterOperator;
-import java.time.OffsetDateTime;
-import java.util.Arrays;
+import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeAuthKeyName;
+import com.azure.resourcemanager.datafactory.models.IntegrationRuntimeRegenerateKeyParameters;
/**
- * Samples for TriggerRuns QueryByFactorySync.
+ * Samples for IntegrationRuntimes RegenerateAuthKey.
*/
-public final class TriggerRunsQueryByFactorySyncSamples {
+public final class IntegrationRuntimesRegenerateAuthKeySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * TriggerRuns_QueryByFactory.json
+ * IntegrationRuntimes_RegenerateAuthKey.json
*/
/**
- * Sample code: TriggerRuns_QueryByFactory.
+ * Sample code: IntegrationRuntimes_RegenerateAuthKey.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void triggerRunsQueryByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.triggerRuns()
- .queryByFactoryWithResponse("exampleResourceGroup", "exampleFactoryName",
- new RunFilterParameters().withLastUpdatedAfter(OffsetDateTime.parse("2018-06-16T00:36:44.3345758Z"))
- .withLastUpdatedBefore(OffsetDateTime.parse("2018-06-16T00:49:48.3686473Z"))
- .withFilters(Arrays.asList(new RunQueryFilter().withOperand(RunQueryFilterOperand.TRIGGER_NAME)
- .withOperator(RunQueryFilterOperator.EQUALS)
- .withValues(Arrays.asList("exampleTrigger")))),
+ public static void
+ integrationRuntimesRegenerateAuthKey(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.integrationRuntimes()
+ .regenerateAuthKeyWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime",
+ new IntegrationRuntimeRegenerateKeyParameters().withKeyName(IntegrationRuntimeAuthKeyName.AUTH_KEY2),
+ com.azure.core.util.Context.NONE);
+ }
+}
+```
+
+### Triggers_QueryByFactory
+
+```java
+import com.azure.resourcemanager.datafactory.models.UserAccessPolicy;
+
+/**
+ * Samples for Factories GetDataPlaneAccess.
+ */
+public final class FactoriesGetDataPlaneAccessSamples {
+ /*
+ * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
+ * Factories_GetDataPlaneAccess.json
+ */
+ /**
+ * Sample code: Factories_GetDataPlaneAccess.
+ *
+ * @param manager Entry point to DataFactoryManager.
+ */
+ public static void factoriesGetDataPlaneAccess(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.factories()
+ .getDataPlaneAccessWithResponse("exampleResourceGroup", "exampleFactoryName",
+ new UserAccessPolicy().withPermissions("r")
+ .withAccessResourcePath("")
+ .withProfileName("DefaultProfile")
+ .withStartTime("2018-11-10T02:46:20.2659347Z")
+ .withExpireTime("2018-11-10T09:46:20.2659347Z"),
com.azure.core.util.Context.NONE);
}
}
@@ -3242,20 +3232,20 @@ public final class TriggerRunsQueryByFactorySyncSamples {
```java
/**
- * Samples for LinkedServices ListByFactorySync.
+ * Samples for Datasets ListByFactory.
*/
-public final class LinkedServicesListByFactorySyncSamples {
+public final class DatasetsListByFactorySamples {
/*
* x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * LinkedServices_ListByFactory.json
+ * Datasets_ListByFactory.json
*/
/**
- * Sample code: LinkedServices_ListByFactory.
+ * Sample code: Datasets_ListByFactory.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void linkedServicesListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.linkedServices()
+ public static void datasetsListByFactory(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.datasets()
.listByFactory("exampleResourceGroup", "exampleFactoryName", com.azure.core.util.Context.NONE);
}
}
@@ -3264,30 +3254,24 @@ public final class LinkedServicesListByFactorySyncSamples {
### Triggers_Stop
```java
-import com.azure.resourcemanager.datafactory.models.UserAccessPolicy;
-
/**
- * Samples for Factories GetDataPlaneAccessSync.
+ * Samples for TriggerRuns Cancel.
*/
-public final class FactoriesGetDataPlaneAccessSyncSamples {
+public final class TriggerRunsCancelSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * Factories_GetDataPlaneAccess.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Cancel.
+ * json
*/
/**
- * Sample code: Factories_GetDataPlaneAccess.
+ * Sample code: Triggers_Cancel.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void factoriesGetDataPlaneAccess(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.factories()
- .getDataPlaneAccessWithResponse("exampleResourceGroup", "exampleFactoryName",
- new UserAccessPolicy().withPermissions("r")
- .withAccessResourcePath("")
- .withProfileName("DefaultProfile")
- .withStartTime("2018-11-10T02:46:20.2659347Z")
- .withExpireTime("2018-11-10T09:46:20.2659347Z"),
- com.azure.core.util.Context.NONE);
+ public static void triggersCancel(com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
+ manager.triggerRuns()
+ .cancelWithResponse("exampleResourceGroup", "exampleFactoryName", "exampleTrigger",
+ "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", com.azure.core.util.Context.NONE);
}
}
```
@@ -3295,32 +3279,48 @@ public final class FactoriesGetDataPlaneAccessSyncSamples {
### Triggers_SubscribeToEvents
```java
-import com.azure.resourcemanager.datafactory.models.CreateLinkedIntegrationRuntimeRequest;
+import com.azure.core.management.serializer.SerializerFactory;
+import com.azure.core.util.serializer.SerializerEncoding;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
/**
- * Samples for IntegrationRuntimes CreateLinkedIntegrationRuntimeSync.
+ * Samples for Pipelines CreateRun.
*/
-public final class IntegrationRuntimesCreateLinkedIntegrationRuntimeSyncSamples {
+public final class PipelinesCreateRunSamples {
/*
- * x-ms-original-file: specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/
- * IntegrationRuntimes_CreateLinkedIntegrationRuntime.json
+ * x-ms-original-file:
+ * specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.
+ * json
*/
/**
- * Sample code: IntegrationRuntimes_CreateLinkedIntegrationRuntime.
+ * Sample code: Pipelines_CreateRun.
*
* @param manager Entry point to DataFactoryManager.
*/
- public static void integrationRuntimesCreateLinkedIntegrationRuntime(
- com.azure.resourcemanager.datafactory.DataFactoryManager manager) {
- manager.integrationRuntimes()
- .createLinkedIntegrationRuntimeWithResponse("exampleResourceGroup", "exampleFactoryName",
- "exampleIntegrationRuntime",
- new CreateLinkedIntegrationRuntimeRequest().withName("bfa92911-9fb6-4fbe-8f23-beae87bc1c83")
- .withSubscriptionId("061774c7-4b5a-4159-a55b-365581830283")
- .withDataFactoryName("e9955d6d-56ea-4be3-841c-52a12c1a9981")
- .withDataFactoryLocation("West US"),
+ public static void pipelinesCreateRun(com.azure.resourcemanager.datafactory.DataFactoryManager manager)
+ throws IOException {
+ manager.pipelines()
+ .createRunWithResponse("exampleResourceGroup", "exampleFactoryName", "examplePipeline", null, null, null,
+ null,
+ mapOf("OutputBlobNameList",
+ SerializerFactory.createDefaultManagementSerializerAdapter()
+ .deserialize("[\"exampleoutput.csv\"]", Object.class, SerializerEncoding.JSON)),
com.azure.core.util.Context.NONE);
}
+
+ // Use "Map.of" if available
+ @SuppressWarnings("unchecked")
+ private static Map mapOf(Object... inputs) {
+ Map map = new HashMap<>();
+ for (int i = 0; i < inputs.length; i += 2) {
+ String key = (String) inputs[i];
+ T value = (T) inputs[i + 1];
+ map.put(key, value);
+ }
+ return map;
+ }
}
```
@@ -3328,9 +3328,9 @@ public final class IntegrationRuntimesCreateLinkedIntegrationRuntimeSyncSamples
```java
/**
- * Samples for TriggerRuns RerunSync.
+ * Samples for TriggerRuns Rerun.
*/
-public final class TriggerRunsRerunSyncSamples {
+public final class TriggerRunsRerunSamples {
/*
* x-ms-original-file:
* specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Rerun.
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml b/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml
index 3ad4431a3d79..d36ae43a832e 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/pom.xml
@@ -45,6 +45,7 @@
UTF-8
0
0
+ true
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java
index b0682f3245e0..f47a32db48c1 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AmazonRdsForLinkedServiceTypeProperties.java
@@ -5,11 +5,11 @@
package com.azure.resourcemanager.datafactory.fluent.models;
import com.azure.core.annotation.Fluent;
-import com.azure.core.util.logging.ClientLogger;
import com.azure.json.JsonReader;
import com.azure.json.JsonSerializable;
import com.azure.json.JsonToken;
import com.azure.json.JsonWriter;
+import com.azure.resourcemanager.datafactory.models.AmazonRdsForOracleAuthenticationType;
import com.azure.resourcemanager.datafactory.models.SecretBase;
import java.io.IOException;
@@ -20,15 +20,100 @@
public final class AmazonRdsForLinkedServiceTypeProperties
implements JsonSerializable {
/*
- * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference.
+ * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Only used for Version 1.0.
*/
private Object connectionString;
+ /*
+ * The location of AmazonRdsForOracle database you want to connect to, the supported forms include connector
+ * descriptor, Easy Connect (Plus) Naming and Oracle Net Services Name (Only self-hosted IR). Type: string. Only
+ * used for Version 2.0.
+ */
+ private Object server;
+
+ /*
+ * Authentication type for connecting to the AmazonRdsForOracle database. Only used for Version 2.0.
+ */
+ private AmazonRdsForOracleAuthenticationType authenticationType;
+
+ /*
+ * The AmazonRdsForOracle database username. Type: string. Only used for Version 2.0.
+ */
+ private Object username;
+
/*
* The Azure key vault secret reference of password in connection string.
*/
private SecretBase password;
+ /*
+ * Specifies the encryption client behavior. Supported values are accepted, rejected, requested or required, default
+ * value is required. Type: string. Only used for Version 2.0.
+ */
+ private Object encryptionClient;
+
+ /*
+ * Specifies the encryption algorithms that client can use. Supported values are AES128, AES192, AES256, 3DES112,
+ * 3DES168, default value is (AES256). Type: string. Only used for Version 2.0.
+ */
+ private Object encryptionTypesClient;
+
+ /*
+ * Specifies the desired data integrity behavior when this client connects to a server. Supported values are
+ * accepted, rejected, requested or required, default value is required. Type: string. Only used for Version 2.0.
+ */
+ private Object cryptoChecksumClient;
+
+ /*
+ * Specifies the crypto-checksum algorithms that client can use. Supported values are SHA1, SHA256, SHA384, SHA512,
+ * default value is (SHA512). Type: string. Only used for Version 2.0.
+ */
+ private Object cryptoChecksumTypesClient;
+
+ /*
+ * Specifies the amount that the source initially fetches for LOB columns, default value is 0. Type: integer. Only
+ * used for Version 2.0.
+ */
+ private Object initialLobFetchSize;
+
+ /*
+ * Specifies the number of bytes that the driver allocates to fetch the data in one database round-trip, default
+ * value is 10485760. Type: integer. Only used for Version 2.0.
+ */
+ private Object fetchSize;
+
+ /*
+ * Specifies the number of cursors or statements to be cached for each database connection, default value is 0.
+ * Type: integer. Only used for Version 2.0.
+ */
+ private Object statementCacheSize;
+
+ /*
+ * Specifies a command that is issued immediately after connecting to the database to manage session settings. Type:
+ * string. Only used for Version 2.0.
+ */
+ private Object initializationString;
+
+ /*
+ * Specifies whether to use bulk copy or batch insert when loading data into the database, default value is true.
+ * Type: boolean. Only used for Version 2.0.
+ */
+ private Object enableBulkLoad;
+
+ /*
+ * Specifies whether to use the Version 1.0 data type mappings. Do not set this to true unless you want to keep
+ * backward compatibility with Version 1.0's data type mappings, default value is false. Type: boolean. Only used
+ * for Version 2.0.
+ */
+ private Object supportV1DataTypes;
+
+ /*
+ * Specifies whether the driver returns column value with the TIMESTAMP WITH TIME ZONE data type as DateTime or
+ * string. This setting is ignored if supportV1DataTypes is not true, default value is true. Type: boolean. Only
+ * used for Version 2.0.
+ */
+ private Object fetchTswtzAsTimestamp;
+
/*
* The encrypted credential used for authentication. Credentials are encrypted using the integration runtime
* credential manager. Type: string.
@@ -43,7 +128,7 @@ public AmazonRdsForLinkedServiceTypeProperties() {
/**
* Get the connectionString property: The connection string. Type: string, SecureString or
- * AzureKeyVaultSecretReference.
+ * AzureKeyVaultSecretReference. Only used for Version 1.0.
*
* @return the connectionString value.
*/
@@ -53,7 +138,7 @@ public Object connectionString() {
/**
* Set the connectionString property: The connection string. Type: string, SecureString or
- * AzureKeyVaultSecretReference.
+ * AzureKeyVaultSecretReference. Only used for Version 1.0.
*
* @param connectionString the connectionString value to set.
* @return the AmazonRdsForLinkedServiceTypeProperties object itself.
@@ -63,6 +148,73 @@ public AmazonRdsForLinkedServiceTypeProperties withConnectionString(Object conne
return this;
}
+ /**
+ * Get the server property: The location of AmazonRdsForOracle database you want to connect to, the supported forms
+ * include connector descriptor, Easy Connect (Plus) Naming and Oracle Net Services Name (Only self-hosted IR).
+ * Type: string. Only used for Version 2.0.
+ *
+ * @return the server value.
+ */
+ public Object server() {
+ return this.server;
+ }
+
+ /**
+ * Set the server property: The location of AmazonRdsForOracle database you want to connect to, the supported forms
+ * include connector descriptor, Easy Connect (Plus) Naming and Oracle Net Services Name (Only self-hosted IR).
+ * Type: string. Only used for Version 2.0.
+ *
+ * @param server the server value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withServer(Object server) {
+ this.server = server;
+ return this;
+ }
+
+ /**
+ * Get the authenticationType property: Authentication type for connecting to the AmazonRdsForOracle database. Only
+ * used for Version 2.0.
+ *
+ * @return the authenticationType value.
+ */
+ public AmazonRdsForOracleAuthenticationType authenticationType() {
+ return this.authenticationType;
+ }
+
+ /**
+ * Set the authenticationType property: Authentication type for connecting to the AmazonRdsForOracle database. Only
+ * used for Version 2.0.
+ *
+ * @param authenticationType the authenticationType value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties
+ withAuthenticationType(AmazonRdsForOracleAuthenticationType authenticationType) {
+ this.authenticationType = authenticationType;
+ return this;
+ }
+
+ /**
+ * Get the username property: The AmazonRdsForOracle database username. Type: string. Only used for Version 2.0.
+ *
+ * @return the username value.
+ */
+ public Object username() {
+ return this.username;
+ }
+
+ /**
+ * Set the username property: The AmazonRdsForOracle database username. Type: string. Only used for Version 2.0.
+ *
+ * @param username the username value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withUsername(Object username) {
+ this.username = username;
+ return this;
+ }
+
/**
* Get the password property: The Azure key vault secret reference of password in connection string.
*
@@ -83,6 +235,256 @@ public AmazonRdsForLinkedServiceTypeProperties withPassword(SecretBase password)
return this;
}
+ /**
+ * Get the encryptionClient property: Specifies the encryption client behavior. Supported values are accepted,
+ * rejected, requested or required, default value is required. Type: string. Only used for Version 2.0.
+ *
+ * @return the encryptionClient value.
+ */
+ public Object encryptionClient() {
+ return this.encryptionClient;
+ }
+
+ /**
+ * Set the encryptionClient property: Specifies the encryption client behavior. Supported values are accepted,
+ * rejected, requested or required, default value is required. Type: string. Only used for Version 2.0.
+ *
+ * @param encryptionClient the encryptionClient value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withEncryptionClient(Object encryptionClient) {
+ this.encryptionClient = encryptionClient;
+ return this;
+ }
+
+ /**
+ * Get the encryptionTypesClient property: Specifies the encryption algorithms that client can use. Supported values
+ * are AES128, AES192, AES256, 3DES112, 3DES168, default value is (AES256). Type: string. Only used for Version 2.0.
+ *
+ * @return the encryptionTypesClient value.
+ */
+ public Object encryptionTypesClient() {
+ return this.encryptionTypesClient;
+ }
+
+ /**
+ * Set the encryptionTypesClient property: Specifies the encryption algorithms that client can use. Supported values
+ * are AES128, AES192, AES256, 3DES112, 3DES168, default value is (AES256). Type: string. Only used for Version 2.0.
+ *
+ * @param encryptionTypesClient the encryptionTypesClient value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withEncryptionTypesClient(Object encryptionTypesClient) {
+ this.encryptionTypesClient = encryptionTypesClient;
+ return this;
+ }
+
+ /**
+ * Get the cryptoChecksumClient property: Specifies the desired data integrity behavior when this client connects to
+ * a server. Supported values are accepted, rejected, requested or required, default value is required. Type:
+ * string. Only used for Version 2.0.
+ *
+ * @return the cryptoChecksumClient value.
+ */
+ public Object cryptoChecksumClient() {
+ return this.cryptoChecksumClient;
+ }
+
+ /**
+ * Set the cryptoChecksumClient property: Specifies the desired data integrity behavior when this client connects to
+ * a server. Supported values are accepted, rejected, requested or required, default value is required. Type:
+ * string. Only used for Version 2.0.
+ *
+ * @param cryptoChecksumClient the cryptoChecksumClient value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withCryptoChecksumClient(Object cryptoChecksumClient) {
+ this.cryptoChecksumClient = cryptoChecksumClient;
+ return this;
+ }
+
+ /**
+ * Get the cryptoChecksumTypesClient property: Specifies the crypto-checksum algorithms that client can use.
+ * Supported values are SHA1, SHA256, SHA384, SHA512, default value is (SHA512). Type: string. Only used for Version
+ * 2.0.
+ *
+ * @return the cryptoChecksumTypesClient value.
+ */
+ public Object cryptoChecksumTypesClient() {
+ return this.cryptoChecksumTypesClient;
+ }
+
+ /**
+ * Set the cryptoChecksumTypesClient property: Specifies the crypto-checksum algorithms that client can use.
+ * Supported values are SHA1, SHA256, SHA384, SHA512, default value is (SHA512). Type: string. Only used for Version
+ * 2.0.
+ *
+ * @param cryptoChecksumTypesClient the cryptoChecksumTypesClient value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withCryptoChecksumTypesClient(Object cryptoChecksumTypesClient) {
+ this.cryptoChecksumTypesClient = cryptoChecksumTypesClient;
+ return this;
+ }
+
+ /**
+ * Get the initialLobFetchSize property: Specifies the amount that the source initially fetches for LOB columns,
+ * default value is 0. Type: integer. Only used for Version 2.0.
+ *
+ * @return the initialLobFetchSize value.
+ */
+ public Object initialLobFetchSize() {
+ return this.initialLobFetchSize;
+ }
+
+ /**
+ * Set the initialLobFetchSize property: Specifies the amount that the source initially fetches for LOB columns,
+ * default value is 0. Type: integer. Only used for Version 2.0.
+ *
+ * @param initialLobFetchSize the initialLobFetchSize value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withInitialLobFetchSize(Object initialLobFetchSize) {
+ this.initialLobFetchSize = initialLobFetchSize;
+ return this;
+ }
+
+ /**
+ * Get the fetchSize property: Specifies the number of bytes that the driver allocates to fetch the data in one
+ * database round-trip, default value is 10485760. Type: integer. Only used for Version 2.0.
+ *
+ * @return the fetchSize value.
+ */
+ public Object fetchSize() {
+ return this.fetchSize;
+ }
+
+ /**
+ * Set the fetchSize property: Specifies the number of bytes that the driver allocates to fetch the data in one
+ * database round-trip, default value is 10485760. Type: integer. Only used for Version 2.0.
+ *
+ * @param fetchSize the fetchSize value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withFetchSize(Object fetchSize) {
+ this.fetchSize = fetchSize;
+ return this;
+ }
+
+ /**
+ * Get the statementCacheSize property: Specifies the number of cursors or statements to be cached for each database
+ * connection, default value is 0. Type: integer. Only used for Version 2.0.
+ *
+ * @return the statementCacheSize value.
+ */
+ public Object statementCacheSize() {
+ return this.statementCacheSize;
+ }
+
+ /**
+ * Set the statementCacheSize property: Specifies the number of cursors or statements to be cached for each database
+ * connection, default value is 0. Type: integer. Only used for Version 2.0.
+ *
+ * @param statementCacheSize the statementCacheSize value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withStatementCacheSize(Object statementCacheSize) {
+ this.statementCacheSize = statementCacheSize;
+ return this;
+ }
+
+ /**
+ * Get the initializationString property: Specifies a command that is issued immediately after connecting to the
+ * database to manage session settings. Type: string. Only used for Version 2.0.
+ *
+ * @return the initializationString value.
+ */
+ public Object initializationString() {
+ return this.initializationString;
+ }
+
+ /**
+ * Set the initializationString property: Specifies a command that is issued immediately after connecting to the
+ * database to manage session settings. Type: string. Only used for Version 2.0.
+ *
+ * @param initializationString the initializationString value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withInitializationString(Object initializationString) {
+ this.initializationString = initializationString;
+ return this;
+ }
+
+ /**
+ * Get the enableBulkLoad property: Specifies whether to use bulk copy or batch insert when loading data into the
+ * database, default value is true. Type: boolean. Only used for Version 2.0.
+ *
+ * @return the enableBulkLoad value.
+ */
+ public Object enableBulkLoad() {
+ return this.enableBulkLoad;
+ }
+
+ /**
+ * Set the enableBulkLoad property: Specifies whether to use bulk copy or batch insert when loading data into the
+ * database, default value is true. Type: boolean. Only used for Version 2.0.
+ *
+ * @param enableBulkLoad the enableBulkLoad value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withEnableBulkLoad(Object enableBulkLoad) {
+ this.enableBulkLoad = enableBulkLoad;
+ return this;
+ }
+
+ /**
+ * Get the supportV1DataTypes property: Specifies whether to use the Version 1.0 data type mappings. Do not set this
+ * to true unless you want to keep backward compatibility with Version 1.0's data type mappings, default value is
+ * false. Type: boolean. Only used for Version 2.0.
+ *
+ * @return the supportV1DataTypes value.
+ */
+ public Object supportV1DataTypes() {
+ return this.supportV1DataTypes;
+ }
+
+ /**
+ * Set the supportV1DataTypes property: Specifies whether to use the Version 1.0 data type mappings. Do not set this
+ * to true unless you want to keep backward compatibility with Version 1.0's data type mappings, default value is
+ * false. Type: boolean. Only used for Version 2.0.
+ *
+ * @param supportV1DataTypes the supportV1DataTypes value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withSupportV1DataTypes(Object supportV1DataTypes) {
+ this.supportV1DataTypes = supportV1DataTypes;
+ return this;
+ }
+
+ /**
+ * Get the fetchTswtzAsTimestamp property: Specifies whether the driver returns column value with the TIMESTAMP WITH
+ * TIME ZONE data type as DateTime or string. This setting is ignored if supportV1DataTypes is not true, default
+ * value is true. Type: boolean. Only used for Version 2.0.
+ *
+ * @return the fetchTswtzAsTimestamp value.
+ */
+ public Object fetchTswtzAsTimestamp() {
+ return this.fetchTswtzAsTimestamp;
+ }
+
+ /**
+ * Set the fetchTswtzAsTimestamp property: Specifies whether the driver returns column value with the TIMESTAMP WITH
+ * TIME ZONE data type as DateTime or string. This setting is ignored if supportV1DataTypes is not true, default
+ * value is true. Type: boolean. Only used for Version 2.0.
+ *
+ * @param fetchTswtzAsTimestamp the fetchTswtzAsTimestamp value to set.
+ * @return the AmazonRdsForLinkedServiceTypeProperties object itself.
+ */
+ public AmazonRdsForLinkedServiceTypeProperties withFetchTswtzAsTimestamp(Object fetchTswtzAsTimestamp) {
+ this.fetchTswtzAsTimestamp = fetchTswtzAsTimestamp;
+ return this;
+ }
+
/**
* Get the encryptedCredential property: The encrypted credential used for authentication. Credentials are encrypted
* using the integration runtime credential manager. Type: string.
@@ -111,18 +513,11 @@ public AmazonRdsForLinkedServiceTypeProperties withEncryptedCredential(String en
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
- if (connectionString() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Missing required property connectionString in model AmazonRdsForLinkedServiceTypeProperties"));
- }
if (password() != null) {
password().validate();
}
}
- private static final ClientLogger LOGGER = new ClientLogger(AmazonRdsForLinkedServiceTypeProperties.class);
-
/**
* {@inheritDoc}
*/
@@ -130,7 +525,22 @@ public void validate() {
public JsonWriter toJson(JsonWriter jsonWriter) throws IOException {
jsonWriter.writeStartObject();
jsonWriter.writeUntypedField("connectionString", this.connectionString);
+ jsonWriter.writeUntypedField("server", this.server);
+ jsonWriter.writeStringField("authenticationType",
+ this.authenticationType == null ? null : this.authenticationType.toString());
+ jsonWriter.writeUntypedField("username", this.username);
jsonWriter.writeJsonField("password", this.password);
+ jsonWriter.writeUntypedField("encryptionClient", this.encryptionClient);
+ jsonWriter.writeUntypedField("encryptionTypesClient", this.encryptionTypesClient);
+ jsonWriter.writeUntypedField("cryptoChecksumClient", this.cryptoChecksumClient);
+ jsonWriter.writeUntypedField("cryptoChecksumTypesClient", this.cryptoChecksumTypesClient);
+ jsonWriter.writeUntypedField("initialLobFetchSize", this.initialLobFetchSize);
+ jsonWriter.writeUntypedField("fetchSize", this.fetchSize);
+ jsonWriter.writeUntypedField("statementCacheSize", this.statementCacheSize);
+ jsonWriter.writeUntypedField("initializationString", this.initializationString);
+ jsonWriter.writeUntypedField("enableBulkLoad", this.enableBulkLoad);
+ jsonWriter.writeUntypedField("supportV1DataTypes", this.supportV1DataTypes);
+ jsonWriter.writeUntypedField("fetchTswtzAsTimestamp", this.fetchTswtzAsTimestamp);
jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential);
return jsonWriter.writeEndObject();
}
@@ -141,7 +551,6 @@ public JsonWriter toJson(JsonWriter jsonWriter) throws IOException {
* @param jsonReader The JsonReader being read.
* @return An instance of AmazonRdsForLinkedServiceTypeProperties if the JsonReader was pointing to an instance of
* it, or null if it was pointing to JSON null.
- * @throws IllegalStateException If the deserialized JSON object was missing any required properties.
* @throws IOException If an error occurs while reading the AmazonRdsForLinkedServiceTypeProperties.
*/
public static AmazonRdsForLinkedServiceTypeProperties fromJson(JsonReader jsonReader) throws IOException {
@@ -154,8 +563,38 @@ public static AmazonRdsForLinkedServiceTypeProperties fromJson(JsonReader jsonRe
if ("connectionString".equals(fieldName)) {
deserializedAmazonRdsForLinkedServiceTypeProperties.connectionString = reader.readUntyped();
+ } else if ("server".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.server = reader.readUntyped();
+ } else if ("authenticationType".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.authenticationType
+ = AmazonRdsForOracleAuthenticationType.fromString(reader.getString());
+ } else if ("username".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.username = reader.readUntyped();
} else if ("password".equals(fieldName)) {
deserializedAmazonRdsForLinkedServiceTypeProperties.password = SecretBase.fromJson(reader);
+ } else if ("encryptionClient".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.encryptionClient = reader.readUntyped();
+ } else if ("encryptionTypesClient".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.encryptionTypesClient = reader.readUntyped();
+ } else if ("cryptoChecksumClient".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.cryptoChecksumClient = reader.readUntyped();
+ } else if ("cryptoChecksumTypesClient".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.cryptoChecksumTypesClient
+ = reader.readUntyped();
+ } else if ("initialLobFetchSize".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.initialLobFetchSize = reader.readUntyped();
+ } else if ("fetchSize".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.fetchSize = reader.readUntyped();
+ } else if ("statementCacheSize".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.statementCacheSize = reader.readUntyped();
+ } else if ("initializationString".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.initializationString = reader.readUntyped();
+ } else if ("enableBulkLoad".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.enableBulkLoad = reader.readUntyped();
+ } else if ("supportV1DataTypes".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.supportV1DataTypes = reader.readUntyped();
+ } else if ("fetchTswtzAsTimestamp".equals(fieldName)) {
+ deserializedAmazonRdsForLinkedServiceTypeProperties.fetchTswtzAsTimestamp = reader.readUntyped();
} else if ("encryptedCredential".equals(fieldName)) {
deserializedAmazonRdsForLinkedServiceTypeProperties.encryptedCredential = reader.getString();
} else {
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java
index e265b16d8d06..5777137a9c7f 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/AzureDatabricksLinkedServiceTypeProperties.java
@@ -135,6 +135,11 @@ public final class AzureDatabricksLinkedServiceTypeProperties
*/
private CredentialReference credential;
+ /*
+ * The data security mode for the Databricks Cluster. Type: string (or Expression with resultType string).
+ */
+ private Object dataSecurityMode;
+
/**
* Creates an instance of AzureDatabricksLinkedServiceTypeProperties class.
*/
@@ -572,6 +577,28 @@ public AzureDatabricksLinkedServiceTypeProperties withCredential(CredentialRefer
return this;
}
+ /**
+ * Get the dataSecurityMode property: The data security mode for the Databricks Cluster. Type: string (or Expression
+ * with resultType string).
+ *
+ * @return the dataSecurityMode value.
+ */
+ public Object dataSecurityMode() {
+ return this.dataSecurityMode;
+ }
+
+ /**
+ * Set the dataSecurityMode property: The data security mode for the Databricks Cluster. Type: string (or Expression
+ * with resultType string).
+ *
+ * @param dataSecurityMode the dataSecurityMode value to set.
+ * @return the AzureDatabricksLinkedServiceTypeProperties object itself.
+ */
+ public AzureDatabricksLinkedServiceTypeProperties withDataSecurityMode(Object dataSecurityMode) {
+ this.dataSecurityMode = dataSecurityMode;
+ return this;
+ }
+
/**
* Validates the instance.
*
@@ -621,6 +648,7 @@ public JsonWriter toJson(JsonWriter jsonWriter) throws IOException {
jsonWriter.writeStringField("encryptedCredential", this.encryptedCredential);
jsonWriter.writeUntypedField("policyId", this.policyId);
jsonWriter.writeJsonField("credential", this.credential);
+ jsonWriter.writeUntypedField("dataSecurityMode", this.dataSecurityMode);
return jsonWriter.writeEndObject();
}
@@ -687,6 +715,8 @@ public static AzureDatabricksLinkedServiceTypeProperties fromJson(JsonReader jso
} else if ("credential".equals(fieldName)) {
deserializedAzureDatabricksLinkedServiceTypeProperties.credential
= CredentialReference.fromJson(reader);
+ } else if ("dataSecurityMode".equals(fieldName)) {
+ deserializedAzureDatabricksLinkedServiceTypeProperties.dataSecurityMode = reader.readUntyped();
} else {
reader.skipChildren();
}
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ActivityRunsClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ActivityRunsClientImpl.java
index 5c26cb6e200a..3b469e33beb0 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ActivityRunsClientImpl.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ActivityRunsClientImpl.java
@@ -22,7 +22,6 @@
import com.azure.core.management.exception.ManagementException;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
-import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.datafactory.fluent.ActivityRunsClient;
import com.azure.resourcemanager.datafactory.fluent.models.ActivityRunsQueryResponseInner;
import com.azure.resourcemanager.datafactory.models.RunFilterParameters;
@@ -70,17 +69,6 @@ Mono> queryByPipelineRun(@HostParam("$h
@PathParam("runId") String runId, @QueryParam("api-version") String apiVersion,
@BodyParam("application/json") RunFilterParameters filterParameters, @HeaderParam("Accept") String accept,
Context context);
-
- @Headers({ "Content-Type: application/json" })
- @Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response queryByPipelineRunSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("runId") String runId, @QueryParam("api-version") String apiVersion,
- @BodyParam("application/json") RunFilterParameters filterParameters, @HeaderParam("Accept") String accept,
- Context context);
}
/**
@@ -130,6 +118,52 @@ private Mono> queryByPipelineRunWithRes
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Query activity runs based on input filter conditions.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param runId The pipeline run identifier.
+ * @param filterParameters Parameters to filter the activity runs.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return a list activity runs along with {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> queryByPipelineRunWithResponseAsync(String resourceGroupName,
+ String factoryName, String runId, RunFilterParameters filterParameters, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (runId == null) {
+ return Mono.error(new IllegalArgumentException("Parameter runId is required and cannot be null."));
+ }
+ if (filterParameters == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter filterParameters is required and cannot be null."));
+ } else {
+ filterParameters.validate();
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.queryByPipelineRun(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, runId, this.client.getApiVersion(), filterParameters, accept, context);
+ }
+
/**
* Query activity runs based on input filter conditions.
*
@@ -165,36 +199,8 @@ private Mono queryByPipelineRunAsync(String reso
@ServiceMethod(returns = ReturnType.SINGLE)
public Response queryByPipelineRunWithResponse(String resourceGroupName,
String factoryName, String runId, RunFilterParameters filterParameters, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (runId == null) {
- throw LOGGER.atError().log(new IllegalArgumentException("Parameter runId is required and cannot be null."));
- }
- if (filterParameters == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter filterParameters is required and cannot be null."));
- } else {
- filterParameters.validate();
- }
- final String accept = "application/json";
- return service.queryByPipelineRunSync(this.client.getEndpoint(), this.client.getSubscriptionId(),
- resourceGroupName, factoryName, runId, this.client.getApiVersion(), filterParameters, accept, context);
+ return queryByPipelineRunWithResponseAsync(resourceGroupName, factoryName, runId, filterParameters, context)
+ .block();
}
/**
@@ -215,6 +221,4 @@ public ActivityRunsQueryResponseInner queryByPipelineRun(String resourceGroupNam
return queryByPipelineRunWithResponse(resourceGroupName, factoryName, runId, filterParameters, Context.NONE)
.getValue();
}
-
- private static final ClientLogger LOGGER = new ClientLogger(ActivityRunsClientImpl.class);
}
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesClientImpl.java
index 0ec6d4bfb969..31914efdfc05 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesClientImpl.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/ChangeDataCapturesClientImpl.java
@@ -29,7 +29,6 @@
import com.azure.core.management.exception.ManagementException;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
-import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.datafactory.fluent.ChangeDataCapturesClient;
import com.azure.resourcemanager.datafactory.fluent.models.ChangeDataCaptureResourceInner;
import com.azure.resourcemanager.datafactory.models.ChangeDataCaptureListResponse;
@@ -76,15 +75,6 @@ Mono> listByFactory(@HostParam("$host")
@PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
@QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response listByFactorySync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Put("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}")
@ExpectedResponses({ 200 })
@@ -97,18 +87,6 @@ Mono> createOrUpdate(@HostParam("$host"
@BodyParam("application/json") ChangeDataCaptureResourceInner changeDataCapture,
@HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Put("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response createOrUpdateSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("changeDataCaptureName") String changeDataCaptureName,
- @QueryParam("api-version") String apiVersion, @HeaderParam("If-Match") String ifMatch,
- @BodyParam("application/json") ChangeDataCaptureResourceInner changeDataCapture,
- @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}")
@ExpectedResponses({ 200 })
@@ -120,17 +98,6 @@ Mono> get(@HostParam("$host") String en
@QueryParam("api-version") String apiVersion, @HeaderParam("If-None-Match") String ifNoneMatch,
@HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response getSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("changeDataCaptureName") String changeDataCaptureName,
- @QueryParam("api-version") String apiVersion, @HeaderParam("If-None-Match") String ifNoneMatch,
- @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Delete("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}")
@ExpectedResponses({ 200, 204 })
@@ -141,16 +108,6 @@ Mono> delete(@HostParam("$host") String endpoint,
@PathParam("changeDataCaptureName") String changeDataCaptureName,
@QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Delete("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}")
- @ExpectedResponses({ 200, 204 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response deleteSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("changeDataCaptureName") String changeDataCaptureName,
- @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/start")
@ExpectedResponses({ 200 })
@@ -161,16 +118,6 @@ Mono> start(@HostParam("$host") String endpoint,
@PathParam("changeDataCaptureName") String changeDataCaptureName,
@QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/start")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response startSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("changeDataCaptureName") String changeDataCaptureName,
- @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/stop")
@ExpectedResponses({ 200 })
@@ -181,15 +128,6 @@ Mono> stop(@HostParam("$host") String endpoint,
@PathParam("changeDataCaptureName") String changeDataCaptureName,
@QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/stop")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response stopSync(@HostParam("$host") String endpoint, @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("changeDataCaptureName") String changeDataCaptureName,
- @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/status")
@ExpectedResponses({ 200 })
@@ -200,16 +138,6 @@ Mono> status(@HostParam("$host") String endpoint,
@PathParam("changeDataCaptureName") String changeDataCaptureName,
@QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/status")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response statusSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("changeDataCaptureName") String changeDataCaptureName,
- @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Get("{nextLink}")
@ExpectedResponses({ 200 })
@@ -217,14 +145,6 @@ Response statusSync(@HostParam("$host") String endpoint,
Mono> listByFactoryNext(
@PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept, Context context);
-
- @Headers({ "Content-Type: application/json" })
- @Get("{nextLink}")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response listByFactoryNextSync(
- @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint,
- @HeaderParam("Accept") String accept, Context context);
}
/**
@@ -270,15 +190,38 @@ private Mono> listByFactorySingleP
*
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
+ * @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of change data capture resources as paginated response with {@link PagedFlux}.
+ * @return a list of change data capture resources along with {@link PagedResponse} on successful completion of
+ * {@link Mono}.
*/
- @ServiceMethod(returns = ReturnType.COLLECTION)
- private PagedFlux listByFactoryAsync(String resourceGroupName, String factoryName) {
- return new PagedFlux<>(() -> listByFactorySinglePageAsync(resourceGroupName, factoryName),
- nextLink -> listByFactoryNextSinglePageAsync(nextLink));
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> listByFactorySinglePageAsync(String resourceGroupName,
+ String factoryName, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service
+ .listByFactory(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, factoryName,
+ this.client.getApiVersion(), accept, context)
+ .map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(),
+ res.getValue().value(), res.getValue().nextLink(), null));
}
/**
@@ -289,35 +232,12 @@ private PagedFlux listByFactoryAsync(String reso
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of change data capture resources along with {@link PagedResponse}.
+ * @return a list of change data capture resources as paginated response with {@link PagedFlux}.
*/
- @ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse listByFactorySinglePage(String resourceGroupName,
- String factoryName) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- final String accept = "application/json";
- Response res
- = service.listByFactorySync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), accept, Context.NONE);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
+ @ServiceMethod(returns = ReturnType.COLLECTION)
+ private PagedFlux listByFactoryAsync(String resourceGroupName, String factoryName) {
+ return new PagedFlux<>(() -> listByFactorySinglePageAsync(resourceGroupName, factoryName),
+ nextLink -> listByFactoryNextSinglePageAsync(nextLink));
}
/**
@@ -329,35 +249,13 @@ private PagedResponse listByFactorySinglePage(St
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of change data capture resources along with {@link PagedResponse}.
+ * @return a list of change data capture resources as paginated response with {@link PagedFlux}.
*/
- @ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse listByFactorySinglePage(String resourceGroupName,
- String factoryName, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- final String accept = "application/json";
- Response res
- = service.listByFactorySync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), accept, context);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
+ @ServiceMethod(returns = ReturnType.COLLECTION)
+ private PagedFlux listByFactoryAsync(String resourceGroupName, String factoryName,
+ Context context) {
+ return new PagedFlux<>(() -> listByFactorySinglePageAsync(resourceGroupName, factoryName, context),
+ nextLink -> listByFactoryNextSinglePageAsync(nextLink, context));
}
/**
@@ -372,8 +270,7 @@ private PagedResponse listByFactorySinglePage(St
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable listByFactory(String resourceGroupName, String factoryName) {
- return new PagedIterable<>(() -> listByFactorySinglePage(resourceGroupName, factoryName),
- nextLink -> listByFactoryNextSinglePage(nextLink));
+ return new PagedIterable<>(listByFactoryAsync(resourceGroupName, factoryName));
}
/**
@@ -390,8 +287,7 @@ public PagedIterable listByFactory(String resour
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable listByFactory(String resourceGroupName, String factoryName,
Context context) {
- return new PagedIterable<>(() -> listByFactorySinglePage(resourceGroupName, factoryName, context),
- nextLink -> listByFactoryNextSinglePage(nextLink, context));
+ return new PagedIterable<>(listByFactoryAsync(resourceGroupName, factoryName, context));
}
/**
@@ -445,6 +341,57 @@ private Mono> createOrUpdateWithRespons
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Creates or updates a change data capture resource.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param changeDataCaptureName The change data capture name.
+ * @param changeDataCapture Change data capture resource definition.
+ * @param ifMatch ETag of the change data capture entity. Should only be specified for update, for which it should
+ * match existing entity or can be * for unconditional update.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return change data capture resource type along with {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> createOrUpdateWithResponseAsync(String resourceGroupName,
+ String factoryName, String changeDataCaptureName, ChangeDataCaptureResourceInner changeDataCapture,
+ String ifMatch, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (changeDataCaptureName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
+ }
+ if (changeDataCapture == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter changeDataCapture is required and cannot be null."));
+ } else {
+ changeDataCapture.validate();
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.createOrUpdate(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, changeDataCaptureName, this.client.getApiVersion(), ifMatch, changeDataCapture, accept,
+ context);
+ }
+
/**
* Creates or updates a change data capture resource.
*
@@ -484,38 +431,8 @@ private Mono createOrUpdateAsync(String resource
public Response createOrUpdateWithResponse(String resourceGroupName,
String factoryName, String changeDataCaptureName, ChangeDataCaptureResourceInner changeDataCapture,
String ifMatch, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (changeDataCaptureName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
- }
- if (changeDataCapture == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter changeDataCapture is required and cannot be null."));
- } else {
- changeDataCapture.validate();
- }
- final String accept = "application/json";
- return service.createOrUpdateSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, changeDataCaptureName, this.client.getApiVersion(), ifMatch, changeDataCapture, accept,
- context);
+ return createOrUpdateWithResponseAsync(resourceGroupName, factoryName, changeDataCaptureName, changeDataCapture,
+ ifMatch, context).block();
}
/**
@@ -581,6 +498,48 @@ private Mono> getWithResponseAsync(Stri
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Gets a change data capture.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param changeDataCaptureName The change data capture name.
+ * @param ifNoneMatch ETag of the change data capture entity. Should only be specified for get. If the ETag matches
+ * the existing entity tag, or if * was provided, then no content will be returned.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return a change data capture along with {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> getWithResponseAsync(String resourceGroupName,
+ String factoryName, String changeDataCaptureName, String ifNoneMatch, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (changeDataCaptureName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.get(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, factoryName,
+ changeDataCaptureName, this.client.getApiVersion(), ifNoneMatch, accept, context);
+ }
+
/**
* Gets a change data capture.
*
@@ -617,31 +576,8 @@ private Mono getAsync(String resourceGroupName,
@ServiceMethod(returns = ReturnType.SINGLE)
public Response getWithResponse(String resourceGroupName, String factoryName,
String changeDataCaptureName, String ifNoneMatch, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (changeDataCaptureName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
- }
- final String accept = "application/json";
- return service.getSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, changeDataCaptureName, this.client.getApiVersion(), ifNoneMatch, accept, context);
+ return getWithResponseAsync(resourceGroupName, factoryName, changeDataCaptureName, ifNoneMatch, context)
+ .block();
}
/**
@@ -703,6 +639,46 @@ private Mono> deleteWithResponseAsync(String resourceGroupName, S
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Deletes a change data capture.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param changeDataCaptureName The change data capture name.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return the {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> deleteWithResponseAsync(String resourceGroupName, String factoryName,
+ String changeDataCaptureName, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (changeDataCaptureName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.delete(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, changeDataCaptureName, this.client.getApiVersion(), accept, context);
+ }
+
/**
* Deletes a change data capture.
*
@@ -735,31 +711,7 @@ private Mono deleteAsync(String resourceGroupName, String factoryName, Str
@ServiceMethod(returns = ReturnType.SINGLE)
public Response deleteWithResponse(String resourceGroupName, String factoryName, String changeDataCaptureName,
Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (changeDataCaptureName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
- }
- final String accept = "application/json";
- return service.deleteSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, changeDataCaptureName, this.client.getApiVersion(), accept, context);
+ return deleteWithResponseAsync(resourceGroupName, factoryName, changeDataCaptureName, context).block();
}
/**
@@ -817,6 +769,46 @@ private Mono> startWithResponseAsync(String resourceGroupName, St
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Starts a change data capture.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param changeDataCaptureName The change data capture name.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return the {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> startWithResponseAsync(String resourceGroupName, String factoryName,
+ String changeDataCaptureName, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (changeDataCaptureName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.start(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, factoryName,
+ changeDataCaptureName, this.client.getApiVersion(), accept, context);
+ }
+
/**
* Starts a change data capture.
*
@@ -849,31 +841,7 @@ private Mono startAsync(String resourceGroupName, String factoryName, Stri
@ServiceMethod(returns = ReturnType.SINGLE)
public Response startWithResponse(String resourceGroupName, String factoryName, String changeDataCaptureName,
Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (changeDataCaptureName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
- }
- final String accept = "application/json";
- return service.startSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, changeDataCaptureName, this.client.getApiVersion(), accept, context);
+ return startWithResponseAsync(resourceGroupName, factoryName, changeDataCaptureName, context).block();
}
/**
@@ -931,6 +899,46 @@ private Mono> stopWithResponseAsync(String resourceGroupName, Str
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Stops a change data capture.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param changeDataCaptureName The change data capture name.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return the {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> stopWithResponseAsync(String resourceGroupName, String factoryName,
+ String changeDataCaptureName, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (changeDataCaptureName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.stop(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, factoryName,
+ changeDataCaptureName, this.client.getApiVersion(), accept, context);
+ }
+
/**
* Stops a change data capture.
*
@@ -963,31 +971,7 @@ private Mono stopAsync(String resourceGroupName, String factoryName, Strin
@ServiceMethod(returns = ReturnType.SINGLE)
public Response stopWithResponse(String resourceGroupName, String factoryName, String changeDataCaptureName,
Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (changeDataCaptureName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
- }
- final String accept = "application/json";
- return service.stopSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, changeDataCaptureName, this.client.getApiVersion(), accept, context);
+ return stopWithResponseAsync(resourceGroupName, factoryName, changeDataCaptureName, context).block();
}
/**
@@ -1046,6 +1030,47 @@ private Mono> statusWithResponseAsync(String resourceGroupName,
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Gets the current status for the change data capture resource.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param changeDataCaptureName The change data capture name.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return the current status for the change data capture resource along with {@link Response} on successful
+ * completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> statusWithResponseAsync(String resourceGroupName, String factoryName,
+ String changeDataCaptureName, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (changeDataCaptureName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.status(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, changeDataCaptureName, this.client.getApiVersion(), accept, context);
+ }
+
/**
* Gets the current status for the change data capture resource.
*
@@ -1078,31 +1103,7 @@ private Mono statusAsync(String resourceGroupName, String factoryName, S
@ServiceMethod(returns = ReturnType.SINGLE)
public Response statusWithResponse(String resourceGroupName, String factoryName,
String changeDataCaptureName, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (changeDataCaptureName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter changeDataCaptureName is required and cannot be null."));
- }
- final String accept = "application/json";
- return service.statusSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, changeDataCaptureName, this.client.getApiVersion(), accept, context);
+ return statusWithResponseAsync(resourceGroupName, factoryName, changeDataCaptureName, context).block();
}
/**
@@ -1148,33 +1149,6 @@ private Mono> listByFactoryNextSin
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
- /**
- * Get the next page of items.
- *
- * @param nextLink The URL to get the next list of items.
- * @throws IllegalArgumentException thrown if parameters fail the validation.
- * @throws ManagementException thrown if the request is rejected by server.
- * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of change data capture resources along with {@link PagedResponse}.
- */
- @ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse listByFactoryNextSinglePage(String nextLink) {
- if (nextLink == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
- }
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- final String accept = "application/json";
- Response res
- = service.listByFactoryNextSync(nextLink, this.client.getEndpoint(), accept, Context.NONE);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
- }
-
/**
* Get the next page of items.
*
@@ -1183,26 +1157,23 @@ private PagedResponse listByFactoryNextSinglePag
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of change data capture resources along with {@link PagedResponse}.
+ * @return a list of change data capture resources along with {@link PagedResponse} on successful completion of
+ * {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse listByFactoryNextSinglePage(String nextLink,
+ private Mono> listByFactoryNextSinglePageAsync(String nextLink,
Context context) {
if (nextLink == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
+ return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
- Response res
- = service.listByFactoryNextSync(nextLink, this.client.getEndpoint(), accept, context);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
+ context = this.client.mergeContext(context);
+ return service.listByFactoryNext(nextLink, this.client.getEndpoint(), accept, context)
+ .map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(),
+ res.getValue().value(), res.getValue().nextLink(), null));
}
-
- private static final ClientLogger LOGGER = new ClientLogger(ChangeDataCapturesClientImpl.class);
}
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsClientImpl.java
index 0ab9f46a6f01..2b1c1c3713d2 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsClientImpl.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/CredentialOperationsClientImpl.java
@@ -28,7 +28,6 @@
import com.azure.core.management.exception.ManagementException;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
-import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.datafactory.fluent.CredentialOperationsClient;
import com.azure.resourcemanager.datafactory.fluent.models.CredentialResourceInner;
import com.azure.resourcemanager.datafactory.models.CredentialListResponse;
@@ -75,15 +74,6 @@ Mono> listByFactory(@HostParam("$host") String
@PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
@QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/credentials")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response listByFactorySync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Put("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/credentials/{credentialName}")
@ExpectedResponses({ 200 })
@@ -95,17 +85,6 @@ Mono> createOrUpdate(@HostParam("$host") Strin
@HeaderParam("If-Match") String ifMatch, @BodyParam("application/json") CredentialResourceInner credential,
@HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Put("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/credentials/{credentialName}")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response createOrUpdateSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("credentialName") String credentialName, @QueryParam("api-version") String apiVersion,
- @HeaderParam("If-Match") String ifMatch, @BodyParam("application/json") CredentialResourceInner credential,
- @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/credentials/{credentialName}")
@ExpectedResponses({ 200, 304 })
@@ -116,16 +95,6 @@ Mono> get(@HostParam("$host") String endpoint,
@PathParam("credentialName") String credentialName, @QueryParam("api-version") String apiVersion,
@HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Get("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/credentials/{credentialName}")
- @ExpectedResponses({ 200, 304 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response getSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("credentialName") String credentialName, @QueryParam("api-version") String apiVersion,
- @HeaderParam("If-None-Match") String ifNoneMatch, @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Delete("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/credentials/{credentialName}")
@ExpectedResponses({ 200, 204 })
@@ -136,16 +105,6 @@ Mono> delete(@HostParam("$host") String endpoint,
@PathParam("credentialName") String credentialName, @QueryParam("api-version") String apiVersion,
@HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Delete("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/credentials/{credentialName}")
- @ExpectedResponses({ 200, 204 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response deleteSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @PathParam("credentialName") String credentialName, @QueryParam("api-version") String apiVersion,
- @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Get("{nextLink}")
@ExpectedResponses({ 200 })
@@ -153,14 +112,6 @@ Response deleteSync(@HostParam("$host") String endpoint,
Mono> listByFactoryNext(
@PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept, Context context);
-
- @Headers({ "Content-Type: application/json" })
- @Get("{nextLink}")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response listByFactoryNextSync(
- @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint,
- @HeaderParam("Accept") String accept, Context context);
}
/**
@@ -205,15 +156,37 @@ private Mono> listByFactorySinglePageAsyn
*
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
+ * @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of credential resources as paginated response with {@link PagedFlux}.
+ * @return a list of credential resources along with {@link PagedResponse} on successful completion of {@link Mono}.
*/
- @ServiceMethod(returns = ReturnType.COLLECTION)
- private PagedFlux listByFactoryAsync(String resourceGroupName, String factoryName) {
- return new PagedFlux<>(() -> listByFactorySinglePageAsync(resourceGroupName, factoryName),
- nextLink -> listByFactoryNextSinglePageAsync(nextLink));
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> listByFactorySinglePageAsync(String resourceGroupName,
+ String factoryName, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service
+ .listByFactory(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, factoryName,
+ this.client.getApiVersion(), accept, context)
+ .map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(),
+ res.getValue().value(), res.getValue().nextLink(), null));
}
/**
@@ -224,35 +197,12 @@ private PagedFlux listByFactoryAsync(String resourceGro
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of credential resources along with {@link PagedResponse}.
+ * @return a list of credential resources as paginated response with {@link PagedFlux}.
*/
- @ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse listByFactorySinglePage(String resourceGroupName,
- String factoryName) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- final String accept = "application/json";
- Response res
- = service.listByFactorySync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), accept, Context.NONE);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
+ @ServiceMethod(returns = ReturnType.COLLECTION)
+ private PagedFlux listByFactoryAsync(String resourceGroupName, String factoryName) {
+ return new PagedFlux<>(() -> listByFactorySinglePageAsync(resourceGroupName, factoryName),
+ nextLink -> listByFactoryNextSinglePageAsync(nextLink));
}
/**
@@ -264,35 +214,13 @@ private PagedResponse listByFactorySinglePage(String re
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of credential resources along with {@link PagedResponse}.
+ * @return a list of credential resources as paginated response with {@link PagedFlux}.
*/
- @ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse listByFactorySinglePage(String resourceGroupName, String factoryName,
+ @ServiceMethod(returns = ReturnType.COLLECTION)
+ private PagedFlux listByFactoryAsync(String resourceGroupName, String factoryName,
Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- final String accept = "application/json";
- Response res
- = service.listByFactorySync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), accept, context);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
+ return new PagedFlux<>(() -> listByFactorySinglePageAsync(resourceGroupName, factoryName, context),
+ nextLink -> listByFactoryNextSinglePageAsync(nextLink, context));
}
/**
@@ -307,8 +235,7 @@ private PagedResponse listByFactorySinglePage(String re
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable listByFactory(String resourceGroupName, String factoryName) {
- return new PagedIterable<>(() -> listByFactorySinglePage(resourceGroupName, factoryName),
- nextLink -> listByFactoryNextSinglePage(nextLink));
+ return new PagedIterable<>(listByFactoryAsync(resourceGroupName, factoryName));
}
/**
@@ -325,8 +252,7 @@ public PagedIterable listByFactory(String resourceGroup
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable listByFactory(String resourceGroupName, String factoryName,
Context context) {
- return new PagedIterable<>(() -> listByFactorySinglePage(resourceGroupName, factoryName, context),
- nextLink -> listByFactoryNextSinglePage(nextLink, context));
+ return new PagedIterable<>(listByFactoryAsync(resourceGroupName, factoryName, context));
}
/**
@@ -377,6 +303,54 @@ private Mono> createOrUpdateWithResponseAsync(
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Creates or updates a credential.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param credentialName Credential name.
+ * @param credential Credential resource definition.
+ * @param ifMatch ETag of the credential entity. Should only be specified for update, for which it should match
+ * existing entity or can be * for unconditional update.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return credential resource type along with {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> createOrUpdateWithResponseAsync(String resourceGroupName,
+ String factoryName, String credentialName, CredentialResourceInner credential, String ifMatch,
+ Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (credentialName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter credentialName is required and cannot be null."));
+ }
+ if (credential == null) {
+ return Mono.error(new IllegalArgumentException("Parameter credential is required and cannot be null."));
+ } else {
+ credential.validate();
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.createOrUpdate(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, credentialName, this.client.getApiVersion(), ifMatch, credential, accept, context);
+ }
+
/**
* Creates or updates a credential.
*
@@ -415,37 +389,8 @@ private Mono createOrUpdateAsync(String resourceGroupNa
@ServiceMethod(returns = ReturnType.SINGLE)
public Response createOrUpdateWithResponse(String resourceGroupName, String factoryName,
String credentialName, CredentialResourceInner credential, String ifMatch, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (credentialName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter credentialName is required and cannot be null."));
- }
- if (credential == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter credential is required and cannot be null."));
- } else {
- credential.validate();
- }
- final String accept = "application/json";
- return service.createOrUpdateSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, credentialName, this.client.getApiVersion(), ifMatch, credential, accept, context);
+ return createOrUpdateWithResponseAsync(resourceGroupName, factoryName, credentialName, credential, ifMatch,
+ context).block();
}
/**
@@ -510,6 +455,47 @@ private Mono> getWithResponseAsync(String reso
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Gets a credential.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param credentialName Credential name.
+ * @param ifNoneMatch ETag of the credential entity. Should only be specified for get. If the ETag matches the
+ * existing entity tag, or if * was provided, then no content will be returned.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return a credential along with {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> getWithResponseAsync(String resourceGroupName, String factoryName,
+ String credentialName, String ifNoneMatch, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (credentialName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter credentialName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.get(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, factoryName,
+ credentialName, this.client.getApiVersion(), ifNoneMatch, accept, context);
+ }
+
/**
* Gets a credential.
*
@@ -546,31 +532,7 @@ private Mono getAsync(String resourceGroupName, String
@ServiceMethod(returns = ReturnType.SINGLE)
public Response getWithResponse(String resourceGroupName, String factoryName,
String credentialName, String ifNoneMatch, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (credentialName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter credentialName is required and cannot be null."));
- }
- final String accept = "application/json";
- return service.getSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, credentialName, this.client.getApiVersion(), ifNoneMatch, accept, context);
+ return getWithResponseAsync(resourceGroupName, factoryName, credentialName, ifNoneMatch, context).block();
}
/**
@@ -629,6 +591,45 @@ private Mono> deleteWithResponseAsync(String resourceGroupName, S
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Deletes a credential.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param credentialName Credential name.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return the {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> deleteWithResponseAsync(String resourceGroupName, String factoryName,
+ String credentialName, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (credentialName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter credentialName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.delete(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, credentialName, this.client.getApiVersion(), accept, context);
+ }
+
/**
* Deletes a credential.
*
@@ -660,31 +661,7 @@ private Mono deleteAsync(String resourceGroupName, String factoryName, Str
@ServiceMethod(returns = ReturnType.SINGLE)
public Response deleteWithResponse(String resourceGroupName, String factoryName, String credentialName,
Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (credentialName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter credentialName is required and cannot be null."));
- }
- final String accept = "application/json";
- return service.deleteSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, credentialName, this.client.getApiVersion(), accept, context);
+ return deleteWithResponseAsync(resourceGroupName, factoryName, credentialName, context).block();
}
/**
@@ -728,33 +705,6 @@ private Mono> listByFactoryNextSinglePage
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
- /**
- * Get the next page of items.
- *
- * @param nextLink The URL to get the next list of items.
- * @throws IllegalArgumentException thrown if parameters fail the validation.
- * @throws ManagementException thrown if the request is rejected by server.
- * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of credential resources along with {@link PagedResponse}.
- */
- @ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse listByFactoryNextSinglePage(String nextLink) {
- if (nextLink == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
- }
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- final String accept = "application/json";
- Response res
- = service.listByFactoryNextSync(nextLink, this.client.getEndpoint(), accept, Context.NONE);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
- }
-
/**
* Get the next page of items.
*
@@ -763,25 +713,22 @@ private PagedResponse listByFactoryNextSinglePage(Strin
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of credential resources along with {@link PagedResponse}.
+ * @return a list of credential resources along with {@link PagedResponse} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse listByFactoryNextSinglePage(String nextLink, Context context) {
+ private Mono> listByFactoryNextSinglePageAsync(String nextLink,
+ Context context) {
if (nextLink == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
+ return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
- Response res
- = service.listByFactoryNextSync(nextLink, this.client.getEndpoint(), accept, context);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
+ context = this.client.mergeContext(context);
+ return service.listByFactoryNext(nextLink, this.client.getEndpoint(), accept, context)
+ .map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(),
+ res.getValue().value(), res.getValue().nextLink(), null));
}
-
- private static final ClientLogger LOGGER = new ClientLogger(CredentialOperationsClientImpl.class);
}
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientImpl.java
index 50ced65b620e..a8b083a34e5f 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientImpl.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFactoryManagementClientImpl.java
@@ -15,15 +15,12 @@
import com.azure.core.management.exception.ManagementException;
import com.azure.core.management.polling.PollResult;
import com.azure.core.management.polling.PollerFactory;
-import com.azure.core.management.polling.SyncPollerFactory;
-import com.azure.core.util.BinaryData;
import com.azure.core.util.Context;
import com.azure.core.util.CoreUtils;
import com.azure.core.util.logging.ClientLogger;
import com.azure.core.util.polling.AsyncPollResponse;
import com.azure.core.util.polling.LongRunningOperationStatus;
import com.azure.core.util.polling.PollerFlux;
-import com.azure.core.util.polling.SyncPoller;
import com.azure.core.util.serializer.SerializerAdapter;
import com.azure.core.util.serializer.SerializerEncoding;
import com.azure.resourcemanager.datafactory.fluent.ActivityRunsClient;
@@ -550,23 +547,6 @@ public PollerFlux, U> getLroResult(Mono type of poll result.
- * @param type of final result.
- * @return SyncPoller for poll result and final result.
- */
- public SyncPoller, U> getLroResult(Response activationResponse,
- Type pollResultType, Type finalResultType, Context context) {
- return SyncPollerFactory.create(serializerAdapter, httpPipeline, pollResultType, finalResultType,
- defaultPollInterval, () -> activationResponse, context);
- }
-
/**
* Gets the final result, or an error, based on last async poll response.
*
diff --git a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFlowDebugSessionsClientImpl.java b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFlowDebugSessionsClientImpl.java
index d97f93791b20..bf97df1977a0 100644
--- a/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFlowDebugSessionsClientImpl.java
+++ b/sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/implementation/DataFlowDebugSessionsClientImpl.java
@@ -26,10 +26,8 @@
import com.azure.core.http.rest.RestProxy;
import com.azure.core.management.exception.ManagementException;
import com.azure.core.management.polling.PollResult;
-import com.azure.core.util.BinaryData;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
-import com.azure.core.util.logging.ClientLogger;
import com.azure.core.util.polling.PollerFlux;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.datafactory.fluent.DataFlowDebugSessionsClient;
@@ -89,17 +87,6 @@ Mono>> create(@HostParam("$host") String endpoint,
@BodyParam("application/json") CreateDataFlowDebugSessionRequest request,
@HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession")
- @ExpectedResponses({ 200, 202 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response createSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @QueryParam("api-version") String apiVersion,
- @BodyParam("application/json") CreateDataFlowDebugSessionRequest request,
- @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions")
@ExpectedResponses({ 200 })
@@ -109,15 +96,6 @@ Mono> queryByFactory(@HostParam("$h
@PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
@QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response queryByFactorySync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession")
@ExpectedResponses({ 200 })
@@ -128,16 +106,6 @@ Mono> addDataFlow(@HostParam("$
@QueryParam("api-version") String apiVersion, @BodyParam("application/json") DataFlowDebugPackage request,
@HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response addDataFlowSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @QueryParam("api-version") String apiVersion, @BodyParam("application/json") DataFlowDebugPackage request,
- @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession")
@ExpectedResponses({ 200 })
@@ -149,17 +117,6 @@ Mono> delete(@HostParam("$host") String endpoint,
@BodyParam("application/json") DeleteDataFlowDebugSessionRequest request,
@HeaderParam("Accept") String accept, Context context);
- @Headers({ "Content-Type: application/json" })
- @Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response deleteSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @QueryParam("api-version") String apiVersion,
- @BodyParam("application/json") DeleteDataFlowDebugSessionRequest request,
- @HeaderParam("Accept") String accept, Context context);
-
@Headers({ "Content-Type: application/json" })
@Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand")
@ExpectedResponses({ 200, 202 })
@@ -171,17 +128,6 @@ Mono>> executeCommand(@HostParam("$host") String endpo
@BodyParam("application/json") DataFlowDebugCommandRequest request, @HeaderParam("Accept") String accept,
Context context);
- @Headers({ "Content-Type: application/json" })
- @Post("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand")
- @ExpectedResponses({ 200, 202 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response executeCommandSync(@HostParam("$host") String endpoint,
- @PathParam("subscriptionId") String subscriptionId,
- @PathParam("resourceGroupName") String resourceGroupName, @PathParam("factoryName") String factoryName,
- @QueryParam("api-version") String apiVersion,
- @BodyParam("application/json") DataFlowDebugCommandRequest request, @HeaderParam("Accept") String accept,
- Context context);
-
@Headers({ "Content-Type: application/json" })
@Get("{nextLink}")
@ExpectedResponses({ 200 })
@@ -189,14 +135,6 @@ Response executeCommandSync(@HostParam("$host") String endpoint,
Mono> queryByFactoryNext(
@PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept, Context context);
-
- @Headers({ "Content-Type: application/json" })
- @Get("{nextLink}")
- @ExpectedResponses({ 200 })
- @UnexpectedResponseExceptionType(ManagementException.class)
- Response queryByFactoryNextSync(
- @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint,
- @HeaderParam("Accept") String accept, Context context);
}
/**
@@ -247,41 +185,40 @@ private Mono>> createWithResponseAsync(String resource
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param request Data flow debug session definition.
+ * @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return response body structure for creating data flow debug session along with {@link Response}.
+ * @return response body structure for creating data flow debug session along with {@link Response} on successful
+ * completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
- private Response createWithResponse(String resourceGroupName, String factoryName,
- CreateDataFlowDebugSessionRequest request) {
+ private Mono>> createWithResponseAsync(String resourceGroupName, String factoryName,
+ CreateDataFlowDebugSessionRequest request, Context context) {
if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
}
if (request == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter request is required and cannot be null."));
+ return Mono.error(new IllegalArgumentException("Parameter request is required and cannot be null."));
} else {
request.validate();
}
final String accept = "application/json";
- return service.createSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), request, accept, Context.NONE);
+ context = this.client.mergeContext(context);
+ return service.create(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, this.client.getApiVersion(), request, accept, context);
}
/**
@@ -290,42 +227,19 @@ private Response createWithResponse(String resourceGroupName, String
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param request Data flow debug session definition.
- * @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return response body structure for creating data flow debug session along with {@link Response}.
+ * @return the {@link PollerFlux} for polling of response body structure for creating data flow debug session.
*/
- @ServiceMethod(returns = ReturnType.SINGLE)
- private Response createWithResponse(String resourceGroupName, String factoryName,
- CreateDataFlowDebugSessionRequest request, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (request == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter request is required and cannot be null."));
- } else {
- request.validate();
- }
- final String accept = "application/json";
- return service.createSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), request, accept, context);
+ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
+ private PollerFlux, CreateDataFlowDebugSessionResponseInner>
+ beginCreateAsync(String resourceGroupName, String factoryName, CreateDataFlowDebugSessionRequest request) {
+ Mono>> mono = createWithResponseAsync(resourceGroupName, factoryName, request);
+ return this.client
+ .getLroResult(mono,
+ this.client.getHttpPipeline(), CreateDataFlowDebugSessionResponseInner.class,
+ CreateDataFlowDebugSessionResponseInner.class, this.client.getContext());
}
/**
@@ -334,6 +248,7 @@ private Response createWithResponse(String resourceGroupName, String
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param request Data flow debug session definition.
+ * @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
@@ -341,12 +256,15 @@ private Response createWithResponse(String resourceGroupName, String
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
private PollerFlux, CreateDataFlowDebugSessionResponseInner>
- beginCreateAsync(String resourceGroupName, String factoryName, CreateDataFlowDebugSessionRequest request) {
- Mono>> mono = createWithResponseAsync(resourceGroupName, factoryName, request);
+ beginCreateAsync(String resourceGroupName, String factoryName, CreateDataFlowDebugSessionRequest request,
+ Context context) {
+ context = this.client.mergeContext(context);
+ Mono>> mono
+ = createWithResponseAsync(resourceGroupName, factoryName, request, context);
return this.client
.getLroResult(mono,
this.client.getHttpPipeline(), CreateDataFlowDebugSessionResponseInner.class,
- CreateDataFlowDebugSessionResponseInner.class, this.client.getContext());
+ CreateDataFlowDebugSessionResponseInner.class, context);
}
/**
@@ -363,11 +281,7 @@ private Response createWithResponse(String resourceGroupName, String
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
public SyncPoller, CreateDataFlowDebugSessionResponseInner>
beginCreate(String resourceGroupName, String factoryName, CreateDataFlowDebugSessionRequest request) {
- Response response = createWithResponse(resourceGroupName, factoryName, request);
- return this.client
- .getLroResult(response,
- CreateDataFlowDebugSessionResponseInner.class, CreateDataFlowDebugSessionResponseInner.class,
- Context.NONE);
+ return this.beginCreateAsync(resourceGroupName, factoryName, request).getSyncPoller();
}
/**
@@ -386,10 +300,7 @@ private Response createWithResponse(String resourceGroupName, String
public SyncPoller, CreateDataFlowDebugSessionResponseInner>
beginCreate(String resourceGroupName, String factoryName, CreateDataFlowDebugSessionRequest request,
Context context) {
- Response response = createWithResponse(resourceGroupName, factoryName, request, context);
- return this.client
- .getLroResult(response,
- CreateDataFlowDebugSessionResponseInner.class, CreateDataFlowDebugSessionResponseInner.class, context);
+ return this.beginCreateAsync(resourceGroupName, factoryName, request, context).getSyncPoller();
}
/**
@@ -410,6 +321,25 @@ private Mono createAsync(String resourc
.flatMap(this.client::getLroFinalResultOrError);
}
+ /**
+ * Creates a data flow debug session.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param request Data flow debug session definition.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return response body structure for creating data flow debug session on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono createAsync(String resourceGroupName, String factoryName,
+ CreateDataFlowDebugSessionRequest request, Context context) {
+ return beginCreateAsync(resourceGroupName, factoryName, request, context).last()
+ .flatMap(this.client::getLroFinalResultOrError);
+ }
+
/**
* Creates a data flow debug session.
*
@@ -424,7 +354,7 @@ private Mono createAsync(String resourc
@ServiceMethod(returns = ReturnType.SINGLE)
public CreateDataFlowDebugSessionResponseInner create(String resourceGroupName, String factoryName,
CreateDataFlowDebugSessionRequest request) {
- return beginCreate(resourceGroupName, factoryName, request).getFinalResult();
+ return createAsync(resourceGroupName, factoryName, request).block();
}
/**
@@ -442,7 +372,7 @@ public CreateDataFlowDebugSessionResponseInner create(String resourceGroupName,
@ServiceMethod(returns = ReturnType.SINGLE)
public CreateDataFlowDebugSessionResponseInner create(String resourceGroupName, String factoryName,
CreateDataFlowDebugSessionRequest request, Context context) {
- return beginCreate(resourceGroupName, factoryName, request, context).getFinalResult();
+ return createAsync(resourceGroupName, factoryName, request, context).block();
}
/**
@@ -488,15 +418,38 @@ private Mono> queryByFactorySingleP
*
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
+ * @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of active debug sessions as paginated response with {@link PagedFlux}.
+ * @return a list of active debug sessions along with {@link PagedResponse} on successful completion of
+ * {@link Mono}.
*/
- @ServiceMethod(returns = ReturnType.COLLECTION)
- private PagedFlux queryByFactoryAsync(String resourceGroupName, String factoryName) {
- return new PagedFlux<>(() -> queryByFactorySinglePageAsync(resourceGroupName, factoryName),
- nextLink -> queryByFactoryNextSinglePageAsync(nextLink));
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> queryByFactorySinglePageAsync(String resourceGroupName,
+ String factoryName, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service
+ .queryByFactory(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, factoryName,
+ this.client.getApiVersion(), accept, context)
+ .map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(),
+ res.getValue().value(), res.getValue().nextLink(), null));
}
/**
@@ -507,35 +460,12 @@ private PagedFlux queryByFactoryAsync(String reso
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of active debug sessions along with {@link PagedResponse}.
+ * @return a list of active debug sessions as paginated response with {@link PagedFlux}.
*/
- @ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse queryByFactorySinglePage(String resourceGroupName,
- String factoryName) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- final String accept = "application/json";
- Response res
- = service.queryByFactorySync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), accept, Context.NONE);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
+ @ServiceMethod(returns = ReturnType.COLLECTION)
+ private PagedFlux queryByFactoryAsync(String resourceGroupName, String factoryName) {
+ return new PagedFlux<>(() -> queryByFactorySinglePageAsync(resourceGroupName, factoryName),
+ nextLink -> queryByFactoryNextSinglePageAsync(nextLink));
}
/**
@@ -547,35 +477,13 @@ private PagedResponse queryByFactorySinglePage(St
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return a list of active debug sessions along with {@link PagedResponse}.
+ * @return a list of active debug sessions as paginated response with {@link PagedFlux}.
*/
- @ServiceMethod(returns = ReturnType.SINGLE)
- private PagedResponse queryByFactorySinglePage(String resourceGroupName,
- String factoryName, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- final String accept = "application/json";
- Response res
- = service.queryByFactorySync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), accept, context);
- return new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(),
- res.getValue().nextLink(), null);
+ @ServiceMethod(returns = ReturnType.COLLECTION)
+ private PagedFlux queryByFactoryAsync(String resourceGroupName, String factoryName,
+ Context context) {
+ return new PagedFlux<>(() -> queryByFactorySinglePageAsync(resourceGroupName, factoryName, context),
+ nextLink -> queryByFactoryNextSinglePageAsync(nextLink, context));
}
/**
@@ -590,8 +498,7 @@ private PagedResponse queryByFactorySinglePage(St
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable queryByFactory(String resourceGroupName, String factoryName) {
- return new PagedIterable<>(() -> queryByFactorySinglePage(resourceGroupName, factoryName),
- nextLink -> queryByFactoryNextSinglePage(nextLink));
+ return new PagedIterable<>(queryByFactoryAsync(resourceGroupName, factoryName));
}
/**
@@ -608,8 +515,7 @@ public PagedIterable queryByFactory(String resour
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable queryByFactory(String resourceGroupName, String factoryName,
Context context) {
- return new PagedIterable<>(() -> queryByFactorySinglePage(resourceGroupName, factoryName, context),
- nextLink -> queryByFactoryNextSinglePage(nextLink, context));
+ return new PagedIterable<>(queryByFactoryAsync(resourceGroupName, factoryName, context));
}
/**
@@ -654,6 +560,48 @@ public PagedIterable queryByFactory(String resour
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Add a data flow into debug session.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param request Data flow debug session definition with debug content.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return response body structure for starting data flow debug session along with {@link Response} on successful
+ * completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> addDataFlowWithResponseAsync(
+ String resourceGroupName, String factoryName, DataFlowDebugPackage request, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (request == null) {
+ return Mono.error(new IllegalArgumentException("Parameter request is required and cannot be null."));
+ } else {
+ request.validate();
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.addDataFlow(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, this.client.getApiVersion(), request, accept, context);
+ }
+
/**
* Add a data flow into debug session.
*
@@ -687,33 +635,7 @@ private Mono addDataFlowAsync(String res
@ServiceMethod(returns = ReturnType.SINGLE)
public Response addDataFlowWithResponse(String resourceGroupName,
String factoryName, DataFlowDebugPackage request, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (request == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter request is required and cannot be null."));
- } else {
- request.validate();
- }
- final String accept = "application/json";
- return service.addDataFlowSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), request, accept, context);
+ return addDataFlowWithResponseAsync(resourceGroupName, factoryName, request, context).block();
}
/**
@@ -774,6 +696,47 @@ private Mono> deleteWithResponseAsync(String resourceGroupName, S
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
+ /**
+ * Deletes a data flow debug session.
+ *
+ * @param resourceGroupName The resource group name.
+ * @param factoryName The factory name.
+ * @param request Data flow debug session definition for deletion.
+ * @param context The context to associate with this operation.
+ * @throws IllegalArgumentException thrown if parameters fail the validation.
+ * @throws ManagementException thrown if the request is rejected by server.
+ * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
+ * @return the {@link Response} on successful completion of {@link Mono}.
+ */
+ @ServiceMethod(returns = ReturnType.SINGLE)
+ private Mono> deleteWithResponseAsync(String resourceGroupName, String factoryName,
+ DeleteDataFlowDebugSessionRequest request, Context context) {
+ if (this.client.getEndpoint() == null) {
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
+ }
+ if (this.client.getSubscriptionId() == null) {
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ }
+ if (resourceGroupName == null) {
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ }
+ if (factoryName == null) {
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ }
+ if (request == null) {
+ return Mono.error(new IllegalArgumentException("Parameter request is required and cannot be null."));
+ } else {
+ request.validate();
+ }
+ final String accept = "application/json";
+ context = this.client.mergeContext(context);
+ return service.delete(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, this.client.getApiVersion(), request, accept, context);
+ }
+
/**
* Deletes a data flow debug session.
*
@@ -806,33 +769,7 @@ private Mono deleteAsync(String resourceGroupName, String factoryName,
@ServiceMethod(returns = ReturnType.SINGLE)
public Response deleteWithResponse(String resourceGroupName, String factoryName,
DeleteDataFlowDebugSessionRequest request, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (request == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter request is required and cannot be null."));
- } else {
- request.validate();
- }
- final String accept = "application/json";
- return service.deleteSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), request, accept, context);
+ return deleteWithResponseAsync(resourceGroupName, factoryName, request, context).block();
}
/**
@@ -898,42 +835,40 @@ private Mono>> executeCommandWithResponseAsync(String
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param request Data flow debug command definition.
+ * @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response body structure of data flow result for data preview, statistics or expression preview along with
- * {@link Response}.
+ * {@link Response} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
- private Response executeCommandWithResponse(String resourceGroupName, String factoryName,
- DataFlowDebugCommandRequest request) {
+ private Mono>> executeCommandWithResponseAsync(String resourceGroupName,
+ String factoryName, DataFlowDebugCommandRequest request, Context context) {
if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
+ return Mono.error(
+ new IllegalArgumentException("Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
+ return Mono.error(new IllegalArgumentException(
+ "Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
+ return Mono
+ .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
+ return Mono.error(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
}
if (request == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter request is required and cannot be null."));
+ return Mono.error(new IllegalArgumentException("Parameter request is required and cannot be null."));
} else {
request.validate();
}
final String accept = "application/json";
- return service.executeCommandSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), request, accept, Context.NONE);
+ context = this.client.mergeContext(context);
+ return service.executeCommand(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
+ factoryName, this.client.getApiVersion(), request, accept, context);
}
/**
@@ -942,43 +877,20 @@ private Response executeCommandWithResponse(String resourceGroupName
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param request Data flow debug command definition.
- * @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
- * @return response body structure of data flow result for data preview, statistics or expression preview along with
- * {@link Response}.
+ * @return the {@link PollerFlux} for polling of response body structure of data flow result for data preview,
+ * statistics or expression preview.
*/
- @ServiceMethod(returns = ReturnType.SINGLE)
- private Response executeCommandWithResponse(String resourceGroupName, String factoryName,
- DataFlowDebugCommandRequest request, Context context) {
- if (this.client.getEndpoint() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getEndpoint() is required and cannot be null."));
- }
- if (this.client.getSubscriptionId() == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException(
- "Parameter this.client.getSubscriptionId() is required and cannot be null."));
- }
- if (resourceGroupName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
- }
- if (factoryName == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter factoryName is required and cannot be null."));
- }
- if (request == null) {
- throw LOGGER.atError()
- .log(new IllegalArgumentException("Parameter request is required and cannot be null."));
- } else {
- request.validate();
- }
- final String accept = "application/json";
- return service.executeCommandSync(this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName,
- factoryName, this.client.getApiVersion(), request, accept, context);
+ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
+ private PollerFlux, DataFlowDebugCommandResponseInner>
+ beginExecuteCommandAsync(String resourceGroupName, String factoryName, DataFlowDebugCommandRequest request) {
+ Mono>> mono
+ = executeCommandWithResponseAsync(resourceGroupName, factoryName, request);
+ return this.client.getLroResult(mono,
+ this.client.getHttpPipeline(), DataFlowDebugCommandResponseInner.class,
+ DataFlowDebugCommandResponseInner.class, this.client.getContext());
}
/**
@@ -987,6 +899,7 @@ private Response executeCommandWithResponse(String resourceGroupName
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param request Data flow debug command definition.
+ * @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
@@ -995,12 +908,14 @@ private Response executeCommandWithResponse(String resourceGroupName
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
private PollerFlux, DataFlowDebugCommandResponseInner>
- beginExecuteCommandAsync(String resourceGroupName, String factoryName, DataFlowDebugCommandRequest request) {
+ beginExecuteCommandAsync(String resourceGroupName, String factoryName, DataFlowDebugCommandRequest request,
+ Context context) {
+ context = this.client.mergeContext(context);
Mono>> mono
- = executeCommandWithResponseAsync(resourceGroupName, factoryName, request);
+ = executeCommandWithResponseAsync(resourceGroupName, factoryName, request, context);
return this.client.getLroResult(mono,
this.client.getHttpPipeline(), DataFlowDebugCommandResponseInner.class,
- DataFlowDebugCommandResponseInner.class, this.client.getContext());
+ DataFlowDebugCommandResponseInner.class, context);
}
/**
@@ -1018,9 +933,7 @@ private Response executeCommandWithResponse(String resourceGroupName
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
public SyncPoller, DataFlowDebugCommandResponseInner>
beginExecuteCommand(String resourceGroupName, String factoryName, DataFlowDebugCommandRequest request) {
- Response response = executeCommandWithResponse(resourceGroupName, factoryName, request);
- return this.client.getLroResult(response,
- DataFlowDebugCommandResponseInner.class, DataFlowDebugCommandResponseInner.class, Context.NONE);
+ return this.beginExecuteCommandAsync(resourceGroupName, factoryName, request).getSyncPoller();
}
/**
@@ -1040,9 +953,7 @@ private Response executeCommandWithResponse(String resourceGroupName
public SyncPoller, DataFlowDebugCommandResponseInner>
beginExecuteCommand(String resourceGroupName, String factoryName, DataFlowDebugCommandRequest request,
Context context) {
- Response response = executeCommandWithResponse(resourceGroupName, factoryName, request, context);
- return this.client.getLroResult(response,
- DataFlowDebugCommandResponseInner.class, DataFlowDebugCommandResponseInner.class, context);
+ return this.beginExecuteCommandAsync(resourceGroupName, factoryName, request, context).getSyncPoller();
}
/**
@@ -1064,6 +975,26 @@ private Mono